gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
/*
* MuninMX
* Written by Enrico Kern, kern@clavain.com
* www.clavain.com
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.clavain.utils;
import com.clavain.jobs.CheckJob;
import com.clavain.jobs.CustomJob;
import com.clavain.jobs.MuninJob;
import com.clavain.json.ScheduledJob;
import com.clavain.json.ServiceCheck;
import com.clavain.munin.MuninNode;
import com.clavain.munin.MuninPlugin;
import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
import org.quartz.Trigger;
import static com.clavain.muninmxcd.logger;
import static com.clavain.muninmxcd.sched;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.quartz.impl.matchers.GroupMatcher;
import org.quartz.JobDetail;
import org.quartz.SchedulerException;
import static org.quartz.JobBuilder.*;
import org.quartz.JobKey;
import static org.quartz.TriggerBuilder.newTrigger;
import static org.quartz.impl.matchers.GroupMatcher.groupEquals;
import static com.clavain.utils.Database.getMuninPluginForCustomJobFromDb;
import static com.clavain.utils.Database.getServiceCheckFromDatabase;
import static com.clavain.utils.Generic.getStampFromTimeAndZone;
import static com.clavain.utils.Generic.getMuninPluginForCustomJob;
import java.text.SimpleDateFormat;
import java.util.TimeZone;
import org.quartz.CronScheduleBuilder;
/**
*
* @author enricokern
*/
public class Quartz {
public static boolean scheduleServiceCheck(Integer p_cid)
{
ServiceCheck sc = getServiceCheckFromDatabase(p_cid);
if(sc == null)
{
return false;
}
return scheduleServiceCheck(sc);
}
// schedule a new check
public static boolean scheduleServiceCheck(ServiceCheck p_sc)
{
boolean l_retVal = false;
Trigger trigger = newTrigger().withIdentity("checktrigger", p_sc.getUser_id().toString() + p_sc.getCid() + System.currentTimeMillis()).startNow().withSchedule(simpleSchedule().withIntervalInMinutes(p_sc.getInterval()) .repeatForever().withMisfireHandlingInstructionFireNow()).build();
JobDetail job = newJob(CheckJob.class).withIdentity(p_sc.getCid().toString(), p_sc.getUser_id().toString()).usingJobData("cid", p_sc.getCid()).build();
try
{
com.clavain.muninmxcd.sched_checks.scheduleJob(job, trigger);
l_retVal = true;
} catch (Exception ex)
{
com.clavain.muninmxcd.logger.error(ex);
}
return l_retVal;
}
// delete a job
public static boolean unscheduleServiceCheck(String p_cid, String p_uid)
{
boolean l_retVal = false;
JobKey jk = new JobKey(p_cid.toString(),p_uid.toString());
try {
com.clavain.muninmxcd.sched_checks.deleteJob(jk);
l_retVal = true;
} catch (SchedulerException ex) {
com.clavain.muninmxcd.logger.error(ex.getLocalizedMessage());
}
return l_retVal;
}
public static boolean isServiceCheckScheduled(int p_cid)
{
boolean retval = false;
String match = p_cid+"";
for(ScheduledJob sj : getScheduledServiceChecks())
{
if(sj.getJobName().equals(match))
{
return true;
}
}
return retval;
}
public static ArrayList<ScheduledJob> getScheduledServiceChecks()
{
ArrayList<ScheduledJob> retval = new ArrayList<>();
try {
for (String groupName : com.clavain.muninmxcd.sched_checks.getJobGroupNames()) {
for (JobKey jobKey : com.clavain.muninmxcd.sched_checks.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {
String jobName = jobKey.getName();
String jobGroup = jobKey.getGroup();
//get job's trigger
List<Trigger> triggers = (List<Trigger>) com.clavain.muninmxcd.sched_checks.getTriggersOfJob(jobKey);
Date nextFireTime = triggers.get(0).getNextFireTime();
ScheduledJob sj = new ScheduledJob();
sj.setJobName(jobName);
sj.setGroupName(jobGroup);
sj.setNextFireTime(nextFireTime.toString());
retval.add(sj);
}
}
} catch (SchedulerException ex) {
com.clavain.muninmxcd.logger.error(ex.getLocalizedMessage());
ex.printStackTrace();
}
return retval;
}
// schedule a new check
public static boolean scheduleJob(MuninNode mn)
{
boolean l_retVal = false;
String uid = mn.getUser_id().toString();
Trigger trigger = newTrigger().withIdentity("trigger", uid + mn.getNode_id() + System.currentTimeMillis()).startNow().withSchedule(simpleSchedule().withIntervalInMinutes(mn.getQueryInterval()) .repeatForever().withMisfireHandlingInstructionFireNow()).build();
JobDetail job = newJob(MuninJob.class).withIdentity(mn.getNode_id().toString(), mn.getUser_id().toString()).usingJobData("nodeId", mn.getNode_id()).build();
try
{
sched.scheduleJob(job, trigger);
logger.info("Scheduled Job for Node: " + mn.getHostname() + " with interval: " + mn.getQueryInterval() + " minutes");
l_retVal = true;
} catch (Exception ex)
{
logger.error("Unable to Schedule Job for Node: " + mn.getHostname());
logger.error(ex);
}
return l_retVal;
}
public static boolean isScheduled(Integer p_nodeid, Integer p_user_id)
{
boolean l_retVal = false;
String jobSearch = p_user_id + "." + p_nodeid;
try {
// enumerate each job group
for(String group: com.clavain.muninmxcd.sched.getJobGroupNames()) {
// enumerate each job in group
for(JobKey jobKey : com.clavain.muninmxcd.sched.getJobKeys((GroupMatcher<JobKey>) groupEquals(group))) {
if(jobKey.toString().equals(jobSearch))
{
l_retVal = true;
}
}
}
} catch (SchedulerException ex) {
logger.error("Error in isScheduled: " + ex.getLocalizedMessage());
}
return l_retVal;
}
public static boolean isJobScheduled(int p_nodeid)
{
boolean retval = false;
String match = p_nodeid+"";
for(ScheduledJob sj : getScheduledJobs())
{
if(sj.getJobName().equals(match))
{
return true;
}
}
return retval;
}
public static boolean unscheduleCheck(String p_nodeid, String p_uid)
{
boolean l_retVal = false;
JobKey jk = new JobKey(p_nodeid,p_uid);
try {
com.clavain.muninmxcd.sched.deleteJob(jk);
l_retVal = true;
} catch (SchedulerException ex) {
logger.error("Error in unscheduleCheck: " + ex.getLocalizedMessage());
}
return l_retVal;
}
public static boolean isCustomJobScheduled(int p_cid)
{
boolean retval = false;
String match = p_cid+"";
for(ScheduledJob sj : getScheduledCustomJobs())
{
if(sj.getJobName().equals(match))
{
return true;
}
}
return retval;
}
public static boolean unscheduleCustomJob(String p_cid, String p_uid)
{
boolean l_retVal = false;
JobKey jk = new JobKey(p_cid,p_uid);
try {
com.clavain.muninmxcd.sched_custom.deleteJob(jk);
com.clavain.muninmxcd.v_cinterval_plugins.remove(getMuninPluginForCustomJob(Integer.parseInt(p_cid)));
l_retVal = true;
} catch (SchedulerException ex) {
logger.error("Error in unscheduleCustomCheck: " + ex.getLocalizedMessage());
}
return l_retVal;
}
// schedule a custom interval check
public static boolean scheduleCustomIntervalJob(Integer p_cid)
{
boolean retval = false;
MuninPlugin l_mp = getMuninPluginForCustomJobFromDb(p_cid);
if(l_mp != null)
{
String uid = l_mp.getUser_id().toString();
String cinterval = "";
// build trigger
Trigger trigger;
// crontab trigger
if(!l_mp.getCrontab().equals("false"))
{
// fixed start/end ?
if(l_mp.getFrom_time() == 0)
{
trigger = newTrigger().withIdentity("trigger", uid + l_mp.getCustomId() + System.currentTimeMillis()).withSchedule(CronScheduleBuilder.cronSchedule(l_mp.getCrontab()).withMisfireHandlingInstructionFireAndProceed().inTimeZone(TimeZone.getTimeZone(l_mp.getTimezone()))).build();
cinterval = " every " + l_mp.getQuery_interval() + " seconds with crontab: " + l_mp.getCrontab();
}
else
{
long a = l_mp.getFrom_time();
long b = l_mp.getTo_time();
Date startDate = new Date(a*1000L);
Date endDate = new Date(b*1000L);
trigger = newTrigger().withIdentity("trigger", uid + l_mp.getCustomId() + System.currentTimeMillis()).startAt(startDate).withSchedule(CronScheduleBuilder.cronSchedule(l_mp.getCrontab()).withMisfireHandlingInstructionFireAndProceed().inTimeZone(TimeZone.getTimeZone(l_mp.getTimezone()))).endAt(endDate).build();
cinterval = " every " + l_mp.getQuery_interval() + " seconds with crontab: " + l_mp.getCrontab() + " from " + startDate + " to " + endDate;
}
}
else
{
// standard repeat forever trigger
if(l_mp.getFrom_time() == 0)
{
trigger = newTrigger().withIdentity("trigger", uid + l_mp.getCustomId() + System.currentTimeMillis()).startNow().withSchedule(simpleSchedule().withIntervalInSeconds(l_mp.getQuery_interval()).repeatForever().withMisfireHandlingInstructionFireNow()).build();
cinterval = " every " + l_mp.getQuery_interval() + " seconds";
}
// daterange trigger, ignore missfire
else
{
long a = l_mp.getFrom_time();
long b = l_mp.getTo_time();
Date startDate = new Date(a*1000L);
Date endDate = new Date(b*1000L);
long cur = (System.currentTimeMillis() / 1000L);
trigger = newTrigger().withIdentity("trigger", uid + l_mp.getCustomId() + System.currentTimeMillis()).startAt(startDate).withSchedule(simpleSchedule().withIntervalInSeconds(l_mp.getQuery_interval()).repeatForever().withMisfireHandlingInstructionIgnoreMisfires()).endAt(endDate).build();
cinterval = " every " + l_mp.getQuery_interval() + " seconds from " + startDate + " to " + endDate;
}
}
//Trigger trigger = newTrigger().withIdentity("trigger", uid + l_mp.get_NodeId() + System.currentTimeMillis()).startNow().withSchedule(simpleSchedule().withIntervalInMinutes(l_mp.get .repeatForever().withMisfireHandlingInstructionFireNow()).build();
JobDetail job = newJob(CustomJob.class).withIdentity(l_mp.getCustomId() + "", l_mp.getUser_id().toString()).usingJobData("customId", l_mp.getCustomId()).build();
try
{
com.clavain.muninmxcd.sched_custom.scheduleJob(job, trigger);
logger.info("Scheduled CustomJob for custom interval: " + l_mp.getCustomId() + " with interval " + cinterval);
retval = true;
com.clavain.muninmxcd.v_cinterval_plugins.add(l_mp);
} catch (Exception ex)
{
logger.error("Unable to Schedule Job for custom interval:" + l_mp.getCustomId() + " with interval " + cinterval);
logger.error(ex.getLocalizedMessage());
}
}
return retval;
}
public static ArrayList<ScheduledJob> getScheduledJobs()
{
ArrayList<ScheduledJob> retval = new ArrayList<>();
try {
for (String groupName : com.clavain.muninmxcd.sched.getJobGroupNames()) {
for (JobKey jobKey : com.clavain.muninmxcd.sched.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {
String jobName = jobKey.getName();
String jobGroup = jobKey.getGroup();
//get job's trigger
List<Trigger> triggers = (List<Trigger>) com.clavain.muninmxcd.sched.getTriggersOfJob(jobKey);
Date nextFireTime = triggers.get(0).getNextFireTime();
ScheduledJob sj = new ScheduledJob();
sj.setJobName(jobName);
sj.setGroupName(jobGroup);
sj.setNextFireTime(nextFireTime.toString());
retval.add(sj);
}
}
} catch (SchedulerException ex) {
logger.error("Error in getScheduledJobs(): " + ex.getLocalizedMessage());
}
return retval;
}
public static ArrayList<ScheduledJob> getScheduledCheckJobs()
{
ArrayList<ScheduledJob> retval = new ArrayList<>();
try {
for (String groupName : com.clavain.muninmxcd.sched_checks.getJobGroupNames()) {
for (JobKey jobKey : com.clavain.muninmxcd.sched_checks.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {
String jobName = jobKey.getName();
String jobGroup = jobKey.getGroup();
//get job's trigger
List<Trigger> triggers = (List<Trigger>) com.clavain.muninmxcd.sched_checks.getTriggersOfJob(jobKey);
Date nextFireTime = triggers.get(0).getNextFireTime();
ScheduledJob sj = new ScheduledJob();
sj.setJobName(jobName);
sj.setGroupName(jobGroup);
sj.setNextFireTime(nextFireTime.toString());
retval.add(sj);
}
}
} catch (SchedulerException ex) {
logger.error("Error in getScheduledJobs(): " + ex.getLocalizedMessage());
}
return retval;
}
public static ArrayList<ScheduledJob> getScheduledCustomJobs()
{
ArrayList<ScheduledJob> retval = new ArrayList<>();
try {
for (String groupName : com.clavain.muninmxcd.sched_custom.getJobGroupNames()) {
for (JobKey jobKey : com.clavain.muninmxcd.sched_custom.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {
String jobName = jobKey.getName();
String jobGroup = jobKey.getGroup();
//get job's trigger
List<Trigger> triggers = (List<Trigger>) com.clavain.muninmxcd.sched_custom.getTriggersOfJob(jobKey);
Date nextFireTime = triggers.get(0).getNextFireTime();
ScheduledJob sj = new ScheduledJob();
sj.setJobName(jobName);
sj.setGroupName(jobGroup);
sj.setNextFireTime(nextFireTime.toString());
retval.add(sj);
}
}
} catch (SchedulerException ex) {
logger.error("Error in getScheduledJobs(): " + ex.getLocalizedMessage());
}
return retval;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets;
import static java.util.Map.Entry;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.apache.geode.test.dunit.LogWriterUtils.getLogWriter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.client.PoolManager;
import org.apache.geode.cache.client.internal.PoolImpl;
import org.apache.geode.cache.client.internal.QueueStateImpl.SequenceIdAndExpirationObject;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.AvailablePort;
import org.apache.geode.internal.cache.ClientServerObserver;
import org.apache.geode.internal.cache.ClientServerObserverAdapter;
import org.apache.geode.internal.cache.ClientServerObserverHolder;
import org.apache.geode.internal.cache.ha.HAHelper;
import org.apache.geode.internal.cache.ha.HARegionQueue;
import org.apache.geode.internal.cache.ha.ThreadIdentifier;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.NetworkUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase;
import org.apache.geode.test.junit.categories.ClientServerTest;
/**
* Tests the reliable messaging functionality - Client sends a periodic ack to the primary server
* for the messages received.
*/
@Category({ClientServerTest.class})
public class ReliableMessagingDUnitTest extends JUnit4DistributedTestCase {
static VM server1 = null;
static VM server2 = null;
/** the cache */
private static Cache cache = null;
/** port for the cache server */
private static int PORT1;
private static int PORT2;
static PoolImpl pool = null;
static ThreadIdentifier tid = null;
static Long seqid = null;
static long creationTime = 0;
static int CLIENT_ACK_INTERVAL = 5000;
/** name of the test region */
private static final String REGION_NAME =
ReliableMessagingDUnitTest.class.getSimpleName() + "_Region";
/*
* Test verifies that client is sending periodic ack to the primary server for messages received.
*/
@Test
public void testPeriodicAckSendByClient() throws Exception {
createEntries();
server1.invoke(() -> ReliableMessagingDUnitTest.putOnServer());
waitForServerUpdate();
setCreationTimeTidAndSeq();
waitForClientAck();
server1.invoke(() -> ReliableMessagingDUnitTest.checkTidAndSeq());
}
/**
* If the primary fails before receiving an ack from the messages it delivered then it should send
* an ack to the new primary so that new primary can sends QRM to other redundant servers.
*/
@Test
public void testPeriodicAckSendByClientPrimaryFailover() throws Exception {
IgnoredException.addIgnoredException("java.net.ConnectException");
createEntries();
setClientServerObserverForBeforeSendingClientAck();
server1.invoke(() -> ReliableMessagingDUnitTest.putOnServer());
LogWriterUtils.getLogWriter().info("Entering waitForServerUpdate");
waitForServerUpdate();
LogWriterUtils.getLogWriter().info("Entering waitForCallback");
waitForCallback();
LogWriterUtils.getLogWriter().info("Entering waitForClientAck");
waitForClientAck();
server2.invoke(() -> ReliableMessagingDUnitTest.checkTidAndSeq());
}
/**
* Wait for acknowledgment from client, verify creation time is correct
*
*/
public static void waitForClientAck() throws Exception {
final long maxWaitTime = 30000;
final long start = System.currentTimeMillis();
Iterator iter = pool.getThreadIdToSequenceIdMap().entrySet().iterator();
SequenceIdAndExpirationObject seo = null;
if (!iter.hasNext()) {
fail("map is empty");
}
Map.Entry entry = (Map.Entry) iter.next();
seo = (SequenceIdAndExpirationObject) entry.getValue();
for (;;) {
if (seo.getAckSend()) {
break;
}
assertTrue("Waited over " + maxWaitTime + " for client ack ",
+(System.currentTimeMillis() - start) < maxWaitTime);
sleep(1000);
}
LogWriterUtils.getLogWriter().info("seo = " + seo);
assertTrue(
"Creation time " + creationTime + " supposed to be same as seo " + seo.getCreationTime(),
creationTime == seo.getCreationTime());
}
public static void setCreationTimeTidAndSeq() {
final Map map = pool.getThreadIdToSequenceIdMap();
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
synchronized (map) {
return map.entrySet().size() > 0;
}
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
Entry entry;
synchronized (map) {
Iterator iter = map.entrySet().iterator();
entry = (Entry) iter.next();
}
SequenceIdAndExpirationObject seo = (SequenceIdAndExpirationObject) entry.getValue();
assertFalse(seo.getAckSend());
creationTime = seo.getCreationTime();
getLogWriter().info("seo is " + seo.toString());
assertTrue("Creation time not set", creationTime != 0);
Object args[] = new Object[] {((ThreadIdentifier) entry.getKey()).getMembershipID(),
new Long(((ThreadIdentifier) entry.getKey()).getThreadID()), new Long(seo.getSequenceId())};
server1.invoke(ReliableMessagingDUnitTest.class, "setTidAndSeq", args);
server2.invoke(ReliableMessagingDUnitTest.class, "setTidAndSeq", args);
}
public static void checkEmptyDispatchedMsgs() {
assertEquals(0, HARegionQueue.getDispatchedMessagesMapForTesting().size());
}
public static void checkTidAndSeq() {
Map map = HARegionQueue.getDispatchedMessagesMapForTesting();
assertTrue(map.size() > 0);
Iterator iter = map.entrySet().iterator();
if (!iter.hasNext()) {
fail("Dispatched messages is empty");
}
Map.Entry entry = (Map.Entry) iter.next();
Map dispMap = HAHelper.getDispatchMessageMap(entry.getValue());
assertEquals(seqid, dispMap.get(tid));
}
public static void setTidAndSeq(byte[] membershipId, Long threadId, Long sequenceId) {
tid = new ThreadIdentifier(membershipId, threadId.longValue());
seqid = sequenceId;
}
public static void createEntries() throws Exception {
creationTime = 0;
Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
String keyPrefix = "server-";
for (int i = 0; i < 5; i++) {
r1.create(keyPrefix + i, "val");
}
}
public static void putOnServer() throws Exception {
Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
String keyPrefix = "server-";
for (int i = 0; i < 5; i++) {
r1.put(keyPrefix + i, "val-" + i);
}
}
private static void sleep(int ms) {
try {
Thread.sleep(ms);
} catch (InterruptedException e) {
Assert.fail("Interrupted", e);
}
}
public static void checkServerCount(int expectedDeadServers, int expectedLiveServers) {
final long maxWaitTime = 60000;
long start = System.currentTimeMillis();
for (;;) {
if (pool.getConnectedServerCount() == expectedLiveServers) {
break; // met
}
assertTrue(
"Waited over " + maxWaitTime + "for active servers to become :" + expectedLiveServers,
(System.currentTimeMillis() - start) < maxWaitTime);
sleep(2000);
}
}
public static void stopServer() {
try {
Iterator iter = cache.getCacheServers().iterator();
if (iter.hasNext()) {
CacheServer server = (CacheServer) iter.next();
server.stop();
}
} catch (Exception e) {
Assert.fail("failed while stopServer()", e);
}
}
/**
* Wait for new value on cache server to become visible in this cache
*/
public static void waitForServerUpdate() {
Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
final long maxWaitTime = 60000;
final long start = System.currentTimeMillis();
for (;;) {
if (r1.getEntry("server-4").getValue().equals("val-4")) {
break;
}
assertTrue("Waited over " + maxWaitTime + " ms for entry to be refreshed",
(System.currentTimeMillis() - start) < maxWaitTime);
sleep(1000);
}
}
public static void setClientServerObserverForBeforeSendingClientAck() throws Exception {
PoolImpl.BEFORE_SENDING_CLIENT_ACK_CALLBACK_FLAG = true;
origObserver = ClientServerObserverHolder.setInstance(new ClientServerObserverAdapter() {
@Override
public void beforeSendingClientAck() {
LogWriterUtils.getLogWriter().info("beforeSendingClientAck invoked");
setCreationTimeTidAndSeq();
server1.invoke(() -> ReliableMessagingDUnitTest.stopServer());
checkServerCount(1, 1);
server2.invoke(() -> ReliableMessagingDUnitTest.checkEmptyDispatchedMsgs());
PoolImpl.BEFORE_SENDING_CLIENT_ACK_CALLBACK_FLAG = false;
LogWriterUtils.getLogWriter().info("end of beforeSendingClientAck");
}
});
}
/**
* Wait for magic callback
*/
public static void waitForCallback() {
final long maxWaitTime = 60000;
final long start = System.currentTimeMillis();
for (;;) {
if (!PoolImpl.BEFORE_SENDING_CLIENT_ACK_CALLBACK_FLAG) {
break;
}
assertTrue("Waited over " + maxWaitTime + "to send an ack from client : ",
(System.currentTimeMillis() - start) < maxWaitTime);
sleep(2000);
}
}
@Override
public final void postSetUp() throws Exception {
final Host host = Host.getHost(0);
server1 = host.getVM(0);
server2 = host.getVM(1);
PORT1 =
((Integer) server1.invoke(() -> ReliableMessagingDUnitTest.createServerCache())).intValue();
PORT2 =
((Integer) server2.invoke(() -> ReliableMessagingDUnitTest.createServerCache())).intValue();
CacheServerTestUtil.disableShufflingOfEndpoints();
createClientCache(PORT1, PORT2);
}
private Cache createCache(Properties props) throws Exception {
DistributedSystem ds = getSystem(props);
ds.disconnect();
ds = getSystem(props);
Cache result = null;
result = CacheFactory.create(ds);
if (result == null) {
throw new Exception("CacheFactory.create() returned null ");
}
return result;
}
public static Integer createServerCache() throws Exception {
ReliableMessagingDUnitTest test = new ReliableMessagingDUnitTest();
Properties props = new Properties();
cache = test.createCache(props);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
RegionAttributes attrs = factory.create();
cache.setMessageSyncInterval(25);
cache.createRegion(REGION_NAME, attrs);
CacheServer server = cache.addCacheServer();
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
server.setPort(port);
server.setNotifyBySubscription(true);
server.start();
LogWriterUtils.getLogWriter().info("Server started at PORT = " + port);
return new Integer(server.getPort());
}
public static void createClientCache(int port1, int port2) throws Exception {
ReliableMessagingDUnitTest test = new ReliableMessagingDUnitTest();
Properties props = new Properties();
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "");
cache = test.createCache(props);
String host = NetworkUtils.getServerHostName(Host.getHost(0));
PoolImpl p = (PoolImpl) PoolManager.createFactory().addServer(host, PORT1)
.addServer(host, PORT2).setSubscriptionEnabled(true).setSubscriptionRedundancy(1)
.setMinConnections(6).setReadTimeout(20000)
.setPingInterval(10000).setRetryAttempts(5).setSubscriptionAckInterval(CLIENT_ACK_INTERVAL)
.create("ReliableMessagingDUnitTestPool");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setPoolName(p.getName());
RegionAttributes attrs = factory.create();
Region region = cache.createRegion(REGION_NAME, attrs);
region.registerInterest("ALL_KEYS");
pool = p;
}
@Override
public final void preTearDown() throws Exception {
creationTime = 0;
closeCache();
server1.invoke(() -> ReliableMessagingDUnitTest.closeCache());
server2.invoke(() -> ReliableMessagingDUnitTest.closeCache());
CacheServerTestUtil.resetDisableShufflingOfEndpointsFlag();
}
public static void closeCache() {
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().disconnect();
}
}
private static ClientServerObserver origObserver;
public static void resetCallBack() {
ClientServerObserverHolder.setInstance(origObserver);
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.dom;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiManager;
import org.jetbrains.idea.maven.dom.model.MavenDomProfilesModel;
import org.jetbrains.idea.maven.dom.references.MavenPropertyPsiReference;
public class MavenFilteredPropertiesCompletionAndResolutionTest extends MavenDomTestCase {
public void testBasic() throws Exception {
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=abc${project<caret>.version}abc");
assertResolved(f, findTag("project.version"));
}
public void testCorrectlyCalculatingBaseDir() throws Exception {
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=abc${basedir<caret>}abc");
PsiDirectory baseDir = PsiManager.getInstance(myProject).findDirectory(myProjectPom.getParent());
assertResolved(f, baseDir);
}
public void testResolvingToNonManagedParentProperties() throws Exception {
createProjectSubDir("res");
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<parent>" +
" <groupId>test</groupId>" +
" <artifactId>parent</artifactId>" +
" <version>1</version>" +
" <relativePath>parent/pom.xml</relativePath>" +
"</parent>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile parent = createModulePom("parent",
"<groupId>test</groupId>" +
"<artifactId>parent</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<properties>" +
" <parentProp>value</parentProp>" +
"</properties>");
importProject();
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=${parentProp<caret>}");
assertResolved(f, findTag(parent, "project.properties.parentProp"));
}
public void testResolvingToProfilesXmlProperties() throws Exception {
createProjectSubDir("res");
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile profiles = createProfilesXml("<profile>" +
" <id>one</id>" +
" <properties>" +
" <profileProp>value</profileProp>" +
" </properties>" +
"</profile>");
importProjectWithProfiles("one");
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=${profileProp<caret>}");
assertResolved(f, findTag(profiles, "profilesXml.profiles[0].properties.profileProp", MavenDomProfilesModel.class));
}
public void testDoNotResolveOutsideResources() throws Exception {
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("foo.properties",
"foo=abc${project<caret>.version}abc");
assertNoReferences(f, MavenPropertyPsiReference.class);
}
public void testDoNotResolveNonFilteredResources() throws Exception {
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>false</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=abc${project<caret>.version}abc");
assertNoReferences(f, MavenPropertyPsiReference.class);
}
public void testUsingFilters() throws Exception {
VirtualFile filter = createProjectSubFile("filters/filter.properties", "xxx=1");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <filters>" +
" <filter>filters/filter.properties</filter>" +
" </filters>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=abc${xx<caret>x}abc");
assertResolved(f, MavenDomUtil.findProperty(myProject, filter, "xxx"));
}
public void testCompletionFromFilters() throws Exception {
createProjectSubFile("filters/filter1.properties", "xxx=1");
createProjectSubFile("filters/filter2.properties", "yyy=1");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <filters>" +
" <filter>filters/filter1.properties</filter>" +
" <filter>filters/filter2.properties</filter>" +
" </filters>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties", "foo=abc${<caret>}abc");
assertCompletionVariantsInclude(f, "xxx", "yyy");
}
public void testSearchingFromFilters() throws Exception {
VirtualFile filter = createProjectSubFile("filters/filter.properties", "xxx=1");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <filters>" +
" <filter>filters/filter.properties</filter>" +
" </filters>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=${xxx}");
filter = createProjectSubFile("filters/filter.properties", "xx<caret>x=1");
assertSearchResultsInclude(filter, MavenDomUtil.findPropertyValue(myProject, f, "foo"));
}
public void testCompletionAfterOpenBrace() throws Exception {
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=abc${<caret>");
assertCompletionVariantsInclude(f, "project.version");
}
public void testCompletionAfterOpenBraceInTheBeginningOfFile() throws Exception {
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.txt",
"${<caret>");
assertCompletionVariantsInclude(f, "project.version");
}
public void testCompletionAfterOpenBraceInTheBeginningOfPropertiesFile() throws Exception {
if (ignore()) return;
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"${<caret>");
assertCompletionVariantsInclude(f, "project.version");
}
public void testCompletionInEmptyFile() throws Exception {
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"<caret>");
assertCompletionVariantsDoNotInclude(f, "project.version");
}
public void testRenaming() throws Exception {
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<properties>" +
" <foo>value</foo>" +
"</properties>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=abc${f<caret>oo}abc");
assertResolved(f, findTag("project.properties.foo"));
doRename(f, "bar");
assertEquals(createPomXml("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<properties>" +
" <bar>value</bar>" +
"</properties>" +
"<build>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>"),
findPsiFile(myProjectPom).getText());
assertEquals("foo=abc${bar}abc", findPsiFile(f).getText());
}
public void testRenamingFilteredProperty() throws Exception {
VirtualFile filter = createProjectSubFile("filters/filter.properties", "xxx=1");
createProjectSubDir("res");
importProject("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<build>" +
" <filters>" +
" <filter>filters/filter.properties</filter>" +
" </filters>" +
" <resources>" +
" <resource>" +
" <directory>res</directory>" +
" <filtering>true</filtering>" +
" </resource>" +
" </resources>" +
"</build>");
VirtualFile f = createProjectSubFile("res/foo.properties",
"foo=abc${x<caret>xx}abc");
assertResolved(f, MavenDomUtil.findProperty(myProject, filter, "xxx"));
doRename(f, "bar");
assertEquals("foo=abc${bar}abc", findPsiFile(f).getText());
assertEquals("bar=1", findPsiFile(filter).getText());
}
public void testFilteredPropertiesUsages() throws Exception {
}
}
|
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.network.resource;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import javax.naming.ConfigurationException;
import org.apache.log4j.Logger;
import com.cloud.agent.IAgentControl;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.DirectNetworkUsageAnswer;
import com.cloud.agent.api.DirectNetworkUsageCommand;
import com.cloud.agent.api.MaintainAnswer;
import com.cloud.agent.api.MaintainCommand;
import com.cloud.agent.api.PingCommand;
import com.cloud.agent.api.ReadyAnswer;
import com.cloud.agent.api.ReadyCommand;
import com.cloud.agent.api.RecurringNetworkUsageAnswer;
import com.cloud.agent.api.RecurringNetworkUsageCommand;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupTrafficMonitorCommand;
import com.cloud.host.Host;
import com.cloud.resource.ServerResource;
import com.cloud.utils.exception.ExecutionException;
public class TrafficSentinelResource implements ServerResource {
private String _name;
private String _zoneId;
private String _ip;
private String _guid;
private String _url;
private String _inclZones;
private String _exclZones;
private static final Logger s_logger = Logger.getLogger(TrafficSentinelResource.class);
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
try {
_name = name;
_zoneId = (String) params.get("zone");
if (_zoneId == null) {
throw new ConfigurationException("Unable to find zone");
}
_ip = (String) params.get("ipaddress");
if (_ip == null) {
throw new ConfigurationException("Unable to find IP");
}
_guid = (String)params.get("guid");
if (_guid == null) {
throw new ConfigurationException("Unable to find the guid");
}
_url = (String)params.get("url");
if (_url == null) {
throw new ConfigurationException("Unable to find url");
}
_inclZones = (String)params.get("inclZones");
_exclZones = (String)params.get("exclZones");
return true;
} catch (Exception e) {
throw new ConfigurationException(e.getMessage());
}
}
@Override
public StartupCommand[] initialize() {
StartupTrafficMonitorCommand cmd = new StartupTrafficMonitorCommand();
cmd.setName(_name);
cmd.setDataCenter(_zoneId);
cmd.setPod("");
cmd.setPrivateIpAddress(_ip);
cmd.setStorageIpAddress("");
cmd.setVersion(TrafficSentinelResource.class.getPackage().getImplementationVersion());
cmd.setGuid(_guid);
return new StartupCommand[]{cmd};
}
@Override
public Host.Type getType() {
return Host.Type.TrafficMonitor;
}
@Override
public String getName() {
return _name;
}
@Override
public PingCommand getCurrentStatus(final long id) {
return new PingCommand(Host.Type.TrafficMonitor, id);
}
@Override
public boolean start() {
return true;
}
@Override
public boolean stop() {
return true;
}
@Override
public void disconnected() {
return;
}
@Override
public IAgentControl getAgentControl() {
return null;
}
@Override
public void setAgentControl(IAgentControl agentControl) {
return;
}
@Override
public Answer executeRequest(Command cmd) {
if (cmd instanceof ReadyCommand) {
return execute((ReadyCommand) cmd);
} else if (cmd instanceof MaintainCommand) {
return execute((MaintainCommand) cmd);
} else if (cmd instanceof DirectNetworkUsageCommand) {
return execute((DirectNetworkUsageCommand) cmd);
} else if (cmd instanceof RecurringNetworkUsageCommand) {
return execute((RecurringNetworkUsageCommand) cmd);
} else {
return Answer.createUnsupportedCommandAnswer(cmd);
}
}
private Answer execute(ReadyCommand cmd) {
return new ReadyAnswer(cmd);
}
private synchronized RecurringNetworkUsageAnswer execute(RecurringNetworkUsageCommand cmd) {
return new RecurringNetworkUsageAnswer(cmd);
}
private synchronized DirectNetworkUsageAnswer execute(DirectNetworkUsageCommand cmd) {
try {
return getPublicIpBytesSentAndReceived(cmd);
} catch (ExecutionException e) {
return new DirectNetworkUsageAnswer(cmd, e);
}
}
private Answer execute(MaintainCommand cmd) {
return new MaintainAnswer(cmd);
}
private DirectNetworkUsageAnswer getPublicIpBytesSentAndReceived(DirectNetworkUsageCommand cmd) throws ExecutionException {
DirectNetworkUsageAnswer answer = new DirectNetworkUsageAnswer(cmd);
try {
//Direct Network Usage
URL trafficSentinel;
//Use Global include/exclude zones if there are no per TS zones
if(_inclZones == null){
_inclZones = cmd.getIncludeZones();
}
if(_exclZones == null){
_exclZones = cmd.getExcludeZones();
}
try {
//Query traffic Sentinel
trafficSentinel = new URL(_url+"/inmsf/Query?script="+URLEncoder.encode(getScript(cmd.getPublicIps(), cmd.getStart(), cmd.getEnd()),"UTF-8")
+"&authenticate=basic&resultFormat=txt");
BufferedReader in = new BufferedReader(
new InputStreamReader(trafficSentinel.openStream()));
String inputLine;
while ((inputLine = in.readLine()) != null){
//Parse the script output
StringTokenizer st = new StringTokenizer(inputLine, ",");
if(st.countTokens() == 3){
String publicIp = st.nextToken();
Long bytesSent = new Long(st.nextToken());
Long bytesRcvd = new Long(st.nextToken());
if(bytesSent == null || bytesRcvd == null){
s_logger.debug("Incorrect bytes for IP: "+publicIp);
}
long[] bytesSentAndReceived = new long[2];
bytesSentAndReceived[0] = bytesSent;
bytesSentAndReceived[1] = bytesRcvd;
answer.put(publicIp, bytesSentAndReceived);
}
}
in.close();
} catch (MalformedURLException e1) {
s_logger.info("Invalid Traffic Sentinel URL",e1);
throw new ExecutionException(e1.getMessage());
} catch (IOException e) {
s_logger.debug("Error in direct network usage accounting",e);
throw new ExecutionException(e.getMessage());
}
} catch (Exception e) {
s_logger.debug(e);
throw new ExecutionException(e.getMessage());
}
return answer;
}
private String getScript(List<String> Ips, Date start, Date end){
String IpAddresses = "";
for(int i=0; i<Ips.size(); i++ ){
IpAddresses += Ips.get(i);
if(i != (Ips.size() - 1)){
// Append comma for all Ips except the last Ip
IpAddresses += ",";
}
}
String destZoneCondition = "";
if(_inclZones !=null && !_inclZones.isEmpty()){
destZoneCondition = " & destinationzone = "+_inclZones;
}
if(_exclZones !=null && !_exclZones.isEmpty()){
destZoneCondition += " & destinationzone != "+_exclZones;
}
String srcZoneCondition = "";
if(_inclZones !=null && !_inclZones.isEmpty()){
srcZoneCondition = " & sourcezone = "+_inclZones;
}
if(_exclZones !=null && !_exclZones.isEmpty()){
srcZoneCondition += " & sourcezone != "+_exclZones;
}
String startDate = getDateString(start);
String endtDate = getDateString(end);
StringBuffer sb = new StringBuffer();
sb.append("var q = Query.topN(\"historytrmx\",");
sb.append(" \"ipsource,bytes\",");
sb.append(" \"ipsource = "+IpAddresses+destZoneCondition+"\",");
sb.append(" \""+startDate+", "+endtDate+"\",");
sb.append(" \"bytes\",");
sb.append(" 100000);");
sb.append("var totalsSent = {};");
sb.append("var t = q.run(");
sb.append(" function(row,table) {");
sb.append(" if(row[0]) { ");
sb.append(" totalsSent[row[0]] = row[1];");
sb.append(" }");
sb.append(" });");
sb.append("var q = Query.topN(\"historytrmx\",");
sb.append(" \"ipdestination,bytes\",");
sb.append(" \"ipdestination = "+IpAddresses+srcZoneCondition+"\",");
sb.append(" \""+startDate+", "+endtDate+"\",");
sb.append(" \"bytes\",");
sb.append(" 100000);");
sb.append("var totalsRcvd = {};");
sb.append("var t = q.run(");
sb.append(" function(row,table) {");
sb.append(" if(row[0]) {");
sb.append(" totalsRcvd[row[0]] = row[1];");
sb.append(" }");
sb.append(" });");
sb.append("for (var addr in totalsSent) {");
sb.append(" var TS = 0;");
sb.append(" var TR = 0;");
sb.append(" if(totalsSent[addr]) TS = totalsSent[addr];");
sb.append(" if(totalsRcvd[addr]) TR = totalsRcvd[addr];");
sb.append(" println(addr + \",\" + TS + \",\" + TR);");
sb.append("}");
return sb.toString();
}
private String getDateString(Date date){
DateFormat dfDate = new SimpleDateFormat("yyyyMMdd HH:mm:ss");
return dfDate.format(date);
}
@Override
public void setName(String name) {
// TODO Auto-generated method stub
}
@Override
public void setConfigParams(Map<String, Object> params) {
// TODO Auto-generated method stub
}
@Override
public Map<String, Object> getConfigParams() {
// TODO Auto-generated method stub
return null;
}
@Override
public int getRunLevel() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setRunLevel(int level) {
// TODO Auto-generated method stub
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn.dialogs;
import java.awt.BorderLayout;
import java.awt.Component;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.List;
import javax.annotation.Nonnull;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTree;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import javax.annotation.Nullable;
import org.jetbrains.idea.svn.SvnVcs;
import org.jetbrains.idea.svn.browse.DirectoryEntry;
import org.jetbrains.idea.svn.dialogs.browserCache.Expander;
import org.jetbrains.idea.svn.history.SvnFileRevision;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.wc.SVNRevision;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import consulo.util.dataholder.Key;
import com.intellij.openapi.vcs.vfs.VcsFileSystem;
import com.intellij.openapi.vcs.vfs.VcsVirtualFile;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.NavigatableAdapter;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.SpeedSearchComparator;
import com.intellij.ui.TreeSpeedSearch;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.EditSourceOnDoubleClickHandler;
import com.intellij.util.NotNullFunction;
import com.intellij.util.containers.Convertor;
/**
* @author alex
*/
public class RepositoryBrowserComponent extends JPanel implements Disposable, DataProvider {
private JTree myRepositoryTree;
private final SvnVcs myVCS;
public RepositoryBrowserComponent(@Nonnull SvnVcs vcs) {
myVCS = vcs;
createComponent();
}
public JTree getRepositoryTree() {
return myRepositoryTree;
}
@Nonnull
public Project getProject() {
return myVCS.getProject();
}
public void setRepositoryURLs(SVNURL[] urls, final boolean showFiles) {
setRepositoryURLs(urls, showFiles, null, false);
}
public void setRepositoryURLs(SVNURL[] urls,
final boolean showFiles,
@Nullable NotNullFunction<RepositoryBrowserComponent, Expander> defaultExpanderFactory,
boolean expandFirst) {
RepositoryTreeModel model = new RepositoryTreeModel(myVCS, showFiles, this);
if (defaultExpanderFactory != null) {
model.setDefaultExpanderFactory(defaultExpanderFactory);
}
model.setRoots(urls);
Disposer.register(this, model);
myRepositoryTree.setModel(model);
if (expandFirst) {
myRepositoryTree.expandRow(0);
}
}
public void setRepositoryURL(SVNURL url, boolean showFiles, final NotNullFunction<RepositoryBrowserComponent, Expander> defaultExpanderFactory) {
RepositoryTreeModel model = new RepositoryTreeModel(myVCS, showFiles, this);
model.setDefaultExpanderFactory(defaultExpanderFactory);
model.setSingleRoot(url);
Disposer.register(this, model);
myRepositoryTree.setModel(model);
myRepositoryTree.setRootVisible(true);
myRepositoryTree.setSelectionRow(0);
}
public void setRepositoryURL(SVNURL url, boolean showFiles) {
RepositoryTreeModel model = new RepositoryTreeModel(myVCS, showFiles, this);
model.setSingleRoot(url);
Disposer.register(this, model);
myRepositoryTree.setModel(model);
myRepositoryTree.setRootVisible(true);
myRepositoryTree.setSelectionRow(0);
}
public void expandNode(@Nonnull final TreeNode treeNode) {
final TreeNode[] pathToNode = ((RepositoryTreeModel) myRepositoryTree.getModel()).getPathToRoot(treeNode);
if ((pathToNode != null) && (pathToNode.length > 0)) {
final TreePath treePath = new TreePath(pathToNode);
myRepositoryTree.expandPath(treePath);
}
}
public Collection<TreeNode> getExpandedSubTree(@Nonnull final TreeNode treeNode) {
final TreeNode[] pathToNode = ((RepositoryTreeModel) myRepositoryTree.getModel()).getPathToRoot(treeNode);
final Enumeration<TreePath> expanded = myRepositoryTree.getExpandedDescendants(new TreePath(pathToNode));
final List<TreeNode> result = new ArrayList<>();
if (expanded != null) {
while (expanded.hasMoreElements()) {
final TreePath treePath = expanded.nextElement();
result.add((TreeNode) treePath.getLastPathComponent());
}
}
return result;
}
public boolean isExpanded(@Nonnull final TreeNode treeNode) {
final TreeNode[] pathToNode = ((RepositoryTreeModel) myRepositoryTree.getModel()).getPathToRoot(treeNode);
return (pathToNode != null) && (pathToNode.length > 0) && myRepositoryTree.isExpanded(new TreePath(pathToNode));
}
public void addURL(String url) {
try {
((RepositoryTreeModel) myRepositoryTree.getModel()).addRoot(SVNURL.parseURIEncoded(url));
} catch (SVNException e) {
//
}
}
public void removeURL(String url) {
try {
((RepositoryTreeModel) myRepositoryTree.getModel()).removeRoot(SVNURL.parseURIEncoded(url));
} catch (SVNException e) {
//
}
}
@Nullable
public DirectoryEntry getSelectedEntry() {
TreePath selection = myRepositoryTree.getSelectionPath();
if (selection == null) {
return null;
}
Object element = selection.getLastPathComponent();
if (element instanceof RepositoryTreeNode) {
RepositoryTreeNode node = (RepositoryTreeNode) element;
return node.getSVNDirEntry();
}
return null;
}
@Nullable
public String getSelectedURL() {
SVNURL selectedUrl = getSelectedSVNURL();
return selectedUrl == null ? null : selectedUrl.toString();
}
@Nullable
public SVNURL getSelectedSVNURL() {
TreePath selection = myRepositoryTree.getSelectionPath();
if (selection == null) {
return null;
}
Object element = selection.getLastPathComponent();
if (element instanceof RepositoryTreeNode) {
RepositoryTreeNode node = (RepositoryTreeNode) element;
return node.getURL();
}
return null;
}
public void addChangeListener(TreeSelectionListener l) {
myRepositoryTree.addTreeSelectionListener(l);
}
public void removeChangeListener(TreeSelectionListener l) {
myRepositoryTree.removeTreeSelectionListener(l);
}
public Component getPreferredFocusedComponent() {
return myRepositoryTree;
}
private void createComponent() {
setLayout(new BorderLayout());
myRepositoryTree = new Tree();
myRepositoryTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
myRepositoryTree.setRootVisible(false);
myRepositoryTree.setShowsRootHandles(true);
JScrollPane scrollPane =
ScrollPaneFactory.createScrollPane(myRepositoryTree, JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
add(scrollPane, BorderLayout.CENTER);
myRepositoryTree.setCellRenderer(new SvnRepositoryTreeCellRenderer());
TreeSpeedSearch search = new TreeSpeedSearch(myRepositoryTree, new Convertor<TreePath, String>() {
@Override
public String convert(TreePath o) {
Object component = o.getLastPathComponent();
if (component instanceof RepositoryTreeNode) {
return ((RepositoryTreeNode)component).getURL().toDecodedString();
}
return null;
}
});
search.setComparator(new SpeedSearchComparator(false, true));
EditSourceOnDoubleClickHandler.install(myRepositoryTree);
}
@Nullable
public RepositoryTreeNode getSelectedNode() {
TreePath selection = myRepositoryTree.getSelectionPath();
if (selection != null && selection.getLastPathComponent() instanceof RepositoryTreeNode) {
return (RepositoryTreeNode) selection.getLastPathComponent();
}
return null;
}
public void setSelectedNode(@Nonnull final TreeNode node) {
final TreeNode[] pathNodes = ((RepositoryTreeModel) myRepositoryTree.getModel()).getPathToRoot(node);
myRepositoryTree.setSelectionPath(new TreePath(pathNodes));
}
@Nullable
public VirtualFile getSelectedVcsFile() {
final RepositoryTreeNode node = getSelectedNode();
if (node == null) return null;
DirectoryEntry entry = node.getSVNDirEntry();
if (entry == null || !entry.isFile()) {
return null;
}
String name = entry.getName();
FileTypeManager manager = FileTypeManager.getInstance();
if (entry.getName().lastIndexOf('.') > 0 && !manager.getFileTypeByFileName(name).isBinary()) {
SVNURL url = node.getURL();
final SvnFileRevision revision = new SvnFileRevision(myVCS, SVNRevision.UNDEFINED, SVNRevision.HEAD, url.toString(),
entry.getAuthor(), entry.getDate(), null, null);
return new VcsVirtualFile(node.getSVNDirEntry().getName(), revision, VcsFileSystem.getInstance());
} else {
return null;
}
}
@Nullable
@Override
public Object getData(Key<?> dataId) {
if (CommonDataKeys.NAVIGATABLE == dataId) {
final Project project = myVCS.getProject();
if (project == null || project.isDefault()) {
return null;
}
final VirtualFile vcsFile = getSelectedVcsFile();
// do not return OpenFileDescriptor instance here as in that case SelectInAction will be enabled and its invocation (using keyboard)
// will raise error - see IDEA-104113 - because of the following operations inside SelectInAction.actionPerformed():
// - at first VcsVirtualFile content will be loaded which for svn results in showing progress dialog
// - then DataContext from SelectInAction will still be accessed which results in error as current event count has already changed
// (because of progress dialog)
return vcsFile != null ? new NavigatableAdapter() {
@Override
public void navigate(boolean requestFocus) {
navigate(project, vcsFile, requestFocus);
}
} : null;
} else if (CommonDataKeys.PROJECT == dataId) {
return myVCS.getProject();
}
return null;
}
public void dispose() {
}
public void setLazyLoadingExpander(final NotNullFunction<RepositoryBrowserComponent, Expander> expanderFactory) {
((RepositoryTreeModel) myRepositoryTree.getModel()).setDefaultExpanderFactory(expanderFactory);
}
}
|
|
package com.lieverandiver.thesisproject.adapter;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.support.v7.widget.RecyclerView;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.ToggleButton;
import com.lieverandiver.thesisproject.R;
import com.remswork.project.alice.exception.GradingFactorException;
import com.remswork.project.alice.model.ActivityResult;
import com.remswork.project.alice.model.Student;
import com.remswork.project.alice.service.ActivityService;
import com.remswork.project.alice.service.impl.ActivityServiceImpl;
import org.w3c.dom.Text;
import java.util.ArrayList;
import java.util.List;
public class ActivityResultAdapter extends RecyclerView
.Adapter<ActivityResultAdapter.SimpleActivityViewHolder> {
private LayoutInflater layoutInflater;
private List<ActivityResult> resultList;
private Context context;
private ActivityAdapter.OnClickListener onClickListener;
private int totalItem;
public ActivityResultAdapter(Context context, List<ActivityResult> resultList, int totalItem) {
layoutInflater = LayoutInflater.from(context);
this.resultList = resultList;
this.context = context;
this.totalItem = totalItem;
}
@Override
public SimpleActivityViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = layoutInflater.inflate(R.layout.activity_all_result_cardview, parent,false);
return new SimpleActivityViewHolder(view);
}
@Override
public void onBindViewHolder(SimpleActivityViewHolder holder, int position) {
holder.setView(resultList.get(position), position);
}
@Override
public int getItemCount() {
return resultList.size();
}
class SimpleActivityViewHolder extends RecyclerView.ViewHolder {
private TextView txName;
private TextView txScore;
private TextView txInit;
private Button btnSave;
private Button btnDelete;
private Button btnCancel;
LinearLayout laOptionPane;
ImageView imgThree;
private ActivityResult result;
private int position;
SimpleActivityViewHolder(View itemView) {
super(itemView);
txName = (TextView) itemView.findViewById(R.id.result_cardview_name);
txScore = (TextView) itemView.findViewById(R.id.result_cardview_score);
txInit = (TextView) itemView.findViewById(R.id.result_cardview_init);
laOptionPane = (LinearLayout) itemView.findViewById(R.id.result_linearoption);
imgThree = (ImageView) itemView.findViewById(R.id.result_option);
btnSave = (Button) itemView.findViewById(R.id.result_cardview_save);
btnDelete = (Button) itemView.findViewById(R.id.result_cardview_delete);
btnCancel = (Button) itemView.findViewById(R.id.result_cardview_cancel);
}
void setView(final ActivityResult result, final int position) {
this.result = result;
this.position = position;
if(result != null) {
Student student = result.getStudent();
String name = String.format("%s, %s %s.",
student.getLastName(),
student.getFirstName(),
student.getMiddleName().substring(0, 1)
);
final String score = String.valueOf(result.getScore());
String init = student.getLastName().substring(0, 1);
txName.setText(name);
txScore.setText(score);
txInit.setText(init);
txScore.addTextChangedListener(textWatcher);
imgThree.setOnClickListener(new Button.OnClickListener(){
@Override
public void onClick(View v) {
laOptionPane.setVisibility(View.VISIBLE);
txScore.setEnabled(true);
}
});
btnCancel.setOnClickListener(new Button.OnClickListener(){
@Override
public void onClick(View v) {
laOptionPane.setVisibility(View.GONE);
txScore.setEnabled(false);
txScore.setText(score);
}
});
btnDelete.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getDialog().show();
}
});
btnSave.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try{
ActivityService activityService = new ActivityServiceImpl();
activityService.updateActivityResultByActivityAndStudentId(
Integer.parseInt(txScore.getText().toString()),
result.getActivity().getId(), result.getStudent().getId());
resultList.get(position).setScore(Integer.parseInt(txScore.getText().toString()));
laOptionPane.setVisibility(View.GONE);
txScore.setEnabled(false);
notifyDataSetChanged();
}catch (GradingFactorException e){
e.printStackTrace();
}
}
});
}
}
private AlertDialog getDialog()
{
AlertDialog dialog = new AlertDialog.Builder(context)
.setTitle("Delete")
.setMessage("Do you want to Delete")
.setIcon(R.drawable.delete)
.setPositiveButton("Delete", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
try{
ActivityService activityService = new ActivityServiceImpl();
activityService.deleteActivityResultByActivityAndStudentId(
result.getActivity().getId(), result.getStudent().getId());
List<ActivityResult> cResultList = new ArrayList<>();
resultList.remove(position);
notifyDataSetChanged();
for(int i=0;i<resultList.size();i++) {
cResultList.add(resultList.get(i));
}
resultList = cResultList;
}catch (GradingFactorException e){
e.printStackTrace();
}
dialog.dismiss();
}
}).setNegativeButton("cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.create();
return dialog;
}
TextWatcher textWatcher = new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
if(Integer.parseInt(txScore.getText().toString().trim().equals("") ? "0" : txScore.getText().toString().trim()) <= totalItem) {
btnSave.setEnabled(true);
}else
btnSave.setEnabled(false);
}
};
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sejda.sambox.input;
import static org.sejda.sambox.util.CharUtils.ASCII_SPACE;
import static org.sejda.sambox.util.CharUtils.isEOF;
import static org.sejda.sambox.util.CharUtils.isEOL;
import static org.sejda.sambox.util.CharUtils.isNul;
import static org.sejda.sambox.util.CharUtils.isSpace;
import static org.sejda.sambox.util.CharUtils.isWhitespace;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import org.sejda.commons.FastByteArrayOutputStream;
import org.sejda.commons.util.IOUtils;
import org.sejda.io.SeekableSource;
import org.sejda.io.SeekableSources;
import org.sejda.sambox.contentstream.PDContentStream;
import org.sejda.sambox.contentstream.operator.Operator;
import org.sejda.sambox.contentstream.operator.OperatorName;
import org.sejda.sambox.cos.COSBase;
import org.sejda.sambox.cos.COSDictionary;
import org.sejda.sambox.cos.COSName;
/**
* Component responsible for parsing a a content stream to extract operands and such.
*
* @author Andrea Vacondio
*/
public class ContentStreamParser extends SourceReader
{
private ContentStreamCOSParser cosParser;
private List<Object> tokens = new ArrayList<>();
public ContentStreamParser(PDContentStream stream) throws IOException
{
this(SeekableSources.inMemorySeekableSourceFrom(stream.getContents()));
}
public ContentStreamParser(SeekableSource source)
{
super(source);
this.cosParser = new ContentStreamCOSParser(source());
}
/**
* @return a list of tokens retrieved parsing the source this parser was created from.
* @throws IOException
*/
public List<Object> tokens() throws IOException
{
tokens.clear();
Object token;
while ((token = nextParsedToken()) != null)
{
tokens.add(token);
}
return Collections.unmodifiableList(tokens);
}
/**
* @return the next token parsed from the content stream
* @throws IOException
*/
public Object nextParsedToken() throws IOException
{
skipSpaces();
long pos = position();
COSBase token = cosParser.nextParsedToken();
if (token != null)
{
return token;
}
position(pos);
return nextOperator();
}
private Object nextOperator() throws IOException
{
if ('B' == (char) source().peek())
{
Operator operator = Operator.getOperator(readToken());
if (OperatorName.BEGIN_INLINE_IMAGE.equals(operator.getName()))
{
nextInlineImage(operator);
}
return operator;
}
return Optional.ofNullable(readToken()).filter(s -> s.length() > 0)
.map(Operator::getOperator).orElse(null);
}
private void nextInlineImage(Operator operator) throws IOException
{
COSDictionary imageParams = new COSDictionary();
operator.setImageParameters(imageParams);
COSBase nextToken = null;
long position = position();
while ((nextToken = cosParser.nextParsedToken()) instanceof COSName)
{
imageParams.setItem((COSName) nextToken, cosParser.nextParsedToken());
position = position();
}
position(position);
operator.setImageData(nextImageData());
}
/**
* Reads data until it finds an "EI" operator followed by a whitespace.
*
* @return the image data
* @throws IOException
*/
private byte[] nextImageData() throws IOException
{
skipSpaces();
skipExpected(OperatorName.BEGIN_INLINE_IMAGE_DATA);
if (!isWhitespace(source().read()))
{
source().back();
}
try (FastByteArrayOutputStream imageData = new FastByteArrayOutputStream())
{
int current;
while ((current = source().read()) != -1)
{
long position = source().position();
if ((current == 'E' && isEndOfImageFrom(position - 1))
|| (isWhitespace(current) && isEndOfImageFrom(position)))
{
break;
}
imageData.write(current);
}
return imageData.toByteArray();
}
}
private boolean isEndOfImageFrom(long position) throws IOException
{
long currentPosition = source().position();
source().position(position);
int current = source().read();
if (current == 'E')
{
current = source().read();
// if not a EI we restore the position and go on
if (current == 'I' && (isEndOfImage() || isEOF(source().peek())))
{
return true;
}
}
source().position(currentPosition);
return false;
}
private boolean isEndOfImage() throws IOException
{
long currentPosition = source().position();
try
{
int current = source().read();
// we do what PDF.js does
if (isSpace(current) || isEOL(current))
{
// from PDF.js: Let's check the next ten bytes are ASCII... just be sure.
for (int i = 0; i < 10; i++)
{
current = source().read();
if (isNul(current) && !isNul(source().peek()))
{
// from PDF.js: NUL bytes are not supposed to occur *outside* of inline
// images, but some PDF generators violate that assumption,
// thus breaking the EI detection heuristics used below.
//
// However, we can't unconditionally treat NUL bytes as "ASCII",
// since that *could* result in inline images being truncated.
//
// To attempt to address this, we'll still treat any *sequence*
// of NUL bytes as non-ASCII, but for a *single* NUL byte we'll
// continue checking the `followingBytes` (fixes issue8823.pdf).
continue;
}
if (!isEOF(current) && !isEOL(current)
&& (current < ASCII_SPACE || current > 0x7F))
{
// from PDF.js: Not a LF, CR, SPACE or any visible ASCII character, i.e. it's binary stuff.
return false;
}
}
return true;
}
return false;
}
finally
{
source().position(currentPosition);
}
}
@Override
public void close() throws IOException
{
super.close();
IOUtils.closeQuietly(cosParser);
}
}
|
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.andes.client.handler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.andes.AMQException;
import org.wso2.andes.client.protocol.AMQProtocolSession;
import org.wso2.andes.client.state.AMQMethodNotImplementedException;
import org.wso2.andes.client.state.AMQStateManager;
import org.wso2.andes.framing.*;
import java.util.HashMap;
import java.util.Map;
public class ClientMethodDispatcherImpl implements MethodDispatcher
{
private static final BasicCancelOkMethodHandler _basicCancelOkMethodHandler = BasicCancelOkMethodHandler.getInstance();
private static final BasicDeliverMethodHandler _basicDeliverMethodHandler = BasicDeliverMethodHandler.getInstance();
private static final BasicReturnMethodHandler _basicReturnMethodHandler = BasicReturnMethodHandler.getInstance();
private static final ChannelCloseMethodHandler _channelCloseMethodHandler = ChannelCloseMethodHandler.getInstance();
private static final ChannelCloseOkMethodHandler _channelCloseOkMethodHandler = ChannelCloseOkMethodHandler.getInstance();
private static final ChannelFlowOkMethodHandler _channelFlowOkMethodHandler = ChannelFlowOkMethodHandler.getInstance();
private static final ChannelFlowMethodHandler _channelFlowMethodHandler = ChannelFlowMethodHandler.getInstance();
private static final ConnectionCloseMethodHandler _connectionCloseMethodHandler = ConnectionCloseMethodHandler.getInstance();
private static final ConnectionOpenOkMethodHandler _connectionOpenOkMethodHandler = ConnectionOpenOkMethodHandler.getInstance();
private static final ConnectionRedirectMethodHandler _connectionRedirectMethodHandler = ConnectionRedirectMethodHandler.getInstance();
private static final ConnectionSecureMethodHandler _connectionSecureMethodHandler = ConnectionSecureMethodHandler.getInstance();
private static final ConnectionStartMethodHandler _connectionStartMethodHandler = ConnectionStartMethodHandler.getInstance();
private static final ConnectionTuneMethodHandler _connectionTuneMethodHandler = ConnectionTuneMethodHandler.getInstance();
private static final ExchangeBoundOkMethodHandler _exchangeBoundOkMethodHandler = ExchangeBoundOkMethodHandler.getInstance();
private static final QueueDeleteOkMethodHandler _queueDeleteOkMethodHandler = QueueDeleteOkMethodHandler.getInstance();
private static final Logger _logger = LoggerFactory.getLogger(ClientMethodDispatcherImpl.class);
private static interface DispatcherFactory
{
public ClientMethodDispatcherImpl createMethodDispatcher(AMQProtocolSession session);
}
private static final Map<ProtocolVersion, DispatcherFactory> _dispatcherFactories =
new HashMap<ProtocolVersion, DispatcherFactory>();
static
{
_dispatcherFactories.put(ProtocolVersion.v8_0,
new DispatcherFactory()
{
public ClientMethodDispatcherImpl createMethodDispatcher(AMQProtocolSession session)
{
return new ClientMethodDispatcherImpl_8_0(session);
}
});
_dispatcherFactories.put(ProtocolVersion.v0_9,
new DispatcherFactory()
{
public ClientMethodDispatcherImpl createMethodDispatcher(AMQProtocolSession session)
{
return new ClientMethodDispatcherImpl_0_9(session);
}
});
_dispatcherFactories.put(ProtocolVersion.v0_91,
new DispatcherFactory()
{
public ClientMethodDispatcherImpl createMethodDispatcher(AMQProtocolSession session)
{
return new ClientMethodDispatcherImpl_0_91(session);
}
});
}
public static ClientMethodDispatcherImpl newMethodDispatcher(ProtocolVersion version, AMQProtocolSession session)
{
if (_logger.isDebugEnabled())
{
_logger.debug("New Method Dispatcher:" + session);
}
DispatcherFactory factory = _dispatcherFactories.get(version);
return factory.createMethodDispatcher(session);
}
AMQProtocolSession _session;
public ClientMethodDispatcherImpl(AMQProtocolSession session)
{
_session = session;
}
public AMQStateManager getStateManager()
{
return _session.getStateManager();
}
public boolean dispatchAccessRequestOk(AccessRequestOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchBasicCancelOk(BasicCancelOkBody body, int channelId) throws AMQException
{
_basicCancelOkMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchBasicConsumeOk(BasicConsumeOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchBasicDeliver(BasicDeliverBody body, int channelId) throws AMQException
{
_basicDeliverMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchBasicGetEmpty(BasicGetEmptyBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchBasicGetOk(BasicGetOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchBasicQosOk(BasicQosOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchBasicReturn(BasicReturnBody body, int channelId) throws AMQException
{
_basicReturnMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchChannelClose(ChannelCloseBody body, int channelId) throws AMQException
{
_channelCloseMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchChannelCloseOk(ChannelCloseOkBody body, int channelId) throws AMQException
{
_channelCloseOkMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchChannelFlow(ChannelFlowBody body, int channelId) throws AMQException
{
_channelFlowMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchChannelFlowOk(ChannelFlowOkBody body, int channelId) throws AMQException
{
_channelFlowOkMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchChannelOpenOk(ChannelOpenOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchConnectionClose(ConnectionCloseBody body, int channelId) throws AMQException
{
_connectionCloseMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchConnectionCloseOk(ConnectionCloseOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchConnectionOpenOk(ConnectionOpenOkBody body, int channelId) throws AMQException
{
_connectionOpenOkMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchConnectionRedirect(ConnectionRedirectBody body, int channelId) throws AMQException
{
_connectionRedirectMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchConnectionSecure(ConnectionSecureBody body, int channelId) throws AMQException
{
_connectionSecureMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchConnectionStart(ConnectionStartBody body, int channelId) throws AMQException
{
_connectionStartMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchConnectionTune(ConnectionTuneBody body, int channelId) throws AMQException
{
_connectionTuneMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchQueueDeleteOk(QueueDeleteOkBody body, int channelId) throws AMQException
{
_queueDeleteOkMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchQueuePurgeOk(QueuePurgeOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchStreamCancelOk(StreamCancelOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchStreamConsumeOk(StreamConsumeOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchAccessRequest(AccessRequestBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchBasicAck(BasicAckBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchBasicCancel(BasicCancelBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchBasicConsume(BasicConsumeBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchBasicGet(BasicGetBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchBasicPublish(BasicPublishBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchBasicQos(BasicQosBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchBasicRecover(BasicRecoverBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchBasicReject(BasicRejectBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchChannelOpen(ChannelOpenBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchConnectionOpen(ConnectionOpenBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchConnectionSecureOk(ConnectionSecureOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchConnectionStartOk(ConnectionStartOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchConnectionTuneOk(ConnectionTuneOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchDtxSelect(DtxSelectBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchDtxStart(DtxStartBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchExchangeBound(ExchangeBoundBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchExchangeDeclare(ExchangeDeclareBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchExchangeDelete(ExchangeDeleteBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileAck(FileAckBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileCancel(FileCancelBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileConsume(FileConsumeBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFilePublish(FilePublishBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileQos(FileQosBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileReject(FileRejectBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchQueueBind(QueueBindBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchQueueDeclare(QueueDeclareBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchQueueDelete(QueueDeleteBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchQueuePurge(QueuePurgeBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchStreamCancel(StreamCancelBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchStreamConsume(StreamConsumeBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchStreamPublish(StreamPublishBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchStreamQos(StreamQosBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchTunnelRequest(TunnelRequestBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchTxCommit(TxCommitBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchTxRollback(TxRollbackBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchTxSelect(TxSelectBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchDtxSelectOk(DtxSelectOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchDtxStartOk(DtxStartOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchExchangeBoundOk(ExchangeBoundOkBody body, int channelId) throws AMQException
{
_exchangeBoundOkMethodHandler.methodReceived(_session, body, channelId);
return true;
}
public boolean dispatchExchangeDeclareOk(ExchangeDeclareOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchExchangeDeleteOk(ExchangeDeleteOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchFileCancelOk(FileCancelOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileConsumeOk(FileConsumeOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileDeliver(FileDeliverBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileOpen(FileOpenBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileOpenOk(FileOpenOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileQosOk(FileQosOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileReturn(FileReturnBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchFileStage(FileStageBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchQueueBindOk(QueueBindOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchQueueDeclareOk(QueueDeclareOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchStreamDeliver(StreamDeliverBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchStreamQosOk(StreamQosOkBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchStreamReturn(StreamReturnBody body, int channelId) throws AMQException
{
throw new AMQMethodNotImplementedException(body);
}
public boolean dispatchTxCommitOk(TxCommitOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchTxRollbackOk(TxRollbackOkBody body, int channelId) throws AMQException
{
return false;
}
public boolean dispatchTxSelectOk(TxSelectOkBody body, int channelId) throws AMQException
{
return false;
}
}
|
|
/**
* Copyright (C) 2009-2012 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.restygwt.client.basic;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.shared.EventBus;
import com.google.gwt.event.shared.SimpleEventBus;
import com.google.gwt.junit.client.GWTTestCase;
import com.google.gwt.user.client.Timer;
import org.fusesource.restygwt.client.Defaults;
import org.fusesource.restygwt.client.Method;
import org.fusesource.restygwt.client.MethodCallback;
import org.fusesource.restygwt.client.Resource;
import org.fusesource.restygwt.client.RestServiceProxy;
import org.fusesource.restygwt.client.cache.QueueableCacheStorage;
import org.fusesource.restygwt.client.cache.VolatileQueueableCacheStorage;
import org.fusesource.restygwt.client.callback.CachingCallbackFilter;
import org.fusesource.restygwt.client.callback.CallbackFactory;
import org.fusesource.restygwt.client.callback.DefaultFilterawareRequestCallback;
import org.fusesource.restygwt.client.callback.FilterawareRequestCallback;
import org.fusesource.restygwt.client.callback.ModelChangeCallbackFilter;
import org.fusesource.restygwt.client.dispatcher.CachingDispatcherFilter;
import org.fusesource.restygwt.client.dispatcher.DefaultFilterawareDispatcher;
import org.fusesource.restygwt.client.dispatcher.FilterawareDispatcher;
/**
* test to check if {@link CachingCallbackFilter} {@link QueueableCacheStorage}
* and caching stuff in complete works as expected
*
* @author <a href="mailto:andi.balke@gmail.com">andi</a>
*/
public class CacheCallbackTestGwt extends GWTTestCase {
private BlockingTimeoutService service;
private final int TESTCLASS_DELAY_TIMEOUT = 15000;
@Override
public String getModuleName() {
return "org.fusesource.restygwt.CachingTestGwt";
}
/**
* prove all callbacks are registered, called and unregistered without
* using the cache. in this test all calls will reach the server.
*
* this is done by just calling the same method multiple times
*/
public void testNonCachingCallback() {
service.noncachingCall(0, new MethodCallback<Void>() {
@Override
public void onSuccess(Method method, Void response) {
GWT.log("passing first call");
}
@Override
public void onFailure(Method method, Throwable exception) {
fail("failure on read: " + exception.getMessage());
}
});
service.noncachingCall(1, new MethodCallback<Void>() {
@Override
public void onSuccess(Method method, Void response) {
GWT.log("passing second call");
}
@Override
public void onFailure(Method method, Throwable exception) {
fail("failure on read: " + exception.getMessage());
}
});
service.noncachingCall(2, new MethodCallback<Void>() {
@Override
public void onSuccess(Method method, Void response) {
GWT.log("passing third call");
finishTest();
}
@Override
public void onFailure(Method method, Throwable exception) {
fail("failure on read: " + exception.getMessage());
}
});
// wait... we are in async testing...
delayTestFinish(TESTCLASS_DELAY_TIMEOUT);
}
/**
* prove all callbacks are registered, performed and unregistered with
* using the cache.
*
* not all calls will reach the server, {@link VolatileQueueableCacheStorage} will
* need to handle some of the callbacks by its own.
*
* this is done by just calling the same method multiple times
*
* first the simple case:
* use the cache when the first method call is back from backend. there wont be
* any callback queuing yet.
*/
public void testSequential_NonQueuing_CachingCallback() {
// backend reaching call
service.cachingCall(0, new MethodCallback<Void>() {
@Override
public void onSuccess(Method method, Void response) {
GWT.log("passing first non-queuing call");
}
@Override
public void onFailure(Method method, Throwable exception) {
fail("failure on read: " + exception.getMessage());
}
});
// wait a second for callback to be back for sure
// usually there should be something like Thread.sleep, but thats not possible here
new Timer() {
@Override
public void run() {
/*
* two calls that are handled directly by the cache
* (no backend interaction at all)
*/
service.cachingCall(0, new MethodCallback<Void>() {
@Override
public void onSuccess(Method method, Void response) {
GWT.log("passing second non-queuing call");
}
@Override
public void onFailure(Method method, Throwable exception) {
fail("failure on read: " + exception.getMessage());
}
});
}
}.schedule(1000);
// this is the third one, started in 3 seconds
new Timer() {
@Override
public void run() {
service.cachingCall(0, new MethodCallback<Void>() {
@Override
public void onSuccess(Method method, Void response) {
GWT.log("passing third non-queuing call");
finishTest();
}
@Override
public void onFailure(Method method, Throwable exception) {
fail("failure on read: " + exception.getMessage());
}
});
}
}.schedule(3000);
// wait... we are in async testing...
delayTestFinish(TESTCLASS_DELAY_TIMEOUT);
}
public void testSequential_Queuing_CachingCallback() {
// backend reaching call
service.cachingQueuingCall(2, new MethodCallback<Void>() {
@Override
public void onSuccess(Method method, Void response) {
GWT.log("passing first queuing call");
}
@Override
public void onFailure(Method method, Throwable exception) {
fail("failure on read: " + exception.getMessage());
}
});
/*
* same call again to get this callback queued
* and called when the first is back from backend
*/
service.cachingQueuingCall(2, new MethodCallback<Void>() {
@Override
public void onSuccess(Method method, Void response) {
GWT.log("passing second queuing call");
finishTest();
}
@Override
public void onFailure(Method method, Throwable exception) {
fail("failure on read: " + exception.getMessage());
}
});
// wait... we are in async testing...
delayTestFinish(TESTCLASS_DELAY_TIMEOUT);
}
/**
* usually this stuff is all done by gin in a real application. or at least there
* would be a central place which is not the activity in the end.
*/
@Override
public void gwtSetUp() {
/*
* configure RESTY to use cache, usually done in gin
*/
final EventBus eventBus = new SimpleEventBus();
final QueueableCacheStorage cacheStorage = new VolatileQueueableCacheStorage();
FilterawareDispatcher dispatcher = new DefaultFilterawareDispatcher();
dispatcher.addFilter(new CachingDispatcherFilter(cacheStorage, new CallbackFactory() {
@Override
public FilterawareRequestCallback createCallback(Method method) {
FilterawareRequestCallback retryingCallback = new DefaultFilterawareRequestCallback(method);
retryingCallback.addFilter(new CachingCallbackFilter(cacheStorage));
retryingCallback.addFilter(new ModelChangeCallbackFilter(eventBus));
return retryingCallback;
}
}));
Defaults.setDispatcher(dispatcher);
/*
* setup the service, usually done in gin
*/
Resource resource = new Resource(GWT.getModuleBaseURL());
service = GWT.create(BlockingTimeoutService.class);
((RestServiceProxy) service).setResource(resource);
}
}
|
|
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.samples.apps.iosched.gcm;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.text.TextUtils;
import com.google.samples.apps.iosched.BuildConfig;
import com.google.samples.apps.iosched.util.AccountUtils;
import java.io.IOException;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import static com.google.samples.apps.iosched.util.LogUtils.LOGD;
import static com.google.samples.apps.iosched.util.LogUtils.LOGE;
import static com.google.samples.apps.iosched.util.LogUtils.LOGI;
import static com.google.samples.apps.iosched.util.LogUtils.LOGV;
import static com.google.samples.apps.iosched.util.LogUtils.makeLogTag;
/**
* Helper class used to communicate with the demo server.
*/
public final class ServerUtilities {
private static final String TAG = makeLogTag("GCMs");
private static final String PREFERENCES = "com.google.samples.apps.iosched.gcm";
private static final String PROPERTY_REGISTERED_TS = "registered_ts";
private static final String PROPERTY_REG_ID = "reg_id";
private static final String PROPERTY_GCM_KEY = "gcm_key";
private static final int MAX_ATTEMPTS = 5;
private static final int BACKOFF_MILLI_SECONDS = 2000;
private static final Random sRandom = new Random();
private static boolean checkGcmEnabled() {
if (TextUtils.isEmpty(BuildConfig.GCM_SERVER_URL)) {
LOGD(TAG, "GCM feature disabled (no URL configured)");
return false;
} else if (TextUtils.isEmpty(BuildConfig.GCM_API_KEY)) {
LOGD(TAG, "GCM feature disabled (no API key configured)");
return false;
} else if (TextUtils.isEmpty(BuildConfig.GCM_SENDER_ID)) {
LOGD(TAG, "GCM feature disabled (no sender ID configured)");
return false;
}
return true;
}
/**
* Register this account/device pair within the server.
*
* @param context Current context
* @param gcmId The GCM registration ID for this device
* @param gcmKey The GCM key with which to register.
* @return whether the registration succeeded or not.
*/
public static boolean register(final Context context, final String gcmId, final String gcmKey) {
if (!checkGcmEnabled()) {
return false;
}
LOGI(TAG, "registering device (gcm_id = " + gcmId + ")");
String serverUrl = BuildConfig.GCM_SERVER_URL + "/register";
LOGI(TAG, "registering on GCM with GCM key: " + AccountUtils.sanitizeGcmKey(gcmKey));
Map<String, String> params = new HashMap<String, String>();
params.put("gcm_id", gcmId);
params.put("gcm_key", gcmKey);
long backoff = BACKOFF_MILLI_SECONDS + sRandom.nextInt(1000);
// Once GCM returns a registration id, we need to register it in the
// demo server. As the server might be down, we will retry it a couple
// times.
for (int i = 1; i <= MAX_ATTEMPTS; i++) {
LOGV(TAG, "Attempt #" + i + " to register");
try {
post(serverUrl, params, BuildConfig.GCM_API_KEY);
setRegisteredOnServer(context, true, gcmId, gcmKey);
return true;
} catch (IOException e) {
// Here we are simplifying and retrying on any error; in a real
// application, it should retry only on unrecoverable errors
// (like HTTP error code 503).
LOGE(TAG, "Failed to register on attempt " + i, e);
if (i == MAX_ATTEMPTS) {
break;
}
try {
LOGV(TAG, "Sleeping for " + backoff + " ms before retry");
Thread.sleep(backoff);
} catch (InterruptedException e1) {
// Activity finished before we complete - exit.
LOGD(TAG, "Thread interrupted: abort remaining retries!");
Thread.currentThread().interrupt();
return false;
}
// increase backoff exponentially
backoff *= 2;
}
}
return false;
}
/**
* Unregister this account/device pair within the server.
*
* @param context Current context
* @param gcmId The GCM registration ID for this device
*/
static void unregister(final Context context, final String gcmId) {
if (!checkGcmEnabled()) {
return;
}
LOGI(TAG, "unregistering device (gcmId = " + gcmId + ")");
String serverUrl = BuildConfig.GCM_SERVER_URL + "/unregister";
Map<String, String> params = new HashMap<String, String>();
params.put("gcm_id", gcmId);
try {
post(serverUrl, params, BuildConfig.GCM_API_KEY);
setRegisteredOnServer(context, false, gcmId, null);
} catch (IOException e) {
// At this point the device is unregistered from GCM, but still
// registered in the server.
// We could try to unregister again, but it is not necessary:
// if the server tries to send a message to the device, it will get
// a "NotRegistered" error message and should unregister the device.
LOGD(TAG, "Unable to unregister from application server", e);
} finally {
// Regardless of server success, clear local settings_prefs
setRegisteredOnServer(context, false, null, null);
}
}
/**
* Request user data sync.
*
* @param context Current context
*/
public static void notifyUserDataChanged(final Context context) {
if (!checkGcmEnabled()) {
return;
}
LOGI(TAG, "Notifying GCM that user data changed");
String serverUrl = BuildConfig.GCM_SERVER_URL + "/send/self/sync_user";
try {
String gcmKey = AccountUtils.getGcmKey(context, AccountUtils.getActiveAccountName(context));
if (gcmKey != null) {
post(serverUrl, new HashMap<String, String>(), gcmKey);
}
} catch (IOException e) {
LOGE(TAG, "Unable to notify GCM about user data change", e);
}
}
/**
* Sets whether the device was successfully registered in the server side.
*
* @param context Current context
* @param flag True if registration was successful, false otherwise
* @param gcmId True if registration was successful, false otherwise
*/
private static void setRegisteredOnServer(Context context, boolean flag, String gcmId, String gcmKey) {
final SharedPreferences prefs = context.getSharedPreferences(
PREFERENCES, Context.MODE_PRIVATE);
LOGD(TAG, "Setting registered on server status as: " + flag + ", gcmKey="
+ AccountUtils.sanitizeGcmKey(gcmKey));
Editor editor = prefs.edit();
if (flag) {
editor.putLong(PROPERTY_REGISTERED_TS, new Date().getTime());
editor.putString(PROPERTY_GCM_KEY, gcmKey == null ? "" : gcmKey);
editor.putString(PROPERTY_REG_ID, gcmId);
} else {
editor.remove(PROPERTY_REG_ID);
}
editor.apply();
}
/**
* Checks whether the device was successfully registered in the server side.
*
* @param context Current context
* @return True if registration was successful, false otherwise
*/
public static boolean isRegisteredOnServer(Context context, String gcmKey) {
final SharedPreferences prefs = context.getSharedPreferences(
PREFERENCES, Context.MODE_PRIVATE);
// Find registration threshold
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DATE, -1);
long yesterdayTS = cal.getTimeInMillis();
long regTS = prefs.getLong(PROPERTY_REGISTERED_TS, 0);
gcmKey = gcmKey == null ? "" : gcmKey;
if (regTS > yesterdayTS) {
LOGV(TAG, "GCM registration current. regTS=" + regTS + " yesterdayTS=" + yesterdayTS);
final String registeredGcmKey = prefs.getString(PROPERTY_GCM_KEY, "");
if (registeredGcmKey.equals(gcmKey)) {
LOGD(TAG, "GCM registration is valid and for the correct gcm key: "
+ AccountUtils.sanitizeGcmKey(registeredGcmKey));
return true;
}
LOGD(TAG, "GCM registration is for DIFFERENT gcm key "
+ AccountUtils.sanitizeGcmKey(registeredGcmKey) + ". We were expecting "
+ AccountUtils.sanitizeGcmKey(gcmKey));
return false;
} else {
LOGV(TAG, "GCM registration expired. regTS=" + regTS + " yesterdayTS=" + yesterdayTS);
return false;
}
}
public static String getGcmId(Context context) {
final SharedPreferences prefs = context.getSharedPreferences(PREFERENCES, Context.MODE_PRIVATE);
return prefs.getString(PROPERTY_REG_ID, null);
}
/**
* Unregister the current GCM ID when we sign-out
*
* @param context Current context
*/
public static void onSignOut(Context context) {
String gcmId = getGcmId(context);
if (gcmId != null) {
unregister(context, gcmId);
}
}
/**
* Issue a POST request to the server.
*
* @param endpoint POST address.
* @param params request parameters.
* @throws java.io.IOException propagated from POST.
*/
private static void post(String endpoint, Map<String, String> params, String key)
throws IOException {
URL url;
try {
url = new URL(endpoint);
} catch (MalformedURLException e) {
throw new IllegalArgumentException("invalid url: " + endpoint);
}
params.put("key", key);
StringBuilder bodyBuilder = new StringBuilder();
Iterator<Entry<String, String>> iterator = params.entrySet().iterator();
// constructs the POST body using the parameters
while (iterator.hasNext()) {
Entry<String, String> param = iterator.next();
bodyBuilder.append(param.getKey()).append('=')
.append(param.getValue());
if (iterator.hasNext()) {
bodyBuilder.append('&');
}
}
String body = bodyBuilder.toString();
LOGV(TAG, "Posting '" + body + "' to " + url);
HttpURLConnection conn = null;
try {
conn = (HttpURLConnection) url.openConnection();
conn.setDoOutput(true);
conn.setUseCaches(false);
conn.setChunkedStreamingMode(0);
conn.setRequestMethod("POST");
conn.setRequestProperty("Content-Type",
"application/x-www-form-urlencoded;charset=UTF-8");
conn.setRequestProperty("Content-Length",
Integer.toString(body.length()));
// post the request
OutputStream out = conn.getOutputStream();
out.write(body.getBytes());
out.close();
// handle the response
int status = conn.getResponseCode();
if (status != 200) {
throw new IOException("Post failed with error code " + status);
}
} finally {
if (conn != null) {
conn.disconnect();
}
}
}
}
|
|
/*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Bob Jervis
* Google Inc.
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino.jstype;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import com.google.javascript.rhino.ErrorReporter;
import com.google.javascript.rhino.Node;
import java.util.List;
/**
* A {@code NamedType} is a named reference to some other type. This provides
* a convenient mechanism for implementing forward references to types; a
* {@code NamedType} can be used as a placeholder until its reference is
* resolved. It is also useful for representing type names in JsDoc type
* annotations, some of which may never be resolved (as they may refer to
* types in host systems not yet supported by JSCompiler, such as the JVM.)<p>
*
* An important distinction: {@code NamedType} is a type name reference,
* whereas {@link ObjectType} is a named type object, such as an Enum name.
* The Enum itself is typically used only in a dot operator to name one of its
* constants, or in a declaration, where its name will appear in a
* NamedType.<p>
*
* A {@code NamedType} is not currently a full-fledged typedef, because it
* cannot resolve to any JavaScript type. It can only resolve to a named
* {@link JSTypeRegistry} type, or to {@link FunctionType} or
* {@link EnumType}.<p>
*
* If full typedefs are to be supported, then each method on each type class
* needs to be reviewed to make sure that everything works correctly through
* typedefs. Alternatively, we would need to walk through the parse tree and
* unroll each reference to a {@code NamedType} to its resolved type before
* applying the rest of the analysis.<p>
*
* TODO(user): Revisit all of this logic.<p>
*
* The existing typing logic is hacky. Unresolved types should get processed
* in a more consistent way, but with the Rhino merge coming, there will be
* much that has to be changed.<p>
*
*/
class NamedType extends ProxyObjectType {
private static final long serialVersionUID = 1L;
private final String reference;
private final String sourceName;
private final int lineno;
private final int charno;
/**
* Validates the type resolution.
*/
private Predicate<JSType> validator;
/**
* Property-defining continuations.
*/
private List<PropertyContinuation> propertyContinuations = null;
/**
* Create a named type based on the reference.
*/
NamedType(JSTypeRegistry registry, String reference,
String sourceName, int lineno, int charno) {
super(registry, registry.getNativeObjectType(JSTypeNative.UNKNOWN_TYPE));
Preconditions.checkNotNull(reference);
this.reference = reference;
this.sourceName = sourceName;
this.lineno = lineno;
this.charno = charno;
}
@Override
boolean defineProperty(String propertyName, JSType type,
boolean inferred, Node propertyNode) {
if (!isResolved()) {
// If this is an unresolved object type, we need to save all its
// properties and define them when it is resolved.
if (propertyContinuations == null) {
propertyContinuations = Lists.newArrayList();
}
propertyContinuations.add(
new PropertyContinuation(
propertyName, type, inferred, propertyNode));
return true;
} else {
return super.defineProperty(
propertyName, type, inferred, propertyNode);
}
}
private void finishPropertyContinuations() {
ObjectType referencedObjType = getReferencedObjTypeInternal();
if (referencedObjType != null && !referencedObjType.isUnknownType()) {
if (propertyContinuations != null) {
for (PropertyContinuation c : propertyContinuations) {
c.commit(this);
}
}
}
propertyContinuations = null;
}
/** Returns the type to which this refers (which is unknown if unresolved). */
public JSType getReferencedType() {
return getReferencedTypeInternal();
}
@Override
public String getReferenceName() {
return reference;
}
@Override
String toStringHelper(boolean forAnnotations) {
return reference;
}
@Override
public boolean hasReferenceName() {
return true;
}
@Override
boolean isNamedType() {
return true;
}
@Override
public boolean isNominalType() {
return true;
}
@Override
public int hashCode() {
return reference.hashCode();
}
/**
* Resolve the referenced type within the enclosing scope.
*/
@Override
JSType resolveInternal(ErrorReporter t, StaticScope<JSType> enclosing) {
// TODO(user): Investigate whether it is really necessary to keep two
// different mechanisms for resolving named types, and if so, which order
// makes more sense. Now, resolution via registry is first in order to
// avoid triggering the warnings built into the resolution via properties.
boolean resolved = resolveViaRegistry(t, enclosing);
if (detectImplicitPrototypeCycle()) {
handleTypeCycle(t);
}
if (resolved) {
super.resolveInternal(t, enclosing);
finishPropertyContinuations();
return registry.isLastGeneration() ?
getReferencedType() : this;
}
resolveViaProperties(t, enclosing);
if (detectImplicitPrototypeCycle()) {
handleTypeCycle(t);
}
super.resolveInternal(t, enclosing);
if (isResolved()) {
finishPropertyContinuations();
}
return registry.isLastGeneration() ?
getReferencedType() : this;
}
/**
* Resolves a named type by looking it up in the registry.
* @return True if we resolved successfully.
*/
private boolean resolveViaRegistry(
ErrorReporter t, StaticScope<JSType> enclosing) {
JSType type = registry.getType(reference);
if (type != null) {
setReferencedAndResolvedType(type, t, enclosing);
return true;
}
return false;
}
/**
* Resolves a named type by looking up its first component in the scope, and
* subsequent components as properties. The scope must have been fully
* parsed and a symbol table constructed.
*/
private void resolveViaProperties(ErrorReporter t,
StaticScope<JSType> enclosing) {
JSType value = lookupViaProperties(t, enclosing);
// last component of the chain
if (value != null && value.isFunctionType() &&
(value.isConstructor() || value.isInterface())) {
FunctionType functionType = value.toMaybeFunctionType();
setReferencedAndResolvedType(
functionType.getInstanceType(), t, enclosing);
} else if (value != null && value.isNoObjectType()) {
setReferencedAndResolvedType(
registry.getNativeFunctionType(
JSTypeNative.NO_OBJECT_TYPE).getInstanceType(), t, enclosing);
} else if (value instanceof EnumType) {
setReferencedAndResolvedType(
((EnumType) value).getElementsType(), t, enclosing);
} else {
// We've been running into issues where people forward-declare
// non-named types. (This is legitimate...our dependency management
// code doubles as our forward-declaration code.)
//
// So if the type does resolve to an actual value, but it's not named,
// then don't respect the forward declaration.
handleUnresolvedType(t, value == null || value.isUnknownType());
}
}
/**
* Resolves a type by looking up its first component in the scope, and
* subsequent components as properties. The scope must have been fully
* parsed and a symbol table constructed.
* @return The type of the symbol, or null if the type could not be found.
*/
private JSType lookupViaProperties( ErrorReporter t,
StaticScope<JSType> enclosing) {
String[] componentNames = reference.split("\\.", -1);
if (componentNames[0].length() == 0) {
return null;
}
StaticSlot<JSType> slot = enclosing.getSlot(componentNames[0]);
if (slot == null) {
return null;
}
// If the first component has a type of 'Unknown', then any type
// names using it should be regarded as silently 'Unknown' rather than be
// noisy about it.
JSType slotType = slot.getType();
if (slotType == null || slotType.isAllType() || slotType.isNoType()) {
return null;
}
JSType value = getTypedefType(t, slot, componentNames[0]);
if (value == null) {
return null;
}
// resolving component by component
for (int i = 1; i < componentNames.length; i++) {
ObjectType parentClass = ObjectType.cast(value);
if (parentClass == null) {
return null;
}
if (componentNames[i].length() == 0) {
return null;
}
value = parentClass.getPropertyType(componentNames[i]);
}
return value;
}
private void setReferencedAndResolvedType(JSType type, ErrorReporter t,
StaticScope<JSType> enclosing) {
if (validator != null) {
validator.apply(type);
}
setReferencedType(type);
checkEnumElementCycle(t);
setResolvedTypeInternal(getReferencedType());
}
private void handleTypeCycle(ErrorReporter t) {
setReferencedType(
registry.getNativeObjectType(JSTypeNative.UNKNOWN_TYPE));
t.warning("Cycle detected in inheritance chain of type " + reference,
sourceName, lineno, charno);
setResolvedTypeInternal(getReferencedType());
}
private void checkEnumElementCycle(ErrorReporter t) {
JSType referencedType = getReferencedType();
if (referencedType instanceof EnumElementType &&
((EnumElementType) referencedType).getPrimitiveType() == this) {
handleTypeCycle(t);
}
}
// Warns about this type being unresolved iff it's not a forward-declared
// type name.
private void handleUnresolvedType(
ErrorReporter t, boolean ignoreForwardReferencedTypes) {
if (registry.isLastGeneration()) {
boolean isForwardDeclared =
ignoreForwardReferencedTypes &&
registry.isForwardDeclaredType(reference);
if (!isForwardDeclared && registry.isLastGeneration()) {
t.warning("Bad type annotation. Unknown type " + reference,
sourceName, lineno, charno);
} else {
setReferencedType(
registry.getNativeObjectType(
JSTypeNative.NO_RESOLVED_TYPE));
if (registry.isLastGeneration() && validator != null) {
validator.apply(getReferencedType());
}
}
setResolvedTypeInternal(getReferencedType());
} else {
setResolvedTypeInternal(this);
}
}
JSType getTypedefType(ErrorReporter t, StaticSlot<JSType> slot, String name) {
JSType type = slot.getType();
if (type != null) {
return type;
}
handleUnresolvedType(t, true);
return null;
}
@Override
public boolean setValidator(Predicate<JSType> validator) {
// If the type is already resolved, we can validate it now. If
// the type has not been resolved yet, we need to wait till its
// resolved before we can validate it.
if (this.isResolved()) {
return super.setValidator(validator);
} else {
this.validator = validator;
return true;
}
}
/** Store enough information to define a property at a later time. */
private static final class PropertyContinuation {
private final String propertyName;
private final JSType type;
private final boolean inferred;
private final Node propertyNode;
private PropertyContinuation(
String propertyName,
JSType type,
boolean inferred,
Node propertyNode) {
this.propertyName = propertyName;
this.type = type;
this.inferred = inferred;
this.propertyNode = propertyNode;
}
void commit(ObjectType target) {
target.defineProperty(
propertyName, type, inferred, propertyNode);
}
}
}
|
|
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java.intellij;
import static com.facebook.buck.testutil.MoreAsserts.assertListEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import com.facebook.buck.android.AndroidBinary;
import com.facebook.buck.android.AndroidBinaryBuilder;
import com.facebook.buck.android.AndroidLibraryBuilder;
import com.facebook.buck.android.AndroidResourceRuleBuilder;
import com.facebook.buck.android.NdkLibrary;
import com.facebook.buck.android.NdkLibraryBuilder;
import com.facebook.buck.cli.BuildTargetNodeToBuildRuleTransformer;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.jvm.core.JavaPackageFinder;
import com.facebook.buck.jvm.java.FakeJavaPackageFinder;
import com.facebook.buck.jvm.java.JavaLibraryBuilder;
import com.facebook.buck.jvm.java.JavaTestBuilder;
import com.facebook.buck.jvm.java.KeystoreBuilder;
import com.facebook.buck.jvm.java.PrebuiltJarBuilder;
import com.facebook.buck.jvm.java.intellij.SerializableModule.SourceFolder;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.InMemoryBuildFileTree;
import com.facebook.buck.model.Pair;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.ActionGraph;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.ProjectConfig;
import com.facebook.buck.rules.ProjectConfigBuilder;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.testutil.BuckTestConstant;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import org.easymock.EasyMock;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import javax.annotation.Nullable;
public class ProjectTest {
private static final Path PATH_TO_GUAVA_JAR = Paths.get("third_party/guava/guava-10.0.1.jar");
@SuppressWarnings("PMD.UnusedPrivateField")
private BuildRule guava;
/**
* Creates an ActionGraph with two android_binary rules, each of which depends on the same
* android_library. The difference between the two is that one lists Guava in its no_dx list and
* the other does not.
* <p>
* The ActionGraph also includes three project_config rules: one for the android_library, and one
* for each of the android_binary rules.
*/
public Pair<ProjectWithModules, BuildRuleResolver> createActionGraphForTesting(
@Nullable JavaPackageFinder javaPackageFinder) throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
// prebuilt_jar //third_party/guava:guava
guava = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/guava:guava"))
.setBinaryJar(PATH_TO_GUAVA_JAR)
.build(ruleResolver);
// android_resouce android_res/base:res
BuildRule androidResRule = ruleResolver.addToIndex(
AndroidResourceRuleBuilder.newBuilder()
.setResolver(new SourcePathResolver(ruleResolver))
.setBuildTarget(BuildTargetFactory.newInstance("//android_res/base:res"))
.setRes(new FakeSourcePath("android_res/base/res"))
.setRDotJavaPackage("com.facebook")
.build());
// project_config android_res/base:res
ProjectConfig projectConfigForResource = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//android_res/base:project_config"))
.setSrcRule(androidResRule.getBuildTarget())
.setSrcRoots(ImmutableList.of("res"))
.build(ruleResolver);
// java_library //java/src/com/facebook/grandchild:grandchild
BuildTarget grandchildTarget =
BuildTargetFactory.newInstance("//java/src/com/facebook/grandchild:grandchild");
BuildRule grandchild = JavaLibraryBuilder
.createBuilder(grandchildTarget)
.addSrc(Paths.get("Grandchild.java"))
.build(ruleResolver);
// java_library //java/src/com/facebook/child:child
BuildRule childRule = JavaLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/src/com/facebook/child:child"))
.addSrc(Paths.get("Child.java"))
.addDep(grandchild.getBuildTarget())
.build(ruleResolver);
// java_library //java/src/com/facebook/exportlib:exportlib
BuildRule exportLib = JavaLibraryBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/src/com/facebook/exportlib:exportlib"))
.addSrc(Paths.get("ExportLib.java"))
.addDep(guava.getBuildTarget())
.addExportedDep(guava.getBuildTarget())
.build(ruleResolver);
// android_library //java/src/com/facebook/base:base
BuildRule baseRule = AndroidLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/src/com/facebook/base:base"))
.addSrc(Paths.get("Base.java"))
.addDep(exportLib.getBuildTarget())
.addDep(childRule.getBuildTarget())
.addDep(androidResRule.getBuildTarget())
.build(ruleResolver);
// project_config //java/src/com/facebook/base:project_config
ProjectConfig projectConfigForLibrary = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance(
"//java/src/com/facebook/base:project_config"))
.setSrcRule(baseRule.getBuildTarget())
.setSrcRoots(ImmutableList.of("src", "src-gen"))
.build(ruleResolver);
ProjectConfig projectConfigForExportLibrary = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/src/com/facebook/exportlib:project_config"))
.setSrcRule(exportLib.getBuildTarget())
.setSrcRoots(ImmutableList.of("src")).build(ruleResolver);
// keystore //keystore:debug
BuildTarget keystoreTarget = BuildTargetFactory.newInstance("//keystore:debug");
BuildRule keystore = KeystoreBuilder.createBuilder(keystoreTarget)
.setStore(new FakeSourcePath("keystore/debug.keystore"))
.setProperties(new FakeSourcePath("keystore/debug.keystore.properties"))
.build(ruleResolver);
// android_binary //foo:app
ImmutableSortedSet<BuildTarget> androidBinaryRuleDepsTarget =
ImmutableSortedSet.of(baseRule.getBuildTarget());
AndroidBinary androidBinaryRule = (AndroidBinary) AndroidBinaryBuilder.createBuilder(
BuildTargetFactory.newInstance("//foo:app"))
.setOriginalDeps(androidBinaryRuleDepsTarget)
.setManifest(new FakeSourcePath("foo/AndroidManifest.xml"))
.setKeystore(keystore.getBuildTarget())
.setBuildTargetsToExcludeFromDex(
ImmutableSet.of(
BuildTargetFactory.newInstance("//third_party/guava:guava")))
.build(ruleResolver);
// project_config //foo:project_config
ProjectConfig projectConfigUsingNoDx = (ProjectConfig) ProjectConfigBuilder
.createBuilder(BuildTargetFactory.newInstance("//foo:project_config"))
.setSrcRule(androidBinaryRule.getBuildTarget())
.build(ruleResolver);
// android_binary //bar:app
ImmutableSortedSet<BuildTarget> barAppBuildRuleDepsTarget =
ImmutableSortedSet.of(baseRule.getBuildTarget());
AndroidBinary barAppBuildRule = (AndroidBinary) AndroidBinaryBuilder.createBuilder(
BuildTargetFactory.newInstance("//bar:app"))
.setOriginalDeps(barAppBuildRuleDepsTarget)
.setManifest(new FakeSourcePath("foo/AndroidManifest.xml"))
.setKeystore(keystore.getBuildTarget())
.build(ruleResolver);
// project_config //bar:project_config
ProjectConfig projectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(BuildTargetFactory.newInstance("//bar:project_config"))
.setSrcRule(barAppBuildRule.getBuildTarget())
.build(ruleResolver);
return new Pair<>(getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(
projectConfigForExportLibrary,
projectConfigForLibrary,
projectConfigForResource,
projectConfigUsingNoDx,
projectConfig),
javaPackageFinder),
ruleResolver);
}
@Test
public void testGenerateRelativeGenPath() {
Path basePathOfModule = Paths.get("android_res/com/facebook/gifts/");
Path expectedRelativePathToGen =
Paths.get("../../../../buck-out/android/android_res/com/facebook/gifts/gen");
assertEquals(
expectedRelativePathToGen, Project.generateRelativeGenPath(basePathOfModule));
}
/**
* This is an important test that verifies that the {@code no_dx} argument for an
* {@code android_binary} is handled appropriately when generating an IntelliJ project.
*/
@Test
public void testProject() throws Exception {
JavaPackageFinder javaPackageFinder = EasyMock.createMock(JavaPackageFinder.class);
EasyMock
.expect(javaPackageFinder.findJavaPackage(Paths.get("foo/module_foo.iml")))
.andReturn("");
EasyMock
.expect(javaPackageFinder.findJavaPackage(Paths.get("bar/module_bar.iml")))
.andReturn("");
EasyMock.replay(javaPackageFinder);
Pair<ProjectWithModules, BuildRuleResolver> projectWithModules =
createActionGraphForTesting(javaPackageFinder);
Project project = projectWithModules.getFirst().project;
BuildRuleResolver resolver = projectWithModules.getSecond();
List<SerializableModule> modules = projectWithModules.getFirst().modules;
assertEquals("Should be one module for the java_library, one for the android_library, " +
"one module for the android_resource, and one for each android_binary",
5,
modules.size());
// Check the values of the module that corresponds to the android_library.
SerializableModule javaLibraryModule = modules.get(4);
assertSame(
getRuleByBuildTarget("//java/src/com/facebook/exportlib:exportlib", resolver),
javaLibraryModule.srcRule);
assertEquals("module_java_src_com_facebook_exportlib", javaLibraryModule.name);
assertEquals(
Paths.get("java/src/com/facebook/exportlib/module_java_src_com_facebook_exportlib.iml"),
javaLibraryModule.pathToImlFile);
assertListEquals(
ImmutableList.of(SerializableModule.SourceFolder.SRC),
javaLibraryModule.sourceFolders);
// Check the dependencies.
SerializableDependentModule inheritedJdk = SerializableDependentModule.newInheritedJdk();
SerializableDependentModule guavaAsProvidedDep = SerializableDependentModule.newLibrary(
guava.getBuildTarget(), "buck_out_gen_third_party_guava_guava_jar");
guavaAsProvidedDep.scope = "PROVIDED";
assertListEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
guavaAsProvidedDep,
SerializableDependentModule.newInheritedJdk()),
javaLibraryModule.getDependencies());
// Check the values of the module that corresponds to the android_library.
SerializableModule androidLibraryModule = modules.get(3);
assertSame(
getRuleByBuildTarget("//java/src/com/facebook/base:base", resolver),
androidLibraryModule.srcRule);
assertEquals("module_java_src_com_facebook_base", androidLibraryModule.name);
assertEquals(
Paths.get("java/src/com/facebook/base/module_java_src_com_facebook_base.iml"),
androidLibraryModule.pathToImlFile);
assertListEquals(
ImmutableList.of(
SerializableModule.SourceFolder.SRC,
new SourceFolder("file://$MODULE_DIR$/src-gen", false /* isTestSource */),
SerializableModule.SourceFolder.GEN),
androidLibraryModule.sourceFolders);
assertEquals(Boolean.TRUE, androidLibraryModule.hasAndroidFacet);
assertEquals(Boolean.TRUE, androidLibraryModule.isAndroidLibraryProject);
assertEquals(null, androidLibraryModule.proguardConfigPath);
assertEquals(null, androidLibraryModule.resFolder);
// Check the dependencies.
SerializableDependentModule androidResourceAsProvidedDep =
SerializableDependentModule.newModule(
BuildTargetFactory.newInstance("//android_res/base:res"),
"module_android_res_base");
SerializableDependentModule childAsProvidedDep = SerializableDependentModule.newModule(
BuildTargetFactory.newInstance("//java/src/com/facebook/child:child"),
"module_java_src_com_facebook_child");
SerializableDependentModule exportDepsAsProvidedDep = SerializableDependentModule.newModule(
BuildTargetFactory.newInstance("//java/src/com/facebook/exportlib:exportlib"),
"module_java_src_com_facebook_exportlib");
assertListEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
guavaAsProvidedDep,
androidResourceAsProvidedDep,
childAsProvidedDep,
exportDepsAsProvidedDep,
inheritedJdk),
androidLibraryModule.getDependencies());
// Check the values of the module that corresponds to the android_binary that uses no_dx.
SerializableModule androidResourceModule = modules.get(0);
assertSame(
getRuleByBuildTarget("//android_res/base:res", resolver),
androidResourceModule.srcRule);
assertEquals(Paths.get("res"), androidResourceModule.resFolder);
// Check the values of the module that corresponds to the android_binary that uses no_dx.
SerializableModule androidBinaryModuleNoDx = modules.get(2);
assertSame(getRuleByBuildTarget("//foo:app", resolver), androidBinaryModuleNoDx.srcRule);
assertEquals("module_foo", androidBinaryModuleNoDx.name);
assertEquals(Paths.get("foo/module_foo.iml"), androidBinaryModuleNoDx.pathToImlFile);
assertListEquals(
ImmutableList.of(SerializableModule.SourceFolder.GEN),
androidBinaryModuleNoDx.sourceFolders);
assertEquals(Boolean.TRUE, androidBinaryModuleNoDx.hasAndroidFacet);
assertEquals(Boolean.FALSE, androidBinaryModuleNoDx.isAndroidLibraryProject);
assertEquals(null, androidBinaryModuleNoDx.proguardConfigPath);
assertEquals(null, androidBinaryModuleNoDx.resFolder);
assertEquals(Paths.get("../keystore/debug.keystore"), androidBinaryModuleNoDx.keystorePath);
// Check the moduleDependencies.
SerializableDependentModule grandchildAsProvidedDep = SerializableDependentModule.newModule(
BuildTargetFactory.newInstance("//java/src/com/facebook/grandchild:grandchild"),
"module_java_src_com_facebook_grandchild");
SerializableDependentModule androidLibraryDep = SerializableDependentModule.newModule(
androidLibraryModule.srcRule.getBuildTarget(), "module_java_src_com_facebook_base");
assertEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
guavaAsProvidedDep,
androidLibraryDep,
androidResourceAsProvidedDep,
childAsProvidedDep,
exportDepsAsProvidedDep,
grandchildAsProvidedDep,
inheritedJdk),
androidBinaryModuleNoDx.getDependencies());
// Check the values of the module that corresponds to the android_binary with an empty no_dx.
SerializableModule androidBinaryModuleEmptyNoDx = modules.get(1);
assertSame(getRuleByBuildTarget("//bar:app", resolver), androidBinaryModuleEmptyNoDx.srcRule);
assertEquals("module_bar", androidBinaryModuleEmptyNoDx.name);
assertEquals(Paths.get("bar/module_bar.iml"), androidBinaryModuleEmptyNoDx.pathToImlFile);
assertListEquals(
ImmutableList.of(SerializableModule.SourceFolder.GEN),
androidBinaryModuleEmptyNoDx.sourceFolders);
assertEquals(Boolean.TRUE, androidBinaryModuleEmptyNoDx.hasAndroidFacet);
assertEquals(Boolean.FALSE, androidBinaryModuleEmptyNoDx.isAndroidLibraryProject);
assertEquals(null, androidBinaryModuleEmptyNoDx.proguardConfigPath);
assertEquals(null, androidBinaryModuleEmptyNoDx.resFolder);
assertEquals(
Paths.get("../keystore/debug.keystore"),
androidBinaryModuleEmptyNoDx.keystorePath);
// Check the moduleDependencies.
SerializableDependentModule guavaAsCompiledDep = SerializableDependentModule.newLibrary(
guava.getBuildTarget(), "buck_out_gen_third_party_guava_guava_jar");
assertEquals("Important that Guava is listed as a 'COMPILED' dependency here because it is " +
"only listed as a 'PROVIDED' dependency earlier.",
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
guavaAsCompiledDep,
androidLibraryDep,
androidResourceAsProvidedDep,
childAsProvidedDep,
exportDepsAsProvidedDep,
grandchildAsProvidedDep,
inheritedJdk),
androidBinaryModuleEmptyNoDx.getDependencies());
// Check that the correct data was extracted to populate the .idea/libraries directory.
BuildRule guava = getRuleByBuildTarget("//third_party/guava:guava", resolver);
assertSame(guava, Iterables.getOnlyElement(project.getLibraryJars()));
}
@Test
public void testPrebuiltJarIncludesDeps() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
// Build up a the graph that corresponds to:
//
// android_library(
// name = 'example',
// deps = [
// ':easymock',
// ],
// )
//
// prebuilt_jar(
// name = 'easymock',
// binary_jar = 'easymock.jar',
// deps = [
// ':cglib',
// ':objenesis',
// ],
// )
//
// prebuilt_jar(
// name = 'cglib',
// binary_jar = 'cglib.jar',
// )
//
// prebuilt_jar(
// name = 'objenesis',
// binary_jar = 'objenesis.jar',
// )
//
// project_config(
// src_target = ':example',
// )
BuildRule cglib = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/easymock:cglib"))
.setBinaryJar(Paths.get("third_party/java/easymock/cglib.jar"))
.build(ruleResolver);
BuildRule objenesis = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/easymock:objenesis"))
.setBinaryJar(Paths.get("third_party/java/easymock/objenesis.jar"))
.build(ruleResolver);
BuildRule easymock = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/easymock:easymock"))
.setBinaryJar(Paths.get("third_party/java/easymock/easymock.jar"))
.addDep(cglib.getBuildTarget())
.addDep(objenesis.getBuildTarget())
.build(ruleResolver);
BuildTarget easyMockExampleTarget = BuildTargetFactory.newInstance(
"//third_party/java/easymock:example");
BuildRule mockRule = AndroidLibraryBuilder.createBuilder(easyMockExampleTarget)
.addDep(easymock.getBuildTarget())
.build(ruleResolver);
ProjectConfig projectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//third_party/java/easymock:project_config"))
.setSrcRule(mockRule.getBuildTarget())
.build(ruleResolver);
ProjectWithModules projectWithModules = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(projectConfig),
null /* javaPackageFinder */);
List<SerializableModule> modules = projectWithModules.modules;
// Verify that the single Module that is created transitively includes all JAR files.
assertEquals("Should be one module for the android_library", 1, modules.size());
SerializableModule androidLibraryModule = Iterables.getOnlyElement(modules);
assertThat(
androidLibraryModule.getDependencies(),
Matchers.containsInAnyOrder(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newLibrary(
easymock.getBuildTarget(),
"buck_out_gen_third_party_java_easymock_easymock_jar"),
SerializableDependentModule.newLibrary(
cglib.getBuildTarget(),
"buck_out_gen_third_party_java_easymock_cglib_jar"),
SerializableDependentModule.newLibrary(
objenesis.getBuildTarget(),
"buck_out_gen_third_party_java_easymock_objenesis_jar"),
SerializableDependentModule.newInheritedJdk()));
}
@Test
public void testIfModuleIsBothTestAndCompileDepThenTreatAsCompileDep() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
// Create a java_library() and a java_test() that both depend on Guava.
// When they are part of the same project_config() rule, then the resulting module should
// include Guava as scope="COMPILE" in IntelliJ.
BuildRule guava = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/guava:guava"))
.setBinaryJar(Paths.get("third_party/java/guava.jar"))
.build(ruleResolver);
BuildRule baseBuildRule = JavaLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/example/base:base"))
.addDep(guava.getBuildTarget())
.build(ruleResolver);
BuildRule testBuildRule = JavaTestBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/example/base:tests"))
.addDep(guava.getBuildTarget())
.build(ruleResolver);
String jdkName = "1.8";
String jdkType = "JavaSDK";
ProjectConfig projectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/com/example/base:project_config"))
.setSrcRule(baseBuildRule.getBuildTarget())
.setTestRule(testBuildRule.getBuildTarget())
.setTestRoots(ImmutableList.of("tests"))
.setJdkName(jdkName)
.setJdkType(jdkType)
.build(ruleResolver);
ProjectWithModules projectWithModules = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(projectConfig),
null /* javaPackageFinder */);
List<SerializableModule> modules = projectWithModules.modules;
assertEquals(1, modules.size());
SerializableModule comExampleBaseModule = Iterables.getOnlyElement(modules);
assertListEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newLibrary(
guava.getBuildTarget(),
"buck_out_gen_third_party_java_guava_guava_jar"),
SerializableDependentModule.newStandardJdk(jdkName, jdkType)),
comExampleBaseModule.getDependencies());
}
/**
* In the context of Robolectric, httpcore-4.0.1.jar needs to be loaded before the android.jar
* associated with the Android SDK. Both httpcore-4.0.1.jar and android.jar define
* org.apache.http.params.BasicHttpParams; however, only httpcore-4.0.1.jar contains a real
* implementation of BasicHttpParams whereas android.jar contains a stub implementation of
* BasicHttpParams.
* <p>
* One way to fix this problem would be to "tag" httpcore-4.0.1.jar to indicate that it must
* appear before the Android SDK (or anything that transitively depends on the Android SDK) when
* listing dependencies for IntelliJ. This would be a giant kludge to the prebuilt_jar rule, so
* instead we just list jars before modules within an <orderEntry scope="TEST"/> or an
* <orderEntry scope="COMPILE"/> group.
*/
@Test
public void testThatJarsAreListedBeforeModules() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
BuildRule supportV4 = JavaLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/android/support/v4:v4"))
.build(ruleResolver);
BuildRule httpCore = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/httpcore:httpcore"))
.setBinaryJar(Paths.get("httpcore-4.0.1.jar"))
.build(ruleResolver);
// The support-v4 library is loaded as a java_library() rather than a prebuilt_jar() because it
// contains our local changes to the library.
BuildTarget robolectricTarget =
BuildTargetFactory.newInstance("//third_party/java/robolectric:robolectric");
BuildRule robolectricRule = JavaLibraryBuilder
.createBuilder(robolectricTarget)
.addDep(supportV4.getBuildTarget())
.addDep(httpCore.getBuildTarget())
.build(ruleResolver);
ProjectConfig projectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//third_party/java/robolectric:project_config"))
.setSrcRule(robolectricRule.getBuildTarget())
.setSrcRoots(ImmutableList.of("src/main/java"))
.build(ruleResolver);
ProjectWithModules projectWithModules = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(projectConfig),
null /* javaPackageFinder */);
List<SerializableModule> modules = projectWithModules.modules;
assertEquals("Should be one module for the android_library", 1, modules.size());
SerializableModule robolectricModule = Iterables.getOnlyElement(modules);
assertListEquals(
"It is imperative that httpcore-4.0.1.jar be listed before the support v4 library, " +
"or else when robolectric is listed as a dependency, " +
"org.apache.http.params.BasicHttpParams will be loaded from android.jar instead of " +
"httpcore-4.0.1.jar.",
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newLibrary(
httpCore.getBuildTarget(),
"buck_out_gen_third_party_java_httpcore_httpcore_jar"),
SerializableDependentModule.newModule(
supportV4.getBuildTarget(), "module_java_com_android_support_v4"),
SerializableDependentModule.newInheritedJdk()),
robolectricModule.getDependencies());
}
@Test
public void testCreatePathToProjectDotPropertiesFileForModule() {
SerializableModule rootModule = new SerializableModule(null /* buildRule */,
BuildTargetFactory.newInstance("//:project_config"));
rootModule.pathToImlFile = Paths.get("fb4a.iml");
assertEquals("project.properties", Project.createPathToProjectDotPropertiesFileFor(rootModule));
SerializableModule someModule = new SerializableModule(null /* buildRule */,
BuildTargetFactory.newInstance("//java/com/example/base:project_config"));
someModule.pathToImlFile = Paths.get("java/com/example/base/base.iml");
assertEquals("java/com/example/base/project.properties",
Project.createPathToProjectDotPropertiesFileFor(someModule));
}
/**
* A project_config()'s src_roots argument can be {@code None}, {@code []}, or a non-empty array.
* Each of these should be treated differently.
*/
@Test
public void testSrcRoots() throws Exception {
// Create a project_config() with src_roots=None.
BuildRuleResolver ruleResolver1 =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
BuildRule resBuildRule = ruleResolver1.addToIndex(
AndroidResourceRuleBuilder.newBuilder()
.setResolver(new SourcePathResolver(ruleResolver1))
.setBuildTarget(BuildTargetFactory.newInstance("//resources/com/example:res"))
.build());
ProjectConfig projectConfigNullSrcRoots = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//resources/com/example:project_config"))
.setSrcRule(resBuildRule.getBuildTarget())
.setSrcRoots(null)
.build(ruleResolver1);
ProjectWithModules projectWithModules1 = getModulesForActionGraph(
ruleResolver1,
ImmutableSortedSet.of(projectConfigNullSrcRoots),
null /* javaPackageFinder */);
// Verify that the correct source folders are created.
assertEquals(1, projectWithModules1.modules.size());
SerializableModule moduleNoJavaSource = projectWithModules1.modules.get(0);
assertListEquals(
"Only source tmp should be gen/ when setSrcRoots(null) is specified.",
ImmutableList.of(SerializableModule.SourceFolder.GEN),
moduleNoJavaSource.sourceFolders);
// Create a project_config() with src_roots=[].
BuildRuleResolver ruleResolver2 =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
BuildRule baseBuildRule = AndroidLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/example/base:base"))
.build(ruleResolver2);
ProjectConfig inPackageProjectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/com/example/base:project_config"))
.setSrcRule(baseBuildRule.getBuildTarget())
.setSrcRoots(ImmutableList.<String>of())
.build(ruleResolver2);
// Verify that the correct source folders are created.
JavaPackageFinder javaPackageFinder = EasyMock.createMock(JavaPackageFinder.class);
EasyMock
.expect(
javaPackageFinder.findJavaPackage(
Paths.get("java/com/example/base/module_java_com_example_base.iml")))
.andReturn("com.example.base");
EasyMock.replay(javaPackageFinder);
ProjectWithModules projectWithModules2 = getModulesForActionGraph(
ruleResolver2,
ImmutableSortedSet.of(inPackageProjectConfig),
javaPackageFinder);
EasyMock.verify(javaPackageFinder);
assertEquals(1, projectWithModules2.modules.size());
SerializableModule moduleWithPackagePrefix = projectWithModules2.modules.get(0);
assertListEquals(
"The current directory should be a source tmp with a package prefix " +
"as well as the gen/ directory.",
ImmutableList.of(
new SourceFolder("file://$MODULE_DIR$", false /* isTestSource */, "com.example.base"),
SerializableModule.SourceFolder.GEN),
moduleWithPackagePrefix.sourceFolders);
// Create a project_config() with src_roots=['src'].
BuildRuleResolver ruleResolver3 =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
BuildRule baseBuildRule3 = AndroidLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/example/base:base"))
.build(ruleResolver3);
ProjectConfig hasSrcFolderProjectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/com/example/base:project_config"))
.setSrcRule(baseBuildRule3.getBuildTarget())
.setSrcRoots(ImmutableList.of("src"))
.build(ruleResolver3);
ProjectWithModules projectWithModules3 = getModulesForActionGraph(
ruleResolver3,
ImmutableSortedSet.of(hasSrcFolderProjectConfig),
null /* javaPackageFinder */);
// Verify that the correct source folders are created.
assertEquals(1, projectWithModules3.modules.size());
SerializableModule moduleHasSrcFolder = projectWithModules3.modules.get(0);
assertListEquals(
"Both src/ and gen/ should be source folders.",
ImmutableList.of(
new SourceFolder("file://$MODULE_DIR$/src", false /* isTestSource */),
SerializableModule.SourceFolder.GEN),
moduleHasSrcFolder.sourceFolders);
}
private static class ProjectWithModules {
private final Project project;
private final ImmutableList<SerializableModule> modules;
private ProjectWithModules(Project project, ImmutableList<SerializableModule> modules) {
this.project = project;
this.modules = modules;
}
}
private ProjectWithModules getModulesForActionGraph(
BuildRuleResolver ruleResolver,
ImmutableSortedSet<ProjectConfig> projectConfigs,
@Nullable JavaPackageFinder javaPackageFinder) throws IOException {
if (javaPackageFinder == null) {
javaPackageFinder = new FakeJavaPackageFinder();
}
ActionGraph actionGraph = new ActionGraph(ruleResolver.getBuildRules());
// Create the Project.
ExecutionContext executionContext = EasyMock.createMock(ExecutionContext.class);
ProjectFilesystem projectFilesystem = EasyMock.createMock(ProjectFilesystem.class);
EasyMock.expect(projectFilesystem.getRelativizer()).andStubReturn(
new Function<Path, Path>() {
@Override
public Path apply(@Nullable Path input) {
return Paths.get("").toAbsolutePath().relativize(input);
}
});
EasyMock.expect(projectFilesystem.getRootPath()).andStubReturn(Paths.get("").toAbsolutePath());
Properties keystoreProperties = new Properties();
keystoreProperties.put("key.alias", "androiddebugkey");
keystoreProperties.put("key.store.password", "android");
keystoreProperties.put("key.alias.password", "android");
EasyMock.expect(projectFilesystem.readPropertiesFile(
Paths.get("keystore/debug.keystore.properties").toAbsolutePath()))
.andReturn(keystoreProperties).anyTimes();
ImmutableMap<Path, String> basePathToAliasMap = ImmutableMap.of();
Project project = new Project(
new SourcePathResolver(ruleResolver),
projectConfigs,
actionGraph,
basePathToAliasMap,
javaPackageFinder,
executionContext,
new InMemoryBuildFileTree(
Iterables.transform(
actionGraph.getNodes(),
BuildTarget.TO_TARGET)),
projectFilesystem,
/* pathToDefaultAndroidManifest */ Optional.<String>absent(),
new IntellijConfig(FakeBuckConfig.builder().build()),
/* pathToPostProcessScript */ Optional.<String>absent(),
BuckTestConstant.PYTHON_INTERPRETER,
new ObjectMapper(),
true);
// Execute Project's business logic.
EasyMock.replay(executionContext, projectFilesystem);
List<SerializableModule> modules = new ArrayList<>(project.createModulesForProjectConfigs());
EasyMock.verify(executionContext, projectFilesystem);
return new ProjectWithModules(project, ImmutableList.copyOf(modules));
}
private static BuildRule getRuleByBuildTarget(String buildTarget, BuildRuleResolver resolver)
throws NoSuchBuildTargetException {
BuildRule rule = resolver.requireRule(BuildTargetFactory.newInstance(buildTarget));
Preconditions.checkNotNull(rule, "No rule for %s", buildTarget);
return rule;
}
@Test
public void testNdkLibraryHasCorrectPath() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
// Build up a the graph that corresponds to:
//
// ndk_library(
// name = 'foo-jni'
// )
//
// project_config(
// src_target = ':foo-jni',
// )
ProjectFilesystem projectFilesystem = EasyMock.createMock(ProjectFilesystem.class);
BuildTarget fooJni = BuildTargetFactory.newInstance("//third_party/java/foo/jni:foo-jni");
NdkLibrary ndkLibrary =
(NdkLibrary) new NdkLibraryBuilder(fooJni)
.build(ruleResolver, projectFilesystem);
ProjectConfig ndkProjectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance(
"//third_party/java/foo/jni:project_config"))
.setSrcRule(ndkLibrary.getBuildTarget())
.build(ruleResolver);
ProjectWithModules projectWithModules = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(ndkProjectConfig),
null /* javaPackageFinder */);
List<SerializableModule> modules = projectWithModules.modules;
assertEquals("Should be one module for the ndk_library.", 1, modules.size());
SerializableModule androidLibraryModule = Iterables.getOnlyElement(modules);
assertListEquals(ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newInheritedJdk()),
androidLibraryModule.getDependencies());
assertEquals(
Paths.get(String.format("../../../../%s", ndkLibrary.getLibraryPath())),
androidLibraryModule.nativeLibs);
}
@Test
public void testDoNotIgnoreAllOfBuckOut() {
SourcePathResolver resolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
ProjectFilesystem projectFilesystem = EasyMock.createMock(ProjectFilesystem.class);
ImmutableSet<Path> ignorePaths = ImmutableSet.of(Paths.get("buck-out"), Paths.get(".git"));
EasyMock.expect(projectFilesystem.getRootPath()).andStubReturn(Paths.get("/opt/src/buck"));
EasyMock.expect(projectFilesystem.getIgnorePaths()).andReturn(ignorePaths);
EasyMock.replay(projectFilesystem);
BuildTarget buildTarget = BuildTarget.builder(
projectFilesystem.getRootPath(),
"//",
"base").build();
BuildRule buildRule = new FakeBuildRule(buildTarget, resolver);
SerializableModule module = new SerializableModule(buildRule, buildTarget);
Project.addRootExcludes(module, buildRule, projectFilesystem);
ImmutableSortedSet<SourceFolder> expectedExcludeFolders =
ImmutableSortedSet.orderedBy(SerializableModule.ALPHABETIZER)
.add(new SourceFolder("file://$MODULE_DIR$/.git", /* isTestSource */ false))
.add(new SourceFolder("file://$MODULE_DIR$/buck-out/bin", /* isTestSource */ false))
.add(new SourceFolder("file://$MODULE_DIR$/buck-out/log", /* isTestSource */ false))
.add(new SourceFolder("file://$MODULE_DIR$/buck-out/tmp", /* isTestSource */ false))
.build();
assertEquals("Specific subfolders of buck-out should be excluded rather than all of buck-out.",
expectedExcludeFolders,
module.excludeFolders);
EasyMock.verify(projectFilesystem);
}
}
|
|
package com.zimbra.cs.service.authenticator;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import com.zimbra.common.account.Key.AccountBy;
import com.zimbra.common.account.Key.DomainBy;
import com.zimbra.common.service.ServiceException;
import com.zimbra.common.util.HttpUtil;
import com.zimbra.common.util.ZimbraLog;
import com.zimbra.cs.account.Account;
import com.zimbra.cs.account.Entry;
import com.zimbra.cs.account.NamedEntry;
import com.zimbra.cs.account.Provisioning;
import com.zimbra.cs.account.SearchAccountsOptions;
import com.zimbra.cs.ldap.ZLdapFilterFactory.FilterId;
import com.zimbra.cs.service.authenticator.SSOAuthenticator.ZimbraPrincipal;
public class ClientCertPrincipalMap {
static final String LOG_PREFIX = ClientCertAuthenticator.LOG_PREFIX;
private static final String RULE_DELIMITER = ","; // seperate each rule
private static final char LDAP_FILTER_LEADING_CHAR = '(';
private static final String MAP_DELIMITER = "="; // seperate cert filed and zimbra key
static abstract class CertField {
abstract String getName();
}
// a fixed, known certificate field
static class KnownCertField extends CertField {
static enum Field {
SUBJECT_DN,
SUBJECTALTNAME_OTHERNAME_UPN,
SUBJECTALTNAME_RFC822NAME;
private KnownCertField knownCertField;
private Field() {
knownCertField = new KnownCertField(this);
}
private KnownCertField getKnownCertField() {
return knownCertField;
}
private static String names() {
StringBuilder str = new StringBuilder();
int i = 0;
for (Field type : Field.values()) {
if (i++ > 0) str.append('|');
str.append(type.name());
}
return str.toString();
}
};
private Field field;
private KnownCertField(Field field) {
this.field = field;
}
private static KnownCertField parse(String fieldStr) {
try {
Field parsedField = Field.valueOf(fieldStr);
return parsedField.getKnownCertField();
} catch (IllegalArgumentException e) {
return null;
}
}
static String names() {
return Field.names();
}
Field getField() {
return field;
}
@Override
String getName() {
return field.name();
}
}
// a RDN in Subject
static class SubjectCertField extends CertField {
private static final String PREFIX = "SUBJECT_";
private static final int PREFIX_LEN = PREFIX.length();
// default if no mapping configured
private static final SubjectCertField EMAILADDRESS = new SubjectCertField(CertUtil.ATTR_EMAILADDRESS);
String rdnAttrType;
private SubjectCertField(String rdnType) {
this.rdnAttrType = rdnType;
}
static SubjectCertField parse(String fieldStr) {
if (fieldStr.startsWith(PREFIX) && fieldStr.length() > PREFIX_LEN) {
String rdnType = fieldStr.substring(PREFIX_LEN);
return new SubjectCertField(rdnType);
}
return null;
}
static String names() {
return PREFIX + "{an RDN attr, e.g. CN}";
}
String getRDNAttrType() {
return rdnAttrType;
}
@Override
String getName() {
return PREFIX + rdnAttrType;
}
}
static enum ZimbraKey {
// Note: do NOT support search Zimbra account by DN because:
// (1) DOS attack (non-existing DN will cause repeated LDAP search)
// and
// (2) Subject DN in the certificate mostly likely will not be an
// exact match of a Zimbra account DN.
// dn,
name,
zimbraId,
zimbraForeignPrincipal;
}
static abstract class Rule {
abstract String getName();
abstract ZimbraPrincipal apply(X509Certificate cert) throws ServiceException;
}
static class LdapFilterRule extends Rule {
private static Pattern pattern = Pattern.compile("\\%\\{([^\\}]*)\\}");
private String filter;
private LdapFilterRule(String filter) {
this.filter = filter;
}
String getFilter() {
return filter;
}
@Override
String getName() {
return filter;
}
@Override
ZimbraPrincipal apply(X509Certificate cert) throws ServiceException {
String filter = expandFilter(cert);
ZimbraLog.account.debug(LOG_PREFIX +
"search account by expanded filter(prepended with account objectClass filter): " + filter);
SearchAccountsOptions searchOpts = new SearchAccountsOptions();
searchOpts.setMaxResults(1);
searchOpts.setFilterString(FilterId.ACCOUNT_BY_SSL_CLENT_CERT_PRINCIPAL_MAP, filter);
// should return at most one entry. If more than one entries were matched,
// TOO_MANY_SEARCH_RESULTS will be thrown
List<NamedEntry> entries = Provisioning.getInstance().searchDirectory(searchOpts);
if (entries.size() == 1) {
Account acct = (Account) entries.get(0);
return new ZimbraPrincipal(filter, acct);
} else {
return null;
}
}
private String expandFilter(X509Certificate cert) throws ServiceException {
CertUtil certUtil = new CertUtil(cert);
Matcher matcher = pattern.matcher(getFilter());
StringBuffer sb = new StringBuffer();
while (matcher.find()) {
String rawCertField = matcher.group(1);
CertField certField = parseCertField(rawCertField);
String certFieldValue = certUtil.getCertField(certField);
matcher.appendReplacement(sb, certFieldValue);
}
matcher.appendTail(sb);
return sb.toString();
}
}
static class FieldMapRule extends Rule {
private CertField certField;
private ZimbraKey zimbraKey;
private FieldMapRule(CertField certField, ZimbraKey zimbraKey) {
this.certField = certField;
this.zimbraKey = zimbraKey;
}
CertField getCertField() {
return certField;
}
ZimbraKey getZimbraKey() {
return zimbraKey;
}
@Override
String getName() {
return certField.getName() + MAP_DELIMITER + zimbraKey.name();
}
@Override
ZimbraPrincipal apply(X509Certificate cert) throws ServiceException {
CertUtil certUtil = new CertUtil(cert);
String certFieldValue = certUtil.getCertField(getCertField());
if (certFieldValue != null) {
Account acct = getZimbraAccount(getZimbraKey(), getCertField(), certFieldValue);
if (acct != null) {
return new ZimbraPrincipal(certFieldValue, acct);
}
}
return null;
}
private Account getZimbraAccount(ZimbraKey zimbraKey, CertField certField, String certFieldValue) {
ZimbraLog.account.debug(LOG_PREFIX + "get account by " +
zimbraKey.name() + ", " + certField.getName() + "=" + certFieldValue);
Provisioning prov = Provisioning.getInstance();
Account acct = null;
try {
switch (zimbraKey) {
case name:
acct = prov.get(AccountBy.name, certFieldValue);
break;
case zimbraId:
acct = prov.get(AccountBy.id, certFieldValue);
break;
case zimbraForeignPrincipal:
String foreignPrincipal =
String.format(Provisioning.FP_PREFIX_CERT, certField.getName(),certFieldValue);
acct = prov.get(AccountBy.foreignPrincipal, foreignPrincipal);
break;
}
} catch (ServiceException e) {
ZimbraLog.account.debug(LOG_PREFIX + "no matching account by " +
zimbraKey.name() + ", " + certField.getName() + "=" + certFieldValue, e);
}
return acct;
}
}
private List<Rule> rules;
ClientCertPrincipalMap(HttpServletRequest req) throws ServiceException {
String rawRules = getMappingConfig(req);
rules = parse(rawRules);
}
List<Rule> getRules() {
return rules;
}
private String getMappingConfig(HttpServletRequest req) throws ServiceException {
Provisioning prov = Provisioning.getInstance();
String virtualHostName = HttpUtil.getVirtualHost(req);
Entry entry = prov.get(DomainBy.virtualHostname, virtualHostName);
if (entry == null) {
entry = prov.getConfig();
}
return entry.getAttr(Provisioning.A_zimbraMailSSLClientCertPrincipalMap);
}
private List<Rule> parse(String rawRules) throws ServiceException {
List<Rule> parsedRules = new ArrayList<Rule>();
if (rawRules == null) {
// default to SUBJECT_EMAILADDRESS=name
Rule rule = new FieldMapRule(SubjectCertField.EMAILADDRESS, ZimbraKey.name);
ZimbraLog.account.warn(LOG_PREFIX + "No " + Provisioning.A_zimbraMailSSLClientCertPrincipalMap +
" configured, default to " + rule.getName());
parsedRules.add(rule);
} else {
boolean ldapFilterRuleEnabled =
Provisioning.getInstance().getConfig().isMailSSLClientCertPrincipalMapLdapFilterEnabled();
String[] rules = rawRules.split(RULE_DELIMITER);
for (String rawRule : rules) {
Rule rule = null;
if (LDAP_FILTER_LEADING_CHAR == rawRule.charAt(0)) {
if (!ldapFilterRuleEnabled) {
throw ServiceException.FAILURE("LDAP filter is not allowed: " + rawRule, null);
}
rule = new LdapFilterRule(rawRule);
} else {
rule = parseFieldMapRule(rawRule);
}
parsedRules.add(rule);
}
}
return parsedRules;
}
private Rule parseFieldMapRule(String rawRule) throws ServiceException {
String[] parts = rawRule.split(MAP_DELIMITER);
if (parts.length != 2) {
throw ServiceException.FAILURE("Invalid config:" + rawRule +
" in " + Provisioning.A_zimbraMailSSLClientCertPrincipalMap, null);
}
try {
String certPart = parts[0].trim();
String zimbraPart = parts[1].trim();
CertField certField = parseCertField(certPart);
ZimbraKey zimbraKey = ZimbraKey.valueOf(zimbraPart);
Rule rule = new FieldMapRule(certField, zimbraKey);
return rule;
} catch (ServiceException e) {
throw ServiceException.FAILURE("Invalid config:" + rawRule +
" in " + Provisioning.A_zimbraMailSSLClientCertPrincipalMap, e);
}
}
static CertField parseCertField(String rawCertField) throws ServiceException {
// see if it is a KnownCertField
CertField certField = KnownCertField.parse(rawCertField);
if (certField == null) {
// see if it is a SubjectCertField
certField = SubjectCertField.parse(rawCertField);
}
if (certField == null) {
throw ServiceException.FAILURE("Invalid cert field:" + rawCertField, null);
}
return certField;
}
}
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.logic.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/** The AS2 agreement protocol settings. */
@Fluent
public final class AS2ProtocolSettings {
@JsonIgnore private final ClientLogger logger = new ClientLogger(AS2ProtocolSettings.class);
/*
* The message connection settings.
*/
@JsonProperty(value = "messageConnectionSettings", required = true)
private AS2MessageConnectionSettings messageConnectionSettings;
/*
* The acknowledgement connection settings.
*/
@JsonProperty(value = "acknowledgementConnectionSettings", required = true)
private AS2AcknowledgementConnectionSettings acknowledgementConnectionSettings;
/*
* The MDN settings.
*/
@JsonProperty(value = "mdnSettings", required = true)
private AS2MdnSettings mdnSettings;
/*
* The security settings.
*/
@JsonProperty(value = "securitySettings", required = true)
private AS2SecuritySettings securitySettings;
/*
* The validation settings.
*/
@JsonProperty(value = "validationSettings", required = true)
private AS2ValidationSettings validationSettings;
/*
* The envelope settings.
*/
@JsonProperty(value = "envelopeSettings", required = true)
private AS2EnvelopeSettings envelopeSettings;
/*
* The error settings.
*/
@JsonProperty(value = "errorSettings", required = true)
private AS2ErrorSettings errorSettings;
/**
* Get the messageConnectionSettings property: The message connection settings.
*
* @return the messageConnectionSettings value.
*/
public AS2MessageConnectionSettings messageConnectionSettings() {
return this.messageConnectionSettings;
}
/**
* Set the messageConnectionSettings property: The message connection settings.
*
* @param messageConnectionSettings the messageConnectionSettings value to set.
* @return the AS2ProtocolSettings object itself.
*/
public AS2ProtocolSettings withMessageConnectionSettings(AS2MessageConnectionSettings messageConnectionSettings) {
this.messageConnectionSettings = messageConnectionSettings;
return this;
}
/**
* Get the acknowledgementConnectionSettings property: The acknowledgement connection settings.
*
* @return the acknowledgementConnectionSettings value.
*/
public AS2AcknowledgementConnectionSettings acknowledgementConnectionSettings() {
return this.acknowledgementConnectionSettings;
}
/**
* Set the acknowledgementConnectionSettings property: The acknowledgement connection settings.
*
* @param acknowledgementConnectionSettings the acknowledgementConnectionSettings value to set.
* @return the AS2ProtocolSettings object itself.
*/
public AS2ProtocolSettings withAcknowledgementConnectionSettings(
AS2AcknowledgementConnectionSettings acknowledgementConnectionSettings) {
this.acknowledgementConnectionSettings = acknowledgementConnectionSettings;
return this;
}
/**
* Get the mdnSettings property: The MDN settings.
*
* @return the mdnSettings value.
*/
public AS2MdnSettings mdnSettings() {
return this.mdnSettings;
}
/**
* Set the mdnSettings property: The MDN settings.
*
* @param mdnSettings the mdnSettings value to set.
* @return the AS2ProtocolSettings object itself.
*/
public AS2ProtocolSettings withMdnSettings(AS2MdnSettings mdnSettings) {
this.mdnSettings = mdnSettings;
return this;
}
/**
* Get the securitySettings property: The security settings.
*
* @return the securitySettings value.
*/
public AS2SecuritySettings securitySettings() {
return this.securitySettings;
}
/**
* Set the securitySettings property: The security settings.
*
* @param securitySettings the securitySettings value to set.
* @return the AS2ProtocolSettings object itself.
*/
public AS2ProtocolSettings withSecuritySettings(AS2SecuritySettings securitySettings) {
this.securitySettings = securitySettings;
return this;
}
/**
* Get the validationSettings property: The validation settings.
*
* @return the validationSettings value.
*/
public AS2ValidationSettings validationSettings() {
return this.validationSettings;
}
/**
* Set the validationSettings property: The validation settings.
*
* @param validationSettings the validationSettings value to set.
* @return the AS2ProtocolSettings object itself.
*/
public AS2ProtocolSettings withValidationSettings(AS2ValidationSettings validationSettings) {
this.validationSettings = validationSettings;
return this;
}
/**
* Get the envelopeSettings property: The envelope settings.
*
* @return the envelopeSettings value.
*/
public AS2EnvelopeSettings envelopeSettings() {
return this.envelopeSettings;
}
/**
* Set the envelopeSettings property: The envelope settings.
*
* @param envelopeSettings the envelopeSettings value to set.
* @return the AS2ProtocolSettings object itself.
*/
public AS2ProtocolSettings withEnvelopeSettings(AS2EnvelopeSettings envelopeSettings) {
this.envelopeSettings = envelopeSettings;
return this;
}
/**
* Get the errorSettings property: The error settings.
*
* @return the errorSettings value.
*/
public AS2ErrorSettings errorSettings() {
return this.errorSettings;
}
/**
* Set the errorSettings property: The error settings.
*
* @param errorSettings the errorSettings value to set.
* @return the AS2ProtocolSettings object itself.
*/
public AS2ProtocolSettings withErrorSettings(AS2ErrorSettings errorSettings) {
this.errorSettings = errorSettings;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (messageConnectionSettings() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property messageConnectionSettings in model AS2ProtocolSettings"));
} else {
messageConnectionSettings().validate();
}
if (acknowledgementConnectionSettings() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property acknowledgementConnectionSettings in model AS2ProtocolSettings"));
} else {
acknowledgementConnectionSettings().validate();
}
if (mdnSettings() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException("Missing required property mdnSettings in model AS2ProtocolSettings"));
} else {
mdnSettings().validate();
}
if (securitySettings() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property securitySettings in model AS2ProtocolSettings"));
} else {
securitySettings().validate();
}
if (validationSettings() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property validationSettings in model AS2ProtocolSettings"));
} else {
validationSettings().validate();
}
if (envelopeSettings() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property envelopeSettings in model AS2ProtocolSettings"));
} else {
envelopeSettings().validate();
}
if (errorSettings() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property errorSettings in model AS2ProtocolSettings"));
} else {
errorSettings().validate();
}
}
}
|
|
/*
* Copyright (c) 2013-2016 Cinchapi Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cinchapi.concourse.server.storage.temp;
import java.io.File;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mockito;
import com.cinchapi.common.base.TernaryTruth;
import com.cinchapi.concourse.server.GlobalState;
import com.cinchapi.concourse.server.io.FileSystem;
import com.cinchapi.concourse.server.storage.PermanentStore;
import com.cinchapi.concourse.server.storage.Store;
import com.cinchapi.concourse.server.storage.temp.Buffer;
import com.cinchapi.concourse.server.storage.temp.Limbo;
import com.cinchapi.concourse.server.storage.temp.Write;
import com.cinchapi.concourse.test.Variables;
import com.cinchapi.concourse.time.Time;
import com.cinchapi.concourse.util.Convert;
import com.cinchapi.concourse.util.TestData;
import com.google.common.collect.Lists;
/**
* Unit tests for {@link Buffer}.
*
* @author Jeff Nelson
*/
public class BufferTest extends LimboTest {
private static PermanentStore MOCK_DESTINATION = Mockito
.mock(PermanentStore.class);
static {
// NOTE: The Buffer assumes it is transporting to a Database, but we
// cannot mock that class with Mockito since it is final. Mocking the
// PermanentStore interface does not pose a problem as long as tests
// don't do something that would cause the Database#triggerSync() method
// to be called (i.e. transporting more than a page worth of Writes).
//
// So, please use the Buffer#canTransport() method to check to see if is
// okay to do a transport without causing a triggerSync(). And do not
// unit tests streaming writes in this test class (do that at a level
// above where an actual Database is defined)!!!
Mockito.doNothing().when(MOCK_DESTINATION)
.accept(Mockito.any(Write.class));
}
private String current;
@Override
protected Buffer getStore() {
current = TestData.DATA_DIR + File.separator + Time.now();
return new Buffer(current);
}
@Override
protected void cleanup(Store store) {
FileSystem.deleteDirectory(current);
}
@Test
public void testBufferCanAddPageWhileServicingRead()
throws InterruptedException {
int count = 0;
while (!((Buffer) store).canTransport()) {
add("foo", Convert.javaToThrift(count), 1);
count++;
}
// Now add a second page worth of writes, but but don't spill over into
// a third page yet
int max = 0;
for (int i = count; i < (count * 2) - 2; i++) {
add("foo", Convert.javaToThrift(i), 1);
max = i;
}
final int value = max + 1;
final AtomicBoolean caughtException = new AtomicBoolean(false);
Thread read = new Thread(new Runnable() {
@Override
public void run() {
try {
store.select("foo", 1);
}
catch (ConcurrentModificationException e) {
caughtException.set(true);
}
}
});
Thread write = new Thread(new Runnable() {
@Override
public void run() {
add("foo", Convert.javaToThrift(value + 1), 1);
}
});
read.start();
write.start();
write.join();
read.join();
Assert.assertFalse(caughtException.get());
}
@Test
public void testIteratorAfterTransport() {
((Buffer) store).transportRateMultiplier = 1;
List<Write> writes = getWrites();
int j = 0;
for (Write write : writes) {
add(write.getKey().toString(), write.getValue().getTObject(), write
.getRecord().longValue());
Variables.register("write_" + j, write);
j++;
}
Variables.register("size_pre_transport", writes.size());
int div = Variables.register("div", (TestData.getScaleCount() % 9) + 1);
int count = Variables.register("count", writes.size() / div);
for (int i = 0; i < count; i++) {
if(((Buffer) store).canTransport()) {
((Buffer) store).transport(MOCK_DESTINATION);
writes.remove(0);
}
else {
break;
}
}
Variables.register("size_post_transport", writes.size());
Iterator<Write> it0 = ((Limbo) store).iterator();
Iterator<Write> it1 = writes.iterator();
while (it1.hasNext()) {
Assert.assertTrue(it0.hasNext());
Write w0 = it0.next();
Write w1 = it1.next();
Assert.assertEquals(w0, w1);
}
Assert.assertFalse(it0.hasNext());
}
@Test
public void testWaitUntilTransportable() throws InterruptedException {
final AtomicLong later = new AtomicLong(0);
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
((Buffer) store).waitUntilTransportable();
later.set(Time.now());
}
});
thread.start();
long before = Time.now();
while (!((Buffer) store).canTransport()) {
before = Time.now();
add(TestData.getString(), TestData.getTObject(), TestData.getLong());
}
thread.join(); // make sure thread finishes before comparing
Assert.assertTrue(later.get() > before);
}
@Test
@Ignore
public void testOnDiskIterator() {
Buffer buffer = (Buffer) store;
int count = TestData.getScaleCount();
List<Write> expected = Lists.newArrayList();
for (int i = 0; i < count; ++i) {
Write write = Write.add(TestData.getSimpleString(),
TestData.getTObject(), i);
buffer.insert(write);
expected.add(write);
Variables.register("expected_" + i, write);
}
buffer.stop();
Iterator<Write> it = Buffer.onDiskIterator(buffer.getBackingStore());
List<Write> stored = Lists.newArrayList();
int i = 0;
while (it.hasNext()) {
Write write = it.next();
stored.add(write);
Variables.register("actual_" + i, write);
++i;
}
Assert.assertEquals(expected, stored);
}
@Test
public void testVerifyFastTrue() {
Buffer buffer = (Buffer) store;
Write write = Write.add("foo", Convert.javaToThrift("bar"), 1);
buffer.insert(write);
Assert.assertEquals(TernaryTruth.TRUE, buffer.verifyFast(write));
}
@Test
public void testVerifyFastFalseRemoved() {
Buffer buffer = (Buffer) store;
Write write = Write.add("foo", Convert.javaToThrift("bar"), 1);
buffer.insert(write);
buffer.insert(write.inverse());
Assert.assertEquals(TernaryTruth.FALSE, buffer.verifyFast(write));
}
@Test
public void testVerifyFastFalseNeverAdded() {
Buffer buffer = (Buffer) store;
Write write = Write.add("foo", Convert.javaToThrift("bar"), 1);
Assert.assertEquals(TernaryTruth.FALSE, buffer.verifyFast(write));
}
@Test
public void testVerifyFastUnsure() {
Buffer buffer = (Buffer) store;
Write write = Write.add("foo", Convert.javaToThrift("bar"), 1);
buffer.insert(write);
while (!buffer.canTransport()) {
buffer.insert(TestData.getWriteAdd());
}
buffer.transport(MOCK_DESTINATION);
Assert.assertEquals(TernaryTruth.UNSURE, buffer.verifyFast(write));
}
@Test
public void testOnDiskIteratorEmptyDirectory() {
Buffer buffer = (Buffer) store;
Buffer.onDiskIterator(buffer.getBackingStore() + "/foo").hasNext();
Assert.assertTrue(true); // lack of exception means test passes
}
@Test
public void testPageExpansion() {
// NOTE: This test is designed to ensure that buffer pages can
// automatically expand to accommodate a write that is larger than
// BUFFER_PAGE_SIZE
int oldBufferPageSize = GlobalState.BUFFER_PAGE_SIZE;
try {
GlobalState.BUFFER_PAGE_SIZE = 4;
Buffer buffer = getStore();
buffer.start();
buffer.insert(Write.add("foo", Convert.javaToThrift(4), 1));
Assert.assertTrue(buffer.contains(1));
}
finally {
GlobalState.BUFFER_PAGE_SIZE = oldBufferPageSize;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.storage.earthobservation;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.OffsetTime;
import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoField;
import javax.measure.unit.Unit;
import javax.measure.quantity.Length;
import javax.measure.unit.SI;
import org.opengis.metadata.Metadata;
import org.opengis.metadata.acquisition.AcquisitionInformation;
import org.opengis.metadata.acquisition.OperationType;
import org.opengis.metadata.citation.DateType;
import org.opengis.metadata.content.AttributeGroup;
import org.opengis.metadata.content.CoverageContentType;
import org.opengis.metadata.content.ImageDescription;
import org.opengis.metadata.distribution.Distribution;
import org.opengis.metadata.extent.Extent;
import org.opengis.metadata.identification.Identification;
import org.opengis.metadata.identification.Progress;
import org.opengis.metadata.maintenance.ScopeCode;
import org.apache.sis.metadata.iso.DefaultIdentifier;
import org.apache.sis.metadata.iso.DefaultMetadata;
import org.apache.sis.metadata.iso.acquisition.DefaultAcquisitionInformation;
import org.apache.sis.metadata.iso.acquisition.DefaultEvent;
import org.apache.sis.metadata.iso.acquisition.DefaultInstrument;
import org.apache.sis.metadata.iso.acquisition.DefaultOperation;
import org.apache.sis.metadata.iso.acquisition.DefaultPlatform;
import org.apache.sis.metadata.iso.citation.Citations;
import org.apache.sis.metadata.iso.citation.DefaultCitation;
import org.apache.sis.metadata.iso.citation.DefaultCitationDate;
import org.apache.sis.metadata.iso.content.DefaultAttributeGroup;
import org.apache.sis.metadata.iso.content.DefaultBand;
import org.apache.sis.metadata.iso.content.DefaultImageDescription;
import org.apache.sis.metadata.iso.distribution.DefaultFormat;
import org.apache.sis.metadata.iso.distribution.DefaultDistribution;
import org.apache.sis.metadata.iso.extent.DefaultExtent;
import org.apache.sis.metadata.iso.extent.DefaultGeographicBoundingBox;
import org.apache.sis.metadata.iso.extent.DefaultTemporalExtent;
import org.apache.sis.metadata.iso.identification.DefaultDataIdentification;
import org.apache.sis.metadata.iso.identification.DefaultAggregateInformation;
import org.apache.sis.storage.DataStoreException;
import org.apache.sis.util.iso.DefaultInternationalString;
import org.apache.sis.util.logging.WarningListeners;
import static java.util.Collections.singleton;
import org.apache.sis.metadata.iso.citation.AbstractParty;
import org.apache.sis.metadata.iso.citation.DefaultResponsibility;
import org.apache.sis.metadata.iso.identification.DefaultKeywords;
import static org.apache.sis.storage.earthobservation.LandsatKeys.*;
/**
* Parses Landsat metadata as {@linkplain DefaultMetadata ISO-19115 Metadata}
* object.
*
* @author Thi Phuong Hao Nguyen (VNSC)
* @author Remi Marechal (Geomatys)
* @since 0.8
* @version 0.8
* @module
*/
public class LandsatReaderVNSC {
/**
* The description of all bands that can be included in a Landsat coverage.
* This description is hard-coded and shared by all metadata instances.
*/
private static final AttributeGroup BANDS;
static {
final double[] wavelengths = {433, 482, 562, 655, 865, 1610, 2200, 590, 1375, 10800, 12000};
final String[] nameband = {
"Coastal Aerosol", // 433 nm
"Blue", // 482 nm
"Green", // 562 nm
"Red", // 655 nm
"Near-Infrared", // 865 nm
"Short Wavelength Infrared (SWIR) 1", // 1610 nm
"Short Wavelength Infrared (SWIR) 2", // 2200 nm
"Panchromatic", // 590 nm
"Cirrus", // 1375 nm
"Thermal Infrared Sensor (TIRS) 1", // 10800 nm
"Thermal Infrared Sensor (TIRS) 2" // 12000 nm
};
final DefaultBand[] bands = new DefaultBand[wavelengths.length];
final Unit<Length> nm = SI.MetricPrefix.NANO(SI.METRE);
for (int i = 0; i < bands.length; i++) {
final DefaultBand band = new DefaultBand();
band.setDescription(new DefaultInternationalString(nameband[i]));
band.setPeakResponse(wavelengths[i]);
band.setBoundUnits(nm);
bands[i] = band;
}
final DefaultAttributeGroup attributes = new DefaultAttributeGroup(CoverageContentType.PHYSICAL_MEASUREMENT, null);
attributes.setAttributes(Arrays.asList(bands));
attributes.freeze();
BANDS = attributes;
}
/**
* All properties found in the Landsat metadata file, except {@code GROUP} and {@code END_GROUP}. Example:
*
* {
*
* @preformat text
* DATE_ACQUIRED = 2014-03-12 SCENE_CENTER_TIME =03:02:01.5339408Z
* CORNER_UL_LAT_PRODUCT = 12.61111
* CORNER_UL_LON_PRODUCT= 108.33624
* CORNER_UR_LAT_PRODUCT = 12.62381
* CORNER_UR_LON_PRODUCT = 110.44017 }
*/
private final Map<String, String> properties;
/**
* Where to sends the warnings.
*
* @todo Set a reference given by the data store.
*/
private WarningListeners<?> listeners;
/**
* Creates a new metadata parser from the given characters reader.
*
* @param reader a reader opened on the Landsat file. It is caller's
* responsibility to close this reader.
* @throws IOException if an I/O error occurred while reading the given
* stream.
* @throws DataStoreException if the content is not a Landsat file.
*/
public LandsatReaderVNSC(final BufferedReader reader) throws IOException, DataStoreException {
properties = new HashMap<>();
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (!line.isEmpty() && line.charAt(0) != '#') {
/*
* Landsat metadata ends with the END keyword. If we find that keyword, stop reading.
* All remaining lines (if any) will be ignored.
*/
if (line.equals("END")) {
break;
}
/*
* Separate the line into its key and value. For example in CORNER_UL_LAT_PRODUCT = 12.61111,
* the key will be CORNER_UL_LAT_PRODUCT and the value will be 12.61111.
*/
int separator = line.indexOf('=');
if (separator < 0) {
throw new DataStoreException("Not a key-value pair.");
}
String key = line.substring(0, separator).trim().toUpperCase(Locale.US);
if (!key.equals("GROUP") && !key.equals("END_GROUP")) {
String value = line.substring(separator + 1).trim();
if (key.isEmpty()) {
throw new DataStoreException("Key shall not be empty.");
}
/*
* In a Landsat file, String values are between quotes. Example: STATION_ID = "LGN"
* If such quotes are found, remove them.
*/
int length = value.length();
if (length >= 2 && value.charAt(0) == '"' && value.charAt(length - 1) == '"') {
value = value.substring(1, length - 1).trim();
length = value.length();
}
/*
* Store only non-empty values. If a different value was already specified for the same key,
* this is considered as an error.
*/
if (length != 0) {
String previous = properties.put(key, value);
if (previous != null && !value.equals(previous)) {
throw new DataStoreException("Duplicated values for \"" + key + "\".");
}
}
}
}
}
}
/**
* Returns the property value associated to the given key, or {@code null}
* if none.
*
* @param key the key for which to get the property value.
* @return the property value associated to the given key, {@code null} if
* none.
*/
private String getValue(String key) {
return properties.get(key);
}
/**
* Returns the floating-point value associated to the given key, or
* {@code NaN} if none.
*
* @param key the key for which to get the floating-point value.
* @return the floating-point value associated to the given key, or
* {@link Double#NaN} if none.
* @throws NumberFormatException if the property associated to the given key
* can not be parsed as a floating-point number.
*/
private double getNumericValue(String key) throws NumberFormatException {
String value = getValue(key);
return (value != null) ? Double.parseDouble(value) : Double.NaN;
}
/**
* Returns the minimal or maximal value associated to the given two keys, or
* {@code NaN} if none.
*
* @param key1 the key for which to get the first floating-point value.
* @param key2 the key for which to get the second floating-point value.
* @param max {@code true} for the maximal value, or {@code false} for the
* minimal value.
* @return the minimal (if {@code max} is false) or maximal (if {@code max}
* is true) floating-point value associated to the given keys, or
* {@link Double#NaN} if none.
* @throws NumberFormatException if the property associated to one of the
* given keys can not be parsed as a floating-point number.
*/
private double getExtremumValue(String key1, String key2, boolean max) throws NumberFormatException {
double value1 = getNumericValue(key1);
double value2 = getNumericValue(key2);
if (max ? (value2 > value1) : (value2 < value1)) {
return value2;
} else {
return value1;
}
}
/**
* Returns the date associated to the given key, or {@code null} if none.
* The date is expected to be formatted in ISO 8601 format.
*
* @param key the key for which to get the date value.
* @return the date associated to the given key, or {@code null} if none.
* @throws DateTimeParseException if the date can not be parsed.
*/
private Date getDate(final String key) throws DateTimeParseException {
final String value = getValue(key);
if (value == null) {
return null;
}
final OffsetDateTime time = OffsetDateTime.parse(value);
return new Date(time.toEpochSecond() * 1000 + time.getNano() / 1000000);
}
/**
* Returns the date and time associated to the given key, or {@code null} if
* none. The date and time are expected to be in two separated fields, with
* each field formatted in ISO 8601 format.
*
* @param dateKey the key for which to get the date value.
* @param timeKey the key for which to get the time value.
* @return the date and time associated to the given keys, or {@code null}
* if none.
* @throws DateTimeParseException if the date can not be parsed.
*/
private Date getDate(final String dateKey, final String timeKey) throws DateTimeParseException {
String value = getValue(dateKey);
if (value == null) {
return null;
}
final LocalDate date = LocalDate.parse(value);
value = getValue(timeKey);
final long millis;
if (value == null) {
millis = date.getLong(ChronoField.INSTANT_SECONDS) * 1000;
} else {
final OffsetDateTime time = date.atTime(OffsetTime.parse(value));
millis = time.toEpochSecond() * 1000 + time.getNano() / 1000000;
}
return new Date(millis);
}
/**
* Gets information about an image's suitability for use.
*
* @throws DataStoreException if a property value can not be parsed as a
* number or a date.
*/
private ImageDescription createImageDescription() throws DataStoreException {
final DefaultImageDescription content = new DefaultImageDescription();
try {
double value;
if (0 <= (value = getNumericValue(CLOUD_COVER))) {
content.setCloudCoverPercentage(value);
}
if (!Double.isNaN(value = getNumericValue(SUN_AZIMUTH))) {
content.setIlluminationAzimuthAngle(value);
}
if (!Double.isNaN(value = getNumericValue(SUN_ELEVATION))) {
content.setIlluminationElevationAngle(value);
}
} catch (NumberFormatException e) {
throw new DataStoreException("Can not read content information.", e);
}
content.setAttributeGroups(singleton(BANDS));
return content;
}
/**
* Gets the geographic and temporal extent for identification info, or
* {@code null} if none. This method expects the data acquisition time in
* argument in order to avoid to compute it twice.
*
* @param sceneTime the data acquisition time, or {@code null} if none.
* @return the data extent in Identification info, or {@code null} if none.
* @throws DataStoreException if a property value can not be parsed as a
* number or a date.
*/
private Extent createExtent(final Date sceneTime) throws DataStoreException {
final DefaultGeographicBoundingBox box;
try {
box = new DefaultGeographicBoundingBox(
getExtremumValue(CORNER_UL_LON_PRODUCT, CORNER_LL_LON_PRODUCT, false), // westBoundLongitude
getExtremumValue(CORNER_UR_LON_PRODUCT, CORNER_LR_LON_PRODUCT, true), // eastBoundLongitude
getExtremumValue(CORNER_LL_LAT_PRODUCT, CORNER_LR_LAT_PRODUCT, false), // southBoundLatitude
getExtremumValue(CORNER_UL_LAT_PRODUCT, CORNER_UR_LAT_PRODUCT, true)); // northBoundLatitude
} catch (NumberFormatException e) {
throw new DataStoreException("Can not read the geographic bounding box.", e);
}
final DefaultExtent extent = new DefaultExtent();
final boolean isEmpty = box.isEmpty();
if (!isEmpty) {
extent.setGeographicElements(singleton(box));
}
if (sceneTime != null) {
try {
final DefaultTemporalExtent t = new DefaultTemporalExtent();
t.setBounds(sceneTime, sceneTime);
extent.setTemporalElements(singleton(t));
} catch (UnsupportedOperationException e) {
// May happen if the temporal module (which is optional) is not on the classpath.
warning(e);
if (isEmpty) {
return null;
}
}
}
return extent;
}
/**
* Gets the acquisition information, or {@code null} if none. This method
* expects the data acquisition time in argument in order to avoid to
* compute it twice.
*
* @param sceneTime the data acquisition time, or {@code null} if none.
* @return the data for the Acquisition Information, or {@code null} if
* none.
*/
private AcquisitionInformation createAcquisitionInformation(final Date sceneTime) {
final DefaultAcquisitionInformation acquisition = new DefaultAcquisitionInformation();
final DefaultPlatform platform = new DefaultPlatform();
String value = getValue(SPACECRAFT_ID);
boolean isEmpty = true;
if (value != null) {
platform.setIdentifier(new DefaultIdentifier(value));
isEmpty = false;
}
value = getValue(SENSOR_ID);
if (value != null) {
final DefaultInstrument instrument = new DefaultInstrument();
instrument.setIdentifier(new DefaultIdentifier(value));
platform.setInstruments(singleton(instrument));
isEmpty = false;
}
if (!isEmpty) {
acquisition.setPlatforms(singleton(platform));
}
if (sceneTime != null) {
final DefaultEvent event = new DefaultEvent();
event.setTime(sceneTime);
final DefaultOperation op = new DefaultOperation();
op.setSignificantEvents(singleton(event));
op.setType(OperationType.REAL);
op.setStatus(Progress.COMPLETED);
acquisition.setOperations(singleton(op));
isEmpty = false;
}
return isEmpty ? null : acquisition;
}
/**
* Get basic Information about the distributor of and options for obtaining
* the resource.
*
* @return the data distributor information, or {@code null} if none.
*/
private Distribution createDistribution() {
DefaultDistribution distribution = new DefaultDistribution();
DefaultFormat format = new DefaultFormat();
String value = getValue(OUTPUT_FORMAT);
format.setName(new DefaultInternationalString(value));
distribution.setDistributionFormats(singleton(format));
return distribution;
}
/**
* Gets basic information required to uniquely identify the data, or
* {@code null} if none. This method expects the metadata and data
* acquisition time in argument in order to avoid to compute them twice.
*
* @param metadataTime the metadata file creation time, or {@code null} if
* none.
* @param sceneTime the data acquisition time, or {@code null} if none.
* @return the data identification information, or {@code null} if none.
* @throws DataStoreException if a property value can not be parsed as a
* number or a date.
*/
private Identification createIdentification(final Date metadataTime, final Date sceneTime) throws DataStoreException {
final DefaultDataIdentification identification = new DefaultDataIdentification();
final DefaultCitation citation = new DefaultCitation();
boolean isEmpty = true;
if (metadataTime != null) {
citation.setDates(singleton(new DefaultCitationDate(metadataTime, DateType.PUBLICATION)));
isEmpty = false;
}
String value = getValue(LANDSAT_SCENE_ID);
if (value != null) {
citation.setTitle(new DefaultInternationalString(value));
isEmpty = false;
}
if (!isEmpty) {
identification.setCitation(citation);
}
final Extent extent = createExtent(sceneTime);
if (extent != null) {
identification.setExtents(singleton(extent));
isEmpty = false;
}
value = getValue(ORIGIN);
if (value != null) {
DefaultResponsibility responsibility = new DefaultResponsibility();
AbstractParty party = new AbstractParty();
party.setName(new DefaultInternationalString(value));
responsibility.getParties().add(party);
citation.getCitedResponsibleParties().add(responsibility);
isEmpty = false;
}
value = getValue(ORIGIN);
if (value != null) {
DefaultCitation citation1 = new DefaultCitation();
DefaultAggregateInformation aggregateInformation = new DefaultAggregateInformation();
citation1.setTitle(new DefaultInternationalString(value));
aggregateInformation.setAggregateDataSetName(citation1);
identification.setAggregationInfo(singleton(aggregateInformation));
isEmpty = false;
}
value = getValue(SPACECRAFT_ID);
if (value != null) {
final DefaultKeywords keyword = new DefaultKeywords(value);
identification.setDescriptiveKeywords(singleton(keyword));
isEmpty = false;
}
return isEmpty ? null : identification;
}
/**
* Returns the metadata about the resources described in the Landsat file.
*
* @return the metadata about Landsat resources.
* @throws DataStoreException if a property value can not be parsed as a
* number or a date.
*/
public Metadata read() throws DataStoreException {
final DefaultMetadata metadata = new DefaultMetadata();
metadata.setMetadataStandards(Citations.ISO_19115);
final Date metadataTime = getDate(FILE_DATE);
if (metadataTime != null) {
metadata.setDateInfo(singleton(new DefaultCitationDate(metadataTime, DateType.CREATION)));
}
metadata.setLanguage(Locale.ENGLISH);
metadata.setFileIdentifier(getValue(LANDSAT_SCENE_ID));
final Distribution metadataDistribution = createDistribution();
metadata.setDistributionInfo(singleton(metadataDistribution));
final Date sceneTime = getDate(DATE_ACQUIRED, SCENE_CENTER_TIME);
final Identification identification = createIdentification(metadataTime, sceneTime);
if (identification != null) {
metadata.setIdentificationInfo(singleton(identification));
}
final ImageDescription content = createImageDescription();
if (content != null) {
metadata.setContentInfo(singleton(content));
}
final AcquisitionInformation acquisition = createAcquisitionInformation(sceneTime);
if (acquisition != null) {
metadata.setAcquisitionInformation(singleton(acquisition));
}
if (getValue(DATA_TYPE) != null) {
metadata.setHierarchyLevels(singleton(ScopeCode.valueOf(getValue(DATA_TYPE))));
}
return metadata;
}
/**
* Invoked when a non-fatal exception occurred while reading metadata. This
* method sends a record to the registered listeners if any, or logs the
* record otherwise.
*/
private void warning(final Exception e) {
if (listeners != null) {
listeners.warning(null, e);
}
}
}
|
|
// Copyright (C) 2008 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.caja.lang.css;
import com.google.caja.SomethingWidgyHappenedError;
import com.google.caja.config.AllowedFileResolver;
import com.google.caja.config.ConfigUtil;
import com.google.caja.config.ImportResolver;
import com.google.caja.lexer.FilePosition;
import com.google.caja.lexer.InputSource;
import com.google.caja.lexer.ParseException;
import com.google.caja.lexer.TokenConsumer;
import com.google.caja.parser.ParseTreeNode;
import com.google.caja.parser.css.CssPropertySignature;
import com.google.caja.parser.js.ArrayConstructor;
import com.google.caja.parser.js.BooleanLiteral;
import com.google.caja.parser.js.Declaration;
import com.google.caja.parser.js.Expression;
import com.google.caja.parser.js.Identifier;
import com.google.caja.parser.js.IntegerLiteral;
import com.google.caja.parser.js.MultiDeclaration;
import com.google.caja.parser.js.ObjectConstructor;
import com.google.caja.parser.js.Statement;
import com.google.caja.parser.js.StringLiteral;
import com.google.caja.parser.js.ValueProperty;
import com.google.caja.parser.quasiliteral.QuasiBuilder;
import com.google.caja.render.JsMinimalPrinter;
import com.google.caja.reporting.EchoingMessageQueue;
import com.google.caja.reporting.MessageContext;
import com.google.caja.reporting.MessageQueue;
import com.google.caja.reporting.RenderContext;
import com.google.caja.reporting.SimpleMessageQueue;
import com.google.caja.tools.BuildCommand;
import com.google.caja.util.Bag;
import com.google.caja.util.Charsets;
import com.google.caja.util.Name;
import com.google.caja.util.Pair;
import com.google.caja.util.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Operates on CSS property signatures to come up with a schema that can be
* used to validate properties.
*
* <p>
* This class produces a javascript file like<pre>
* var CSS_PROP_BIT_x = ...;
* // Sets of allowed literal tokens.
* var CSS_LIT_GROUP = [["auto",...],...];
* var CSS_REGEX = [/^...$/];
* var cssSchema = {
* "float": {
* // Describe the kinds of tokens that can appear in the named
* // property's value and any additional restrictions.
* cssPropBits: CSS_PROP_BIT_x | CSS_PROP_BIT_y | ...,
* // Groups of literal values allowed including keywords and specific
* // numeric values like font-weight:300
* cssLitGroup: [CSS_LIT_GROUP[1],CSS_LIT_GROUP[3],CSS_LIT_GROUP[16]],
* // Schema keys for functions that are allowed (non-transitively).
* cssFns: []
* },
* ...
* // Functions are top-level constructs that have their own filters which
* // can be applied to their actuals.
* "rgba()": { ... },
* ...
* };
* </pre>
*
* <h3>Program Flow</h3>
* <p>
* This class examines a schema and builds a list of all allowed CSS properties, * and each function.
* It then tries to deduce for each property value and function the set of
* keywords/literal token values, how to interpret quoted strings, and what to
* do with loose identifiers that do not match a known keyword.
* <p>
* Once it has collections of keywords/literal-tokens, it tries to group
* commonly co-occuring literal-tokens together to reduce download size.
* Finally, it identifies patterns like {@code border-top} and
* {@code border-bottom} which have identical results.
* <p>
* Finally it builds a javascript parse tree that assigns the {@code css}
* namespace to an object whose keys are CSS property names, and whose
* values are data maps similar to the example code above.
*
* <p>
* "sanitize-css.js" uses this map extensively to sanitize & normalize CSS
* properties, rewriting URIs as needed.
*
* @author mikesamuel@gmail.com
*/
public class CssPropertyPatterns {
private final CssSchema schema;
public CssPropertyPatterns(CssSchema schema) {
this.schema = schema;
}
static final class CssPropertyData {
final String key;
final CssPropertySignature sig;
final EnumSet<CssPropBit> properties;
final Set<String> literals;
final Set<CssPropertySignature.CallSignature> fns;
CssPropertyData(String key, CssPropertySignature sig) {
assert key.equals(Strings.lower(key)) : key;
this.key = key;
this.sig = sig;
this.properties = EnumSet.noneOf(CssPropBit.class);
this.literals = Sets.newHashSet();
this.fns = Sets.newTreeSet(SignatureComparator.SINGLETON);
}
}
/**
* Generates a data map for the given signature.
*/
public CssPropertyData cssPropertyToData(
String key, CssPropertySignature sig) {
CssPropertyData data = new CssPropertyData(key, sig);
new Inspector(data).inspect();
return data;
}
private static final Set<String> KNOWN_VENDOR_PREFIXES = ImmutableSet.of(
"apple",
"css",
"epub",
"khtml",
"moz",
"ms",
"mso",
"o",
"rim",
"wap",
"webkit",
"xv"
);
public static String withoutVendorPrefix(String cssIdentifier) {
if (cssIdentifier.startsWith("-")) {
int dash = cssIdentifier.indexOf('-', 1);
if (dash >= 0) {
String possiblePrefix = cssIdentifier.substring(1, dash);
if (KNOWN_VENDOR_PREFIXES.contains(possiblePrefix)) {
return cssIdentifier.substring(dash + 1);
}
}
}
return cssIdentifier;
}
public static boolean hasVendorPrefix(String cssIdentifier) {
return !cssIdentifier.equals(withoutVendorPrefix(cssIdentifier));
}
/**
* Walks a property signature to figure out what tokens can comprise its
* value and how non-symbolic tokens like quoted strings, and non-keyword
* identifiers are used.
*/
private class Inspector {
/** Modified in-place as the inspector encounters symbols. */
final CssPropertyData data;
/** Avoid infinitely recursing on symbol cycles. */
private final Bag<String> refsUsed;
private Inspector(CssPropertyData data) {
this.data = data;
this.refsUsed = Bag.newHashBag();
}
void inspect() {
this.data.literals.clear();
this.data.properties.clear();
this.data.fns.clear();
inspectSig(data.sig);
}
private void inspectSig(CssPropertySignature sig) {
// Dispatch to a set of handlers that either append balanced content to
// out, or append cruft and return null.
if (sig instanceof CssPropertySignature.LiteralSignature) {
inspectLit((CssPropertySignature.LiteralSignature) sig);
} else if (sig instanceof CssPropertySignature.RepeatedSignature) {
inspectRep((CssPropertySignature.RepeatedSignature) sig);
} else if (sig instanceof CssPropertySignature.PropertyRefSignature) {
inspectRef((CssPropertySignature.PropertyRefSignature) sig);
} else if (sig instanceof CssPropertySignature.SeriesSignature) {
inspectSeries((CssPropertySignature.SeriesSignature) sig);
} else if (sig instanceof CssPropertySignature.SymbolSignature) {
inspectSymbol((CssPropertySignature.SymbolSignature) sig);
} else if (sig instanceof CssPropertySignature.SetSignature
|| sig instanceof CssPropertySignature.ExclusiveSetSignature) {
inspectSet(sig);
} else if (sig instanceof CssPropertySignature.CallSignature) {
inspectCall((CssPropertySignature.CallSignature) sig);
} else if (sig instanceof CssPropertySignature.ProgIdSignature) {
// Ignore. progid is of interest for old versions of IE and should
// probably be obsoleted.
} else {
throw new SomethingWidgyHappenedError(
sig + " : " + sig.getClass().getSimpleName());
}
}
private void inspectLit(CssPropertySignature.LiteralSignature lit) {
String litValue = lit.getValue();
// Match some trailing whitespace.
// Since some patterns can match nothing (e.g. foo*), we make sure that
// all positive matches are followed by token-breaking space.
// The pattern as a whole can then be matched against the value with one
// space added at the end.
data.literals.add(withoutVendorPrefix(litValue));
}
private void inspectRep(CssPropertySignature.RepeatedSignature sig) {
CssPropertySignature rep = sig.getRepeatedSignature();
inspectSig(rep);
}
private void inspectRef(CssPropertySignature.PropertyRefSignature sig) {
Name propertyName = sig.getPropertyName();
if (refsUsed.incr(propertyName.getCanonicalForm()) == 0) {
CssSchema.CssPropertyInfo p = schema.getCssProperty(propertyName);
if (p == null) {
throw new SomethingWidgyHappenedError(
"Unsatisfied reference " + propertyName);
}
inspectSig(p.sig);
}
}
private void inspectSeries(CssPropertySignature.SeriesSignature sig) {
for (CssPropertySignature child : sig.children()) {
inspectSig(child);
}
}
private void inspectSymbol(CssPropertySignature.SymbolSignature sig) {
Name symbolName = sig.getValue();
CssSchema.SymbolInfo s = schema.getSymbol(symbolName);
if (s != null) {
inspectSig(s.sig);
} else if (!inspectBuiltin(symbolName)) {
throw new SomethingWidgyHappenedError(
"unknown CSS symbol " + symbolName);
}
}
private void inspectSet(CssPropertySignature sig) {
for (CssPropertySignature child : sig.children()) {
inspectSig(child);
}
}
private void inspectCall(CssPropertySignature.CallSignature sig) {
data.fns.add(sig);
}
private boolean inspectBuiltin(Name name) {
String key = name.getCanonicalForm();
int colon = key.lastIndexOf(':');
boolean negative = key.lastIndexOf('-') > colon;
String baseKey = colon >= 0 ? key.substring(0, colon) : key;
CssPropBit b = BUILTIN_PROP_BITS.get(baseKey);
if (b == null) {
return false;
}
data.properties.add(b);
// The negative bit allows for some schemas to reject positioning
// outside the parents' bounding boxes, and negative offsets for clip
// regions.
if (b == CssPropBit.QUANTITY && (colon < 0 || negative)) {
// TODO: maybe tighten this condition
data.properties.add(CssPropBit.NEGATIVE_QUANTITY);
}
return true;
}
}
private static final Map<String, CssPropBit> BUILTIN_PROP_BITS
= new ImmutableMap.Builder<String, CssPropBit>()
.put("angle", CssPropBit.QUANTITY)
.put("frequency", CssPropBit.QUANTITY)
.put("global-name", CssPropBit.GLOBAL_NAME)
.put("hex-color", CssPropBit.HASH_VALUE)
.put("integer", CssPropBit.QUANTITY)
.put("length", CssPropBit.QUANTITY)
.put("number", CssPropBit.QUANTITY)
.put("percentage", CssPropBit.QUANTITY)
.put("property-name", CssPropBit.PROPERTY_NAME)
.put("quotable-word", CssPropBit.UNRESERVED_WORD)
.put("specific-voice", CssPropBit.QSTRING)
.put("string", CssPropBit.QSTRING)
.put("time", CssPropBit.QUANTITY)
.put("unicode-range", CssPropBit.UNICODE_RANGE)
.put("unreserved-word", CssPropBit.UNRESERVED_WORD)
.put("uri", CssPropBit.URL)
.put("z-index", CssPropBit.QUANTITY)
.build();
public static void generatePatterns(CssSchema schema, Appendable out)
throws IOException {
FilePosition unk = FilePosition.UNKNOWN;
CssPropertyPatterns pp = new CssPropertyPatterns(schema);
List<CssSchema.CssPropertyInfo> props
= Lists.newArrayList(schema.getCssProperties());
Collections.sort(
props, new Comparator<CssSchema.CssPropertyInfo>() {
public int compare(CssSchema.CssPropertyInfo a,
CssSchema.CssPropertyInfo b) {
return a.name.compareTo(b.name);
}
});
List<Pair<CssSchema.CssPropertyInfo, CssPropertyData>> propData
= Lists.newArrayList();
List<Expression> stringPool = Lists.newArrayList();
List<Expression> regexPool = Lists.newArrayList();
// Inspect each property's signature in the schema.
Set<String> keys = Sets.newHashSet();
for (CssSchema.CssPropertyInfo prop : props) {
if (!schema.isPropertyAllowed(prop.name)) { continue; }
String key = prop.name.getCanonicalForm();
if (hasVendorPrefix(key)) { continue; }
CssPropertyData data = new CssPropertyData(key, prop.sig);
pp.new Inspector(data).inspect();
propData.add(Pair.pair(prop, data));
keys.add(data.key);
}
// Now, rewalk the list, and add an entry for each unique function signature
// seen, and allocate names for the functions.
Map<CssPropertySignature, CssPropertyData> fnSigToData
= Maps.newTreeMap(SignatureComparator.SINGLETON);
for (int i = 0; i < propData.size() /* Walks over fns as added */; ++i) {
for (CssPropertySignature.CallSignature fn : propData.get(i).b.fns) {
if (!fnSigToData.containsKey(fn)) {
String fnName = fn.getName();
if (fnName == null) { continue; }
String fnKey = allocateKey(fnName + "()", keys);
CssPropertyData fnData = new CssPropertyData(
fnKey, fn.getArgumentsSignature());
pp.new Inspector(fnData).inspect();
fnSigToData.put(fn, fnData);
keys.add(fnKey);
propData.add(Pair.pair((CssSchema.CssPropertyInfo) null, fnData));
}
}
}
Statement poolDecls = null;
if (!stringPool.isEmpty()) {
poolDecls = joinDeclarations(
poolDecls,
new Declaration(unk, new Identifier(unk, "s"),
new ArrayConstructor(unk, stringPool)));
}
if (!regexPool.isEmpty()) {
poolDecls = joinDeclarations(
poolDecls,
new Declaration(unk, new Identifier(unk, "c"),
new ArrayConstructor(unk, regexPool)));
}
// Given keyword sets like
// [['red','blue','green','transparent','inherit',;none'],
// ['red','blue','green'],
// ['inherit','none','bold','bolder']]
// recognize that ['red','blue','green'] probably occurs frequently and
// create a partition like
// [['red','blue','green'],['bold','bolder'],['inherit',none'],
// ['transparent']]
// and then store indices into the array of partition elements with
// CSS property names so they can be unioned as needed.
List<Set<String>> literalSets = Lists.newArrayList();
for (Pair<CssSchema.CssPropertyInfo, CssPropertyData> p : propData) {
literalSets.add(p.b.literals);
}
Partitions.Partition<String> litPartition = Partitions.partition(
literalSets, String.class, null);
List<ArrayConstructor> literalSetArrs = Lists.newArrayList();
for (int[] literalIndices : litPartition.partition) {
List<StringLiteral> literalArr = Lists.newArrayList();
for (int litIndex : literalIndices) {
literalArr.add(StringLiteral.valueOf(
unk, litPartition.universe[litIndex]));
}
literalSetArrs.add(new ArrayConstructor(unk, literalArr));
}
if (!literalSetArrs.isEmpty()) {
poolDecls = joinDeclarations(
poolDecls,
new Declaration(unk, new Identifier(unk, "L"),
new ArrayConstructor(unk, literalSetArrs)));
}
List<ValueProperty> cssSchemaProps = Lists.newArrayList();
StringLiteral propbitsObjKey = new StringLiteral(unk, "cssPropBits");
StringLiteral litgroupObjKey = new StringLiteral(unk, "cssLitGroup");
StringLiteral fnsObjKey = new StringLiteral(unk, "cssFns");
// Keep track of the JS we generate so we can reuse data-objects for
// CSS properties whose filtering schemes are functionally equivalent.
Map<String, String> dataJsToKey = Maps.newHashMap();
boolean hasAliases = false;
for (int propIndex = 0, n = propData.size(); propIndex < n; ++propIndex) {
Pair<CssSchema.CssPropertyInfo, CssPropertyData> d
= propData.get(propIndex);
CssPropertyData data = d.b;
ObjectConstructor dataObj = new ObjectConstructor(unk);
int propBits = 0;
for (CssPropBit b : data.properties) {
propBits |= b.jsValue;
}
dataObj.appendChild(
new ValueProperty(propbitsObjKey, new IntegerLiteral(unk, propBits)));
List<Expression> litGroups = Lists.newArrayList();
for (int groupIndex : litPartition.unions[propIndex]) {
litGroups.add((Expression) QuasiBuilder.substV(
"L[@i]", "i", new IntegerLiteral(unk, groupIndex)));
}
if (!litGroups.isEmpty()) {
dataObj.appendChild(new ValueProperty(
litgroupObjKey, new ArrayConstructor(unk, litGroups)));
}
List<Expression> fnKeyStrs = Lists.newArrayList();
for (CssPropertySignature.CallSignature fn : data.fns) {
String fnKey = fnSigToData.get(fn).key;
fnKeyStrs.add(StringLiteral.valueOf(unk, fnKey));
}
ArrayConstructor fnKeyArray = new ArrayConstructor(unk, fnKeyStrs);
dataObj.appendChild(new ValueProperty(fnsObjKey, fnKeyArray));
String dataJs;
{
StringBuilder js = new StringBuilder();
JsMinimalPrinter tokenConsumer = new JsMinimalPrinter(js);
dataObj.render(new RenderContext(tokenConsumer));
tokenConsumer.noMoreTokens();
dataJs = js.toString();
}
String equivKey = dataJsToKey.get(dataJs);
Expression value = dataObj;
if (equivKey == null) {
dataJsToKey.put(dataJs, data.key);
} else {
value = StringLiteral.valueOf(unk, equivKey);
hasAliases = true;
}
cssSchemaProps.add(new ValueProperty(
unk, StringLiteral.valueOf(unk, data.key), value));
}
ObjectConstructor cssSchema = new ObjectConstructor(unk, cssSchemaProps);
ParseTreeNode js = QuasiBuilder.substV(
""
+ "var cssSchema = (function () {"
+ " @poolDecls?;"
+ " var schema = @cssSchema;"
+ " if (@hasAliases) {"
+ " for (var key in schema) {"
+ " if ('string' === typeof schema[key]"
+ " && Object.hasOwnProperty.call(schema, key)) {"
+ " schema[key] = schema[schema[key]];"
+ " }"
+ " }"
+ " }"
+ " return schema;"
+ "})();",
"poolDecls", poolDecls,
"cssSchema", cssSchema,
"hasAliases", new BooleanLiteral(unk, hasAliases));
TokenConsumer tc = js.makeRenderer(out, null);
js.render(new RenderContext(tc));
tc.noMoreTokens();
out.append(";\n");
}
private static Statement joinDeclarations(
@Nullable Statement decl, Declaration d) {
if (decl == null) { return d; }
if (decl instanceof Declaration) {
decl = new MultiDeclaration(
FilePosition.UNKNOWN, Arrays.asList((Declaration) decl));
}
((MultiDeclaration) decl).appendChild(d);
return decl;
}
public static class Builder implements BuildCommand {
public boolean build(List<File> inputs, List<File> deps,
Map<String, Object> options, File output)
throws IOException {
File symbolsAndPropertiesFile = null;
File functionsFile = null;
for (File input : inputs) {
if (input.getName().endsWith(".json")) {
if (symbolsAndPropertiesFile == null) {
symbolsAndPropertiesFile = input;
} else if (functionsFile == null) {
functionsFile = input;
} else {
throw new IOException("Unused input " + input);
}
}
}
if (symbolsAndPropertiesFile == null) {
throw new IOException("No JSON whitelist for CSS Symbols + Properties");
}
if (functionsFile == null) {
throw new IOException("No JSON whitelist for CSS Functions");
}
FilePosition sps = FilePosition.startOfFile(new InputSource(
symbolsAndPropertiesFile.getAbsoluteFile().toURI()));
FilePosition fns = FilePosition.startOfFile(new InputSource(
functionsFile.getAbsoluteFile().toURI()));
MessageContext mc = new MessageContext();
mc.addInputSource(sps.source());
mc.addInputSource(fns.source());
MessageQueue mq = new EchoingMessageQueue(
new PrintWriter(new OutputStreamWriter(System.err), true), mc, false);
Set<File> inputsAndDeps = Sets.newHashSet();
for (File f : inputs) { inputsAndDeps.add(f.getAbsoluteFile()); }
for (File f : deps) { inputsAndDeps.add(f.getAbsoluteFile()); }
ImportResolver resolver = new AllowedFileResolver(inputsAndDeps);
CssSchema schema;
try {
schema = new CssSchema(
ConfigUtil.loadWhiteListFromJson(
sps.source().getUri(), resolver, mq),
ConfigUtil.loadWhiteListFromJson(
fns.source().getUri(), resolver, mq));
} catch (ParseException ex) {
ex.toMessageQueue(mq);
throw (IOException) new IOException("Failed to parse schema")
.initCause(ex);
}
Writer out = new OutputStreamWriter(
new FileOutputStream(output), Charsets.UTF_8.name());
try {
String currentDate = "" + new Date();
if (currentDate.indexOf("*/") >= 0) {
throw new SomethingWidgyHappenedError("Date should not contain '*/'");
}
out.write("/* Copyright Google Inc.\n");
out.write(" * Licensed under the Apache Licence Version 2.0\n");
out.write(" * Autogenerated at " + currentDate + "\n");
out.write(" * \\@overrides window\n");
out.write(" * \\@provides cssSchema");
for (CssPropBit b : CssPropBit.values()) {
out.write(", CSS_PROP_BIT_");
out.write(Strings.upper(b.name()));
}
out.write(" */\n");
for (CssPropBit b : CssPropBit.values()) {
out.write("/**\n * @const\n * @type {number}\n */\n");
out.write("var CSS_PROP_BIT_");
out.write(Strings.upper(b.name()));
out.write(" = ");
out.write(String.valueOf(b.jsValue));
out.write(";\n");
}
generatePatterns(schema, out);
out.write("if (typeof window !== 'undefined') {\n");
out.write(" window['cssSchema'] = cssSchema;\n");
out.write("}\n");
} finally {
out.close();
}
return true;
}
}
/**
* Adds a key that is not in allocated to it and returns the result.
* The result will have base as a prefix.
*/
private static final String allocateKey(String base, Set<String> allocated) {
base = Strings.lower(base);
int counter = 0;
String candidate = base;
while (!allocated.add(candidate)) {
candidate = base + "#" + counter;
++counter;
}
return candidate;
}
public static void main(String[] args) throws IOException {
CssSchema schema = CssSchema.getDefaultCss21Schema(
new SimpleMessageQueue());
generatePatterns(schema, System.out);
}
/**
* Compares two CSS signatures by type (concrete class), value, and
* recursively by child list.
* The ordering is suitable for use in an Ordered{Set,Map} but has no greater
* significance.
*/
private static final class SignatureComparator
implements Comparator<CssPropertySignature> {
private SignatureComparator() {}
static final SignatureComparator SINGLETON = new SignatureComparator();
@SuppressWarnings("unchecked")
public int compare(CssPropertySignature a, CssPropertySignature b) {
if (a == b) {
return 0;
}
Class<?> aClass = a.getClass();
Class<?> bClass = b.getClass();
if (aClass != bClass) {
return aClass.getName().compareTo(bClass.getName());
}
Object aValue = a.getValue();
Object bValue = b.getValue();
if (aValue != bValue) {
if (aValue == null) {
return -1;
}
if (bValue == null) {
return 1;
}
// Works for the Number and String types typically used as ParseTreeNode
// values, but is not strictly type safe.
@SuppressWarnings("rawtypes")
Comparable aValueCmp = (Comparable) aValue;
@SuppressWarnings("rawtypes")
Comparable bValueCmp = (Comparable) bValue;
return aValueCmp.compareTo(bValueCmp);
}
List<? extends CssPropertySignature> aChildren = a.children();
List<? extends CssPropertySignature> bChildren = b.children();
int size = aChildren.size();
int sizeDelta = size - bChildren.size();
if (sizeDelta != 0) {
return sizeDelta;
}
for (int i = 0; i < size; ++i) {
int childDelta = compare(aChildren.get(i), bChildren.get(i));
if (childDelta != 0) {
return childDelta;
}
}
return 0;
}
}
}
|
|
package scamell.michael.amulet;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.NumberPicker;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
public class UnitCalculatorFragment extends Fragment {
private static final int EXAMPLE_LIST_REQUEST_CODE = 1234;
private static final int FAVOURITE_LIST_REQUEST_CODE = 4444;
private final int maxQuantity = 100;
private final int minQuantity = 1;
private final int volMeasurementSpinnerMlLocation = 0;
private final int volMeasurementSpinnerPintLocation = 1;
private final int volMeasurementSpinnerLitresLocation = 2;
private final int volMeasurementSpinnerClLocation = 3;
private UnitCalculatorListener mListener;
private Spinner drinkTypeSpinner;
private EditText abvEditText, volumeEditText;
private TextView unitValueTextView;
private NumberPicker quantityNumberPicker;
private Spinner volumeMeasurementSpinner;
private EditText drinkNameEditText;
private ImageButton confirmButton;
private String drinkName;
private String mABV;
private String drinkVolume;
private String drinkVolumeType;
private int drinkVolumeTypePos;
private String drinkType;
private int drinkTypePos;
private int drinkQuantity;
private String unitsString;
private Boolean saveToDrinkDiary = false;
private Boolean firstStart;
private Boolean stopCalcUnits = false;
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
// This makes sure that the container activity has implemented
// the callback interface. If not, it throws an exception
try {
mListener = (UnitCalculatorListener) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity.toString()
+ " must implement UnitCalculatorListener");
}
}
@Override
public View onCreateView(LayoutInflater inflater, final ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_unit_calculator, container, false);
final Bundle bundle = getArguments();
firstStart = true;
setHasOptionsMenu(true);
drinkNameEditText = (EditText) rootView.findViewById(R.id.unit_calculator_drink_name_edittext);
drinkTypeSpinner = (Spinner) rootView.findViewById(R.id.unit_calculator_drink_type_spinner);
ArrayAdapter<String> spinnerArrayAdapter = new ArrayAdapter<String>(getActivity(), R.layout.spinner_unit_calculator_example_item, getResources().getStringArray(R.array.drink_type_array));
drinkTypeSpinner.setAdapter(spinnerArrayAdapter);
abvEditText = (EditText) rootView.findViewById(R.id.unit_calc_enter_abv_edittext);
volumeEditText = (EditText) rootView.findViewById(R.id.unit_calc_volume_edittext);
volumeMeasurementSpinner = (Spinner) rootView.findViewById(R.id.unit_calc_volume_measurement_spinner);
quantityNumberPicker = (NumberPicker) rootView.findViewById(R.id.numberPicker);
quantityNumberPicker.setMaxValue(maxQuantity);
quantityNumberPicker.setMinValue(minQuantity);
quantityNumberPicker.setWrapSelectorWheel(false);
unitValueTextView = (TextView) rootView.findViewById(R.id.unit_calc_unit_value);
//sets up the start button as long as there is a boolean that specifies it is need included
//in a bundle to start the fragment, otherwise it is ignored. used for starting tasks after
//getting units
confirmButton = (ImageButton) rootView.findViewById(R.id.unit_calc_start_task_button);
final CheckBox saveToDrinkDiaryCheckbox = (CheckBox) rootView.findViewById(R.id.save_to_drink_diary_checkbox);
if (bundle != null) {
if (bundle.getBoolean("task_unit_calculator")) {
confirmButton.setVisibility(View.VISIBLE);
confirmButton.setBackgroundColor(Color.TRANSPARENT);
} else if (bundle.getBoolean("drink_diary_unit_calculator")) {
confirmButton.setImageResource(R.drawable.tick);
confirmButton.setPadding(10, 10, 10, 10);
confirmButton.setVisibility(View.VISIBLE);
bundle.getBoolean("drink_diary_unit_calculator");
}
{
confirmButton.setVisibility(View.VISIBLE);
confirmButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (bundle.getBoolean("task_unit_calculator")) {
getFieldEntries();
if (checkEntries(true)) {
DrinkDiaryEntry drinkDiaryEntry = new DrinkDiaryEntry();
drinkDiaryEntry.date = DateAndTime.getDateAndTimeNowForTasks();
drinkDiaryEntry.drinkName = drinkName;
drinkDiaryEntry.drinkType = drinkType;
drinkDiaryEntry.units = unitValueTextView.getText().toString();
mListener.unitCalculationComplete(drinkDiaryEntry, saveToDrinkDiary);
if (saveToDrinkDiary) {
SharedPreferencesWrapper.saveToPrefs(getActivity(), "lastDrinkAdded", drinkName);
SharedPreferencesWrapper.saveToPrefs(getActivity(), "unitsOfLastDrinkAdded", unitsString);
}
SharedPreferencesWrapper.saveToPrefs(getActivity(), "task_session_units", unitValueTextView.getText().toString());
clearEntryFields();
removeUnitCalcEntriesFromSharedPrefs();
}
} else if (bundle.getBoolean("drink_diary_unit_calculator")) {
confirmButton.setImageResource(R.drawable.tick);
getFieldEntries();
if (checkEntries(true)) {
DrinkDiaryEntry drinkDiaryEntry = new DrinkDiaryEntry();
drinkDiaryEntry.date = DateAndTime.getDateAndTimeNowForTasks();
drinkDiaryEntry.drinkName = drinkName;
drinkDiaryEntry.drinkType = drinkType;
drinkDiaryEntry.units = unitValueTextView.getText().toString();
mListener.unitCalculationComplete(drinkDiaryEntry, true);
SharedPreferencesWrapper.saveToPrefs(getActivity(), "lastDrinkAdded", drinkName);
SharedPreferencesWrapper.saveToPrefs(getActivity(), "unitsOfLastDrinkAdded", unitsString);
clearEntryFields();
removeUnitCalcEntriesFromSharedPrefs();
}
}
}
});
}
if (bundle.getBoolean("task_unit_calculator")) {
saveToDrinkDiaryCheckbox.setVisibility(View.VISIBLE);
saveToDrinkDiaryCheckbox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
saveToDrinkDiary = true;
} else if (!isChecked) {
saveToDrinkDiary = false;
}
}
});
}
}
abvEditText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
if (!stopCalcUnits && !firstStart) {
calculateUnits();
}
}
@Override
public void afterTextChanged(Editable s) {
if (s.length() < 3) {
abvEditText.requestFocus();
}
}
});
volumeEditText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
if (!stopCalcUnits && !firstStart) {
calculateUnits();
}
}
@Override
public void afterTextChanged(Editable s) {
if (s.length() < 5) {
volumeEditText.requestFocus();
}
}
});
volumeMeasurementSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if (!stopCalcUnits && !firstStart) {
calculateUnits();
} else {
firstStart = false;
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
//don't do anything until something has been changed
//otherwise is simply default of "ml"
}
});
quantityNumberPicker.setOnValueChangedListener(new NumberPicker.OnValueChangeListener() {
@Override
public void onValueChange(NumberPicker picker, int oldVal, int newVal) {
if (!stopCalcUnits && !firstStart) {
calculateUnits();
}
}
});
return rootView;
}
@Override
public void onPause() {
super.onPause();
SharedPreferencesWrapper.saveToPrefs(getActivity(), "unit_calc_drinkName", drinkNameEditText.getText().toString());
SharedPreferencesWrapper.saveToPrefs(getActivity(), "unit_calc_abv", abvEditText.getText().toString());
SharedPreferencesWrapper.saveToPrefs(getActivity(), "unit_calc_drinkVolume", volumeEditText.getText().toString());
SharedPreferencesWrapper.saveToPrefs(getActivity(), "unit_calc_volPos", volumeMeasurementSpinner.getSelectedItemPosition());
SharedPreferencesWrapper.saveToPrefs(getActivity(), "unit_calc_typePos", drinkTypeSpinner.getSelectedItemPosition());
SharedPreferencesWrapper.saveToPrefs(getActivity(), "unit_calc_quantity", quantityNumberPicker.getValue());
SharedPreferencesWrapper.saveToPrefs(getActivity(), "unit_calc_units", unitValueTextView.getText().toString());
}
@Override
public void onResume() {
super.onResume();
drinkNameEditText.setText(SharedPreferencesWrapper.getFromPrefs(getActivity(), "unit_calc_drinkName", ""));
abvEditText.setText(SharedPreferencesWrapper.getFromPrefs(getActivity(), "unit_calc_abv", ""));
volumeEditText.setText(SharedPreferencesWrapper.getFromPrefs(getActivity(), "unit_calc_drinkVolume", ""));
volumeMeasurementSpinner.setSelection(SharedPreferencesWrapper.getFromPrefs(getActivity(), "unit_calc_volPos", 0));
drinkTypeSpinner.setSelection(SharedPreferencesWrapper.getFromPrefs(getActivity(), "unit_calc_typePos", 0));
quantityNumberPicker.setValue(SharedPreferencesWrapper.getFromPrefs(getActivity(), "unit_calc_quantity", 1));
unitValueTextView.setText(SharedPreferencesWrapper.getFromPrefs(getActivity(), "unit_calc_units", "0.0"));
}
public void removeUnitCalcEntriesFromSharedPrefs() {
SharedPreferencesWrapper.removeFromPrefs(getActivity(), "unit_calc_drinkName");
SharedPreferencesWrapper.removeFromPrefs(getActivity(), "unit_calc_abv");
SharedPreferencesWrapper.removeFromPrefs(getActivity(), "unit_calc_drinkVolume");
SharedPreferencesWrapper.removeFromPrefs(getActivity(), "unit_calc_volPos");
SharedPreferencesWrapper.removeFromPrefs(getActivity(), "unit_calc_typePos");
SharedPreferencesWrapper.removeFromPrefs(getActivity(), "unit_calc_quantity");
SharedPreferencesWrapper.removeFromPrefs(getActivity(), "unit_calc_units");
}
private void calculateUnits() {
float aBV;
float volume;
Boolean allFieldsEntered = false;
getFieldEntries();
if (!firstStart) {
allFieldsEntered = checkEntries(false);
} else {
firstStart = false;
}
//if neither are empty perform the calculation (quantity is default 1)
if (allFieldsEntered) {
aBV = Float.parseFloat(mABV);
volume = Float.parseFloat(drinkVolume);
float unitTotal = UnitCalculator.UnitCalculation(drinkQuantity, drinkVolumeType, volume, aBV);
unitValueTextView.setText(getString(R.string.unit_calc_units_value, Float.toString(unitTotal)));
}
}
//for overriding in activity/fragments that implement.
protected void startTaskButtonClicked() {
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.unit_calculator_menu, menu);
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// handle item selection
switch (item.getItemId()) {
case R.id.action_save:
getFieldEntries();
if (checkEntries(true)) {
Toast.makeText(getActivity(), "Saved to Favourites", Toast.LENGTH_SHORT).show();
FavouriteDrinkUtility.saveFavouriteDrinkToStorage(getActivity(), drinkName, drinkType, drinkVolume, drinkVolumeType, String.valueOf(drinkQuantity), unitsString, mABV, String.valueOf(drinkVolumeTypePos), String.valueOf(drinkTypePos));
}
return true;
case R.id.action_example_list:
ExampleDrinksDialogFragment exampleDialog = new ExampleDrinksDialogFragment();
exampleDialog.setTargetFragment(this, EXAMPLE_LIST_REQUEST_CODE);
exampleDialog.show(getActivity().getSupportFragmentManager(), "EXAMPLE_DIALOG");
return true;
case R.id.action_clear:
clearEntryFields();
Toast.makeText(getActivity(), "Cleared", Toast.LENGTH_SHORT).show();
return true;
case R.id.action_add_favourite:
UnitCalculatorFavouriteDrinksDialogFragment unitCalculatorFavouriteDrinksDialogFragment = new UnitCalculatorFavouriteDrinksDialogFragment();
unitCalculatorFavouriteDrinksDialogFragment.setTargetFragment(this, FAVOURITE_LIST_REQUEST_CODE);
unitCalculatorFavouriteDrinksDialogFragment.show(getActivity().getSupportFragmentManager(), "FAVOURITE_DIALOG");
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private void clearEntryFields() {
drinkNameEditText.setError(null);
abvEditText.setError(null);
volumeEditText.setError(null);
stopCalcUnits = true;
drinkNameEditText.setText("");
abvEditText.setText("");
volumeEditText.setText("");
volumeMeasurementSpinner.setSelection(volMeasurementSpinnerMlLocation);
quantityNumberPicker.setValue(1);
drinkTypeSpinner.setSelection(0);
unitValueTextView.setText("0.0");
stopCalcUnits = false;
}
private Boolean checkEntries(Boolean savingToFavourites) {
drinkNameEditText.setError(null);
Boolean cancel = false;
View focusView = null;
if (drinkVolume.isEmpty()) {
volumeEditText.setError(getString(R.string.error_field_required));
focusView = volumeEditText;
cancel = true;
} else if (!drinkVolume.matches(".*[0-9].*")) {
volumeEditText.setError(getString(R.string.dialog_error_no_units));
focusView = volumeEditText;
cancel = true;
}
if (mABV.isEmpty()) {
abvEditText.setError(getString(R.string.error_field_required));
focusView = abvEditText;
cancel = true;
} else if (!mABV.matches(".*[0-9].*")) {
abvEditText.setError(getString(R.string.dialog_error_no_drink_quantity));
focusView = abvEditText;
cancel = true;
}
if (savingToFavourites) {
if (drinkName.isEmpty()) {
drinkNameEditText.setError(getString(R.string.error_field_required));
focusView = drinkNameEditText;
cancel = true;
} else if (!drinkName.matches(".*[a-zA-Z].*|.*[0-9].*")) {
drinkNameEditText.setError(getString(R.string.dialog_error_no_drink_name));
focusView = drinkNameEditText;
cancel = true;
}
}
if (cancel) {
// There was an error; don't attempt drink diary entry and focus the first
// form field with an error.
focusView.requestFocus();
return false;
}
return true;
}
private void getFieldEntries() {
drinkName = drinkNameEditText.getText().toString();
mABV = abvEditText.getText().toString();
drinkVolume = volumeEditText.getText().toString();
drinkType = drinkTypeSpinner.getSelectedItem().toString();
drinkTypePos = drinkTypeSpinner.getSelectedItemPosition();
drinkQuantity = quantityNumberPicker.getValue();
//volume measurement will always have a selection as an array is loaded up with measurements on start
//noinspection ConstantConditions
drinkVolumeType = volumeMeasurementSpinner.getSelectedItem().toString();
drinkVolumeTypePos = volumeMeasurementSpinner.getSelectedItemPosition();
unitsString = unitValueTextView.getText().toString();
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (requestCode == EXAMPLE_LIST_REQUEST_CODE) {
int drinkExample = intent.getExtras().getInt("drinkExample");
switch (drinkExample) {
case 0:
setBeerExample();
break;
case 1:
setBottledBeerExample();
break;
case 2:
setCiderExample();
break;
case 3:
setWineExample();
break;
case 4:
setChampagneExample();
break;
case 5:
setSpiritsExample();
break;
case 6:
setAlcopopExample();
break;
}
} else if (requestCode == FAVOURITE_LIST_REQUEST_CODE) {
if (intent != null) {
drinkNameEditText.setError(null);
abvEditText.setError(null);
volumeEditText.setError(null);
stopCalcUnits = true;
String favouriteName = intent.getExtras().getString("drinkName");
drinkNameEditText.setText(favouriteName);
abvEditText.setText(intent.getExtras().getString("drinkVolume"));
volumeEditText.setText(intent.getExtras().getString("drinkABV"));
volumeMeasurementSpinner.setSelection(intent.getExtras().getInt("drinkVolumeTypePos"));
quantityNumberPicker.setValue(intent.getExtras().getInt("drinkQuantity"));
Toast.makeText(getActivity(), favouriteName + "Set", Toast.LENGTH_SHORT).show();
drinkTypeSpinner.setSelection(intent.getExtras().getInt("drinkTypePos"));
unitValueTextView.setText(intent.getExtras().getString("drinkUnits"));
stopCalcUnits = false;
}
}
}
public void setBeerExample() {
abvEditText.setError(null);
volumeEditText.setError(null);
stopCalcUnits = true;
abvEditText.setText(Float.toString(UnitCalculator.beerPintABV));
volumeEditText.setText("1");
volumeMeasurementSpinner.setSelection(volMeasurementSpinnerPintLocation);
quantityNumberPicker.setValue(1);
Toast.makeText(getActivity(), "Beer example set", Toast.LENGTH_SHORT).show();
calculateUnits();
stopCalcUnits = false;
drinkTypeSpinner.setSelection(0);
}
private void setBottledBeerExample() {
abvEditText.setError(null);
volumeEditText.setError(null);
stopCalcUnits = true;
abvEditText.setText(Float.toString(UnitCalculator.beerBottleABV));
volumeEditText.setText("330");
volumeMeasurementSpinner.setSelection(volMeasurementSpinnerMlLocation);
quantityNumberPicker.setValue(1);
Toast.makeText(getActivity(), "Bottled Beer example set", Toast.LENGTH_SHORT).show();
calculateUnits();
stopCalcUnits = false;
drinkTypeSpinner.setSelection(1);
}
private void setCiderExample() {
abvEditText.setError(null);
volumeEditText.setError(null);
stopCalcUnits = true;
abvEditText.setText(Float.toString(UnitCalculator.ciderABV));
volumeEditText.setText("1");
volumeMeasurementSpinner.setSelection(volMeasurementSpinnerPintLocation);
quantityNumberPicker.setValue(1);
Toast.makeText(getActivity(), "Cider example set", Toast.LENGTH_SHORT).show();
calculateUnits();
stopCalcUnits = false;
drinkTypeSpinner.setSelection(2);
}
private void setWineExample() {
abvEditText.setError(null);
volumeEditText.setError(null);
stopCalcUnits = true;
abvEditText.setText(Float.toString(UnitCalculator.wineABV));
volumeEditText.setText(Float.toString(UnitCalculator.wineGlassVolume));
volumeMeasurementSpinner.setSelection(volMeasurementSpinnerMlLocation);
quantityNumberPicker.setValue(1);
Toast.makeText(getActivity(), "Wine example set", Toast.LENGTH_SHORT).show();
calculateUnits();
stopCalcUnits = false;
drinkTypeSpinner.setSelection(3);
}
private void setChampagneExample() {
abvEditText.setError(null);
volumeEditText.setError(null);
stopCalcUnits = true;
abvEditText.setText(Float.toString(UnitCalculator.champagneABV));
volumeEditText.setText(Float.toString(UnitCalculator.champagneGlassVolume));
volumeMeasurementSpinner.setSelection(volMeasurementSpinnerMlLocation);
quantityNumberPicker.setValue(1);
Toast.makeText(getActivity(), "Champagne example set", Toast.LENGTH_SHORT).show();
calculateUnits();
stopCalcUnits = false;
drinkTypeSpinner.setSelection(4);
}
private void setSpiritsExample() {
abvEditText.setError(null);
volumeEditText.setError(null);
stopCalcUnits = true;
abvEditText.setText(Float.toString(UnitCalculator.spiritsABV));
volumeEditText.setText(Float.toString(UnitCalculator.spiritGlassVolume));
volumeMeasurementSpinner.setSelection(volMeasurementSpinnerMlLocation);
quantityNumberPicker.setValue(1);
Toast.makeText(getActivity(), "Spirits example set", Toast.LENGTH_SHORT).show();
calculateUnits();
stopCalcUnits = false;
drinkTypeSpinner.setSelection(5);
}
private void setAlcopopExample() {
abvEditText.setError(null);
volumeEditText.setError(null);
stopCalcUnits = true;
abvEditText.setText(Float.toString(UnitCalculator.alcopopABV));
volumeEditText.setText(Float.toString(UnitCalculator.alcopopBottleVolume));
volumeMeasurementSpinner.setSelection(volMeasurementSpinnerMlLocation);
quantityNumberPicker.setValue(1);
Toast.makeText(getActivity(), "Alcopop example set", Toast.LENGTH_SHORT).show();
calculateUnits();
stopCalcUnits = false;
drinkTypeSpinner.setSelection(6);
}
public interface UnitCalculatorListener {
public void unitCalculationComplete(DrinkDiaryEntry drinkDiaryEntry, Boolean save);
}
}
|
|
/*
* Copyright (C) 2012-2020 Gregory Hedlund <https://www.phon.ca>
* Copyright (C) 2012 Jason Gedge <http://www.gedge.ca>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ca.phon.opgraph.app.components;
import java.util.*;
import javax.swing.*;
import javax.swing.tree.*;
import ca.phon.opgraph.*;
import ca.phon.opgraph.extensions.*;
/**
* Tree model for {@link OpGraph} outline.
*/
public class OpGraphTreeModel extends DefaultTreeModel {
private Map<OpGraph, OpNode> compositeNodeMap = new HashMap<>();
private final JTree tree;
public OpGraphTreeModel(JTree tree, OpGraph graph) {
super(new DefaultMutableTreeNode(graph));
this.tree = tree;
setupTree((DefaultMutableTreeNode)getRoot(), graph);
graph.addGraphListener(graphListener);
}
private void setupTree(DefaultMutableTreeNode node, OpGraph graph) {
for(OpNode opnode:graph.getVertices()) {
final DefaultMutableTreeNode childNode = new DefaultMutableTreeNode(opnode);
node.add(childNode);
if(childNode.getUserObject() instanceof CompositeNode) {
final OpGraph childGraph = ((CompositeNode)childNode.getUserObject()).getGraph();
setupTree(childNode, childGraph);
}
opnode.addNodeListener(nodeListener);
}
}
public OpGraph getGraph() {
final DefaultMutableTreeNode root = (DefaultMutableTreeNode)getRoot();
return (OpGraph)root.getUserObject();
}
protected DefaultMutableTreeNode getMutableNode(OpGraph graph) {
if(graph == getGraph()) {
return (DefaultMutableTreeNode)getRoot();
} else {
return findMutableNode((DefaultMutableTreeNode)getRoot(), graph);
}
}
protected DefaultMutableTreeNode findMutableNode(DefaultMutableTreeNode parent, OpGraph graph) {
DefaultMutableTreeNode retVal = null;
for(int i = 0; i < parent.getChildCount(); i++) {
final DefaultMutableTreeNode childNode = (DefaultMutableTreeNode)parent.getChildAt(i);
if(childNode.getUserObject() instanceof CompositeNode) {
final OpGraph childGraph = ((CompositeNode)childNode.getUserObject()).getGraph();
if(childGraph == graph) {
retVal = childNode;
break;
} else {
retVal = findMutableNode(childNode, graph);
if(retVal != null) break;
}
}
}
return retVal;
}
protected DefaultMutableTreeNode getMutableNode(OpNode node) {
return findMutableNode((DefaultMutableTreeNode)getRoot(), node);
}
protected DefaultMutableTreeNode findMutableNode(DefaultMutableTreeNode parent, OpNode node) {
DefaultMutableTreeNode retVal = null;
for(int i = 0; i < parent.getChildCount(); i++) {
final DefaultMutableTreeNode childNode = (DefaultMutableTreeNode)parent.getChildAt(i);
if(childNode.getUserObject() == node) {
return childNode;
}
if(childNode.getUserObject() instanceof CompositeNode) {
retVal = findMutableNode(childNode, node);
if(retVal != null) break;
}
}
return retVal;
}
public void nodeWasRemoved(OpGraph graph, OpNode node) {
final DefaultMutableTreeNode treeNode = getMutableNode(node);
if(treeNode != null) {
super.removeNodeFromParent(treeNode);
}
}
public void nodeWasAdded(OpGraph graph, OpNode node) {
final int nodeIdx = graph.getVertices().indexOf(node);
final DefaultMutableTreeNode parentNode = getMutableNode(graph);
final DefaultMutableTreeNode childNode = new DefaultMutableTreeNode(node);
node.addNodeListener(nodeListener);
if(node instanceof CompositeNode) {
final OpGraph childGraph = ((CompositeNode)node).getGraph();
childGraph.addGraphListener(graphListener);
setupTree(childNode, childGraph);
}
parentNode.insert(childNode, nodeIdx);
if(parentNode.getChildCount() == 1) {
// notify structure has changed
super.nodeStructureChanged(parentNode);
}
super.nodesWereInserted(parentNode, new int[]{ nodeIdx });
}
public void nodeChanged(OpNode node) {
final DefaultMutableTreeNode treeNode = getMutableNode(node);
super.nodeChanged(treeNode);
}
public void updateChildOrder(DefaultMutableTreeNode treeNode, OpGraph graph) {
final TreeNode[] nodePath = super.getPathToRoot(treeNode);
final TreePath treePath = new TreePath(nodePath);
final Enumeration<TreePath> expandedPaths = tree.getExpandedDescendants(treePath);
final Map<OpNode, DefaultMutableTreeNode> nodeMap = new HashMap<>();
for(int i = 0; i < treeNode.getChildCount(); i++) {
final DefaultMutableTreeNode childTreeNode = (DefaultMutableTreeNode)treeNode.getChildAt(i);
final OpNode opNode = (OpNode)childTreeNode.getUserObject();
nodeMap.put(opNode, childTreeNode);
}
treeNode.removeAllChildren();
for(OpNode opNode:graph.getVertices()) {
final DefaultMutableTreeNode childTreeNode = nodeMap.get(opNode);
treeNode.add(childTreeNode);
}
nodeStructureChanged(treeNode);
while(expandedPaths != null && expandedPaths.hasMoreElements()) {
tree.expandPath(expandedPaths.nextElement());
}
}
private final OpNodeListener nodeListener = new OpNodeListener() {
@Override
public void nodePropertyChanged(OpNode node, String propertyName, Object oldValue, Object newValue) {
if(propertyName.equals(OpNode.NAME_PROPERTY))
nodeChanged(node);
}
@Override
public void fieldAdded(OpNode node, InputField field) {
}
@Override
public void fieldRemoved(OpNode node, InputField field) {
}
@Override
public void fieldAdded(OpNode node, OutputField field) {
}
@Override
public void fieldRemoved(OpNode node, OutputField field) {
}
@Override
public void fieldRenamed(OpNode node, ContextualItem field) {
}
};
private final OpGraphListener graphListener = new OpGraphListener() {
@Override
public void nodeRemoved(OpGraph graph, OpNode node) {
nodeWasRemoved(graph, node);
}
@Override
public void nodeAdded(OpGraph graph, OpNode node) {
nodeWasAdded(graph, node);
}
@Override
public void linkRemoved(OpGraph graph, OpLink link) {
if(!graph.contains(link.getSource())
|| !graph.contains(link.getDestination())) return;
final DefaultMutableTreeNode treeNode = getMutableNode(graph);
if(treeNode != null)
updateChildOrder(treeNode, graph);
}
@Override
public void linkAdded(OpGraph graph, OpLink link) {
final DefaultMutableTreeNode treeNode = getMutableNode(graph);
if(treeNode != null)
updateChildOrder(treeNode, graph);
}
@Override
public void nodeSwapped(OpGraph graph, OpNode oldNode, OpNode newNode) {
}
};
}
|
|
package org.knowm.xchange.therock;
import static org.knowm.xchange.dto.Order.OrderType.ASK;
import static org.knowm.xchange.dto.Order.OrderType.BID;
import static org.knowm.xchange.utils.DateUtils.fromISODateString;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.account.AccountInfo;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.OpenOrders;
import org.knowm.xchange.dto.trade.UserTrade;
import org.knowm.xchange.dto.trade.UserTrades;
import org.knowm.xchange.therock.dto.account.TheRockBalance;
import org.knowm.xchange.therock.dto.marketdata.TheRockBid;
import org.knowm.xchange.therock.dto.marketdata.TheRockOrderBook;
import org.knowm.xchange.therock.dto.marketdata.TheRockTrade;
import org.knowm.xchange.therock.dto.marketdata.TheRockTrade.Side;
import org.knowm.xchange.therock.dto.marketdata.TheRockTrades;
import org.knowm.xchange.therock.dto.trade.TheRockOrder;
import org.knowm.xchange.therock.dto.trade.TheRockOrders;
import org.knowm.xchange.therock.dto.trade.TheRockUserTrade;
import org.knowm.xchange.therock.dto.trade.TheRockUserTrades;
public final class TheRockAdapters {
private TheRockAdapters() {}
public static TheRockOrder.Side adaptSide(OrderType type) {
return type == BID ? TheRockOrder.Side.buy : TheRockOrder.Side.sell;
}
public static AccountInfo adaptAccountInfo(List<TheRockBalance> trBalances, String userName) {
ArrayList<Balance> balances = new ArrayList<>(trBalances.size());
for (TheRockBalance blc : trBalances) {
Currency currency = Currency.getInstance(blc.getCurrency());
balances.add(new Balance(currency, blc.getBalance(), blc.getTradingBalance()));
}
return new AccountInfo(userName, new Wallet(balances));
}
public static OrderBook adaptOrderBook(TheRockOrderBook theRockOrderBook) {
final List<LimitOrder> asks = new ArrayList<>();
final List<LimitOrder> bids = new ArrayList<>();
for (TheRockBid theRockBid : theRockOrderBook.getAsks()) {
asks.add(
adaptBid(
theRockOrderBook.getCurrencyPair(), ASK, theRockBid, theRockOrderBook.getDate()));
}
for (TheRockBid theRockBid : theRockOrderBook.getBids()) {
bids.add(
adaptBid(
theRockOrderBook.getCurrencyPair(), BID, theRockBid, theRockOrderBook.getDate()));
}
return new OrderBook(theRockOrderBook.getDate(), asks, bids);
}
private static LimitOrder adaptBid(
CurrencyPair currencyPair, Order.OrderType orderType, TheRockBid theRockBid, Date timestamp) {
return new LimitOrder.Builder(orderType, currencyPair)
.limitPrice(theRockBid.getPrice())
.originalAmount(theRockBid.getAmount())
.timestamp(timestamp)
.build();
}
public static Trades adaptTrades(TheRockTrades trades, CurrencyPair currencyPair)
throws com.fasterxml.jackson.databind.exc.InvalidFormatException {
List<Trade> tradesList = new ArrayList<>(trades.getCount());
long lastTradeId = 0;
for (int i = 0; i < trades.getCount(); i++) {
TheRockTrade trade = trades.getTrades()[i];
if (trade.getSide() != Side.buy && trade.getSide() != Side.sell) {
continue; // process buys and sells only
}
long tradeId = trade.getId();
if (tradeId > lastTradeId) lastTradeId = tradeId;
tradesList.add(adaptTrade(trade, currencyPair));
}
return new Trades(tradesList, lastTradeId, Trades.TradeSortType.SortByID);
}
public static Trade adaptTrade(TheRockTrade trade, CurrencyPair currencyPair)
throws com.fasterxml.jackson.databind.exc.InvalidFormatException {
final String tradeId = String.valueOf(trade.getId());
return new Trade(
trade.getSide() == Side.sell ? OrderType.ASK : BID,
trade.getAmount(),
currencyPair,
trade.getPrice(),
trade.getDate(),
tradeId);
}
public static UserTrade adaptUserTrade(TheRockUserTrade trade, CurrencyPair currencyPair)
throws com.fasterxml.jackson.databind.exc.InvalidFormatException {
final String tradeId = String.valueOf(trade.getId());
// return new UserTrade(trade.getSide() == Side.sell ? OrderType.ASK : BID, trade.getAmount(),
// currencyPair, trade.getPrice(), trade.getDate(), tradeId);
return new UserTrade.Builder()
.id(tradeId)
.originalAmount(trade.getAmount())
.currencyPair(currencyPair)
.price(trade.getPrice())
.timestamp(trade.getDate())
.orderId(String.valueOf(trade.getOrderId()))
.type(trade.getSide() == Side.buy ? OrderType.BID : OrderType.ASK)
.feeAmount(trade.getFeeAmount())
.feeCurrency(
trade.getFeeCurrency() == null ? null : Currency.getInstance(trade.getFeeCurrency()))
.build();
}
public static UserTrades adaptUserTrades(TheRockUserTrades trades, CurrencyPair currencyPair)
throws com.fasterxml.jackson.databind.exc.InvalidFormatException {
List<UserTrade> tradesList = new ArrayList<>(trades.getCount());
long lastTradeId = 0;
for (int i = 0; i < trades.getCount(); i++) {
TheRockUserTrade trade = trades.getTrades()[i];
long tradeId = trade.getId();
if (tradeId > lastTradeId) lastTradeId = tradeId;
tradesList.add(adaptUserTrade(trade, currencyPair));
}
return new UserTrades(tradesList, lastTradeId, Trades.TradeSortType.SortByID);
}
public static LimitOrder adaptOrder(TheRockOrder order) {
Date timestamp;
try {
timestamp = order.getDate() == null ? null : fromISODateString(order.getDate());
} catch (Exception e) {
timestamp = null;
}
BigDecimal amount = order.getAmount();
BigDecimal unfilled = order.getAmountUnfilled();
BigDecimal cumulative = (unfilled != null && amount != null) ? amount.subtract(unfilled) : null;
return new LimitOrder(
adaptOrderType(order.getSide()),
order.getAmount(),
order.getFundId().pair,
Long.toString(order.getId()),
timestamp,
null,
order.getPrice(),
cumulative,
null,
adaptOrderStatus(order));
}
public static OrderType adaptOrderType(TheRockOrder.Side orderSide) {
return orderSide.equals(TheRockOrder.Side.buy) ? OrderType.BID : OrderType.ASK;
}
public static OpenOrders adaptOrders(TheRockOrders theRockOrders) {
List<LimitOrder> orders = new ArrayList<>(theRockOrders.getOrders().length);
for (TheRockOrder theRockOrder : theRockOrders.getOrders()) {
orders.add(adaptOrder(theRockOrder));
}
return new OpenOrders(orders);
}
/**
* The status from the {@link TheRock} object converted to xchange status By the API documentation
* available order states are the follow: (active|conditional|executed|deleted)
*/
public static org.knowm.xchange.dto.Order.OrderStatus adaptOrderStatus(TheRockOrder order) {
if ("active".equalsIgnoreCase(order.getStatus())) {
return org.knowm.xchange.dto.Order.OrderStatus.NEW;
} else if ("conditional".equalsIgnoreCase(order.getStatus())) {
return org.knowm.xchange.dto.Order.OrderStatus.NEW;
} else if ("executed".equalsIgnoreCase(order.getStatus())) {
return org.knowm.xchange.dto.Order.OrderStatus.FILLED;
} else if ("deleted".equalsIgnoreCase(order.getStatus())) {
return org.knowm.xchange.dto.Order.OrderStatus.CANCELED;
} else return org.knowm.xchange.dto.Order.OrderStatus.UNKNOWN;
}
/*
* public static LimitOrder adaptOrder(TheRockOrder o) { return new LimitOrder(adaptOrderType(o.getSide()), o.getAmount(), o.getFundId().pair,
* Long.toString(o.getId()), null, o.getPrice()); } public static Order.OrderType adaptOrderType(TheRockOrder.Side orderSide) { return
* orderSide.equals(TheRockOrder.Side.buy) ? Order.OrderType.BID : Order.OrderType.ASK; } public static OrderBook adaptOrderBook(TheRockOrderBook
* therockOrderBook) { List<LimitOrder> asks = new ArrayList<LimitOrder>(); List<LimitOrder> bids = new ArrayList<LimitOrder>(); for
* (TheRockOrderBook.Entry obe : therockOrderBook.getData()) { if (TheRockOrder.Type.Buy.equals(obe.getType())) { bids.add(new
* LimitOrder(Order.OrderType.BID, obe.getQuantity(), obe.getCurrencyPair(), null, obe.getCreated(), obe.getPrice())); } else { asks.add(new
* LimitOrder(Order.OrderType.ASK, obe.getQuantity(), obe.getCurrencyPair(), null, obe.getCreated(), obe.getPrice())); } } Collections.sort(bids,
* BID_COMPARATOR); Collections.sort(asks, ASK_COMPARATOR); return new OrderBook(new Date(), asks, bids); } public static UserTrades
* adaptTradeHistory(TheRockUserTrade[] therockUserTrades) { List<UserTrade> trades = new ArrayList<UserTrade>(); long lastTradeId = 0; for
* (TheRockUserTrade therockUserTrade : therockUserTrades) { lastTradeId = Math.max(lastTradeId, therockUserTrade.getTradeId());
* trades.add(adaptTrade(therockUserTrade)); } return new UserTrades(trades, lastTradeId, TradeSortType.SortByID); } public static UserTrade
* adaptTrade(TheRockUserTrade therockUserTrade) { CurrencyPair currencyPair = therockUserTrade.getCurrencyPair(); return new UserTrade(
* adaptOrderType(therockUserTrade.getType()), therockUserTrade.getQuantity(), currencyPair, therockUserTrade.getPrice().abs(),
* therockUserTrade.getExecuted(), String.valueOf(therockUserTrade.getTradeId()), String.valueOf(therockUserTrade.getOrderId()),
* therockUserTrade.getFee(), therockUserTrade.getType() == TheRockOrder.Type.Buy ? currencyPair.counter.getCurrencyCode() :
* currencyPair.base.getCurrencyCode()); }
*/
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.monitor.servlets.trace;
import java.security.PrivilegedAction;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.monitor.servlets.BasicServlet;
import org.apache.accumulo.monitor.util.Table;
import org.apache.accumulo.monitor.util.celltypes.DurationType;
import org.apache.accumulo.monitor.util.celltypes.NumberType;
import org.apache.accumulo.monitor.util.celltypes.StringType;
import org.apache.accumulo.tracer.TraceFormatter;
import org.apache.accumulo.tracer.thrift.RemoteSpan;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
public class Summary extends Basic {
private static final long serialVersionUID = 1L;
public static final int DEFAULT_MINUTES = 10;
int getMinutes(HttpServletRequest req) {
return getIntParameter(req, "minutes", DEFAULT_MINUTES);
}
@Override
public String getTitle(HttpServletRequest req) {
return "Traces for the last " + getMinutes(req) + " minutes";
}
static private class Stats {
int count;
long min = Long.MAX_VALUE;
long max = Long.MIN_VALUE;
long total = 0l;
long histogram[] = new long[] {0, 0, 0, 0, 0, 0};
void addSpan(RemoteSpan span) {
count++;
long ms = span.stop - span.start;
total += ms;
min = Math.min(min, ms);
max = Math.max(max, ms);
int index = 0;
while (ms >= 10 && index < histogram.length) {
ms /= 10;
index++;
}
histogram[index]++;
}
long average() {
return total / count;
}
}
private static class ShowTypeLink extends StringType<String> {
private static final long serialVersionUID = 1L;
int minutes;
public ShowTypeLink(int minutes) {
this.minutes = minutes;
}
@Override
public String format(Object obj) {
if (obj == null)
return "-";
String type = obj.toString();
String encodedType = BasicServlet.encode(type);
return String.format("<a href='/trace/listType?type=%s&minutes=%d'>%s</a>", encodedType, minutes, type);
}
}
static private class HistogramType extends StringType<Stats> {
private static final long serialVersionUID = 1L;
@Override
public String format(Object obj) {
Stats stat = (Stats) obj;
StringBuilder sb = new StringBuilder();
sb.append("<table>");
sb.append("<tr>");
for (long count : stat.histogram) {
if (count > 0)
sb.append(String.format("<td style='width:5em'>%d</td>", count));
else
sb.append("<td style='width:5em'>-</td>");
}
sb.append("</tr></table>");
return sb.toString();
}
@Override
public int compare(Stats o1, Stats o2) {
for (int i = 0; i < o1.histogram.length; i++) {
long diff = o1.histogram[i] - o2.histogram[i];
if (diff < 0)
return -1;
if (diff > 0)
return 1;
}
return 0;
}
}
protected Range getRangeForTrace(long minutesSince) {
long endTime = System.currentTimeMillis();
long millisSince = minutesSince * 60 * 1000;
// Catch the overflow
if (millisSince < minutesSince) {
millisSince = endTime;
}
long startTime = endTime - millisSince;
String startHexTime = Long.toHexString(startTime), endHexTime = Long.toHexString(endTime);
while (startHexTime.length() < endHexTime.length()) {
startHexTime = "0" + startHexTime;
}
return new Range(new Text("start:" + startHexTime), new Text("start:" + endHexTime));
}
private void parseSpans(Scanner scanner, Map<String,Stats> summary) {
for (Entry<Key,Value> entry : scanner) {
RemoteSpan span = TraceFormatter.getRemoteSpan(entry);
Stats stats = summary.get(span.description);
if (stats == null) {
summary.put(span.description, stats = new Stats());
}
stats.addSpan(span);
}
}
@Override
public void pageBody(HttpServletRequest req, HttpServletResponse resp, StringBuilder sb) throws Exception {
int minutes = getMinutes(req);
Entry<Scanner,UserGroupInformation> pair = getScanner(sb);
final Scanner scanner = pair.getKey();
if (scanner == null) {
return;
}
Range range = getRangeForTrace(minutes);
scanner.setRange(range);
final Map<String,Stats> summary = new TreeMap<String,Stats>();
if (null != pair.getValue()) {
pair.getValue().doAs(new PrivilegedAction<Void>() {
@Override
public Void run() {
parseSpans(scanner, summary);
return null;
}
});
} else {
parseSpans(scanner, summary);
}
Table trace = new Table("traceSummary", "All Traces");
trace.addSortableColumn("Type", new ShowTypeLink(minutes), "Trace Type");
trace.addSortableColumn("Total", new NumberType<Integer>(), "Number of spans of this type");
trace.addSortableColumn("min", new DurationType(), "Shortest span duration");
trace.addSortableColumn("max", new DurationType(), "Longest span duration");
trace.addSortableColumn("avg", new DurationType(), "Average span duration");
trace
.addSortableColumn(
"Histogram",
new HistogramType(),
"Counts of spans of different duration. Columns start at milliseconds, and each column is ten times longer: tens of milliseconds, seconds, tens of seconds, etc.");
for (Entry<String,Stats> entry : summary.entrySet()) {
Stats stat = entry.getValue();
trace.addRow(entry.getKey(), stat.count, stat.min, stat.max, stat.average(), stat);
}
trace.generate(req, sb);
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.aggregate.hazelcast;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import com.hazelcast.config.Config;
import com.hazelcast.config.XmlConfigBuilder;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.core.TransactionalMap;
import com.hazelcast.transaction.TransactionContext;
import com.hazelcast.transaction.TransactionOptions;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.impl.DefaultExchange;
import org.apache.camel.impl.DefaultExchangeHolder;
import org.apache.camel.spi.OptimisticLockingAggregationRepository;
import org.apache.camel.spi.RecoverableAggregationRepository;
import org.apache.camel.support.ServiceSupport;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.StringHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A Hazelcast-based AggregationRepository implementing
* {@link RecoverableAggregationRepository} and {@link OptimisticLockingAggregationRepository}.
* Defaults to thread-safe (non-optimistic) locking and recoverable strategy.
* Hazelcast settings are given to an end-user and can be controlled with repositoryName and persistentRespositoryName,
* both are {@link com.hazelcast.core.IMap} <String, Exchange>. However HazelcastAggregationRepository
* can run it's own Hazelcast instance, but obviously no benefits of Hazelcast clustering are gained this way.
* If the {@link HazelcastAggregationRepository} uses it's own local {@link HazelcastInstance} it will destroy this
* instance on {@link #doStop()}. You should control {@link HazelcastInstance} lifecycle yourself whenever you instantiate
* {@link HazelcastAggregationRepository} passing a reference to the instance.
*
*/
public class HazelcastAggregationRepository extends ServiceSupport
implements RecoverableAggregationRepository,
OptimisticLockingAggregationRepository {
private static final Logger LOG = LoggerFactory.getLogger(HazelcastAggregationRepository.class.getName());
private static final String COMPLETED_SUFFIX = "-completed";
private boolean optimistic;
private boolean useLocalHzInstance;
private boolean useRecovery = true;
private IMap<String, DefaultExchangeHolder> cache;
private IMap<String, DefaultExchangeHolder> persistedCache;
private HazelcastInstance hzInstance;
private String mapName;
private String persistenceMapName;
private String deadLetterChannel;
private long recoveryInterval = 5000;
private int maximumRedeliveries = 3;
private boolean allowSerializedHeaders;
/**
* Creates new {@link HazelcastAggregationRepository} that defaults to non-optimistic locking
* with recoverable behavior and a local Hazelcast instance. Recoverable repository name defaults to
* {@code repositoryName} + "-compeleted".
* @param repositoryName {@link IMap} repository name;
*/
public HazelcastAggregationRepository(final String repositoryName) {
mapName = repositoryName;
persistenceMapName = String.format("%s%s", mapName, COMPLETED_SUFFIX);
optimistic = false;
useLocalHzInstance = true;
}
/**
* Creates new {@link HazelcastAggregationRepository} that defaults to non-optimistic locking
* with recoverable behavior and a local Hazelcast instance.
* @param repositoryName {@link IMap} repository name;
* @param persistentRepositoryName {@link IMap} recoverable repository name;
*/
public HazelcastAggregationRepository(final String repositoryName, final String persistentRepositoryName) {
mapName = repositoryName;
persistenceMapName = persistentRepositoryName;
optimistic = false;
useLocalHzInstance = true;
}
/**
* Creates new {@link HazelcastAggregationRepository} with recoverable behavior and a local Hazelcast instance.
* Recoverable repository name defaults to {@code repositoryName} + "-compeleted".
* @param repositoryName {@link IMap} repository name;
* @param optimistic whether to use optimistic locking manner.
*/
public HazelcastAggregationRepository(final String repositoryName, boolean optimistic) {
this(repositoryName);
this.optimistic = optimistic;
useLocalHzInstance = true;
}
/**
* Creates new {@link HazelcastAggregationRepository} with recoverable behavior and a local Hazelcast instance.
* @param repositoryName {@link IMap} repository name;
* @param persistentRepositoryName {@link IMap} recoverable repository name;
* @param optimistic whether to use optimistic locking manner.
*/
public HazelcastAggregationRepository(final String repositoryName, final String persistentRepositoryName, boolean optimistic) {
this(repositoryName, persistentRepositoryName);
this.optimistic = optimistic;
useLocalHzInstance = true;
}
/**
* Creates new {@link HazelcastAggregationRepository} that defaults to non-optimistic locking
* with recoverable behavior. Recoverable repository name defaults to
* {@code repositoryName} + "-compeleted".
* @param repositoryName {@link IMap} repository name;
* @param hzInstanse externally configured {@link HazelcastInstance}.
*/
public HazelcastAggregationRepository(final String repositoryName, HazelcastInstance hzInstanse) {
this (repositoryName, false);
this.hzInstance = hzInstanse;
useLocalHzInstance = false;
}
/**
* Creates new {@link HazelcastAggregationRepository} that defaults to non-optimistic locking
* with recoverable behavior.
* @param repositoryName {@link IMap} repository name;
* @param persistentRepositoryName {@link IMap} recoverable repository name;
* @param hzInstanse externally configured {@link HazelcastInstance}.
*/
public HazelcastAggregationRepository(final String repositoryName, final String persistentRepositoryName, HazelcastInstance hzInstanse) {
this (repositoryName, persistentRepositoryName, false);
this.hzInstance = hzInstanse;
useLocalHzInstance = false;
}
/**
* Creates new {@link HazelcastAggregationRepository} with recoverable behavior.
* Recoverable repository name defaults to {@code repositoryName} + "-compeleted".
* @param repositoryName {@link IMap} repository name;
* @param optimistic whether to use optimistic locking manner;
* @param hzInstance externally configured {@link HazelcastInstance}.
*/
public HazelcastAggregationRepository(final String repositoryName, boolean optimistic, HazelcastInstance hzInstance) {
this(repositoryName, optimistic);
this.hzInstance = hzInstance;
useLocalHzInstance = false;
}
/**
* Creates new {@link HazelcastAggregationRepository} with recoverable behavior.
* @param repositoryName {@link IMap} repository name;
* @param optimistic whether to use optimistic locking manner;
* @param persistentRepositoryName {@link IMap} recoverable repository name;
* @param hzInstance externally configured {@link HazelcastInstance}.
*/
public HazelcastAggregationRepository(final String repositoryName, final String persistentRepositoryName, boolean optimistic, HazelcastInstance hzInstance) {
this(repositoryName, persistentRepositoryName, optimistic);
this.hzInstance = hzInstance;
useLocalHzInstance = false;
}
@Override
public Exchange add(CamelContext camelContext, String key, Exchange oldExchange, Exchange newExchange) throws OptimisticLockingException {
if (!optimistic) {
throw new UnsupportedOperationException();
}
LOG.trace("Adding an Exchange with ID {} for key {} in an optimistic manner.", newExchange.getExchangeId(), key);
if (oldExchange == null) {
DefaultExchangeHolder holder = DefaultExchangeHolder.marshal(newExchange, true, allowSerializedHeaders);
final DefaultExchangeHolder misbehaviorHolder = cache.putIfAbsent(key, holder);
if (misbehaviorHolder != null) {
Exchange misbehaviorEx = unmarshallExchange(camelContext, misbehaviorHolder);
LOG.error("Optimistic locking failed for exchange with key {}: IMap#putIfAbsend returned Exchange with ID {}, while it's expected no exchanges to be returned",
key, misbehaviorEx != null ? misbehaviorEx.getExchangeId() : "<null>");
throw new OptimisticLockingException();
}
} else {
DefaultExchangeHolder oldHolder = DefaultExchangeHolder.marshal(oldExchange, true, allowSerializedHeaders);
DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(newExchange, true, allowSerializedHeaders);
if (!cache.replace(key, oldHolder, newHolder)) {
LOG.error("Optimistic locking failed for exchange with key {}: IMap#replace returned no Exchanges, while it's expected to replace one",
key);
throw new OptimisticLockingException();
}
}
LOG.trace("Added an Exchange with ID {} for key {} in optimistic manner.", newExchange.getExchangeId(), key);
return oldExchange;
}
@Override
public Exchange add(CamelContext camelContext, String key, Exchange exchange) {
if (optimistic) {
throw new UnsupportedOperationException();
}
LOG.trace("Adding an Exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key);
Lock l = hzInstance.getLock(mapName);
try {
l.lock();
DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(exchange, true, allowSerializedHeaders);
DefaultExchangeHolder oldHolder = cache.put(key, newHolder);
return unmarshallExchange(camelContext, oldHolder);
} finally {
LOG.trace("Added an Exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key);
l.unlock();
}
}
@Override
public Set<String> scan(CamelContext camelContext) {
if (useRecovery) {
LOG.trace("Scanning for exchanges to recover in {} context", camelContext.getName());
Set<String> scanned = Collections.unmodifiableSet(persistedCache.keySet());
LOG.trace("Found {} keys for exchanges to recover in {} context", scanned.size(), camelContext.getName());
return scanned;
} else {
LOG.warn("What for to run recovery scans in {} context while repository {} is running in non-recoverable aggregation repository mode?!",
camelContext.getName(), mapName);
return Collections.emptySet();
}
}
@Override
public Exchange recover(CamelContext camelContext, String exchangeId) {
LOG.trace("Recovering an Exchange with ID {}.", exchangeId);
return useRecovery ? unmarshallExchange(camelContext, persistedCache.get(exchangeId)) : null;
}
@Override
public void setRecoveryInterval(long interval, TimeUnit timeUnit) {
this.recoveryInterval = timeUnit.toMillis(interval);
}
@Override
public void setRecoveryInterval(long interval) {
this.recoveryInterval = interval;
}
@Override
public long getRecoveryIntervalInMillis() {
return recoveryInterval;
}
@Override
public void setUseRecovery(boolean useRecovery) {
this.useRecovery = useRecovery;
}
@Override
public boolean isUseRecovery() {
return useRecovery;
}
@Override
public void setDeadLetterUri(String deadLetterUri) {
this.deadLetterChannel = deadLetterUri;
}
@Override
public String getDeadLetterUri() {
return deadLetterChannel;
}
@Override
public void setMaximumRedeliveries(int maximumRedeliveries) {
this.maximumRedeliveries = maximumRedeliveries;
}
@Override
public int getMaximumRedeliveries() {
return maximumRedeliveries;
}
@Override
public Exchange get(CamelContext camelContext, String key) {
return unmarshallExchange(camelContext, cache.get(key));
}
/**
* Checks if the key in question is in the repository.
*
* @param key Object - key in question
*/
public boolean containsKey(Object key) {
if (cache != null) {
return cache.containsKey(key);
} else {
return false;
}
}
public boolean isAllowSerializedHeaders() {
return allowSerializedHeaders;
}
public void setAllowSerializedHeaders(boolean allowSerializedHeaders) {
this.allowSerializedHeaders = allowSerializedHeaders;
}
/**
* This method performs transactional operation on removing the {@code exchange}
* from the operational storage and moving it into the persistent one if the {@link HazelcastAggregationRepository}
* runs in recoverable mode and {@code optimistic} is false. It will act at <u>your own</u> risk otherwise.
* @param camelContext the current CamelContext
* @param key the correlation key
* @param exchange the exchange to remove
*/
@Override
public void remove(CamelContext camelContext, String key, Exchange exchange) {
DefaultExchangeHolder holder = DefaultExchangeHolder.marshal(exchange, true, allowSerializedHeaders);
if (optimistic) {
LOG.trace("Removing an exchange with ID {} for key {} in an optimistic manner.", exchange.getExchangeId(), key);
if (!cache.remove(key, holder)) {
LOG.error("Optimistic locking failed for exchange with key {}: IMap#remove removed no Exchanges, while it's expected to remove one.",
key);
throw new OptimisticLockingException();
}
LOG.trace("Removed an exchange with ID {} for key {} in an optimistic manner.", exchange.getExchangeId(), key);
if (useRecovery) {
LOG.trace("Putting an exchange with ID {} for key {} into a recoverable storage in an optimistic manner.",
exchange.getExchangeId(), key);
persistedCache.put(exchange.getExchangeId(), holder);
LOG.trace("Put an exchange with ID {} for key {} into a recoverable storage in an optimistic manner.",
exchange.getExchangeId(), key);
}
} else {
if (useRecovery) {
LOG.trace("Removing an exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key);
// The only considerable case for transaction usage is fault tolerance:
// the transaction will be rolled back automatically (default timeout is 2 minutes)
// if no commit occurs during the timeout. So we are still consistent whether local node crashes.
TransactionOptions tOpts = new TransactionOptions();
tOpts.setTransactionType(TransactionOptions.TransactionType.ONE_PHASE);
TransactionContext tCtx = hzInstance.newTransactionContext(tOpts);
try {
tCtx.beginTransaction();
TransactionalMap<String, DefaultExchangeHolder> tCache = tCtx.getMap(cache.getName());
TransactionalMap<String, DefaultExchangeHolder> tPersistentCache = tCtx.getMap(persistedCache.getName());
DefaultExchangeHolder removedHolder = tCache.remove(key);
LOG.trace("Putting an exchange with ID {} for key {} into a recoverable storage in a thread-safe manner.",
exchange.getExchangeId(), key);
tPersistentCache.put(exchange.getExchangeId(), removedHolder);
tCtx.commitTransaction();
LOG.trace("Removed an exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key);
LOG.trace("Put an exchange with ID {} for key {} into a recoverable storage in a thread-safe manner.",
exchange.getExchangeId(), key);
} catch (Throwable throwable) {
tCtx.rollbackTransaction();
final String msg = String.format("Transaction with ID %s was rolled back for remove operation with a key %s and an Exchange ID %s.",
tCtx.getTxnId(), key, exchange.getExchangeId());
LOG.warn(msg, throwable);
throw new RuntimeException(msg, throwable);
}
} else {
cache.remove(key);
}
}
}
@Override
public void confirm(CamelContext camelContext, String exchangeId) {
LOG.trace("Confirming an exchange with ID {}.", exchangeId);
if (useRecovery) {
persistedCache.remove(exchangeId);
}
}
@Override
public Set<String> getKeys() {
return Collections.unmodifiableSet(cache.keySet());
}
/**
* @return Persistent repository {@link IMap} name;
*/
public String getPersistentRepositoryName() {
return persistenceMapName;
}
@Override
protected void doStart() throws Exception {
if (maximumRedeliveries < 0) {
throw new IllegalArgumentException("Maximum redelivery retries must be zero or a positive integer.");
}
if (recoveryInterval < 0) {
throw new IllegalArgumentException("Recovery interval must be zero or a positive integer.");
}
StringHelper.notEmpty(mapName, "repositoryName");
if (useLocalHzInstance) {
Config cfg = new XmlConfigBuilder().build();
cfg.setProperty("hazelcast.version.check.enabled", "false");
hzInstance = Hazelcast.newHazelcastInstance(cfg);
} else {
ObjectHelper.notNull(hzInstance, "hzInstanse");
}
cache = hzInstance.getMap(mapName);
if (useRecovery) {
persistedCache = hzInstance.getMap(persistenceMapName);
}
}
@Override
protected void doStop() throws Exception {
//noop
if (useLocalHzInstance) {
hzInstance.getLifecycleService().shutdown();
}
}
protected Exchange unmarshallExchange(CamelContext camelContext, DefaultExchangeHolder holder) {
Exchange exchange = null;
if (holder != null) {
exchange = new DefaultExchange(camelContext);
DefaultExchangeHolder.unmarshal(exchange, holder);
}
return exchange;
}
}
|
|
/*
* Copyright 2015 Goldman Sachs.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gs.collections.api.list;
import java.util.Comparator;
import java.util.List;
import java.util.Random;
import com.gs.collections.api.RichIterable;
import com.gs.collections.api.block.HashingStrategy;
import com.gs.collections.api.block.function.Function;
import com.gs.collections.api.block.function.Function2;
import com.gs.collections.api.block.function.primitive.BooleanFunction;
import com.gs.collections.api.block.function.primitive.ByteFunction;
import com.gs.collections.api.block.function.primitive.CharFunction;
import com.gs.collections.api.block.function.primitive.DoubleFunction;
import com.gs.collections.api.block.function.primitive.FloatFunction;
import com.gs.collections.api.block.function.primitive.IntFunction;
import com.gs.collections.api.block.function.primitive.LongFunction;
import com.gs.collections.api.block.function.primitive.ShortFunction;
import com.gs.collections.api.block.predicate.Predicate;
import com.gs.collections.api.block.predicate.Predicate2;
import com.gs.collections.api.block.procedure.Procedure;
import com.gs.collections.api.collection.MutableCollection;
import com.gs.collections.api.list.primitive.MutableBooleanList;
import com.gs.collections.api.list.primitive.MutableByteList;
import com.gs.collections.api.list.primitive.MutableCharList;
import com.gs.collections.api.list.primitive.MutableDoubleList;
import com.gs.collections.api.list.primitive.MutableFloatList;
import com.gs.collections.api.list.primitive.MutableIntList;
import com.gs.collections.api.list.primitive.MutableLongList;
import com.gs.collections.api.list.primitive.MutableShortList;
import com.gs.collections.api.multimap.list.MutableListMultimap;
import com.gs.collections.api.partition.list.PartitionMutableList;
import com.gs.collections.api.tuple.Pair;
/**
* A MutableList is an implementation of a JCF List which provides methods matching the Smalltalk Collection protocol.
*/
public interface MutableList<T>
extends MutableCollection<T>, List<T>, Cloneable, ListIterable<T>
{
MutableList<T> with(T element);
MutableList<T> without(T element);
MutableList<T> withAll(Iterable<? extends T> elements);
MutableList<T> withoutAll(Iterable<? extends T> elements);
MutableList<T> newEmpty();
MutableList<T> clone();
MutableList<T> tap(Procedure<? super T> procedure);
MutableList<T> select(Predicate<? super T> predicate);
<P> MutableList<T> selectWith(Predicate2<? super T, ? super P> predicate, P parameter);
MutableList<T> reject(Predicate<? super T> predicate);
<P> MutableList<T> rejectWith(Predicate2<? super T, ? super P> predicate, P parameter);
PartitionMutableList<T> partition(Predicate<? super T> predicate);
<P> PartitionMutableList<T> partitionWith(Predicate2<? super T, ? super P> predicate, P parameter);
<S> MutableList<S> selectInstancesOf(Class<S> clazz);
<V> MutableList<V> collect(Function<? super T, ? extends V> function);
MutableBooleanList collectBoolean(BooleanFunction<? super T> booleanFunction);
MutableByteList collectByte(ByteFunction<? super T> byteFunction);
MutableCharList collectChar(CharFunction<? super T> charFunction);
MutableDoubleList collectDouble(DoubleFunction<? super T> doubleFunction);
MutableFloatList collectFloat(FloatFunction<? super T> floatFunction);
MutableIntList collectInt(IntFunction<? super T> intFunction);
MutableLongList collectLong(LongFunction<? super T> longFunction);
MutableShortList collectShort(ShortFunction<? super T> shortFunction);
<P, V> MutableList<V> collectWith(Function2<? super T, ? super P, ? extends V> function, P parameter);
<V> MutableList<V> collectIf(Predicate<? super T> predicate, Function<? super T, ? extends V> function);
<V> MutableList<V> flatCollect(Function<? super T, ? extends Iterable<V>> function);
/**
* Returns a new {@code ListIterable} containing the distinct elements in this list.
*
* @since 7.0
*/
MutableList<T> distinct();
/**
* Returns a new {@code ListIterable} containing the distinct elements in this list. Takes HashingStrategy.
*
* @since 7.0
*/
MutableList<T> distinct(HashingStrategy<? super T> hashingStrategy);
/**
* Sorts the internal data structure of this list and returns the list itself as a convenience.
*/
MutableList<T> sortThis(Comparator<? super T> comparator);
/**
* Sorts the internal data structure of this list and returns the list itself as a convenience.
*/
MutableList<T> sortThis();
/**
* Sorts the internal data structure of this list based on the natural order of the attribute returned by {@code
* function}.
*/
<V extends Comparable<? super V>> MutableList<T> sortThisBy(Function<? super T, ? extends V> function);
/**
* @since 6.0
*/
MutableList<T> sortThisByInt(IntFunction<? super T> function);
/**
* @since 6.0
*/
MutableList<T> sortThisByBoolean(BooleanFunction<? super T> function);
/**
* @since 6.0
*/
MutableList<T> sortThisByChar(CharFunction<? super T> function);
/**
* @since 6.0
*/
MutableList<T> sortThisByByte(ByteFunction<? super T> function);
/**
* @since 6.0
*/
MutableList<T> sortThisByShort(ShortFunction<? super T> function);
/**
* @since 6.0
*/
MutableList<T> sortThisByFloat(FloatFunction<? super T> function);
/**
* @since 6.0
*/
MutableList<T> sortThisByLong(LongFunction<? super T> function);
/**
* @since 6.0
*/
MutableList<T> sortThisByDouble(DoubleFunction<? super T> function);
MutableList<T> subList(int fromIndex, int toIndex);
/**
* Returns an unmodifable view of the list.
* The returned list will be <tt>Serializable</tt> if this list is <tt>Serializable</tt>.
*
* @return an unmodifiable view of this list
*/
MutableList<T> asUnmodifiable();
MutableList<T> asSynchronized();
/**
* Returns an immutable copy of this list. If the list is immutable, it returns itself.
* The returned list will be <tt>Serializable</tt> if this list is <tt>Serializable</tt>.
*/
ImmutableList<T> toImmutable();
<V> MutableListMultimap<V, T> groupBy(Function<? super T, ? extends V> function);
<V> MutableListMultimap<V, T> groupByEach(Function<? super T, ? extends Iterable<V>> function);
<S> MutableList<Pair<T, S>> zip(Iterable<S> that);
MutableList<Pair<T, Integer>> zipWithIndex();
MutableList<T> take(int count);
MutableList<T> takeWhile(Predicate<? super T> predicate);
MutableList<T> drop(int count);
MutableList<T> dropWhile(Predicate<? super T> predicate);
PartitionMutableList<T> partitionWhile(Predicate<? super T> predicate);
/**
* Returns a new MutableList in reverse order
*/
MutableList<T> toReversed();
/**
* Mutates the current list by reversing its order and returns the current list as a result
*/
MutableList<T> reverseThis();
MutableList<T> shuffleThis();
MutableList<T> shuffleThis(Random rnd);
}
|
|
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2016-2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.dimensionlookup;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.pentaho.di.core.injection.BaseMetadataInjectionTest;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaFactory;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.junit.rules.RestorePDIEngineEnvironment;
public class DimensionLookupMetaInjectionTest extends BaseMetadataInjectionTest<DimensionLookupMeta> {
@ClassRule public static RestorePDIEngineEnvironment env = new RestorePDIEngineEnvironment();
@Before
public void setup() {
super.setup( new DimensionLookupMeta() );
}
@Test
public void test() throws Exception {
check( "TARGET_SCHEMA", new StringGetter() {
@Override
public String get() {
return meta.getSchemaName();
}
} );
check( "TARGET_TABLE", new StringGetter() {
@Override
public String get() {
return meta.getTableName();
}
} );
check( "UPDATE_DIMENSION", new BooleanGetter() {
@Override
public boolean get() {
return meta.isUpdate();
}
} );
check( "KEY_STREAM_FIELDNAME", new StringGetter() {
@Override
public String get() {
return meta.getKeyStream()[0];
}
} );
check( "KEY_DATABASE_FIELDNAME", new StringGetter() {
@Override
public String get() {
return meta.getKeyLookup()[0];
}
} );
check( "STREAM_DATE_FIELD", new StringGetter() {
@Override
public String get() {
return meta.getDateField();
}
} );
check( "DATE_RANGE_START_FIELD", new StringGetter() {
@Override
public String get() {
return meta.getDateFrom();
}
} );
check( "DATE_RANGE_END_FIELD", new StringGetter() {
@Override
public String get() {
return meta.getDateTo();
}
} );
check( "STREAM_FIELDNAME", new StringGetter() {
@Override
public String get() {
return meta.getFieldStream()[0];
}
} );
check( "DATABASE_FIELDNAME", new StringGetter() {
@Override
public String get() {
return meta.getFieldLookup()[0];
}
} );
check( "TECHNICAL_KEY_FIELD", new StringGetter() {
@Override
public String get() {
return meta.getKeyField();
}
} );
check( "TECHNICAL_KEY_NEW_NAME", new StringGetter() {
@Override
public String get() {
return meta.getKeyRename();
}
} );
check( "VERSION_FIELD", new StringGetter() {
@Override
public String get() {
return meta.getVersionField();
}
} );
check( "TECHNICAL_KEY_SEQUENCE", new StringGetter() {
@Override
public String get() {
return meta.getSequenceName();
}
} );
check( "COMMIT_SIZE", new IntGetter() {
@Override
public int get() {
return meta.getCommitSize();
}
} );
check( "MIN_YEAR", new IntGetter() {
@Override
public int get() {
return meta.getMinYear();
}
} );
check( "MAX_YEAR", new IntGetter() {
@Override
public int get() {
return meta.getMaxYear();
}
} );
check( "TECHNICAL_KEY_CREATION", new StringGetter() {
@Override
public String get() {
return meta.getTechKeyCreation();
}
} );
check( "CACHE_SIZE", new IntGetter() {
@Override
public int get() {
return meta.getCacheSize();
}
} );
check( "USE_ALTERNATIVE_START_DATE", new BooleanGetter() {
@Override
public boolean get() {
return meta.isUsingStartDateAlternative();
}
} );
check( "ALTERNATIVE_START_COLUMN", new StringGetter() {
@Override
public String get() {
return meta.getStartDateFieldName();
}
} );
check( "PRELOAD_CACHE", new BooleanGetter() {
@Override
public boolean get() {
return meta.isPreloadingCache();
}
} );
check( "CONNECTION_NAME", new StringGetter() {
public String get() {
return "My Connection";
}
}, "My Connection" );
ValueMetaInterface mftt = new ValueMetaString( "f" );
injector.setProperty( meta, "ALTERNATIVE_START_OPTION", setValue( mftt, DimensionLookupMeta
.getStartDateAlternativeCode( 0 ) ), "f" );
Assert.assertEquals( 0, meta.getStartDateAlternative() );
String[] valueMetaNames = ValueMetaFactory.getValueMetaNames();
checkStringToInt( "TYPE_OF_RETURN_FIELD", new IntGetter() {
@Override
public int get() {
return meta.getReturnType()[0];
}
}, valueMetaNames, getTypeCodes( valueMetaNames ) );
skipPropertyTest( "ALTERNATIVE_START_OPTION" );
skipPropertyTest( "UPDATE_TYPE" );
}
}
|
|
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
/*
* Copyright (c) 1999 World Wide Web Consortium
* (Massachusetts Institute of Technology, Institut National de Recherche
* en Informatique et en Automatique, Keio University).
* All Rights Reserved. http://www.w3.org/Consortium/Legal/
*
* $Id: LexicalUnitImpl.java,v 1.3 2000/02/15 02:08:19 plehegar Exp $
*/
package com.customweb.sass.internal.parser;
import java.io.Serializable;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.w3c.css.sac.LexicalUnit;
import com.customweb.sass.internal.ScssContext;
import com.customweb.sass.internal.expression.exception.IncompatibleUnitsException;
import com.customweb.sass.internal.parser.function.AbsFunctionGenerator;
import com.customweb.sass.internal.parser.function.AdjustColorFunctionGenerator;
import com.customweb.sass.internal.parser.function.AlphaFunctionGenerator;
import com.customweb.sass.internal.parser.function.CeilFunctionGenerator;
import com.customweb.sass.internal.parser.function.ColorComponentFunctionGenerator;
import com.customweb.sass.internal.parser.function.DarkenFunctionGenerator;
import com.customweb.sass.internal.parser.function.DefaultFunctionGenerator;
import com.customweb.sass.internal.parser.function.FloorFunctionGenerator;
import com.customweb.sass.internal.parser.function.GrayscaleFunctionGenerator;
import com.customweb.sass.internal.parser.function.IfFunctionGenerator;
import com.customweb.sass.internal.parser.function.LightenFunctionGenerator;
import com.customweb.sass.internal.parser.function.ListAppendFunctionGenerator;
import com.customweb.sass.internal.parser.function.ListIndexFunctionGenerator;
import com.customweb.sass.internal.parser.function.ListJoinFunctionGenerator;
import com.customweb.sass.internal.parser.function.ListLengthFunctionGenerator;
import com.customweb.sass.internal.parser.function.ListNthFunctionGenerator;
import com.customweb.sass.internal.parser.function.MinMaxFunctionGenerator;
import com.customweb.sass.internal.parser.function.MixFunctionGenerator;
import com.customweb.sass.internal.parser.function.PercentageFunctionGenerator;
import com.customweb.sass.internal.parser.function.QuoteUnquoteFunctionGenerator;
import com.customweb.sass.internal.parser.function.RGBFunctionGenerator;
import com.customweb.sass.internal.parser.function.RectFunctionGenerator;
import com.customweb.sass.internal.parser.function.RoundFunctionGenerator;
import com.customweb.sass.internal.parser.function.SCSSFunctionGenerator;
import com.customweb.sass.internal.parser.function.SaturationModificationFunctionGenerator;
import com.customweb.sass.internal.parser.function.TransparencyModificationFunctionGenerator;
import com.customweb.sass.internal.parser.function.TypeOfFunctionGenerator;
import com.customweb.sass.internal.parser.function.UnitFunctionGenerator;
import com.customweb.sass.internal.parser.function.UnitlessFunctionGenerator;
import com.customweb.sass.internal.tree.FunctionCall;
import com.customweb.sass.internal.tree.FunctionDefNode;
import com.customweb.sass.internal.tree.Node;
import com.customweb.sass.internal.tree.Node.BuildStringStrategy;
import com.customweb.sass.internal.util.ColorUtil;
import com.customweb.sass.internal.util.StringUtil;
/**
* @version $Revision: 1.3 $
* @author Philippe Le Hegaret
*
* @modified Sebastian Nyholm @ Vaadin Ltd
*/
public class LexicalUnitImpl implements LexicalUnit, SCSSLexicalUnit,
SassListItem, Serializable {
private static final long serialVersionUID = -6649833716809789399L;
public static final long PRECISION = 100000L;
private static final DecimalFormat CSS_FLOAT_FORMAT = new DecimalFormat(
"0.0####");
private short type;
private int line;
private int column;
private int i;
private float f;
private String sdimension;
private StringInterpolationSequence s;
private String fname;
private ActualArgumentList params;
private String printState;
LexicalUnitImpl(int line, int column, short type) {
this.line = line;
this.column = column - 1;
this.type = type;
}
LexicalUnitImpl(int line, int column, short type, float f) {
this(line, column, type);
this.f = f;
i = (int) f;
}
LexicalUnitImpl(int line, int column, short type, int i) {
this(line, column, type);
this.i = i;
f = i;
}
LexicalUnitImpl(int line, int column, short type, String sdimension, float f) {
this(line, column, type, f);
this.sdimension = sdimension;
}
LexicalUnitImpl(int line, int column, short type, String s) {
this(line, column, type, new StringInterpolationSequence(s));
}
LexicalUnitImpl(int line, int column, short type,
StringInterpolationSequence s) {
this(line, column, type);
this.s = s;
}
LexicalUnitImpl(short type, int line, int column, String fname,
ActualArgumentList params) {
this(line, column, type);
this.fname = fname;
this.params = params;
}
public int getLineNumber() {
return line;
}
public int getColumnNumber() {
return column;
}
@Override
public short getLexicalUnitType() {
return type;
}
private void setLexicalUnitType(short type) {
this.type = type;
}
@Override
@Deprecated
public LexicalUnitImpl getNextLexicalUnit() {
return null;
}
@Override
@Deprecated
public LexicalUnitImpl getPreviousLexicalUnit() {
return null;
}
public boolean isUnitlessNumber() {
switch (type) {
case LexicalUnitImpl.SAC_INTEGER:
case LexicalUnitImpl.SAC_REAL:
return true;
default:
return false;
}
}
public boolean isNumber() {
short type = getLexicalUnitType();
switch (type) {
case LexicalUnit.SAC_INTEGER:
case LexicalUnit.SAC_REAL:
case LexicalUnit.SAC_EM:
case SCSSLexicalUnit.SAC_LEM:
case SCSSLexicalUnit.SAC_REM:
case LexicalUnit.SAC_EX:
case LexicalUnit.SAC_PIXEL:
case LexicalUnit.SAC_INCH:
case LexicalUnit.SAC_CENTIMETER:
case LexicalUnit.SAC_MILLIMETER:
case LexicalUnit.SAC_POINT:
case LexicalUnit.SAC_PICA:
case LexicalUnit.SAC_PERCENTAGE:
case LexicalUnit.SAC_DEGREE:
case LexicalUnit.SAC_GRADIAN:
case LexicalUnit.SAC_RADIAN:
case LexicalUnit.SAC_MILLISECOND:
case LexicalUnit.SAC_SECOND:
case LexicalUnit.SAC_HERTZ:
case LexicalUnit.SAC_KILOHERTZ:
case LexicalUnit.SAC_DIMENSION:
return true;
default:
return false;
}
}
@Override
public int getIntegerValue() {
return i;
}
private void setIntegerValue(int i) {
this.i = i;
f = i;
}
@Override
public float getFloatValue() {
return f;
}
/**
* Returns the float value as a string unless the value is an integer. In
* that case returns the integer value as a string.
*
* @return a string representing the value, either with or without decimals
*/
public String getFloatOrInteger() {
float f = getFloatValue();
int i = (int) f;
if (i == f) {
return Integer.toString(i);
} else {
return CSS_FLOAT_FORMAT.format(f);
}
}
private void setFloatValue(float f) {
this.f = f;
i = (int) f;
}
@Override
public String getDimensionUnitText() {
switch (type) {
case SAC_INTEGER:
case SAC_REAL:
return "";
case SAC_PERCENTAGE:
return "%";
case SAC_EM:
return "em";
case SCSSLexicalUnit.SAC_LEM:
return "lem";
case SCSSLexicalUnit.SAC_REM:
return "rem";
case SAC_EX:
return "ex";
case SAC_PIXEL:
return "px";
case SAC_CENTIMETER:
return "cm";
case SAC_MILLIMETER:
return "mm";
case SAC_INCH:
return "in";
case SAC_POINT:
return "pt";
case SAC_PICA:
return "pc";
case SAC_DEGREE:
return "deg";
case SAC_RADIAN:
return "rad";
case SAC_GRADIAN:
return "grad";
case SAC_MILLISECOND:
return "ms";
case SAC_SECOND:
return "s";
case SAC_HERTZ:
return "Hz";
case SAC_KILOHERTZ:
return "kHz";
case SAC_DIMENSION:
return sdimension;
default:
throw new IllegalStateException("invalid dimension " + type);
}
}
public String getStringValue() {
return s == null ? null : s.toString();
}
private void setStringValue(String str) {
s = new StringInterpolationSequence(str);
}
@Override
public String getFunctionName() {
return fname;
}
@Override
public LexicalUnitImpl getParameters() {
// use getParameterList() instead
return null;
}
public ActualArgumentList getParameterList() {
return params;
}
@Override
public LexicalUnitImpl getSubValues() {
// should not be used, this method is only here because of an
// implemented interface
return null;
}
/**
* Prints out the current state of the node tree. Will return SCSS before
* compile and CSS after.
*
* Result value could be null.
*
* @return State as a string
*/
public String printState() {
if (printState == null) {
printState = buildString(Node.PRINT_STRATEGY);
}
return printState;
}
@Override
public String toString() {
String result = simpleAsString();
if (result == null) {
return "Lexical unit node [" + buildString(Node.TO_STRING_STRATEGY)
+ "]";
} else {
return result;
}
}
// A helper method for sass interpolation
@Override
public String unquotedString() {
String result = printState();
if (result.length() >= 2
&& ((result.charAt(0) == '"' && result
.charAt(result.length() - 1) == '"') || (result
.charAt(0) == '\'' && result
.charAt(result.length() - 1) == '\''))) {
result = result.substring(1, result.length() - 1);
}
return result;
}
public LexicalUnitImpl divide(LexicalUnitImpl denominator) {
if (denominator.getLexicalUnitType() != SAC_INTEGER
&& denominator.getLexicalUnitType() != SAC_REAL
&& getLexicalUnitType() != denominator.getLexicalUnitType()) {
throw new IncompatibleUnitsException(printState());
}
LexicalUnitImpl copy = copyWithValue(getFloatValue()
/ denominator.getFloatValue());
if (getLexicalUnitType() == denominator.getLexicalUnitType()) {
copy.setLexicalUnitType(SAC_REAL);
}
return copy;
}
public LexicalUnitImpl add(LexicalUnitImpl another) {
LexicalUnitImpl copy = copyWithValue(getFloatValue()
+ another.getFloatValue());
copy.setLexicalUnitType(checkAndGetUnit(another));
return copy;
}
public LexicalUnitImpl minus(LexicalUnitImpl another) {
LexicalUnitImpl copy = copyWithValue(getFloatValue()
- another.getFloatValue());
copy.setLexicalUnitType(checkAndGetUnit(another));
return copy;
}
public LexicalUnitImpl multiply(LexicalUnitImpl another) {
LexicalUnitImpl copy = copyWithValue(getFloatValue()
* another.getFloatValue());
copy.setLexicalUnitType(checkAndGetUnit(another));
return copy;
}
protected short checkAndGetUnit(LexicalUnitImpl another) {
if (getLexicalUnitType() != SAC_INTEGER
&& getLexicalUnitType() != SAC_REAL
&& another.getLexicalUnitType() != SAC_INTEGER
&& another.getLexicalUnitType() != SAC_REAL
&& getLexicalUnitType() != another.getLexicalUnitType()) {
throw new IncompatibleUnitsException(printState());
}
if (another.getLexicalUnitType() != SAC_INTEGER
&& another.getLexicalUnitType() != SAC_REAL) {
return another.getLexicalUnitType();
}
return getLexicalUnitType();
}
public LexicalUnitImpl modulo(LexicalUnitImpl another) {
if (!checkLexicalUnitType(another, getLexicalUnitType(), SAC_INTEGER,
SAC_REAL)) {
throw new IncompatibleUnitsException(printState());
}
LexicalUnitImpl copy = copy();
copy.setIntegerValue(getIntegerValue() % another.getIntegerValue());
return copy;
}
/**
* Returns a shallow copy of the {@link LexicalUnitImpl} with null as next
* lexical unit pointer. Parameters are not copied but a reference to the
* same parameter list is used.
*
* @return copy of this without next
*/
public LexicalUnitImpl copy() {
LexicalUnitImpl copy = new LexicalUnitImpl(line, column, type);
copy.i = i;
copy.f = f;
copy.s = s;
copy.fname = fname;
copy.sdimension = sdimension;
copy.params = params;
return copy;
}
public LexicalUnitImpl copyWithValue(float value) {
LexicalUnitImpl result = copy();
result.setFloatValue(value);
return result;
}
private void setParameterList(ActualArgumentList params) {
this.params = params;
}
public String getSdimension() {
return sdimension;
}
// here some useful function for creation
public static LexicalUnitImpl createVariable(int line, int column,
String name) {
return new LexicalUnitImpl(line, column, SCSS_VARIABLE, name);
}
public static LexicalUnitImpl createNull(int line, int column) {
return new LexicalUnitImpl(line, column, SCSS_NULL, "null");
}
public static LexicalUnitImpl createNumber(int line, int column, float v) {
int i = (int) v;
if (v == i) {
return new LexicalUnitImpl(line, column, SAC_INTEGER, i);
} else {
return new LexicalUnitImpl(line, column, SAC_REAL, v);
}
}
public static LexicalUnitImpl createInteger(int line, int column, int i) {
return new LexicalUnitImpl(line, column, SAC_INTEGER, i);
}
public static LexicalUnitImpl createPercentage(int line, int column, float v) {
LexicalUnitImpl result = new LexicalUnitImpl(line, column,
SAC_PERCENTAGE, v);
if (Math.round(v * 100 * PRECISION) == (((int) v) * 100 * PRECISION)) {
result.setIntegerValue((int) v);
}
return result;
}
static LexicalUnitImpl createEMS(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_EM, v);
}
static LexicalUnitImpl createLEM(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SCSSLexicalUnit.SAC_LEM, v);
}
static LexicalUnitImpl createREM(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SCSSLexicalUnit.SAC_REM, v);
}
static LexicalUnitImpl createEXS(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_EX, v);
}
public static LexicalUnitImpl createPX(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_PIXEL, v);
}
public static LexicalUnitImpl createCM(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_CENTIMETER, v);
}
static LexicalUnitImpl createMM(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_MILLIMETER, v);
}
static LexicalUnitImpl createIN(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_INCH, v);
}
static LexicalUnitImpl createPT(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_POINT, v);
}
static LexicalUnitImpl createPC(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_PICA, v);
}
public static LexicalUnitImpl createDEG(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_DEGREE, v);
}
static LexicalUnitImpl createRAD(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_RADIAN, v);
}
static LexicalUnitImpl createGRAD(int line, int column, float v) {
return new LexicalUnitImpl(line, column, SAC_GRADIAN, v);
}
static LexicalUnitImpl createMS(int line, int column, float v) {
if (v < 0) {
throw new ParseException("Time values may not be negative", line,
column);
}
return new LexicalUnitImpl(line, column, SAC_MILLISECOND, v);
}
static LexicalUnitImpl createS(int line, int column, float v) {
if (v < 0) {
throw new ParseException("Time values may not be negative", line,
column);
}
return new LexicalUnitImpl(line, column, SAC_SECOND, v);
}
static LexicalUnitImpl createHZ(int line, int column, float v) {
if (v < 0) {
throw new ParseException("Frequency values may not be negative",
line, column);
}
return new LexicalUnitImpl(line, column, SAC_HERTZ, v);
}
static LexicalUnitImpl createKHZ(int line, int column, float v) {
if (v < 0) {
throw new ParseException("Frequency values may not be negative",
line, column);
}
return new LexicalUnitImpl(line, column, SAC_KILOHERTZ, v);
}
static LexicalUnitImpl createDimen(int line, int column, float v, String s) {
return new LexicalUnitImpl(line, column, SAC_DIMENSION, s, v);
}
static LexicalUnitImpl createInherit(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_INHERIT, "inherit");
}
public static LexicalUnitImpl createRawIdent(int line, int column, String s) {
return new LexicalUnitImpl(line, column, SAC_IDENT, s);
}
public static LexicalUnitImpl createIdent(int line, int column, String s) {
return createIdent(line, column, new StringInterpolationSequence(s));
}
public static LexicalUnitImpl createIdent(int line, int column,
StringInterpolationSequence s) {
if ("null".equals(s.toString())) {
return createNull(line, column);
}
return new LexicalUnitImpl(line, column, SAC_IDENT, s);
}
public static LexicalUnitImpl createString(String s) {
return new LexicalUnitImpl(0, 0, SAC_STRING_VALUE, s);
}
public static LexicalUnitImpl createString(int line, int column, String s) {
return new LexicalUnitImpl(line, column, SAC_STRING_VALUE, s);
}
static LexicalUnitImpl createURL(int line, int column, String s) {
return new LexicalUnitImpl(line, column, SAC_URI, s);
}
public static LexicalUnitImpl createAttr(int line, int column, String s) {
return new LexicalUnitImpl(line, column, SAC_ATTR, s);
}
public static LexicalUnitImpl createRGBColor(int line, int column,
ActualArgumentList params) {
return new LexicalUnitImpl(SAC_RGBCOLOR, line, column, "rgb", params);
}
public static LexicalUnitImpl createRect(int line, int column,
ActualArgumentList params) {
return new LexicalUnitImpl(SAC_RECT_FUNCTION, line, column, "rect",
params);
}
public static LexicalUnitImpl createFunction(int line, int column,
String fname, ActualArgumentList params) {
return new LexicalUnitImpl(SAC_FUNCTION, line, column, fname, params);
}
public static boolean checkLexicalUnitType(SassListItem item,
short... lexicalUnitTypes) {
if (!(item instanceof LexicalUnitImpl)) {
return false;
}
for (short s : lexicalUnitTypes) {
if (((LexicalUnitImpl) item).getLexicalUnitType() == s) {
return true;
}
}
return false;
}
public static LexicalUnitImpl createUnicodeRange(int line, int column,
SassList params) {
// @@ return new LexicalUnitImpl(line, column, previous, null,
// SAC_UNICODERANGE, params);
return null;
}
public static LexicalUnitImpl createComma(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_COMMA);
}
public static LexicalUnitImpl createSpace(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_IDENT, " ");
}
public static LexicalUnitImpl createSlash(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_SLASH);
}
public static LexicalUnitImpl createAdd(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_PLUS);
}
public static LexicalUnitImpl createMinus(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_MINUS);
}
public static LexicalUnitImpl createMultiply(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_MULTIPLY);
}
public static LexicalUnitImpl createModulo(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_MOD);
}
public static LexicalUnitImpl createLeftParenthesis(int line, int column) {
return new LexicalUnitImpl(line, column, SCSS_OPERATOR_LEFT_PAREN);
}
public static LexicalUnitImpl createRightParenthesis(int line, int column) {
return new LexicalUnitImpl(line, column, SCSS_OPERATOR_RIGHT_PAREN);
}
public static LexicalUnitImpl createIdent(String s) {
return new LexicalUnitImpl(0, 0, SAC_IDENT, s);
}
public static LexicalUnitImpl createEquals(int line, int column) {
return new LexicalUnitImpl(line, column, SCSS_OPERATOR_EQUALS);
}
public static LexicalUnitImpl createNotEqual(int line, int column) {
return new LexicalUnitImpl(line, column, SCSS_OPERATOR_NOT_EQUAL);
}
public static LexicalUnitImpl createGreaterThan(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_GT);
}
public static LexicalUnitImpl createGreaterThanOrEqualTo(int line,
int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_GE);
}
public static LexicalUnitImpl createLessThan(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_LT);
}
public static LexicalUnitImpl createLessThanOrEqualTo(int line, int column) {
return new LexicalUnitImpl(line, column, SAC_OPERATOR_LE);
}
public static LexicalUnitImpl createAnd(int line, int column) {
return new LexicalUnitImpl(line, column, SCSS_OPERATOR_AND);
}
public static LexicalUnitImpl createOr(int line, int column) {
return new LexicalUnitImpl(line, column, SCSS_OPERATOR_OR);
}
@Override
public SassListItem replaceVariables(ScssContext context) {
LexicalUnitImpl lui = this;
// replace function parameters (if any)
lui = lui.replaceParams(context);
// replace parameters in string value
if (lui.getLexicalUnitType() == LexicalUnitImpl.SCSS_VARIABLE) {
return lui.replaceSimpleVariable(context);
} else if (containsInterpolation()) {
return lui.replaceInterpolation(context);
}
return lui;
}
private LexicalUnitImpl replaceParams(ScssContext context) {
ActualArgumentList params = getParameterList();
if (params != null) {
LexicalUnitImpl copy = copy();
copy.setParameterList(params.replaceVariables(context));
return copy;
} else {
return this;
}
}
private SassListItem replaceSimpleVariable(ScssContext context) {
if (getLexicalUnitType() == LexicalUnitImpl.SCSS_VARIABLE) {
// replace simple variable
String stringValue = getStringValue();
Variable var = context.getVariable(stringValue);
if (var != null) {
return var.getExpr().replaceVariables(context);
}
}
return this;
}
private boolean containsInterpolation() {
return s != null && s.containsInterpolation();
}
private SassListItem replaceInterpolation(ScssContext context) {
// replace interpolation
if (containsInterpolation()) {
// handle Interpolation objects
StringInterpolationSequence sis = s.replaceVariables(context);
// handle strings with interpolation
for (Variable var : context.getVariables()) {
if (!sis.containsInterpolation()) {
break;
}
String interpolation = "#{$" + var.getName() + "}";
String stringValue = sis.toString();
SassListItem expr = var.getExpr();
// strings should be unquoted
if (stringValue.equals(interpolation)
&& !checkLexicalUnitType(expr,
LexicalUnitImpl.SAC_STRING_VALUE)) {
// no more replacements needed, use data type of expr
return expr.replaceVariables(context);
} else if (stringValue.contains(interpolation)) {
String replacementString = expr.replaceVariables(context)
.unquotedString();
sis = new StringInterpolationSequence(
stringValue.replaceAll(
Pattern.quote(interpolation),
Matcher.quoteReplacement(replacementString)));
}
}
if (sis != s) {
LexicalUnitImpl copy = copy();
copy.s = sis;
return copy;
}
}
return this;
}
@Override
public SassListItem evaluateFunctionsAndExpressions(ScssContext context,
boolean evaluateArithmetics) {
if (params != null && !"calc".equals(getFunctionName())) {
SCSSFunctionGenerator generator = getGenerator(getFunctionName());
LexicalUnitImpl copy = this;
if (!"if".equals(getFunctionName())) {
copy = createFunction(line, column, fname,
params.evaluateFunctionsAndExpressions(context, true));
}
if (generator == null) {
SassListItem result = copy.replaceCustomFunctions(context);
if (result != null) {
return result;
}
}
if (generator == null) {
generator = DEFAULT_SERIALIZER;
}
return generator.compute(context, copy);
} else {
return this;
}
}
private SassListItem replaceCustomFunctions(ScssContext context) {
FunctionDefNode functionDef = context
.getFunctionDefinition(getFunctionName());
if (functionDef != null) {
return FunctionCall.evaluate(context, functionDef, this);
}
return null;
}
private static SCSSFunctionGenerator getGenerator(String funcName) {
return SERIALIZERS.get(funcName);
}
private static List<SCSSFunctionGenerator> initSerializers() {
List<SCSSFunctionGenerator> list = new LinkedList<SCSSFunctionGenerator>();
list.add(new AbsFunctionGenerator());
list.add(new AdjustColorFunctionGenerator());
list.add(new CeilFunctionGenerator());
list.add(new DarkenFunctionGenerator());
list.add(new FloorFunctionGenerator());
list.add(new GrayscaleFunctionGenerator());
list.add(new IfFunctionGenerator());
list.add(new LightenFunctionGenerator());
list.add(new ListAppendFunctionGenerator());
list.add(new ListIndexFunctionGenerator());
list.add(new ListJoinFunctionGenerator());
list.add(new ListLengthFunctionGenerator());
list.add(new ListNthFunctionGenerator());
list.add(new MinMaxFunctionGenerator());
list.add(new MixFunctionGenerator());
list.add(new PercentageFunctionGenerator());
list.add(new RectFunctionGenerator());
list.add(new RGBFunctionGenerator());
list.add(new RoundFunctionGenerator());
list.add(new SaturationModificationFunctionGenerator());
list.add(new TypeOfFunctionGenerator());
list.add(new AlphaFunctionGenerator());
list.add(new TransparencyModificationFunctionGenerator());
list.add(new ColorComponentFunctionGenerator());
list.add(new UnitFunctionGenerator());
list.add(new UnitlessFunctionGenerator());
list.add(new QuoteUnquoteFunctionGenerator());
return list;
}
private static final Map<String, SCSSFunctionGenerator> SERIALIZERS = new HashMap<String, SCSSFunctionGenerator>();
private static final SCSSFunctionGenerator DEFAULT_SERIALIZER = new DefaultFunctionGenerator();
private String simpleAsString() {
short type = getLexicalUnitType();
String text = null;
switch (type) {
case SCSS_VARIABLE:
text = "$" + s;
break;
case SCSS_NULL:
text = "";
break;
case LexicalUnit.SAC_OPERATOR_COMMA:
text = ",";
break;
case LexicalUnit.SAC_OPERATOR_PLUS:
text = "+";
break;
case LexicalUnit.SAC_OPERATOR_MINUS:
text = "-";
break;
case LexicalUnit.SAC_OPERATOR_MULTIPLY:
text = "*";
break;
case LexicalUnit.SAC_OPERATOR_SLASH:
text = "/";
break;
case LexicalUnit.SAC_OPERATOR_MOD:
text = "%";
break;
case LexicalUnit.SAC_OPERATOR_EXP:
text = "^";
break;
case LexicalUnitImpl.SCSS_OPERATOR_LEFT_PAREN:
text = "(";
break;
case LexicalUnitImpl.SCSS_OPERATOR_RIGHT_PAREN:
text = ")";
break;
case LexicalUnitImpl.SCSS_OPERATOR_EQUALS:
text = "==";
break;
case LexicalUnitImpl.SCSS_OPERATOR_NOT_EQUAL:
text = "!=";
break;
case LexicalUnit.SAC_OPERATOR_LT:
text = "<";
break;
case LexicalUnit.SAC_OPERATOR_GT:
text = ">";
break;
case LexicalUnit.SAC_OPERATOR_LE:
text = "<=";
break;
case LexicalUnit.SAC_OPERATOR_GE:
text = "=>";
break;
case LexicalUnit.SAC_OPERATOR_TILDE:
text = "~";
break;
case LexicalUnit.SAC_INHERIT:
text = "inherit";
break;
case LexicalUnit.SAC_INTEGER:
text = Integer.toString(getIntegerValue());
break;
case LexicalUnit.SAC_REAL:
text = getFloatOrInteger();
break;
case LexicalUnit.SAC_EM:
case SCSSLexicalUnit.SAC_LEM:
case SCSSLexicalUnit.SAC_REM:
case LexicalUnit.SAC_EX:
case LexicalUnit.SAC_PIXEL:
case LexicalUnit.SAC_INCH:
case LexicalUnit.SAC_CENTIMETER:
case LexicalUnit.SAC_MILLIMETER:
case LexicalUnit.SAC_POINT:
case LexicalUnit.SAC_PICA:
case LexicalUnit.SAC_PERCENTAGE:
case LexicalUnit.SAC_DEGREE:
case LexicalUnit.SAC_GRADIAN:
case LexicalUnit.SAC_RADIAN:
case LexicalUnit.SAC_MILLISECOND:
case LexicalUnit.SAC_SECOND:
case LexicalUnit.SAC_HERTZ:
case LexicalUnit.SAC_KILOHERTZ:
case LexicalUnit.SAC_DIMENSION:
text = getFloatOrInteger() + getDimensionUnitText();
break;
}
return text;
}
@Override
public String buildString(BuildStringStrategy strategy) {
short type = getLexicalUnitType();
String text = simpleAsString();
if (text == null) {
switch (type) {
case LexicalUnit.SAC_URI:
text = "url(" + getStringValue() + ")";
break;
case LexicalUnit.SAC_RGBCOLOR:
int[] rgb = getRgb();
if (rgb != null) {
text = ColorUtil.rgbToColorString(rgb);
break;
}
// else fall through to the function branch
case LexicalUnit.SAC_COUNTER_FUNCTION:
case LexicalUnit.SAC_COUNTERS_FUNCTION:
case LexicalUnit.SAC_RECT_FUNCTION:
case LexicalUnit.SAC_FUNCTION:
if (ColorUtil.isColor(this)) {
text = ColorUtil.rgbToColorString(ColorUtil
.colorToRgb(this));
break;
} else if (ColorUtil.isRgba(this) || ColorUtil.isHsla(this)) {
float alpha = params.get(params.size() - 1)
.getContainedValue().getFloatValue();
rgb = ColorUtil.colorToRgb(this);
if (alpha == 0.0f && rgb[0] == 0 && rgb[1] == 0
&& rgb[2] == 0) {
text = "transparent";
break;
} else if (alpha == 1.0f) {
text = ColorUtil.rgbToColorString(ColorUtil
.colorToRgb(this));
break;
} else if (params.size() == 2 || ColorUtil.isHsla(this)) {
String alphaText = alpha == 0.0f ? "0"
: CSS_FLOAT_FORMAT.format(alpha);
text = "rgba(" + rgb[0] + ", " + rgb[1] + ", " + rgb[2]
+ ", " + alphaText + ")";
break;
}
}
text = fname + "(" + params.buildString(strategy) + ")";
break;
case LexicalUnit.SAC_IDENT:
text = getStringValue();
break;
case LexicalUnit.SAC_STRING_VALUE:
// @@SEEME. not exact
text = "\"" + getStringValue() + "\"";
break;
case LexicalUnit.SAC_ATTR:
text = "attr(" + getStringValue() + ")";
break;
case LexicalUnit.SAC_UNICODERANGE:
text = "@@TODO";
break;
case LexicalUnit.SAC_SUB_EXPRESSION:
text = strategy.build(getParameterList());
break;
default:
text = "@unknown";
break;
}
}
return text;
}
private int[] getRgb() {
if (params.size() != 3
|| !checkLexicalUnitType(params.get(0), SAC_INTEGER)
|| !checkLexicalUnitType(params.get(1), SAC_INTEGER)
|| !checkLexicalUnitType(params.get(2), SAC_INTEGER)) {
return null;
}
int red = ((LexicalUnit) params.get(0)).getIntegerValue();
int green = ((LexicalUnit) params.get(1)).getIntegerValue();
int blue = ((LexicalUnit) params.get(2)).getIntegerValue();
return new int[] { red, green, blue };
}
static {
DecimalFormatSymbols symbols = new DecimalFormatSymbols();
symbols.setDecimalSeparator('.');
CSS_FLOAT_FORMAT.setDecimalFormatSymbols(symbols);
for (SCSSFunctionGenerator serializer : initSerializers()) {
for (String functionName : serializer.getFunctionNames()) {
SERIALIZERS.put(functionName, serializer);
}
}
}
@Override
public boolean containsArithmeticalOperator() {
return false;
}
@Override
public LexicalUnitImpl updateUrl(String prefix) {
if (getLexicalUnitType() == SAC_URI) {
String path = getStringValue().replaceAll("^\"|\"$", "")
.replaceAll("^'|'$", "");
if (!path.startsWith("/") && !path.contains(":")) {
path = prefix + path;
path = StringUtil.cleanPath(path);
}
LexicalUnitImpl copy = copy();
copy.setStringValue(path);
return copy;
} else if (containsInterpolation()) {
// s might contain URLs in its Interpolation objects
LexicalUnitImpl copy = copy();
copy.s = s.updateUrl(prefix);
return copy;
}
return this;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof LexicalUnitImpl)) {
return false;
}
LexicalUnitImpl other = (LexicalUnitImpl) o;
if (isNumber() && other.isNumber()) {
if (!isUnitlessNumber() && !other.isUnitlessNumber()) {
if (getLexicalUnitType() != other.getLexicalUnitType()) {
return false;
}
}
return getFloatValue() == other.getFloatValue()
&& getIntegerValue() == other.getIntegerValue();
} else if (getLexicalUnitType() != other.getLexicalUnitType()) {
return false;
} else {
return printState().equals(other.printState());
}
}
@Override
public int hashCode() {
return printState().hashCode();
}
@Override
public LexicalUnitImpl getContainedValue() {
return this;
}
@Override
public boolean containsVariable() {
return getLexicalUnitType() == SCSS_VARIABLE;
}
}
|
|
/*
* Created by Ihor Karpachev, Copyright (c) 2015. .
*/
package com.drawingmagic.utils;
import android.view.View;
import com.daimajia.androidanimations.library.Techniques;
import com.daimajia.androidanimations.library.YoYo;
import com.nineoldandroids.animation.Animator;
/**
* Created by ihor.karpachev on 22/05/2015.
* Project: Installer Application
* Package: com.touchip.organizer.utils
* Datascope Systems Ltd.
*/
public class AnimationUtils {
/**
* Default duration of animations
*/
private static final int SLOW = 1000, NORMAL = 500, FAST = 300, VERY_FAST = 150;
/**
* Animation techniques
*/
public enum AnimationTechniques {
FADE_IN, FADE_OUT, PULSE, DROP_OUT, LANDING, TAKING_OFF, FLASH, RUBBER_BAND, SHAKE, SWING, WOBBLE,
BOUNCE, TADA, STAND_UP, WAVE, HINGE, ROLL_IN, ROLL_OUT, BOUNCE_IN, BOUNCE_IN_DOWN, BOUNCE_IN_LEFT, BOUNCE_IN_RIGHT, BOUNCE_IN_UP,
FADE_IN_UP, FADE_IN_DOWN, FADE_IN_LEFT, FADE_IN_RIGHT, FADE_OUT_UP, FADE_OUT_DOWN, FADE_OUT_LEFT, FADE_OUT_RIGHT, FLIP_IN_X, FLIP_OUT_X,
FLIP_OUT_Y, ROTATE_IN, ROTATE_IN_DOWN_LEFT, ROTATE_IN_DOWN_RIGHT, SLIDE_IN_LEFT, SLIDE_IN_RIGHT, SLIDE_IN_UP, SLIDE_IN_DOWN, SLIDE_OUT_LEFT,
SLIDE_OUT_RIGHT, SLIDE_OUT_UP, SLIDE_OUT_DOWN, ROTATE_IN_UP_LEFT, ROTATE_IN_UP_RIGHT, ROTATE_OUT, ROTATE_OUT_DOWN_LEFT, ROTATE_OUT_DOWN_RIGHT,
ROTATE_OUT_UP_LEFT, ROTATE_OUT_UP_RIGHT, ZOOM_IN, ZOOM_IN_DOWN, ZOOM_IN_LEFT, ZOOM_IN_RIGHT, ZOOM_IN_UP, ZOOM_OUT, ZOOM_OUT_DOWN, ZOOM_OUT_LEFT, ZOOM_OUT_RIGHT, ZOOM_OUT_UP
}
/**
* Play animation on NORMAL speed
*
* @param target view to play animation on
* @param animationTechniques which techniques to use
*/
public static void animate(View target, AnimationTechniques animationTechniques) {
YoYo.with(getActualTechniques(animationTechniques)).duration(NORMAL).playOn(target);
}
/**
* Play animation on NORMAL speed and make view as GONE after animation is finished
*
* @param target view to play animation on
* @param animationTechniques which techniques to use
*/
public static void animateAndGone(final View target, AnimationTechniques animationTechniques) {
Animator.AnimatorListener listener = new Animator.AnimatorListener() {
@Override
public void onAnimationStart(Animator animation) {
}
@Override
public void onAnimationEnd(Animator animation) {
target.setVisibility(View.GONE);
}
@Override
public void onAnimationCancel(Animator animation) {
}
@Override
public void onAnimationRepeat(Animator animation) {
}
};
YoYo.with(getActualTechniques(animationTechniques)).withListener(listener).duration(NORMAL).playOn(target);
}
/**
* Play animation with listener
*
* @param target play on this view
* @param animationTechniques which techniques to use
* @param listener listener
*/
public static void animate(View target, AnimationTechniques animationTechniques, Animator.AnimatorListener listener) {
YoYo.with(getActualTechniques(animationTechniques)).duration(NORMAL).withListener(listener).playOn(target);
}
/**
* Play animation on FAST speed
*
* @param target view to play animation on
* @param animationTechniques which techniques to use
*/
public static void animateFast(View target, AnimationTechniques animationTechniques) {
YoYo.with(getActualTechniques(animationTechniques)).duration(FAST).playOn(target);
}
/**
* Play animation on VERY FAST speed
*
* @param target view to play animation on
* @param animationTechniques which techniques to use
*/
public static void animateVeryFast(View target, AnimationTechniques animationTechniques) {
YoYo.with(getActualTechniques(animationTechniques)).duration(VERY_FAST).playOn(target);
}
/**
* Play animation on SLOW speed
*
* @param target view to play animation on
* @param animationTechniques which techniques to use
*/
public static void animateSlow(View target, AnimationTechniques animationTechniques) {
YoYo.with(getActualTechniques(animationTechniques)).duration(SLOW).playOn(target);
}
/**
* Play animation on SLOW speed with specific listener
*
* @param target view to play animation on
* @param animationTechniques which techniques to use
* @param listener listener
*/
public static void animateSlow(View target, AnimationTechniques animationTechniques, Animator.AnimatorListener listener) {
YoYo.with(getActualTechniques(animationTechniques)).duration(SLOW).withListener(listener).playOn(target);
}
/**
* Play animation on specific duration
*
* @param target view to play animation on
* @param animationTechniques which techniques to use
*/
public static void animate(View target, AnimationTechniques animationTechniques, int duration) {
YoYo.with(getActualTechniques(animationTechniques)).duration(duration).playOn(target);
}
/**
* Return animation by AnimationTechniques
*
* @param animationTechniques animation to find
* @return actual animation resource
*/
private static Techniques getActualTechniques(AnimationTechniques animationTechniques) {
Techniques techniques;
switch (animationTechniques) {
case FADE_OUT:
techniques = Techniques.FadeOut;
break;
case PULSE:
techniques = Techniques.Pulse;
break;
case DROP_OUT:
techniques = Techniques.DropOut;
break;
case LANDING:
techniques = Techniques.Landing;
break;
case TAKING_OFF:
techniques = Techniques.TakingOff;
break;
case FLASH:
techniques = Techniques.Flash;
break;
case RUBBER_BAND:
techniques = Techniques.RubberBand;
break;
case SHAKE:
techniques = Techniques.Shake;
break;
case SWING:
techniques = Techniques.Swing;
break;
case WOBBLE:
techniques = Techniques.Wobble;
break;
case BOUNCE:
techniques = Techniques.Bounce;
break;
case TADA:
techniques = Techniques.Tada;
break;
case STAND_UP:
techniques = Techniques.StandUp;
break;
case WAVE:
techniques = Techniques.Wave;
break;
case HINGE:
techniques = Techniques.Hinge;
break;
case ROLL_IN:
techniques = Techniques.RollIn;
break;
case ROLL_OUT:
techniques = Techniques.RollOut;
break;
case BOUNCE_IN:
techniques = Techniques.BounceIn;
break;
case BOUNCE_IN_DOWN:
techniques = Techniques.BounceInDown;
break;
case BOUNCE_IN_LEFT:
techniques = Techniques.BounceInLeft;
break;
case BOUNCE_IN_RIGHT:
techniques = Techniques.BounceInRight;
break;
case BOUNCE_IN_UP:
techniques = Techniques.BounceInUp;
break;
case FADE_IN_UP:
techniques = Techniques.FadeInUp;
break;
case FADE_IN_DOWN:
techniques = Techniques.FadeInDown;
break;
case FADE_IN_LEFT:
techniques = Techniques.FadeInLeft;
break;
case FADE_IN_RIGHT:
techniques = Techniques.FadeInRight;
break;
case FADE_OUT_UP:
techniques = Techniques.FadeOutRight;
break;
case FADE_OUT_DOWN:
techniques = Techniques.FadeOutDown;
break;
case FADE_OUT_LEFT:
techniques = Techniques.FadeOutLeft;
break;
case FADE_OUT_RIGHT:
techniques = Techniques.FadeOutRight;
break;
case FLIP_IN_X:
techniques = Techniques.FlipInX;
break;
case FLIP_OUT_X:
techniques = Techniques.FlipOutX;
break;
case FLIP_OUT_Y:
techniques = Techniques.FlipOutY;
break;
case ROTATE_IN:
techniques = Techniques.RotateIn;
break;
case ROTATE_IN_DOWN_LEFT:
techniques = Techniques.RotateInDownLeft;
break;
case ROTATE_IN_DOWN_RIGHT:
techniques = Techniques.RotateInDownRight;
break;
case SLIDE_IN_LEFT:
techniques = Techniques.SlideInLeft;
break;
case SLIDE_IN_RIGHT:
techniques = Techniques.SlideInRight;
break;
case SLIDE_IN_UP:
techniques = Techniques.SlideInUp;
break;
case SLIDE_IN_DOWN:
techniques = Techniques.SlideInDown;
break;
case SLIDE_OUT_LEFT:
techniques = Techniques.SlideOutLeft;
break;
case SLIDE_OUT_RIGHT:
techniques = Techniques.SlideOutRight;
break;
case SLIDE_OUT_UP:
techniques = Techniques.SlideOutUp;
break;
case SLIDE_OUT_DOWN:
techniques = Techniques.SlideOutDown;
break;
case ROTATE_IN_UP_LEFT:
techniques = Techniques.RotateInUpLeft;
break;
case ROTATE_IN_UP_RIGHT:
techniques = Techniques.RotateInUpRight;
break;
case ROTATE_OUT:
techniques = Techniques.RotateOut;
break;
case ROTATE_OUT_DOWN_LEFT:
techniques = Techniques.RotateOutDownLeft;
break;
case ROTATE_OUT_DOWN_RIGHT:
techniques = Techniques.RotateOutDownRight;
break;
case ROTATE_OUT_UP_LEFT:
techniques = Techniques.RotateOutUpLeft;
break;
case ROTATE_OUT_UP_RIGHT:
techniques = Techniques.RotateOutUpRight;
break;
case ZOOM_IN:
techniques = Techniques.ZoomIn;
break;
case ZOOM_IN_DOWN:
techniques = Techniques.ZoomInDown;
break;
case ZOOM_IN_LEFT:
techniques = Techniques.ZoomInLeft;
break;
case ZOOM_IN_RIGHT:
techniques = Techniques.ZoomInRight;
break;
case ZOOM_IN_UP:
techniques = Techniques.ZoomInUp;
break;
case ZOOM_OUT:
techniques = Techniques.ZoomOut;
break;
case ZOOM_OUT_DOWN:
techniques = Techniques.ZoomOut;
break;
case ZOOM_OUT_LEFT:
techniques = Techniques.ZoomOutLeft;
break;
case ZOOM_OUT_RIGHT:
techniques = Techniques.ZoomOutRight;
break;
case ZOOM_OUT_UP:
techniques = Techniques.ZoomOutUp;
break;
default:
techniques = Techniques.FadeIn;
}
return techniques;
}
}
|
|
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.model;
import java.util.Date;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator;
import javax.validation.constraints.NotNull;
import org.hibernate.annotations.ForeignKey;
/**
* Represents a released version of a product. For example, a Beta, GA, or SP release. Each release is associated with a product
* version (many releases for one version), and each release is associated with a single milestone (one to one). For example,
* product version 1.0 could have three milestones (1.0.0.Build1, 1.0.0.Build2, and 1.0.0.Build3) and two releases (1.0.0.Beta1
* which was promoted from 1.0.0.Build1 and 1.0.0.GA which was promoted from 1.0.0.Build3).
*/
@Entity
public class ProductRelease implements GenericEntity<Integer> {
private static final long serialVersionUID = 6314079319551264379L;
public static final String SEQUENCE_NAME = "product_release_id_seq";
@Id
@SequenceGenerator(name = SEQUENCE_NAME, sequenceName = SEQUENCE_NAME, allocationSize = 1)
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = SEQUENCE_NAME)
private Integer id;
@NotNull
private String version;
@NotNull
@ManyToOne(cascade = { CascadeType.REFRESH })
@ForeignKey(name = "fk_productrelease_productversion")
private ProductVersion productVersion;
@Enumerated(EnumType.STRING)
private SupportLevel supportLevel;
private Date releaseDate;
private String downloadUrl;
@NotNull
@OneToOne(cascade = { CascadeType.REFRESH })
@ForeignKey(name = "fk_productrelease_milestone")
private ProductMilestone productMilestone;
public ProductRelease() {
}
public ProductRelease(ProductVersion productVersion, String version) {
this.productVersion = productVersion;
this.version = version;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
/**
* The product version entity associated with this release
*
* @return the product version entity
*/
public ProductVersion getProductVersion() {
return productVersion;
}
public void setProductVersion(ProductVersion productVersion) {
this.productVersion = productVersion;
}
/**
* The current support level of this product release.
*
* @return The support level enum
*/
public SupportLevel getSupportLevel() {
return supportLevel;
}
public void setSupportLevel(SupportLevel supportLevel) {
this.supportLevel = supportLevel;
}
/**
* The date of this release
*
* @return The date representing the release date
*/
public Date getReleaseDate() {
return releaseDate;
}
public void setReleaseDate(Date releaseDate) {
this.releaseDate = releaseDate;
}
/**
* URL which can be used to download the product distribution
*
* @return
*/
public String getDownloadUrl() {
return downloadUrl;
}
public void setDownloadUrl(String downloadUrl) {
this.downloadUrl = downloadUrl;
}
public ProductMilestone getProductMilestone() {
return productMilestone;
}
public void setProductMilestone(ProductMilestone productMilestone) {
this.productMilestone = productMilestone;
}
@Override
public String toString() {
return "ProductRelease [id=" + id + ", version=" + version + "]";
}
/**
* Contains the various possible support levels, such as UNRELEASED, SUPPORTED, EOL, etc..
*
*/
public enum SupportLevel {
UNRELEASED, EARLYACCESS, SUPPORTED, EXTENDED_SUPPORT, EOL
}
public static class Builder {
private Integer id;
private String version;
private ProductVersion productVersion;
private ProductMilestone productMilestone;
private SupportLevel supportLevel;
private Date releaseDate;
private String downloadUrl;
private Builder() {
}
public static Builder newBuilder() {
return new Builder();
}
public ProductRelease build() {
ProductRelease productRelease = new ProductRelease();
productRelease.setId(id);
productRelease.setVersion(version);
productRelease.setSupportLevel(supportLevel);
productRelease.setReleaseDate(releaseDate);
productRelease.setDownloadUrl(downloadUrl);
if (productVersion != null) {
productVersion.addProductRelease(productRelease);
}
productRelease.setProductVersion(productVersion);
if (productMilestone != null) {
productMilestone.setProductRelease(productRelease);
}
productRelease.setProductMilestone(productMilestone);
return productRelease;
}
public Builder id(Integer id) {
this.id = id;
return this;
}
public Builder version(String version) {
this.version = version;
return this;
}
public Builder supportLevel(SupportLevel supportLevel) {
this.supportLevel = supportLevel;
return this;
}
public Builder releaseDate(Date releaseDate) {
this.releaseDate = releaseDate;
return this;
}
public Builder downloadUrl(String downloadUrl) {
this.downloadUrl = downloadUrl;
return this;
}
public Builder productVersion(ProductVersion productVersion) {
this.productVersion = productVersion;
return this;
}
public Builder productMilestone(ProductMilestone productMilestone) {
this.productMilestone = productMilestone;
return this;
}
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.RecordSink;
import com.facebook.presto.spi.type.Type;
import com.google.common.collect.ImmutableList;
import io.airlift.slice.Slice;
import org.joda.time.DateTimeZone;
import org.joda.time.chrono.ISOChronology;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.SQLNonTransientException;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.plugin.jdbc.JdbcErrorCode.JDBC_ERROR;
import static com.facebook.presto.plugin.jdbc.JdbcErrorCode.JDBC_NON_TRANSIENT_ERROR;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.google.common.base.Preconditions.checkState;
import static java.nio.charset.StandardCharsets.UTF_8;
public class JdbcRecordSink
implements RecordSink
{
private final Connection connection;
private final PreparedStatement statement;
private final int fieldCount;
private final List<Type> columnTypes;
private int field = -1;
private int batchSize;
public JdbcRecordSink(JdbcOutputTableHandle handle, JdbcClient jdbcClient)
{
try {
connection = jdbcClient.getConnection(handle);
connection.setAutoCommit(false);
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
try {
statement = connection.prepareStatement(jdbcClient.buildInsertSql(handle));
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
fieldCount = handle.getColumnNames().size();
columnTypes = handle.getColumnTypes();
}
@Override
public void beginRecord()
{
checkState(field == -1, "already in record");
field = 0;
}
@Override
public void finishRecord()
{
checkState(field != -1, "not in record");
checkState(field == fieldCount, "not all fields set");
field = -1;
try {
statement.addBatch();
batchSize++;
if (batchSize >= 1000) {
statement.executeBatch();
connection.commit();
connection.setAutoCommit(false);
batchSize = 0;
}
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
}
@Override
public void appendNull()
{
try {
statement.setObject(next(), null);
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
}
@Override
public void appendBoolean(boolean value)
{
try {
statement.setBoolean(next(), value);
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
}
@Override
public void appendLong(long value)
{
try {
if (DATE.equals(columnTypes.get(field))) {
// convert to midnight in default time zone
long utcMillis = TimeUnit.DAYS.toMillis(value);
long localMillis = ISOChronology.getInstanceUTC().getZone().getMillisKeepLocal(DateTimeZone.getDefault(), utcMillis);
statement.setDate(next(), new Date(localMillis));
}
else {
statement.setLong(next(), value);
}
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
}
@Override
public void appendDouble(double value)
{
try {
statement.setDouble(next(), value);
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
}
@Override
public void appendBigDecimal(BigDecimal value)
{
try {
statement.setBigDecimal(next(), value);
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
}
@Override
public void appendString(byte[] value)
{
try {
statement.setString(next(), new String(value, UTF_8));
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
}
@Override
public void appendObject(Object value)
{
throw new UnsupportedOperationException();
}
@Override
public Collection<Slice> commit()
{
// commit and close
try (Connection connection = this.connection) {
if (batchSize > 0) {
statement.executeBatch();
connection.commit();
}
}
catch (SQLNonTransientException e) {
throw new PrestoException(JDBC_NON_TRANSIENT_ERROR, e);
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
// the committer does not need any additional info
return ImmutableList.of();
}
@SuppressWarnings("UnusedDeclaration")
@Override
public void rollback()
{
// rollback and close
try (Connection connection = this.connection;
PreparedStatement statement = this.statement) {
connection.rollback();
}
catch (SQLException e) {
throw new PrestoException(JDBC_ERROR, e);
}
}
@Override
public List<Type> getColumnTypes()
{
return columnTypes;
}
private int next()
{
checkState(field != -1, "not in record");
checkState(field < fieldCount, "all fields already set");
field++;
return field;
}
}
|
|
/*
* Copyright (c) 2008, Damian Carrillo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
* * Neither the name of the copyright holder's organization nor the names of its contributors
* may be used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package co.cdev.agave;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.regex.Matcher;
/**
* A {@code URIPattern} is the object that indicates which handler should be
* invoked according to the requested URI. A {@code URIPattern} is similar in
* nature to the string part of the URI except for having wildcards and
* replacement variables.
*
* Replacement variables look like {@code ${var}} and are supplied to handler
* methods as arguments to the method if annotated. From this point of view,
* though, consider replacement variables as a single wildcard match.
*
* @author <a href="mailto:damiancarrillo@gmail.com">Damian Carrillo</a>
*/
public class URIPatternImpl implements URIPattern {
private static final long serialVersionUID = 1L;
private String pattern;
private String[] parts;
protected URIPatternImpl() {
}
public URIPatternImpl(String pattern) {
if (!pattern.startsWith("/")) {
throw new IllegalArgumentException(
"The supplied pattern must begin with a forward slash (\"/\") "
+ "where the root is relative to the context path.");
}
if (pattern.contains("**/${")) {
throw new IllegalArgumentException(
"The supplied pattern is nondeterministic. There is no way of "
+ "knowing when to stop matching with this type of pattern: /**/${var}/");
}
this.pattern = normalizePattern(pattern);
if (pattern.length() > 1) {
this.parts = pattern.substring(1).split("/");
}
}
@Override
public String[] getParts() {
return parts;
}
protected String normalizePattern(String pattern) {
URI uri;
try {
pattern = pattern.replace("${", "~~agave~~start~~delim~~");
pattern = pattern.replace("}", "~~agave~~end~~delim~~");
uri = new URI(pattern);
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("Malformed URI pattern: " + pattern, ex);
}
String normalizedUri = stripTrailingSlash(uri.normalize().toString());
normalizedUri = normalizedUri.replace("~~agave~~start~~delim~~", "${");
normalizedUri = normalizedUri.replace("~~agave~~end~~delim~~", "}");
normalizedUri = condenseWildcards(normalizedUri);
return normalizedUri;
}
/**
* Condenses multiple successive wildcards into the most generic wildcard
*/
private String condenseWildcards(String pattern) {
while (pattern.contains("**/**") || pattern.contains("**/*")
|| pattern.contains("*/**")) {
pattern = pattern.replace("**/**", "**");
pattern = pattern.replace("**/*", "**");
pattern = pattern.replace("*/**", "**");
}
return pattern;
}
/**
* Normalizes the URI string so that .. and . are properly handled and
* condensed.
*
* @param uriStr
* the URI string to normalize
* @return the normalized URI string
*/
@Override
public String normalizeURI(String uriStr) {
URI uri;
try {
uri = new URI(uriStr);
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("Malformed URI: " + uriStr, ex);
}
return stripTrailingSlash(uri.normalize().toString());
}
private String stripTrailingSlash(String input) {
if (!input.equals(FORWARD_SLASH) && input.endsWith(FORWARD_SLASH)) {
return input.substring(0, input.length() - 1);
}
return input;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!(obj instanceof URIPattern))
return false;
URIPattern that = (URIPattern) obj;
return pattern.equalsIgnoreCase(that.toString());
}
@Override
public int hashCode() {
return pattern.hashCode();
}
/**
* Compares two {@code URIPattern}s for greater specificity. A more
* specific {@code URIPattern} should always be sorted before a more generic
* one.
*
* @param that
* the {@code URIPattern} to compare against
* @return -1 if this {@code URIPattern} is more specific, 0 if they are
* equal in specificity and 1 if that {@code URIPattern} is more
* specific
*/
@Override
public int compareTo(URIPattern that) {
if (this.equals(that)) {
return 0;
}
Integer value = null;
String[] thisTokens = pattern.split(FORWARD_SLASH);
String[] thatTokens = that.toString().split(FORWARD_SLASH);
for (int i = 0; i < thisTokens.length && i < thatTokens.length; i++) {
if ("**".equals(thisTokens[i]) && !"**".equals(thatTokens[i])) {
value = 1;
break;
} else if (!"**".equals(thisTokens[i]) && "**".equals(thatTokens[i])) {
value = -1;
break;
} else if ("*".equals(thisTokens[i]) && !"*".equals(thatTokens[i])) {
value = 1;
break;
} else if (!"*".equals(thisTokens[i]) && "*".equals(thatTokens[i])) {
value = -1;
break;
}
}
if (value == null) {
if (thisTokens.length > thatTokens.length) {
value = -1;
} else if (thisTokens.length < thatTokens.length) {
value = 1;
} else {
int length = (thisTokens.length > thatTokens.length) ? thisTokens.length : thatTokens.length;
for (int i = 0; i < length; i++) {
Matcher thisTokenMatcher = REPLACEMENT_PATTERN.matcher(thisTokens[i]);
Matcher thatTokenMatcher = REPLACEMENT_PATTERN.matcher(thatTokens[i]);
if (thisTokenMatcher.matches() && thatTokenMatcher.matches()) {
value = 0;
continue; // just ignore this case - treat all replacement params equal
} else if (!thisTokenMatcher.matches() && thatTokenMatcher.matches()) {
value = -1;
break;
} else if (thisTokenMatcher.matches() && !thatTokenMatcher.matches()) {
value = 1;
break;
}
value = thisTokens[i].compareToIgnoreCase(thatTokens[i]);
if (value != 0) {
break;
}
}
}
}
return value;
}
@Override
public String toString() {
return pattern;
}
// Serialization
private Object writeReplace() {
return new SerializationProxy(this);
}
private void readObject(ObjectInputStream in) throws InvalidObjectException {
throw new InvalidObjectException("Expected SerializationProxy");
}
private static class SerializationProxy implements Serializable {
private static final long serialVersionUID = 1L;
private final String pattern;
SerializationProxy(URIPatternImpl uriPattern) {
this.pattern = uriPattern.pattern;
}
private Object readResolve() {
return new URIPatternImpl(pattern);
}
}
}
|
|
package com.nullprogram.chess.ai;
import com.nullprogram.chess.Board;
import com.nullprogram.chess.Game;
import com.nullprogram.chess.Move;
import com.nullprogram.chess.MoveList;
import com.nullprogram.chess.Piece;
import com.nullprogram.chess.Player;
import com.nullprogram.chess.Position;
import com.nullprogram.chess.pieces.Archbishop;
import com.nullprogram.chess.pieces.Bishop;
import com.nullprogram.chess.pieces.Chancellor;
import com.nullprogram.chess.pieces.King;
import com.nullprogram.chess.pieces.Knight;
import com.nullprogram.chess.pieces.Pawn;
import com.nullprogram.chess.pieces.Queen;
import com.nullprogram.chess.pieces.Rook;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.Executors;
import java.util.logging.Logger;
/**
* Minimax Chess AI player.
*
* This employs the dumb minimax algorithm to search the game tree for
* moves. The board is currently only evaluated only by the pieces
* present, not their positions.
*/
public class Minimax implements Player {
/** This class's Logger. */
private static final Logger LOG =
Logger.getLogger("com.nullprogram.chess.ai.Minimax");
/** The number of threads to use. */
private static final int NTHREADS =
Runtime.getRuntime().availableProcessors();
/** Local friendly game controller. */
private final Game game;
/** Side this AI plays. */
private Piece.Side side = null;
/** Best move, the selected move. */
private volatile Move bestMove;
/** Thread manager. */
private final Executor executor = Executors.newFixedThreadPool(NTHREADS);
/** Values of each piece. */
private Map<Class, Double> values;
/** Divisor for milliseconds. */
static final double MILLI = 1000.0;
/** Maximum depth (configured). */
private int maxDepth;
/** Material score weight (configured). */
private double wMaterial;
/** King safety score weight (configured). */
private double wSafety;
/** Mobility score weight (configured). */
private double wMobility;
/**
* Create the default Minimax.
*
* @param active the game this AI is being seated at
*/
public Minimax(final Game active) {
this(active, "default");
}
/**
* Create a new AI from a given properties name.
*
* @param active the game this AI is being seated at
* @param name name of configuration to use
*/
public Minimax(final Game active, final String name) {
this(active, getConfig(name));
}
/**
* Create a new AI for the given board.
*
* @param active the game this AI is being seated at
* @param props properties for this player
*/
public Minimax(final Game active, final Properties props) {
game = active;
values = new HashMap<Class, Double>();
Properties config = props;
/* Piece values */
values.put((new Pawn(side)).getClass(),
Double.parseDouble(config.getProperty("Pawn")));
values.put((new Knight(side)).getClass(),
Double.parseDouble(config.getProperty("Knight")));
values.put((new Bishop(side)).getClass(),
Double.parseDouble(config.getProperty("Bishop")));
values.put((new Rook(side)).getClass(),
Double.parseDouble(config.getProperty("Rook")));
values.put((new Queen(side)).getClass(),
Double.parseDouble(config.getProperty("Queen")));
values.put((new King(side)).getClass(),
Double.parseDouble(config.getProperty("King")));
values.put((new Chancellor(side)).getClass(),
Double.parseDouble(config.getProperty("Chancellor")));
values.put((new Archbishop(side)).getClass(),
Double.parseDouble(config.getProperty("Archbishop")));
maxDepth = (int) Double.parseDouble(config.getProperty("depth"));
wMaterial = Double.parseDouble(config.getProperty("material"));
wSafety = Double.parseDouble(config.getProperty("safety"));
wMobility = Double.parseDouble(config.getProperty("mobility"));
}
/**
* Get the configuration.
*
* @param name name of the configuration to load
* @return the configuration
*/
public static Properties getConfig(final String name) {
Properties props;
if ("default".equals(name)) {
props = new Properties();
} else {
props = new Properties(getConfig("default"));
}
String filename = name + ".properties";
InputStream in = Minimax.class.getResourceAsStream(filename);
try {
props.load(in);
} catch (IOException e) {
LOG.warning("Failed to load AI config: " + name + ": " + e);
} finally {
try {
in.close();
} catch (IOException e) {
LOG.info("failed to close stream: " + e.getMessage());
}
}
return props;
}
@Override
public final Move takeTurn(final Board board,
final Piece.Side currentSide) {
side = currentSide;
/* Gather up every move. */
MoveList moves = board.allMoves(side, true);
moves.shuffle();
/* Initialize the shared structures. */
if (game != null) {
game.setProgress(0);
game.setStatus("Thinking ...");
}
long startTime = System.currentTimeMillis();
/* Spin off threads to evaluate each move's tree. */
CompletionService<Move> service =
new ExecutorCompletionService<Move>(executor);
int submitted = 0;
bestMove = null;
for (final Move move : moves) {
final Board callboard = board.copy();
service.submit(new Callable<Move>() {
public Move call() {
callboard.move(move);
double beta = Double.POSITIVE_INFINITY;
if (bestMove != null) {
beta = -bestMove.getScore();
}
double v = search(callboard, maxDepth - 1,
Piece.opposite(side),
Double.NEGATIVE_INFINITY, beta);
move.setScore(-v);
return move;
}
});
submitted++;
}
/* Gather up results and pick the best move. */
for (int i = 0; i < submitted; i++) {
try {
Move m = service.take().get();
if (bestMove == null || m.getScore() > bestMove.getScore()) {
bestMove = m;
}
} catch (ExecutionException e) {
LOG.warning("move went unevaluated: " + e.getMessage());
} catch (InterruptedException e) {
LOG.warning("move went unevaluated: " + e.getMessage());
}
if (game != null) {
game.setProgress(i / (1.0f * (submitted - 1)));
}
}
long time = (System.currentTimeMillis() - startTime);
LOG.info("AI took " + (time / MILLI) + " seconds (" +
NTHREADS + " threads, " + maxDepth + " plies)");
return bestMove;
}
/**
* Recursive move searching.
*
* @param b board to search
* @param depth current depth
* @param s side for current move
* @param alpha lower bound to check
* @param beta upper bound to check
* @return best valuation found at lowest depth
*/
private double search(final Board b, final int depth, final Piece.Side s,
final double alpha, final double beta) {
if (depth == 0) {
double v = valuate(b);
return (s != side) ? -v : v;
}
Piece.Side opps = Piece.opposite(s); // opposite side
double best = alpha;
MoveList list = b.allMoves(s, true);
for (Move move : list) {
b.move(move);
best = Math.max(best, -search(b, depth - 1, opps, -beta, -best));
b.undo();
/* alpha-beta prune */
if (beta <= best) {
return best;
}
}
return best;
}
/**
* Determine value of this board.
*
* @param b board to be valuated
* @return valuation of this board
*/
private double valuate(final Board b) {
double material = materialValue(b);
double kingSafety = kingInsafetyValue(b);
double mobility = mobilityValue(b);
return material * wMaterial +
kingSafety * wSafety +
mobility * wMobility;
}
/**
* Add up the material value of the board only.
*
* @param b board to be evaluated
* @return material value of the board
*/
private double materialValue(final Board b) {
double value = 0;
for (int y = 0; y < b.getHeight(); y++) {
for (int x = 0; x < b.getWidth(); x++) {
Position pos = new Position(x, y);
Piece p = b.getPiece(pos);
if (p != null) {
value += values.get(p.getClass()) * p.getSide().value();
}
}
}
return value * side.value();
}
/**
* Determine the safety of each king. Higher is worse.
*
* @param b board to be evaluated
* @return king insafety score
*/
private double kingInsafetyValue(final Board b) {
return kingInsafetyValue(b, Piece.opposite(side)) -
kingInsafetyValue(b, side);
}
/**
* Helper function: determine safety of a single king.
*
* @param b board to be evaluated
* @param s side of king to be checked
* @return king insafety score
*/
private double kingInsafetyValue(final Board b, final Piece.Side s) {
/* Trace lines away from the king and count the spaces. */
Position king = b.findKing(s);
if (king == null) {
/* Weird, but may happen during evaluation. */
return Double.POSITIVE_INFINITY;
}
MoveList list = new MoveList(b, false);
/* Take advantage of the Rook and Bishop code. */
Rook.getMoves(b.getPiece(king), list);
Bishop.getMoves(b.getPiece(king), list);
return list.size();
}
/**
* Mobility score for this board.
*
* @param b board to be evaluated
* @return score for this board
*/
private double mobilityValue(final Board b) {
return b.allMoves(side, false).size() -
b.allMoves(Piece.opposite(side), false).size();
}
}
|
|
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.junit.Test;
import org.owasp.dependencycheck.BaseTest;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.utils.Settings;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author Jeremy Long
*/
public class JarAnalyzerTest extends BaseTest {
/**
* Test of inspect method, of class JarAnalyzer.
*
* @throws Exception is thrown when an exception occurs.
*/
@Test
public void testAnalyze() throws Exception {
//File file = new File(this.getClass().getClassLoader().getResource("struts2-core-2.1.2.jar").getPath());
File file = BaseTest.getResourceAsFile(this, "struts2-core-2.1.2.jar");
Dependency result = new Dependency(file);
JarAnalyzer instance = new JarAnalyzer();
instance.initializeFileTypeAnalyzer();
instance.analyze(result, null);
assertTrue(result.getVendorEvidence().toString().toLowerCase().contains("apache"));
assertTrue(result.getVendorEvidence().getWeighting().contains("apache"));
file = BaseTest.getResourceAsFile(this, "dwr.jar");
result = new Dependency(file);
instance.analyze(result, null);
boolean found = false;
for (Evidence e : result.getVendorEvidence()) {
if (e.getName().equals("url")) {
assertEquals("Project url was not as expected in dwr.jar", e.getValue(), "http://getahead.ltd.uk/dwr");
found = true;
break;
}
}
assertTrue("Project url was not found in dwr.jar", found);
//file = new File(this.getClass().getClassLoader().getResource("org.mortbay.jetty.jar").getPath());
file = BaseTest.getResourceAsFile(this, "org.mortbay.jetty.jar");
result = new Dependency(file);
instance.analyze(result, null);
found = false;
for (Evidence e : result.getProductEvidence()) {
if (e.getName().equalsIgnoreCase("package-title")
&& e.getValue().equalsIgnoreCase("org.mortbay.http")) {
found = true;
break;
}
}
assertTrue("package-title of org.mortbay.http not found in org.mortbay.jetty.jar", found);
found = false;
for (Evidence e : result.getVendorEvidence()) {
if (e.getName().equalsIgnoreCase("implementation-url")
&& e.getValue().equalsIgnoreCase("http://jetty.mortbay.org")) {
found = true;
break;
}
}
assertTrue("implementation-url of http://jetty.mortbay.org not found in org.mortbay.jetty.jar", found);
found = false;
for (Evidence e : result.getVersionEvidence()) {
if (e.getName().equalsIgnoreCase("Implementation-Version")
&& e.getValue().equalsIgnoreCase("4.2.27")) {
found = true;
break;
}
}
assertTrue("implementation-version of 4.2.27 not found in org.mortbay.jetty.jar", found);
//file = new File(this.getClass().getClassLoader().getResource("org.mortbay.jmx.jar").getPath());
file = BaseTest.getResourceAsFile(this, "org.mortbay.jmx.jar");
result = new Dependency(file);
instance.analyze(result, null);
assertEquals("org.mortbar.jmx.jar has version evidence?", result.getVersionEvidence().size(), 0);
}
/**
* Test of getSupportedExtensions method, of class JarAnalyzer.
*/
@Test
public void testAcceptSupportedExtensions() throws Exception {
JarAnalyzer instance = new JarAnalyzer();
instance.initialize();
instance.setEnabled(true);
String[] files = {"test.jar", "test.war"};
for (String name : files) {
assertTrue(name, instance.accept(new File(name)));
}
}
/**
* Test of getName method, of class JarAnalyzer.
*/
@Test
public void testGetName() {
JarAnalyzer instance = new JarAnalyzer();
String expResult = "Jar Analyzer";
String result = instance.getName();
assertEquals(expResult, result);
}
@Test
public void testParseManifest() throws Exception {
File file = BaseTest.getResourceAsFile(this, "xalan-2.7.0.jar");
Dependency result = new Dependency(file);
JarAnalyzer instance = new JarAnalyzer();
List<JarAnalyzer.ClassNameInformation> cni = new ArrayList<>();
instance.parseManifest(result, cni);
assertTrue(result.getVersionEvidence().getEvidence("manifest: org/apache/xalan/").size() > 0);
}
/**
* Test of getAnalysisPhase method, of class JarAnalyzer.
*/
@Test
public void testGetAnalysisPhase() {
JarAnalyzer instance = new JarAnalyzer();
AnalysisPhase expResult = AnalysisPhase.INFORMATION_COLLECTION;
AnalysisPhase result = instance.getAnalysisPhase();
assertEquals(expResult, result);
}
/**
* Test of getAnalyzerEnabledSettingKey method, of class JarAnalyzer.
*/
@Test
public void testGetAnalyzerEnabledSettingKey() {
JarAnalyzer instance = new JarAnalyzer();
String expResult = Settings.KEYS.ANALYZER_JAR_ENABLED;
String result = instance.getAnalyzerEnabledSettingKey();
assertEquals(expResult, result);
}
@Test
public void testClassInformation() {
JarAnalyzer.ClassNameInformation instance = new JarAnalyzer.ClassNameInformation("org/owasp/dependencycheck/analyzer/JarAnalyzer");
assertEquals("org/owasp/dependencycheck/analyzer/JarAnalyzer", instance.getName());
List<String> expected = Arrays.asList("owasp", "dependencycheck", "analyzer", "jaranalyzer");
List<String> results = instance.getPackageStructure();
assertEquals(expected, results);
}
@Test
public void testAnalyzeDependency_SkipsMacOSMetaDataFile() throws Exception {
JarAnalyzer instance = new JarAnalyzer();
Dependency macOSMetaDataFile = new Dependency();
macOSMetaDataFile
.setActualFilePath(FileUtils.getFile("src", "test", "resources", "._avro-ipc-1.5.0.jar").getAbsolutePath());
macOSMetaDataFile.setFileName("._avro-ipc-1.5.0.jar");
Dependency actualJarFile = new Dependency();
actualJarFile.setActualFilePath(BaseTest.getResourceAsFile(this, "avro-ipc-1.5.0.jar").getAbsolutePath());
actualJarFile.setFileName("avro-ipc-1.5.0.jar");
Engine engine = new Engine();
engine.setDependencies(Arrays.asList(macOSMetaDataFile, actualJarFile));
instance.analyzeDependency(macOSMetaDataFile, engine);
}
@Test
public void testAnalyseDependency_SkipsNonZipFile() throws Exception {
JarAnalyzer instance = new JarAnalyzer();
Dependency textFileWithJarExtension = new Dependency();
textFileWithJarExtension
.setActualFilePath(BaseTest.getResourceAsFile(this, "textFileWithJarExtension.jar").getAbsolutePath());
textFileWithJarExtension.setFileName("textFileWithJarExtension.jar");
Engine engine = new Engine();
engine.setDependencies(Collections.singletonList(textFileWithJarExtension));
instance.analyzeDependency(textFileWithJarExtension, engine);
}
}
|
|
/*
* Copyright (C) 2014 Sebastien Diot.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.blockwithme.util.xtend;
import java.nio.Buffer;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Dictionary;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import org.eclipse.xtext.xbase.lib.Inline;
import org.eclipse.xtext.xbase.lib.Pure;
import com.blockwithme.util.shared.GwtIncompatible;
/**
* Xtend Extension related to "scalar" values.
*
* The goal is to allow to easily get the "scale/size/length" of a value.
*
* @author monster
*/
public class ScalarExtension extends JavaUtilLoggingExtension {
/** Represents the boolean true value. */
public static final int TRUE = 1;
/** Represents the boolean false value. */
public static final int FALSE = 0;
/** Converts a boolean to an int (true == 1, false == 0). */
@Pure
@Inline("($1 ? 1 : 0)")
public static int operator_plus(final boolean value) {
return value ? TRUE : FALSE;
}
/** Converts a AtomicBoolean to an int (true == 1, null/false == 0). */
@Pure
@Inline("((($1 == null) || !$1.get()) ? 0 : 1)")
public static int operator_plus(final AtomicBoolean value) {
return ((value == null) || !value.get()) ? FALSE : TRUE;
}
/** Converts any Long to a long (null == 0). */
@Pure
@Inline("(($1 == null) ? 0 : $1.longValue())")
public static long operator_plus(final Long value) {
return (value == null) ? 0 : value.longValue();
}
/** Converts any Number to an int (null == 0). */
@Pure
@Inline("(($1 == null) ? 0 : $1.intValue())")
public static int operator_plus(final Number value) {
return (value == null) ? 0 : value.intValue();
}
/** Converts a Character to an int (null == 0). */
@Pure
@Inline("(($1 == null) ? 0 : $1.charValue())")
public static int operator_plus(final Character value) {
return (value == null) ? 0 : value.charValue();
}
/** Converts a Boolean to an int (true == 1, null/false == 0). */
@Pure
@Inline("((($1 == null) || !$1.booleanValue()) ? 0 : 1)")
public static int operator_plus(final Boolean value) {
return ((value == null) || !value.booleanValue()) ? FALSE : TRUE;
}
/** Converts any Date to a long (null == 0). */
@Pure
@Inline("(($1 == null) ? 0 : $1.getTime())")
public static long operator_plus(final Date value) {
return (value == null) ? 0 : value.getTime();
}
/** Converts any Calendar to a long (null == 0). */
@Pure
@Inline("(($1 == null) ? 0 : $1.getTimeInMillis())")
public static long operator_plus(final Calendar value) {
return (value == null) ? 0 : value.getTimeInMillis();
}
/** Converts a boolean[] to an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length)")
public static int operator_plus(final boolean[] value) {
return (value == null) ? 0 : value.length;
}
/** Converts a byte[] to an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length)")
public static int operator_plus(final byte[] value) {
return (value == null) ? 0 : value.length;
}
/** Converts a char[] to an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length)")
public static int operator_plus(final char[] value) {
return (value == null) ? 0 : value.length;
}
/** Converts a short[] to an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length)")
public static int operator_plus(final short[] value) {
return (value == null) ? 0 : value.length;
}
/** Converts a int[] to an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length)")
public static int operator_plus(final int[] value) {
return (value == null) ? 0 : value.length;
}
/** Converts a float[] to an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length)")
public static int operator_plus(final float[] value) {
return (value == null) ? 0 : value.length;
}
/** Converts a long[] to a an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length)")
public static int operator_plus(final long[] value) {
return (value == null) ? 0 : value.length;
}
/** Converts a double[] to an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length)")
public static int operator_plus(final double[] value) {
return (value == null) ? 0 : value.length;
}
/** Converts a Object[] to an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length)")
public static int operator_plus(final Object[] value) {
return (value == null) ? 0 : value.length;
}
/** Converts a Collection<?> to an int (null == 0, otherwise size). */
@Pure
@Inline("(($1 == null) ? 0 : $1.size())")
public static int operator_plus(final Collection<?> value) {
return (value == null) ? 0 : value.size();
}
/** Converts a Map<?,?> to an int (null == 0, otherwise size). */
@Pure
@Inline("(($1 == null) ? 0 : $1.size())")
public static int operator_plus(final Map<?, ?> value) {
return (value == null) ? 0 : value.size();
}
/** Converts a CharSequence to an int (null == 0, otherwise length). */
@Pure
@Inline("(($1 == null) ? 0 : $1.length())")
public static int operator_plus(final CharSequence value) {
return (value == null) ? 0 : value.length();
}
/** Converts a Enum<?> to an int (null == 0, otherwise ordinal). */
@Pure
@Inline("(($1 == null) ? 0 : $1.ordinal())")
public static int operator_plus(final Enum<?> value) {
return (value == null) ? 0 : value.ordinal();
}
/** Converts a Buffer to an int (null == 0, otherwise remaining). */
@GwtIncompatible
@Pure
@Inline("(($1 == null) ? 0 : $1.remaining())")
public static int operator_plus(final Buffer value) {
return (value == null) ? 0 : value.remaining();
}
/** Converts a Dictionary to an int (null == 0, otherwise size). */
@GwtIncompatible
@Pure
@Inline("(($1 == null) ? 0 : $1.size())")
public static int operator_plus(final Dictionary<?, ?> value) {
return (value == null) ? 0 : value.size();
}
/** Converts a String to an boolean (null == false, otherwise Boolean.parseBoolean(value)). */
@Pure
@Inline("(($1 == null) ? false : Boolean.parseBoolean($1.toString()))")
public static boolean booleanValue(final CharSequence value) {
return (value == null) ? false : Boolean.parseBoolean(value.toString());
}
/** Converts a String to a byte (null == 0, otherwise Byte.parseByte(value)). */
@Pure
@Inline("(($1 == null) ? (byte) 0 : Byte.parseByte($1.toString()))")
public static byte byteValue(final CharSequence value) {
return (value == null) ? (byte) 0 : Byte.parseByte(value.toString());
}
/** Converts a String to a char (null == 0, otherwise value.charAt(0)). */
@Pure
public static char charValue(final CharSequence value) {
if (value == null) {
return (char) 0;
}
if (value.length() == 1) {
return value.charAt(0);
}
throw new NumberFormatException("Bad char: '" + value + "'");
}
/** Converts a String to a short (null == 0, otherwise Short.parseShort(value)). */
@Pure
@Inline("(($1 == null) ? (short) 0 : Short.parseShort($1.toString()))")
public static short shortValue(final CharSequence value) {
return (value == null) ? (short) 0 : Short.parseShort(value.toString());
}
/** Converts a String to a int (null == 0, otherwise Integer.parseInt(value)). */
@Pure
@Inline("(($1 == null) ? 0 : Integer.parseInt($1.toString()))")
public static int intValue(final CharSequence value) {
return (value == null) ? 0 : Integer.parseInt(value.toString());
}
/** Converts a String to a float (null == 0, otherwise Float.parseFloat(value)). */
@Pure
@Inline("(($1 == null) ? 0 : Float.parseFloat($1.toString()))")
public static float floatValue(final CharSequence value) {
return (value == null) ? 0 : Float.parseFloat(value.toString());
}
/** Converts a String to a long (null == 0, otherwise Long.parseLong(value)). */
@Pure
@Inline("(($1 == null) ? 0 : Long.parseLong($1.toString()))")
public static long longValue(final CharSequence value) {
return (value == null) ? 0 : Long.parseLong(value.toString());
}
/** Converts a String to a double (null == 0, otherwise Double.parseDouble(value)). */
@Pure
@Inline("(($1 == null) ? 0 : Double.parseDouble($1.toString()))")
public static double doubleValue(final CharSequence value) {
return (value == null) ? 0 : Double.parseDouble(value.toString());
}
}
|
|
package net.exkazuu.tree;
import java.util.Collections;
import java.util.Iterator;
import java.util.NoSuchElementException;
import com.google.common.base.Preconditions;
/**
* Represents a node instead of XElement.
*/
public class NodeBase<TNode extends NodeBase<TNode, TValue>, TValue> {
/**
* Gets the parent node.
*/
protected TNode _parent;
/**
* Gets the previous node.
*/
protected TNode _cyclicPrev;
/**
* Gets the next node.
*/
protected TNode _cyclicNext;
/**
* Gets and sets the value.
*/
protected TValue _value;
/**
* Gets the first child node.
*/
protected TNode _firstChild;
/**
* Initializes a new instance of the Node class with a default value.
*/
protected NodeBase() {
_cyclicPrev = thisNode();
_cyclicNext = thisNode();
}
/**
* Initializes a new instance of the Node class with the specified value.
*/
protected NodeBase(TValue value) {
_cyclicPrev = thisNode();
_cyclicNext = thisNode();
_value = value;
}
/**
* The casted this instance for the simplicity.
*/
@SuppressWarnings("unchecked")
protected final TNode thisNode() {
return (TNode) this;
}
/**
* Gets the parent node.
*/
public final TNode parent() {
return _parent;
}
/**
* Gets the previous node.
*/
public final TNode cyclicPrev() {
return _cyclicPrev;
}
/**
* Gets the next node.
*/
public final TNode cyclicNext() {
return _cyclicNext;
}
/**
* Gets the first child node.
*/
public final TNode firstChild() {
return _firstChild;
}
/**
* Gets the last child node.
*/
public final TNode lastChild() {
return _firstChild != null ? _firstChild._cyclicPrev : null;
}
/**
* Gets the previous node or null.
*/
public final TNode prev() {
return _cyclicPrev != lastSibling() ? _cyclicPrev : null;
}
/**
* Gets the next node or null.
*/
public final TNode next() {
return _cyclicNext != firstSibling() ? _cyclicNext : null;
}
/**
* Gets the first sibling node or the current node.
*/
public final TNode firstSibling() {
return _parent != null ? _parent._firstChild : thisNode();
}
/**
* Gets the last sibling node or the current node.
*/
public final TNode lastSibling() {
return _parent != null ? _parent._firstChild._cyclicPrev : thisNode();
}
public final Iterable<TNode> ancestorsAndSelf() {
final TNode thisNode = thisNode();
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = thisNode;
@Override
public boolean hasNext() {
return _node != null;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = previousNode._parent;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> children() {
final TNode head = _firstChild;
if (head == null) {
return Collections.emptyList();
}
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = head;
private boolean _hasNext = true;
@Override
public boolean hasNext() {
return _hasNext;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = previousNode._cyclicNext;
_hasNext = _node != head;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> nextsFromSelf() {
final TNode node = _cyclicNext;
final TNode terminal = firstSibling();
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = node;
@Override
public boolean hasNext() {
return _node != terminal;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = previousNode._cyclicNext;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> nextsFromLast() {
final TNode node = lastSibling();
final TNode terminal = thisNode();
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = node;
@Override
public boolean hasNext() {
return _node != terminal;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = previousNode._cyclicPrev;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> prevsFromFirst() {
final TNode node = firstSibling();
final TNode terminal = thisNode();
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = node;
@Override
public boolean hasNext() {
return _node != terminal;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = previousNode._cyclicNext;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> prevsFromSelf() {
final TNode node = _cyclicPrev;
final TNode terminal = lastSibling();
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = node;
@Override
public boolean hasNext() {
return _node != terminal;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = previousNode._cyclicPrev;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> descendants() {
final TNode start = thisNode();
if (start._firstChild == null) {
return Collections.emptyList();
}
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _next = start._firstChild;
@Override
public boolean hasNext() {
return _next != null;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode node = _next;
_next = privateNext(node);
return node;
}
private TNode privateNext(TNode cursor) {
if (cursor._firstChild != null) {
cursor = cursor._firstChild;
return cursor;
}
while (cursor.next() == null) {
cursor = cursor._parent;
if (cursor == start) {
return null;
}
}
cursor = cursor._cyclicNext;
return cursor;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> siblings() {
TNode head = firstSibling();
if (head == null || head._cyclicNext == head) {
return Collections.emptyList();
}
final TNode thisNode = thisNode();
final TNode terminal = head != thisNode ? head : head._cyclicNext;
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = terminal;
private boolean _hasNext = true;
@Override
public boolean hasNext() {
return _hasNext;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = previousNode._cyclicNext;
if (_node == thisNode) {
_node = _node._cyclicNext;
}
_hasNext = _node != terminal;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> siblingsAndSelf() {
final TNode head = firstSibling();
if (head == null) {
return Collections.emptyList();
}
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = head;
private boolean _hasNext = true;
@Override
public boolean hasNext() {
return _hasNext;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = previousNode._cyclicNext;
_hasNext = _node != head;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> ancestorsWithSingleChild() {
final TNode node = thisNode();
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = node;
@Override
public boolean hasNext() {
return _node != null && _node == _node._cyclicNext;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
_node = _node._parent;
return _node;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> descendantsOfSingle() {
final TNode node = _firstChild;
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = node;
@Override
public boolean hasNext() {
return _node != null && _node == _node._cyclicNext;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = _node._firstChild;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> descendantsOfFirstChild() {
final TNode node = _firstChild;
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _node = node;
@Override
public boolean hasNext() {
return _node != null;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode previousNode = _node;
_node = _node._firstChild;
return previousNode;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final Iterable<TNode> descendants(final int inclusiveDepth) {
final TNode start = thisNode();
if (start._firstChild == null || inclusiveDepth <= 0) {
return Collections.emptyList();
}
return new Iterable<TNode>() {
@Override
public Iterator<TNode> iterator() {
return new Iterator<TNode>() {
private TNode _next = start._firstChild;
private int _inclusiveDepth = inclusiveDepth - 1;
@Override
public boolean hasNext() {
return _next != null;
}
@Override
public TNode next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
TNode node = _next;
_next = privateNext(node);
return node;
}
private TNode privateNext(TNode cursor) {
if (cursor._firstChild != null && _inclusiveDepth > 0) {
cursor = cursor._firstChild;
_inclusiveDepth--;
return cursor;
}
while (cursor.next() == null) {
cursor = cursor._parent;
_inclusiveDepth++;
if (cursor == start) {
return null;
}
}
cursor = cursor._cyclicNext;
return cursor;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
public final TNode addPrevious(TNode node) {
Preconditions.checkNotNull(node);
Preconditions.checkNotNull(_parent);
Preconditions.checkArgument(node._parent == null);
if (_parent._firstChild == this) {
_parent._firstChild = node;
}
return addPreviousIgnoringFirstChild(node);
}
public final TNode addNext(TNode node) {
Preconditions.checkNotNull(node);
Preconditions.checkNotNull(_parent);
Preconditions.checkArgument(node._parent == null);
return _cyclicNext.addPreviousIgnoringFirstChild(node);
}
public final TNode addFirst(TNode node) {
Preconditions.checkNotNull(node);
Preconditions.checkArgument(node._parent == null);
return addFirstPrivate(node);
}
private final TNode addFirstPrivate(TNode node) {
addLastPrivate(node);
_firstChild = node;
return node;
}
protected final TNode addPreviousIgnoringFirstChild(TNode node) {
node._parent = _parent;
node._cyclicNext = thisNode();
node._cyclicPrev = _cyclicPrev;
_cyclicPrev._cyclicNext = node;
_cyclicPrev = node;
return node;
}
public final TNode addLast(TNode node) {
Preconditions.checkNotNull(node);
Preconditions.checkArgument(node._parent == null);
return addLastPrivate(node);
}
private final TNode addLastPrivate(TNode node) {
TNode second = _firstChild;
if (second == null) {
node._parent = thisNode();
node._cyclicNext = node;
node._cyclicPrev = node;
_firstChild = node;
} else {
second.addPreviousIgnoringFirstChild(node);
}
return node;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
toStringPrivate(thisNode(), 0, builder);
return builder.toString();
}
private final static <TNode extends NodeBase<TNode, T>, T> void toStringPrivate(TNode node,
int depth, StringBuilder builder) {
if (node == null) {
return;
}
for (int i = 0; i < depth; i++) {
builder.append(" ");
}
builder.append(node._value != null ? node._value.toString() : "");
builder.append(System.lineSeparator());
for (TNode child : node.children()) {
toStringPrivate(child, depth + 1, builder);
}
}
}
|
|
/*-
* Copyright (c) 2010, 2020 Oracle and/or its affiliates. All rights reserved.
*
* See the file LICENSE for license information.
*
*/
package repmgrtests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.Socket;
import org.junit.Before;
import org.junit.Test;
import com.sleepycat.db.BtreeStats;
import com.sleepycat.db.Database;
import com.sleepycat.db.DatabaseConfig;
import com.sleepycat.db.DatabaseEntry;
import com.sleepycat.db.DatabaseType;
import com.sleepycat.db.Environment;
import com.sleepycat.db.EnvironmentConfig;
import com.sleepycat.db.ReplicationManagerAckPolicy;
import com.sleepycat.db.ReplicationManagerSiteConfig;
import com.sleepycat.db.ReplicationManagerStartPolicy;
import com.sleepycat.db.ReplicationTimeoutType;
import com.sleepycat.db.VerboseConfig;
/**
* Get a connection hopelessly clogged, and then kill the connection.
* Verify that the blocked thread is immediately freed.
*/
public class TestDrainAbandon {
private static final String TEST_DIR_NAME = "TESTDIR";
private File testdir;
private byte[] data;
private int masterPort;
private int clientPort;
private int client2Port;
private int client3Port;
private int mgrPort;
@Before public void setUp() throws Exception {
testdir = new File(TEST_DIR_NAME);
Util.rm_rf(testdir);
testdir.mkdir();
String alphabet = "abcdefghijklmnopqrstuvwxyz";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
OutputStreamWriter w = new OutputStreamWriter(baos);
while (baos.size() < 1000) // arbitrary min. size
w.write(alphabet);
w.close();
data = baos.toByteArray();
if (Boolean.getBoolean("MANUAL_FIDDLER_START")) {
masterPort = 6000;
clientPort = 6001;
client2Port = 6002;
client3Port = 6003;
mgrPort = 8000;
} else {
String mgrPortNum = System.getenv("DB_TEST_FAKE_PORT");
assertNotNull("required DB_TEST_FAKE_PORT environment variable not found",
mgrPortNum);
mgrPort = Integer.parseInt(mgrPortNum);
PortsConfig p = new PortsConfig(4);
masterPort = p.getRealPort(0);
clientPort = p.getRealPort(1);
client2Port = p.getRealPort(2);
client3Port = p.getRealPort(3);
Util.startFiddler(p, getClass().getName(), mgrPort);
}
}
@Test public void testDraining() throws Exception {
EnvironmentConfig masterConfig = makeBasicConfig();
masterConfig.setReplicationLimit(100000000);
ReplicationManagerSiteConfig site =
new ReplicationManagerSiteConfig("localhost", masterPort);
site.setLocalSite(true);
site.setLegacy(true);
masterConfig.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", clientPort);
site.setLegacy(true);
masterConfig.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", client2Port);
site.setLegacy(true);
masterConfig.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", client3Port);
site.setLegacy(true);
masterConfig.addReplicationManagerSite(site);
Environment master = new Environment(mkdir("master"), masterConfig);
setTimeouts(master);
// Prevent connection retries, so that all connections
// originate from clients
master.setReplicationTimeout(ReplicationTimeoutType.CONNECTION_RETRY,
Integer.MAX_VALUE);
master.replicationManagerStart(2, ReplicationManagerStartPolicy.REP_MASTER);
DatabaseConfig dc = new DatabaseConfig();
dc.setTransactional(true);
dc.setAllowCreate(true);
dc.setType(DatabaseType.BTREE);
dc.setPageSize(4096);
Database db = master.openDatabase(null, "test.db", null, dc);
DatabaseEntry key = new DatabaseEntry();
DatabaseEntry value = new DatabaseEntry();
value.setData(data);
for (int i=0;
((BtreeStats)db.getStats(null, null)).getPageCount() < 500;
i++)
{
String k = "The record number is: " + i;
key.setData(k.getBytes());
db.put(null, key, value);
}
// tell fiddler to stop reading once it sees a PAGE message
Socket s = new Socket("localhost", mgrPort);
OutputStreamWriter w = new OutputStreamWriter(s.getOutputStream());
String path1 = "{" + masterPort + "," + clientPort + "}"; // looks like {6000,6001}
w.write("{init," + path1 + ",page_clog}\r\n");
w.flush();
BufferedReader br = new BufferedReader(new InputStreamReader(s.getInputStream()));
br.readLine();
assertEquals("ok", br.readLine());
// create client
//
EnvironmentConfig ec = makeBasicConfig();
site = new ReplicationManagerSiteConfig("localhost", clientPort);
site.setLocalSite(true);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", masterPort);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", client2Port);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", client3Port);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
Environment client = new Environment(mkdir("client"), ec);
setTimeouts(client);
client.replicationManagerStart(1, ReplicationManagerStartPolicy.REP_CLIENT);
// wait til it gets stuck
Thread.sleep(5000); // FIXME
// Do the same for another client, because the master has 2
// msg processing threads. (It's no longer possible to
// configure just 1.)
String path2 = "{" + masterPort + "," + client2Port + "}";
w.write("{init," + path2 + ",page_clog}\r\n");
w.flush();
br = new BufferedReader(new InputStreamReader(s.getInputStream()));
br.readLine();
assertEquals("ok", br.readLine());
ec = makeBasicConfig();
site = new ReplicationManagerSiteConfig("localhost", client2Port);
site.setLocalSite(true);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", masterPort);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", clientPort);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", client3Port);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
Environment client2 = new Environment(mkdir("client2"), ec);
setTimeouts(client2);
client2.replicationManagerStart(1, ReplicationManagerStartPolicy.REP_CLIENT);
// wait til it gets stuck
Thread.sleep(5000);
// With the connection stuck, the master cannot write out log
// records for new "live" transactions. Knowing we didn't
// write the record, we should not bother waiting for an ack
// that cannot possibly arrive; so we should simply return
// quickly. The duration should be very quick, but anything
// less than the ack timeout indicates correct behavior (in
// case this test runs on a slow, overloaded system).
//
long startTime = System.currentTimeMillis();
key.setData("one extra record".getBytes());
db.put(null, key, value);
long duration = System.currentTimeMillis() - startTime;
assertTrue("txn duration: " + duration, duration < 29000);
System.out.println("txn duration: " + duration);
db.close();
// Tell fiddler to close the connections. That should trigger
// us to abandon the timeout. Then create another client and
// see that it can complete its internal init quickly. Since
// we have limited threads at the master, this demonstrates
// that they were abandoned.
//
path1 = "{" + clientPort + "," + masterPort + "}"; // looks like {6001,6000}
w.write("{" + path1 + ",shutdown}\r\n");
w.flush();
assertEquals("ok", br.readLine());
path2 = "{" + client2Port + "," + masterPort + "}"; // looks like {6001,6000}
w.write("{" + path2 + ",shutdown}\r\n");
w.flush();
assertEquals("ok", br.readLine());
ec = makeBasicConfig();
site = new ReplicationManagerSiteConfig("localhost", client3Port);
site.setLocalSite(true);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", masterPort);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", clientPort);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
site = new ReplicationManagerSiteConfig("localhost", client2Port);
site.setLegacy(true);
ec.addReplicationManagerSite(site);
EventHandler clientMonitor = new EventHandler();
ec.setEventHandler(clientMonitor);
Environment client3 = new Environment(mkdir("client3"), ec);
setTimeouts(client3);
startTime = System.currentTimeMillis();
client3.replicationManagerStart(2, ReplicationManagerStartPolicy.REP_CLIENT);
clientMonitor.await();
duration = System.currentTimeMillis() - startTime;
assertTrue("sync duration: " + duration, duration < 20000); // 20 seconds should be plenty
client3.close();
master.close();
w.write("shutdown\r\n");
w.flush();
assertEquals("ok", br.readLine());
s.close();
}
public static EnvironmentConfig makeBasicConfig() {
EnvironmentConfig ec = new EnvironmentConfig();
ec.setAllowCreate(true);
ec.setInitializeCache(true);
ec.setInitializeLocking(true);
ec.setInitializeLogging(true);
ec.setInitializeReplication(true);
ec.setTransactional(true);
ec.setThreaded(true);
ec.setReplicationInMemory(true);
ec.setCacheSize(256 * 1024 * 1024);
if (Boolean.getBoolean("VERB_REPLICATION"))
ec.setVerbose(VerboseConfig.REPLICATION, true);
return (ec);
}
private void setTimeouts(Environment e) throws Exception {
e.setReplicationTimeout(ReplicationTimeoutType.ACK_TIMEOUT,
30000000);
}
public File mkdir(String dname) {
File f = new File(testdir, dname);
f.mkdir();
return f;
}
}
|
|
/*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.transaction;
import javax.sql.DataSource;
import org.junit.jupiter.api.Test;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.ReactiveTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.reactive.TransactionalOperator;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import org.springframework.transaction.support.TransactionTemplate;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link TransactionAutoConfiguration}.
*
* @author Stephane Nicoll
* @author Phillip Webb
*/
class TransactionAutoConfigurationTests {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(TransactionAutoConfiguration.class));
@Test
void whenThereIsNoPlatformTransactionManagerNoTransactionTemplateIsAutoConfigured() {
this.contextRunner.run((context) -> assertThat(context).doesNotHaveBean(TransactionTemplate.class));
}
@Test
void whenThereIsASinglePlatformTransactionManagerATransactionTemplateIsAutoConfigured() {
this.contextRunner.withUserConfiguration(SinglePlatformTransactionManagerConfiguration.class).run((context) -> {
PlatformTransactionManager transactionManager = context.getBean(PlatformTransactionManager.class);
TransactionTemplate transactionTemplate = context.getBean(TransactionTemplate.class);
assertThat(transactionTemplate.getTransactionManager()).isSameAs(transactionManager);
});
}
@Test
void whenThereIsASingleReactiveTransactionManagerATransactionalOperatorIsAutoConfigured() {
this.contextRunner.withUserConfiguration(SingleReactiveTransactionManagerConfiguration.class).run((context) -> {
ReactiveTransactionManager transactionManager = context.getBean(ReactiveTransactionManager.class);
TransactionalOperator transactionalOperator = context.getBean(TransactionalOperator.class);
assertThat(transactionalOperator).extracting("transactionManager").isSameAs(transactionManager);
});
}
@Test
void whenThereAreBothReactiveAndPlatformTransactionManagersATemplateAndAnOperatorAreAutoConfigured() {
this.contextRunner
.withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class,
DataSourceTransactionManagerAutoConfiguration.class))
.withUserConfiguration(SinglePlatformTransactionManagerConfiguration.class,
SingleReactiveTransactionManagerConfiguration.class)
.run((context) -> {
PlatformTransactionManager platformTransactionManager = context
.getBean(PlatformTransactionManager.class);
TransactionTemplate transactionTemplate = context.getBean(TransactionTemplate.class);
assertThat(transactionTemplate.getTransactionManager()).isSameAs(platformTransactionManager);
ReactiveTransactionManager reactiveTransactionManager = context
.getBean(ReactiveTransactionManager.class);
TransactionalOperator transactionalOperator = context.getBean(TransactionalOperator.class);
assertThat(transactionalOperator).extracting("transactionManager")
.isSameAs(reactiveTransactionManager);
});
}
@Test
void whenThereAreSeveralPlatformTransactionManagersNoTransactionTemplateIsAutoConfigured() {
this.contextRunner.withUserConfiguration(SeveralPlatformTransactionManagersConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(TransactionTemplate.class));
}
@Test
void whenThereAreSeveralReactiveTransactionManagersNoTransactionOperatorIsAutoConfigured() {
this.contextRunner.withUserConfiguration(SeveralReactiveTransactionManagersConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(TransactionalOperator.class));
}
@Test
void whenAUserProvidesATransactionTemplateTheAutoConfiguredTemplateBacksOff() {
this.contextRunner.withUserConfiguration(CustomPlatformTransactionManagerConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(TransactionTemplate.class);
assertThat(context.getBean("transactionTemplateFoo")).isInstanceOf(TransactionTemplate.class);
});
}
@Test
void whenAUserProvidesATransactionalOperatorTheAutoConfiguredOperatorBacksOff() {
this.contextRunner.withUserConfiguration(SingleReactiveTransactionManagerConfiguration.class,
CustomTransactionalOperatorConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(TransactionalOperator.class);
assertThat(context.getBean("customTransactionalOperator"))
.isInstanceOf(TransactionalOperator.class);
});
}
@Test
void platformTransactionManagerCustomizers() {
this.contextRunner.withUserConfiguration(SeveralPlatformTransactionManagersConfiguration.class)
.run((context) -> {
TransactionManagerCustomizers customizers = context.getBean(TransactionManagerCustomizers.class);
assertThat(customizers).extracting("customizers").asList().hasSize(1).first()
.isInstanceOf(TransactionProperties.class);
});
}
@Test
void transactionNotManagedWithNoTransactionManager() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class).run(
(context) -> assertThat(context.getBean(TransactionalService.class).isTransactionActive()).isFalse());
}
@Test
void transactionManagerUsesCglibByDefault() {
this.contextRunner.withUserConfiguration(PlatformTransactionManagersConfiguration.class).run((context) -> {
assertThat(context.getBean(AnotherServiceImpl.class).isTransactionActive()).isTrue();
assertThat(context.getBeansOfType(TransactionalServiceImpl.class)).hasSize(1);
});
}
@Test
void transactionManagerCanBeConfiguredToJdkProxy() {
this.contextRunner.withUserConfiguration(PlatformTransactionManagersConfiguration.class)
.withPropertyValues("spring.aop.proxy-target-class=false").run((context) -> {
assertThat(context.getBean(AnotherService.class).isTransactionActive()).isTrue();
assertThat(context).doesNotHaveBean(AnotherServiceImpl.class);
assertThat(context).doesNotHaveBean(TransactionalServiceImpl.class);
});
}
@Test
void customEnableTransactionManagementTakesPrecedence() {
this.contextRunner
.withUserConfiguration(CustomTransactionManagementConfiguration.class,
PlatformTransactionManagersConfiguration.class)
.withPropertyValues("spring.aop.proxy-target-class=true").run((context) -> {
assertThat(context.getBean(AnotherService.class).isTransactionActive()).isTrue();
assertThat(context).doesNotHaveBean(AnotherServiceImpl.class);
assertThat(context).doesNotHaveBean(TransactionalServiceImpl.class);
});
}
@Configuration
static class SinglePlatformTransactionManagerConfiguration {
@Bean
PlatformTransactionManager transactionManager() {
return mock(PlatformTransactionManager.class);
}
}
@Configuration
static class SingleReactiveTransactionManagerConfiguration {
@Bean
ReactiveTransactionManager reactiveTransactionManager() {
return mock(ReactiveTransactionManager.class);
}
}
@Configuration(proxyBeanMethods = false)
static class SeveralPlatformTransactionManagersConfiguration {
@Bean
PlatformTransactionManager transactionManagerOne() {
return mock(PlatformTransactionManager.class);
}
@Bean
PlatformTransactionManager transactionManagerTwo() {
return mock(PlatformTransactionManager.class);
}
}
@Configuration(proxyBeanMethods = false)
static class SeveralReactiveTransactionManagersConfiguration {
@Bean
ReactiveTransactionManager reactiveTransactionManager1() {
return mock(ReactiveTransactionManager.class);
}
@Bean
ReactiveTransactionManager reactiveTransactionManager2() {
return mock(ReactiveTransactionManager.class);
}
}
@Configuration(proxyBeanMethods = false)
static class CustomPlatformTransactionManagerConfiguration {
@Bean
TransactionTemplate transactionTemplateFoo(PlatformTransactionManager transactionManager) {
return new TransactionTemplate(transactionManager);
}
@Bean
PlatformTransactionManager transactionManagerFoo() {
return mock(PlatformTransactionManager.class);
}
}
@Configuration(proxyBeanMethods = false)
static class CustomTransactionalOperatorConfiguration {
@Bean
TransactionalOperator customTransactionalOperator() {
return mock(TransactionalOperator.class);
}
}
@Configuration(proxyBeanMethods = false)
static class BaseConfiguration {
@Bean
TransactionalService transactionalService() {
return new TransactionalServiceImpl();
}
@Bean
AnotherServiceImpl anotherService() {
return new AnotherServiceImpl();
}
}
@Configuration(proxyBeanMethods = false)
@Import(BaseConfiguration.class)
static class PlatformTransactionManagersConfiguration {
@Bean
DataSourceTransactionManager transactionManager(DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
@Bean
DataSource dataSource() {
return DataSourceBuilder.create().driverClassName("org.hsqldb.jdbc.JDBCDriver").url("jdbc:hsqldb:mem:tx")
.username("sa").build();
}
}
@Configuration(proxyBeanMethods = false)
@EnableTransactionManagement(proxyTargetClass = false)
static class CustomTransactionManagementConfiguration {
}
interface TransactionalService {
@Transactional
boolean isTransactionActive();
}
static class TransactionalServiceImpl implements TransactionalService {
@Override
public boolean isTransactionActive() {
return TransactionSynchronizationManager.isActualTransactionActive();
}
}
interface AnotherService {
boolean isTransactionActive();
}
static class AnotherServiceImpl implements AnotherService {
@Override
@Transactional
public boolean isTransactionActive() {
return TransactionSynchronizationManager.isActualTransactionActive();
}
}
}
|
|
package br.com.halyson.materialdesign.activity;
import android.content.Intent;
import android.content.res.Configuration;
import android.graphics.drawable.BitmapDrawable;
import android.os.Bundle;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Toast;
import com.heinrichreimersoftware.materialdrawer.DrawerActivity;
import com.heinrichreimersoftware.materialdrawer.DrawerView;
import com.heinrichreimersoftware.materialdrawer.structure.DrawerItem;
import com.heinrichreimersoftware.materialdrawer.structure.DrawerProfile;
import br.com.halyson.materialdesign.R;
public class StoreActivity extends DrawerActivity {
private DrawerView drawer;
private ActionBarDrawerToggle drawerToggle;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_store);
DrawerLayout drawerLayout = (DrawerLayout) findViewById(R.id.drawerLayout);
drawer = (DrawerView) findViewById(R.id.drawer);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
//drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_CLOSED);
setSupportActionBar(toolbar);
drawerToggle = new ActionBarDrawerToggle(
this,
drawerLayout,
toolbar,
R.string.drawer_open,
R.string.drawer_close
) {
public void onDrawerClosed(View view) {
invalidateOptionsMenu();
}
public void onDrawerOpened(View drawerView) {
invalidateOptionsMenu();
}
};
drawerLayout.setStatusBarBackgroundColor(getResources().getColor(R.color.color_primary_dark));
drawerLayout.setDrawerListener(drawerToggle);
drawerLayout.closeDrawer(drawer);
drawer.addItem(new DrawerItem()
.setTextPrimary(getString(R.string.lorem_ipsum_short))
.setTextSecondary(getString(R.string.lorem_ipsum_long))
);
drawer.addItem(new DrawerItem()
.setImage(getResources().getDrawable(R.drawable.ic_mail))
//.setTextPrimary(getString(R.string.lorem_ipsum_short))
.setTextPrimary("Store")
//.setTextSecondary(getString(R.string.lorem_ipsum_long))
.setTextSecondary("Buy credit and more...")
);
drawer.addDivider();
drawer.addItem(new DrawerItem()
.setImage(getResources().getDrawable(R.drawable.ic_mail))
//.setTextPrimary(getString(R.string.lorem_ipsum_short))
.setTextPrimary("Settings")
//.setTextSecondary(getString(R.string.lorem_ipsum_long))
.setTextSecondary("Customise your PaiGow app!")
);
drawer.addDivider();
drawer.addItem(new DrawerItem()
.setImage(getResources().getDrawable(R.drawable.ic_mail))
//.setTextPrimary(getString(R.string.lorem_ipsum_short))
.setTextPrimary("About")
//.setTextSecondary(getString(R.string.lorem_ipsum_long))
.setTextSecondary("Additional info about us!")
);
drawer.addDivider();
drawer.selectItem(2);
drawer.setOnItemClickListener(new DrawerItem.OnItemClickListener() {
@Override
public void onClick(DrawerItem item, long id, int position) {
drawer.selectItem(position);
//Toast.makeText(StoreActivity.this, "Clicked item #" + position, Toast.LENGTH_SHORT).show();
Intent myIntent = new Intent(StoreActivity.this, GameActivity.class);
//myIntent.putExtra("key", value); //Optional parameters
StoreActivity.this.startActivity(myIntent);
}
});
drawer.addFixedItem(new DrawerItem()
.setRoundedImage((BitmapDrawable) getResources().getDrawable(R.drawable.cat_2), DrawerItem.SMALL_AVATAR)
//.setTextPrimary(getString(R.string.lorem_ipsum_short))
.setTextPrimary("Logout")
);
/*drawer.addFixedItem(new DrawerItem()
.setImage(getResources().getDrawable(R.drawable.ic_flag))
.setTextPrimary(getString(R.string.lorem_ipsum_short))
);
drawer.setOnFixedItemClickListener(new DrawerItem.OnItemClickListener() {
@Override
public void onClick(DrawerItem item, long id, int position) {
drawer.selectFixedItem(position);
Toast.makeText(GameActivity.this, "Clicked fixed item #" + position, Toast.LENGTH_SHORT).show();
}
});*/
drawer.addProfile(new DrawerProfile()
.setId(1)
//.setRoundedAvatar((BitmapDrawable) getResources().getDrawable(R.drawable.cat_1))
.setBackground(getResources().getDrawable(R.drawable.cat_wide_1))
.setBackground(getResources().getDrawable(R.drawable.cat_wide_1))
.setName("PaiGow")
//.setName(getString(R.string.lorem_ipsum_short))
//.setDescription(getString(R.string.lorem_ipsum_medium))
);
/*drawer.addProfile(new DrawerProfile()
.setId(2)
.setRoundedAvatar((BitmapDrawable) getResources().getDrawable(R.drawable.cat_2))
.setBackground(getResources().getDrawable(R.drawable.cat_wide_1))
.setName(getString(R.string.lorem_ipsum_short))
);
drawer.addProfile(new DrawerProfile()
.setId(3)
.setRoundedAvatar((BitmapDrawable) getResources().getDrawable(R.drawable.cat_1))
.setBackground(getResources().getDrawable(R.drawable.cat_wide_2))
.setName(getString(R.string.lorem_ipsum_short))
.setDescription(getString(R.string.lorem_ipsum_medium))
);*/
/*drawer.setOnProfileClickListener(new DrawerProfile.OnProfileClickListener() {
@Override
public void onClick(DrawerProfile profile, long id) {
Toast.makeText(GameActivity.this, "Clicked profile *" + id, Toast.LENGTH_SHORT).show();
}
});
drawer.setOnProfileSwitchListener(new DrawerProfile.OnProfileSwitchListener() {
@Override
public void onSwitch(DrawerProfile oldProfile, long oldId, DrawerProfile newProfile, long newId) {
Toast.makeText(GameActivity.this, "Switched from profile *" + oldId + " to profile *" + newId, Toast.LENGTH_SHORT).show();
}
});*/
}
public void openDrawerFrameLayout(View view) {
//Intent intent = new Intent(this, MainActivity2.class);
// startActivity(intent);
}
public void openDrawerActivity(View view) {
//Intent intent = new Intent(this, MainActivity3.class);
//startActivity(intent);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (drawerToggle.onOptionsItemSelected(item)) {
return true;
}
/*switch (item.getItemId()) {
case R.id.action_github:
String url = "https://github.com/HeinrichReimer/material-drawer";
Intent i = new Intent(Intent.ACTION_VIEW);
i.setData(Uri.parse(url));
startActivity(i);
break;
}*/
return super.onOptionsItemSelected(item);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
drawerToggle.onConfigurationChanged(newConfig);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
drawerToggle.syncState();
}
}
|
|
package com.rincon.blackbook.bfiledir;
/*
* Copyright (c) 2004-2006 Rincon Research Corporation.
* All rights reserved.
*
* Rincon Research will permit distribution and use by others subject to
* the restrictions of a licensing agreement which contains (among other things)
* the following restrictions:
*
* 1. No credit will be taken for the Work of others.
* 2. It will not be resold for a price in excess of reproduction and
* distribution costs.
* 3. Others are not restricted from copying it or using it except as
* set forward in the licensing agreement.
* 4. Commented source code of any modifications or additions will be
* made available to Rincon Research on the same terms.
* 5. This notice will remain intact and displayed prominently.
*
* Copies of the complete licensing agreement may be obtained by contacting
* Rincon Research, 101 N. Wilmot, Suite 101, Tucson, AZ 85711.
*
* There is no warranty with this product, either expressed or implied.
* Use at your own risk. Rincon Research is not liable or responsible for
* damage or loss incurred or resulting from the use or misuse of this software.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import com.rincon.blackbook.Commands;
import com.rincon.blackbook.Util;
import com.rincon.blackbook.messages.BlackbookConnectMsg;
import net.tinyos.message.Message;
import net.tinyos.message.MessageListener;
import net.tinyos.message.MoteIF;
import net.tinyos.util.Messenger;
public class BFileDir implements BFileDirCommands, MessageListener {
/** Communication with the mote */
private MoteIF comm = new MoteIF((Messenger) null);
/** Command to send */
private BlackbookConnectMsg command = new BlackbookConnectMsg();
/** List of FileTransferEvents listeners */
private static List listeners = new ArrayList();
/** Current destination address */
private int dest = Commands.TOS_BCAST_ADDR;
/** Return value for inline commands */
private long returnAmount = 0;
/**
* Set the destination address of the next send command
* @param destination
*/
public void setDestination(int destination) {
dest = destination;
}
/**
* Constructor
*
*/
public BFileDir() {
comm.registerListener(new BlackbookConnectMsg(), this);
}
/**
* Send a message
* @param dest
* @param m
*/
private synchronized void send(Message m) {
try {
comm.send(dest, m);
} catch (IOException e) {
System.err.println("Couldn't contact the mote");
}
}
/**
* Add a FileTransferEvents listener
* @param listener
*/
public void addListener(BFileDirEvents listener) {
if(!listeners.contains(listener)) {
listeners.add(listener);
}
}
/**
* Remove a FileTransferEvents listener
* @param listener
*/
public void removeListener(BFileDirEvents listener) {
listeners.remove(listener);
}
public synchronized void messageReceived(int to, Message m) {
BlackbookConnectMsg inMsg = (BlackbookConnectMsg) m;
switch(inMsg.get_cmd()) {
case Commands.REPLY_BFILEDIR_TOTALFILES:
returnAmount = inMsg.get_length();
notify();
break;
case Commands.REPLY_BFILEDIR_TOTALNODES:
returnAmount = inMsg.get_length();
notify();
break;
case Commands.REPLY_BFILEDIR_EXISTS:
for(Iterator it = listeners.iterator(); it.hasNext(); ) {
((BFileDirEvents) it.next()).existsCheckDone(inMsg.get_length() == 1, inMsg.get_result() == Commands.SUCCESS);
}
break;
case Commands.REPLY_BFILEDIR_READNEXT:
for(Iterator it = listeners.iterator(); it.hasNext(); ) {
((BFileDirEvents) it.next()).nextFile(Util.dataToFilename(inMsg.get_data()), inMsg.get_result() == Commands.SUCCESS);
}
break;
case Commands.REPLY_BFILEDIR_RESERVEDLENGTH:
returnAmount = inMsg.get_length();
notify();
break;
case Commands.REPLY_BFILEDIR_DATALENGTH:
returnAmount = inMsg.get_length();
notify();
break;
case Commands.REPLY_BFILEDIR_GETFREESPACE:
returnAmount = inMsg.get_length();
notify();
break;
case Commands.REPLY_BFILEDIR_CHECKCORRUPTION:
for(Iterator it = listeners.iterator(); it.hasNext(); ) {
((BFileDirEvents) it.next()).corruptionCheckDone(inMsg.get_length() == 1, inMsg.get_result() == Commands.SUCCESS);
}
break;
case Commands.ERROR_BFILEDIR_TOTALFILES:
case Commands.ERROR_BFILEDIR_TOTALNODES:
case Commands.ERROR_BFILEDIR_EXISTS:
case Commands.ERROR_BFILEDIR_READNEXT:
case Commands.ERROR_BFILEDIR_RESERVEDLENGTH:
case Commands.ERROR_BFILEDIR_DATALENGTH:
case Commands.ERROR_BFILEDIR_CHECKCORRUPTION:
case Commands.ERROR_BFILEDIR_READFIRST:
case Commands.ERROR_BFILEDIR_GETFREESPACE:
System.err.println("Command immediately failed");
System.exit(1);
default:
}
}
public synchronized short getTotalFiles() {
command.set_cmd(Commands.CMD_BFILEDIR_TOTALFILES);
send(command);
try {
wait(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return (short) returnAmount;
}
public synchronized int getTotalNodes() {
command.set_cmd(Commands.CMD_BFILEDIR_TOTALNODES);
send(command);
try {
wait(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return (int) returnAmount;
}
public synchronized long getFreeSpace() {
command.set_cmd(Commands.CMD_BFILEDIR_GETFREESPACE);
send(command);
try {
wait(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return returnAmount;
}
public void checkExists(String fileName) {
command.set_cmd(Commands.CMD_BFILEDIR_EXISTS);
command.set_data(Util.filenameToData(fileName));
send(command);
}
public void readFirst() {
command.set_cmd(Commands.CMD_BFILEDIR_READFIRST);
send(command);
}
public void readNext(String presentFilename) {
command.set_cmd(Commands.CMD_BFILEDIR_READNEXT);
command.set_data(Util.filenameToData(presentFilename));
send(command);
}
public synchronized long getReservedLength(String fileName) {
command.set_cmd(Commands.CMD_BFILEDIR_RESERVEDLENGTH);
command.set_data(Util.filenameToData(fileName));
send(command);
try {
wait(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnAmount;
}
public synchronized long getDataLength(String fileName) {
command.set_cmd(Commands.CMD_BFILEDIR_DATALENGTH);
command.set_data(Util.filenameToData(fileName));
send(command);
try {
wait(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnAmount;
}
public void checkCorruption(String fileName) {
command.set_cmd(Commands.CMD_BFILEDIR_CHECKCORRUPTION);
command.set_data(Util.filenameToData(fileName));
send(command);
}
}
|
|
/*
* Copyright 2014 Red Hat, Inc.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.vertx.ext.web.handler.sockjs;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.http.HttpMethod;
import io.vertx.core.http.WebSocketBase;
import io.vertx.test.core.TestUtils;
import org.junit.Test;
/**
* @author <a href="mailto:julien@julienviet.com">Julien Viet</a>
*/
public class SockJSWriteTest extends SockJSTestBase {
@Test
public void testRaw() throws Exception {
waitFor(2);
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.write(Buffer.buffer(expected), onSuccess(v -> {
complete();
}));
};
startServers();
client.webSocket("/test/websocket", onSuccess(ws -> {
ws.handler(buffer -> {
if (buffer.toString().equals(expected)) {
complete();
}
});
}));
await();
}
@Test
public void testRawFailure() throws Exception {
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.endHandler(v -> {
socket.write(Buffer.buffer(expected), onFailure(err -> {
testComplete();
}));
});
};
startServers();
client.webSocket("/test/websocket", onSuccess(WebSocketBase::close));
await();
}
@Test
public void testWebSocket() throws Exception {
waitFor(2);
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.write(Buffer.buffer(expected), onSuccess(v -> {
complete();
}));
};
startServers();
client.webSocket("/test/400/8ne8e94a/websocket", onSuccess(ws -> {
ws.handler(buffer -> {
if (buffer.toString().equals("a[\"" + expected + "\"]")) {
complete();
}
});
}));
await();
}
@Test
public void testWebSocketFailure() throws Exception {
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.endHandler(v -> {
socket.write(Buffer.buffer(expected), onFailure(err -> {
testComplete();
}));
});
};
startServers();
client.webSocket("/test/400/8ne8e94a/websocket", onSuccess(WebSocketBase::close));
await();
}
@Test
public void testEventSource() throws Exception {
waitFor(2);
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.write(Buffer.buffer(expected), onSuccess(v -> {
complete();
}));
};
startServers();
client.request(HttpMethod.GET, "/test/400/8ne8e94a/eventsource")
.onComplete(onSuccess(req -> req.send(onSuccess(resp -> {
resp.handler(buffer -> {
if (buffer.toString().equals("data: a[\"" + expected + "\"]\r\n\r\n")) {
complete();
}
});
}))));
await();
}
@Test
public void testEventSourceFailure() throws Exception {
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.endHandler(v -> {
socket.write(Buffer.buffer(expected), onFailure(err -> {
testComplete();
}));
});
};
startServers();
client.request(HttpMethod.GET, "/test/400/8ne8e94a/eventsource")
.onComplete(onSuccess(req -> req.send(onSuccess(resp -> {
req.connection().close();
}))));
await();
}
@Test
public void testXHRStreaming() throws Exception {
waitFor(2);
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.write(Buffer.buffer(expected), onSuccess(v -> {
complete();
}));
};
startServers();
client.request(HttpMethod.POST, "/test/400/8ne8e94a/xhr_streaming")
.onComplete(onSuccess(req -> req.send(Buffer.buffer(), onSuccess(resp -> {
assertEquals(200, resp.statusCode());
resp.handler(buffer -> {
if (buffer.toString().equals("a[\"" + expected + "\"]\n")) {
complete();
}
});
}))));
await();
}
@Test
public void testXHRStreamingFailure() throws Exception {
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.endHandler(v -> {
socket.write(Buffer.buffer(expected), onFailure(err -> {
testComplete();
}));
});
};
startServers();
client.request(HttpMethod.POST, "/test/400/8ne8e94a/xhr_streaming")
.onComplete(onSuccess(req -> req.send(onSuccess(resp -> {
req.connection().close();
}))));
await();
}
@Test
public void testXHRPolling() throws Exception {
waitFor(2);
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.write(Buffer.buffer(expected), onSuccess(v -> {
complete();
}));
};
startServers();
Runnable[] task = new Runnable[1];
task[0] = () ->
client.request(HttpMethod.POST, "/test/400/8ne8e94a/xhr")
.onComplete(onSuccess(req -> req.send(Buffer.buffer(), onSuccess(resp -> {
assertEquals(200, resp.statusCode());
resp.handler(buffer -> {
if (buffer.toString().equals("a[\"" + expected + "\"]\n")) {
complete();
} else {
task[0].run();
}
});
}))));
task[0].run();
await();
}
@Test
public void testXHRPollingClose() throws Exception {
// Take 5 seconds which is the hearbeat timeout
waitFor(3);
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.write(Buffer.buffer(expected), onFailure(err -> {
complete();
}));
socket.endHandler(v -> {
socket.write(Buffer.buffer(expected), onFailure(err -> {
complete();
}));
});
socket.close();
};
startServers();
client.request(HttpMethod.POST, "/test/400/8ne8e94a/xhr")
.onComplete(onSuccess(req -> req.send(onSuccess(resp -> {
assertEquals(200, resp.statusCode());
complete();
}))));
await();
}
@Test
public void testXHRPollingShutdown() throws Exception {
// Take 5 seconds which is the hearbeat timeout
waitFor(3);
String expected = TestUtils.randomAlphaString(64);
socketHandler = () -> socket -> {
socket.write(Buffer.buffer(expected), onFailure(err -> {
complete();
}));
socket.endHandler(v -> {
socket.write(Buffer.buffer(expected), onFailure(err -> {
complete();
}));
});
};
startServers();
client.request(HttpMethod.POST, "/test/400/8ne8e94a/xhr")
.onComplete(onSuccess(req -> req.send(onSuccess(resp -> {
assertEquals(200, resp.statusCode());
complete();
}))));
await();
}
}
|
|
package org.apereo.cas.support.saml;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.support.saml.services.SamlRegisteredService;
import org.apereo.cas.support.saml.services.idp.metadata.SamlRegisteredServiceServiceProviderMetadataFacade;
import org.apereo.cas.support.saml.services.idp.metadata.cache.SamlRegisteredServiceCachingMetadataResolver;
import org.apereo.cas.util.CollectionUtils;
import lombok.SneakyThrows;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import net.shibboleth.utilities.java.support.resolver.CriteriaSet;
import org.apache.commons.lang3.StringUtils;
import org.opensaml.core.criterion.EntityIdCriterion;
import org.opensaml.messaging.context.MessageContext;
import org.opensaml.saml.common.SAMLObject;
import org.opensaml.saml.common.messaging.context.SAMLEndpointContext;
import org.opensaml.saml.common.messaging.context.SAMLPeerEntityContext;
import org.opensaml.saml.common.xml.SAMLConstants;
import org.opensaml.saml.criterion.BindingCriterion;
import org.opensaml.saml.criterion.EntityRoleCriterion;
import org.opensaml.saml.metadata.resolver.ChainingMetadataResolver;
import org.opensaml.saml.metadata.resolver.MetadataResolver;
import org.opensaml.saml.metadata.resolver.RoleDescriptorResolver;
import org.opensaml.saml.metadata.resolver.impl.PredicateRoleDescriptorResolver;
import org.opensaml.saml.saml2.core.AuthnRequest;
import org.opensaml.saml.saml2.core.LogoutRequest;
import org.opensaml.saml.saml2.core.NameIDPolicy;
import org.opensaml.saml.saml2.core.RequestAbstractType;
import org.opensaml.saml.saml2.core.StatusResponseType;
import org.opensaml.saml.saml2.metadata.AssertionConsumerService;
import org.opensaml.saml.saml2.metadata.Endpoint;
import org.opensaml.saml.saml2.metadata.SPSSODescriptor;
import org.opensaml.saml.saml2.metadata.impl.AssertionConsumerServiceBuilder;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* This is {@link SamlIdPUtils}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@Slf4j
@UtilityClass
public class SamlIdPUtils {
/**
* Prepare peer entity saml endpoint.
*
* @param request the authn request
* @param outboundContext the outbound context
* @param adaptor the adaptor
* @param binding the binding
* @throws SamlException the saml exception
*/
public static void preparePeerEntitySamlEndpointContext(final RequestAbstractType request,
final MessageContext outboundContext,
final SamlRegisteredServiceServiceProviderMetadataFacade adaptor,
final String binding) throws SamlException {
val entityId = adaptor.getEntityId();
if (!adaptor.containsAssertionConsumerServices()) {
throw new SamlException("No assertion consumer service could be found for entity " + entityId);
}
val peerEntityContext = outboundContext.getSubcontext(SAMLPeerEntityContext.class, true);
if (peerEntityContext == null) {
throw new SamlException("SAMLPeerEntityContext could not be defined for entity " + entityId);
}
peerEntityContext.setEntityId(entityId);
val endpointContext = peerEntityContext.getSubcontext(SAMLEndpointContext.class, true);
if (endpointContext == null) {
throw new SamlException("SAMLEndpointContext could not be defined for entity " + entityId);
}
val endpoint = determineEndpointForRequest(request, adaptor, binding);
LOGGER.debug("Configured peer entity endpoint to be [{}] with binding [{}]", endpoint.getLocation(), endpoint.getBinding());
endpointContext.setEndpoint(endpoint);
}
/**
* Determine assertion consumer service assertion consumer service.
*
* @param authnRequest the authn request
* @param adaptor the adaptor
* @param binding the binding
* @return the assertion consumer service
*/
public static Endpoint determineEndpointForRequest(final RequestAbstractType authnRequest,
final SamlRegisteredServiceServiceProviderMetadataFacade adaptor,
final String binding) {
var endpoint = (Endpoint) null;
if (authnRequest instanceof LogoutRequest) {
endpoint = adaptor.getSingleLogoutService(binding);
} else {
val acsEndpointFromReq = getAssertionConsumerServiceFromRequest(authnRequest, binding);
val acsEndpointFromMetadata = adaptor.getAssertionConsumerService(binding);
if (acsEndpointFromReq != null) {
if (authnRequest.isSigned()) {
endpoint = acsEndpointFromReq;
} else {
if (acsEndpointFromMetadata == null
|| !adaptor.getAssertionConsumerServiceLocations(binding).contains(acsEndpointFromReq.getLocation())) {
throw new SamlException(String.format("Assertion consumer service from unsigned request [%s], does not match ACS from SP metadata [%s]",
acsEndpointFromReq.getLocation(), adaptor.getAssertionConsumerServiceLocations(binding)));
}
endpoint = acsEndpointFromReq;
}
} else {
endpoint = acsEndpointFromMetadata;
}
}
if (endpoint == null || StringUtils.isBlank(endpoint.getBinding())) {
throw new SamlException("Endpoint for "
+ authnRequest.getSchemaType().toString()
+ " is not available or does not define a binding for " + binding);
}
val location = StringUtils.isBlank(endpoint.getResponseLocation()) ? endpoint.getLocation() : endpoint.getResponseLocation();
if (StringUtils.isBlank(location)) {
throw new SamlException("Endpoint for"
+ authnRequest.getSchemaType().toString()
+ " does not define a target location for " + binding);
}
return endpoint;
}
/**
* Gets chaining metadata resolver for all saml services.
*
* @param servicesManager the services manager
* @param entityID the entity id
* @param resolver the resolver
* @return the chaining metadata resolver for all saml services
*/
@SneakyThrows
public static MetadataResolver getMetadataResolverForAllSamlServices(final ServicesManager servicesManager,
final String entityID,
final SamlRegisteredServiceCachingMetadataResolver resolver) {
val registeredServices = servicesManager.findServiceBy(SamlRegisteredService.class::isInstance);
val chainingMetadataResolver = new ChainingMetadataResolver();
val resolvers = registeredServices.stream()
.filter(SamlRegisteredService.class::isInstance)
.map(SamlRegisteredService.class::cast)
.map(s -> SamlRegisteredServiceServiceProviderMetadataFacade.get(resolver, s, entityID))
.filter(Optional::isPresent)
.map(Optional::get)
.map(SamlRegisteredServiceServiceProviderMetadataFacade::getMetadataResolver)
.collect(Collectors.toList());
LOGGER.debug("Located [{}] metadata resolvers to match against [{}]", resolvers, entityID);
chainingMetadataResolver.setResolvers(resolvers);
chainingMetadataResolver.setId(entityID);
chainingMetadataResolver.initialize();
return chainingMetadataResolver;
}
/**
* Gets assertion consumer service for.
*
* @param authnRequest the authn request
* @param servicesManager the services manager
* @param resolver the resolver
* @return the assertion consumer service for
*/
public static AssertionConsumerService getAssertionConsumerServiceFor(final AuthnRequest authnRequest,
final ServicesManager servicesManager,
final SamlRegisteredServiceCachingMetadataResolver resolver) {
try {
val acs = new AssertionConsumerServiceBuilder().buildObject();
if (authnRequest.getAssertionConsumerServiceIndex() != null) {
val issuer = getIssuerFromSamlRequest(authnRequest);
val samlResolver = getMetadataResolverForAllSamlServices(servicesManager, issuer, resolver);
val criteriaSet = new CriteriaSet();
criteriaSet.add(new EntityIdCriterion(issuer));
criteriaSet.add(new EntityRoleCriterion(SPSSODescriptor.DEFAULT_ELEMENT_NAME));
criteriaSet.add(new BindingCriterion(CollectionUtils.wrap(SAMLConstants.SAML2_POST_BINDING_URI)));
val it = samlResolver.resolve(criteriaSet);
it.forEach(entityDescriptor -> {
val spssoDescriptor = entityDescriptor.getSPSSODescriptor(SAMLConstants.SAML20P_NS);
val acsEndpoints = spssoDescriptor.getAssertionConsumerServices();
if (acsEndpoints.isEmpty()) {
throw new IllegalArgumentException("Metadata resolved for entity id " + issuer + " has no defined ACS endpoints");
}
val acsIndex = authnRequest.getAssertionConsumerServiceIndex();
if (acsIndex + 1 > acsEndpoints.size()) {
throw new IllegalArgumentException("AssertionConsumerService index specified in the request " + acsIndex + " is invalid "
+ "since the total endpoints available to " + issuer + " is " + acsEndpoints.size());
}
val foundAcs = acsEndpoints.get(acsIndex);
acs.setBinding(foundAcs.getBinding());
acs.setLocation(foundAcs.getLocation());
acs.setResponseLocation(foundAcs.getResponseLocation());
acs.setIndex(acsIndex);
});
} else {
acs.setBinding(authnRequest.getProtocolBinding());
acs.setLocation(authnRequest.getAssertionConsumerServiceURL());
acs.setResponseLocation(authnRequest.getAssertionConsumerServiceURL());
acs.setIndex(0);
acs.setIsDefault(Boolean.TRUE);
}
LOGGER.debug("Resolved AssertionConsumerService from the request is [{}]", acs);
if (StringUtils.isBlank(acs.getBinding())) {
throw new SamlException("AssertionConsumerService has no protocol binding defined");
}
if (StringUtils.isBlank(acs.getLocation()) && StringUtils.isBlank(acs.getResponseLocation())) {
throw new SamlException("AssertionConsumerServicAcceptableUsagePolicySubmitActione has no location or response location defined");
}
return acs;
} catch (final Exception e) {
throw new IllegalArgumentException(new SamlException(e.getMessage(), e));
}
}
/**
* Gets issuer from saml object.
*
* @param object the object
* @return the issuer from saml object
*/
public static String getIssuerFromSamlObject(final SAMLObject object) {
if (object instanceof RequestAbstractType) {
return RequestAbstractType.class.cast(object).getIssuer().getValue();
}
if (object instanceof StatusResponseType) {
return StatusResponseType.class.cast(object).getIssuer().getValue();
}
return null;
}
/**
* Gets role descriptor resolver.
*
* @param adaptor the adaptor
* @param requireValidMetadata the require valid metadata
* @return the role descriptor resolver
* @throws Exception the exception
*/
public static RoleDescriptorResolver getRoleDescriptorResolver(final SamlRegisteredServiceServiceProviderMetadataFacade adaptor,
final boolean requireValidMetadata) throws Exception {
return getRoleDescriptorResolver(adaptor.getMetadataResolver(), requireValidMetadata);
}
/**
* Gets role descriptor resolver.
*
* @param metadata the metadata
* @param requireValidMetadata the require valid metadata
* @return the role descriptor resolver
* @throws Exception the exception
*/
public static RoleDescriptorResolver getRoleDescriptorResolver(final MetadataResolver metadata,
final boolean requireValidMetadata) throws Exception {
val roleDescriptorResolver = new PredicateRoleDescriptorResolver(metadata);
roleDescriptorResolver.setSatisfyAnyPredicates(true);
roleDescriptorResolver.setUseDefaultPredicateRegistry(true);
roleDescriptorResolver.setRequireValidMetadata(requireValidMetadata);
roleDescriptorResolver.initialize();
return roleDescriptorResolver;
}
/**
* Gets name id policy.
*
* @param authnRequest the authn request
* @return the name id policy
*/
public static Optional<NameIDPolicy> getNameIDPolicy(final RequestAbstractType authnRequest) {
if (authnRequest instanceof AuthnRequest) {
return Optional.ofNullable(AuthnRequest.class.cast(authnRequest).getNameIDPolicy());
}
return Optional.empty();
}
private static AssertionConsumerService getAssertionConsumerServiceFromRequest(final RequestAbstractType authnRequest, final String binding) {
if (authnRequest instanceof AuthnRequest) {
val acsUrl = AuthnRequest.class.cast(authnRequest).getAssertionConsumerServiceURL();
if (StringUtils.isBlank(acsUrl)) {
return null;
}
LOGGER.debug("Fetched assertion consumer service url [{}] with binding [{}] from authentication request", acsUrl, binding);
val builder = new AssertionConsumerServiceBuilder();
val endpoint = builder.buildObject(AssertionConsumerService.DEFAULT_ELEMENT_NAME);
endpoint.setBinding(binding);
endpoint.setResponseLocation(acsUrl);
endpoint.setLocation(acsUrl);
return endpoint;
}
return null;
}
/**
* Gets issuer from saml request.
*
* @param request the request
* @return the issuer from saml request
*/
private static String getIssuerFromSamlRequest(final RequestAbstractType request) {
return request.getIssuer().getValue();
}
}
|
|
/*
* The Apache Software License, Version 1.1
*
* Copyright (c) 2001-2003 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "Ant" and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package ccm.libs.org.codehaus.plexus.util;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
/**
* Condition that tests the OS type.
*
* @author Stefan Bodewig
* @author Magesh Umasankar
* @author Brian Fox
* @since 1.0
* @version $Revision$
*/
public class Os
{
// define the families for easier reference
public static final String FAMILY_DOS = "dos";
public static final String FAMILY_MAC = "mac";
public static final String FAMILY_NETWARE = "netware";
public static final String FAMILY_OS2 = "os/2";
public static final String FAMILY_TANDEM = "tandem";
public static final String FAMILY_UNIX = "unix";
public static final String FAMILY_WINDOWS = "windows";
public static final String FAMILY_WIN9X = "win9x";
public static final String FAMILY_ZOS = "z/os";
public static final String FAMILY_OS400 = "os/400";
public static final String FAMILY_OPENVMS = "openvms";
// store the valid families
private static final Set<String> validFamilies = setValidFamilies();
// get the current info
private static final String PATH_SEP = System.getProperty( "path.separator" );
public static final String OS_NAME = System.getProperty( "os.name" ).toLowerCase( Locale.US );
public static final String OS_ARCH = System.getProperty( "os.arch" ).toLowerCase( Locale.US );
public static final String OS_VERSION = System.getProperty( "os.version" ).toLowerCase( Locale.US );
// Make sure this method is called after static fields it depends on have been set!
public static final String OS_FAMILY = getOsFamily();
private String family;
private String name;
private String version;
private String arch;
/**
* Default constructor
*/
public Os()
{
}
/**
* Constructor that sets the family attribute
*
* @param family a String value
*/
public Os( String family )
{
setFamily( family );
}
/**
* Initializes the set of valid families.
*/
private static Set<String> setValidFamilies()
{
Set<String> valid = new HashSet<String>();
valid.add( FAMILY_DOS );
valid.add( FAMILY_MAC );
valid.add( FAMILY_NETWARE );
valid.add( FAMILY_OS2 );
valid.add( FAMILY_TANDEM );
valid.add( FAMILY_UNIX );
valid.add( FAMILY_WINDOWS );
valid.add( FAMILY_WIN9X );
valid.add( FAMILY_ZOS );
valid.add( FAMILY_OS400 );
valid.add( FAMILY_OPENVMS );
return valid;
}
/**
* Sets the desired OS family type
*
* @param f The OS family type desired<br />
* Possible values:<br />
* <ul>
* <li>dos</li>
* <li>mac</li>
* <li>netware</li>
* <li>os/2</li>
* <li>tandem</li>
* <li>unix</li>
* <li>windows</li>
* <li>win9x</li>
* <li>z/os</li>
* <li>os/400</li>
* <li>openvms</li>
* </ul>
*/
public void setFamily( String f )
{
family = f.toLowerCase( Locale.US );
}
/**
* Sets the desired OS name
*
* @param name The OS name
*/
public void setName( String name )
{
this.name = name.toLowerCase( Locale.US );
}
/**
* Sets the desired OS architecture
*
* @param arch The OS architecture
*/
public void setArch( String arch )
{
this.arch = arch.toLowerCase( Locale.US );
}
/**
* Sets the desired OS version
*
* @param version The OS version
*/
public void setVersion( String version )
{
this.version = version.toLowerCase( Locale.US );
}
/**
* Determines if the current OS matches the type of that
* set in setFamily.
*
* @see Os#setFamily(String)
*/
public boolean eval()
throws Exception
{
return isOs( family, name, arch, version );
}
/**
* Determines if the current OS matches the given OS
* family.
*
* @param family the family to check for
* @return true if the OS matches
* @since 1.0
*/
public static boolean isFamily( String family )
{
return isOs( family, null, null, null );
}
/**
* Determines if the current OS matches the given OS
* name.
*
* @param name the OS name to check for
* @return true if the OS matches
* @since 1.0
*/
public static boolean isName( String name )
{
return isOs( null, name, null, null );
}
/**
* Determines if the current OS matches the given OS
* architecture.
*
* @param arch the OS architecture to check for
* @return true if the OS matches
* @since 1.0
*/
public static boolean isArch( String arch )
{
return isOs( null, null, arch, null );
}
/**
* Determines if the current OS matches the given OS
* version.
*
* @param version the OS version to check for
* @return true if the OS matches
* @since 1.0
*/
public static boolean isVersion( String version )
{
return isOs( null, null, null, version );
}
/**
* Determines if the current OS matches the given OS
* family, name, architecture and version.
*
* The name, archictecture and version are compared to
* the System properties os.name, os.version and os.arch
* in a case-independent way.
*
* @param family The OS family
* @param name The OS name
* @param arch The OS architecture
* @param version The OS version
* @return true if the OS matches
* @since 1.0
*/
public static boolean isOs( String family, String name, String arch, String version )
{
boolean retValue = false;
if ( family != null || name != null || arch != null || version != null )
{
boolean isFamily = true;
boolean isName = true;
boolean isArch = true;
boolean isVersion = true;
if ( family != null )
{
if ( family.equalsIgnoreCase( FAMILY_WINDOWS ) )
{
isFamily = OS_NAME.indexOf( FAMILY_WINDOWS ) > -1;
}
else if ( family.equalsIgnoreCase( FAMILY_OS2 ) )
{
isFamily = OS_NAME.indexOf( FAMILY_OS2 ) > -1;
}
else if ( family.equalsIgnoreCase( FAMILY_NETWARE ) )
{
isFamily = OS_NAME.indexOf( FAMILY_NETWARE ) > -1;
}
else if ( family.equalsIgnoreCase( FAMILY_DOS ) )
{
isFamily = PATH_SEP.equals( ";" ) && !isFamily( FAMILY_NETWARE );
}
else if ( family.equalsIgnoreCase( FAMILY_MAC ) )
{
isFamily = OS_NAME.indexOf( FAMILY_MAC ) > -1;
}
else if ( family.equalsIgnoreCase( FAMILY_TANDEM ) )
{
isFamily = OS_NAME.indexOf( "nonstop_kernel" ) > -1;
}
else if ( family.equalsIgnoreCase( FAMILY_UNIX ) )
{
isFamily = PATH_SEP.equals( ":" ) && !isFamily( FAMILY_OPENVMS )
&& ( !isFamily( FAMILY_MAC ) || OS_NAME.endsWith( "x" ) );
}
else if ( family.equalsIgnoreCase( FAMILY_WIN9X ) )
{
isFamily = isFamily( FAMILY_WINDOWS )
&& ( OS_NAME.indexOf( "95" ) >= 0 || OS_NAME.indexOf( "98" ) >= 0
|| OS_NAME.indexOf( "me" ) >= 0 || OS_NAME.indexOf( "ce" ) >= 0 );
}
else if ( family.equalsIgnoreCase( FAMILY_ZOS ) )
{
isFamily = OS_NAME.indexOf( FAMILY_ZOS ) > -1 || OS_NAME.indexOf( "os/390" ) > -1;
}
else if ( family.equalsIgnoreCase( FAMILY_OS400 ) )
{
isFamily = OS_NAME.indexOf( FAMILY_OS400 ) > -1;
}
else if ( family.equalsIgnoreCase( FAMILY_OPENVMS ) )
{
isFamily = OS_NAME.indexOf( FAMILY_OPENVMS ) > -1;
}
else
{
isFamily = OS_NAME.indexOf( family.toLowerCase( Locale.US ) ) > -1;
}
}
if ( name != null )
{
isName = name.toLowerCase( Locale.US ).equals( OS_NAME );
}
if ( arch != null )
{
isArch = arch.toLowerCase( Locale.US ).equals( OS_ARCH );
}
if ( version != null )
{
isVersion = version.toLowerCase( Locale.US ).equals( OS_VERSION );
}
retValue = isFamily && isName && isArch && isVersion;
}
return retValue;
}
/**
* Helper method to determine the current OS family.
*
* @return name of current OS family.
* @since 1.4.2
*/
private static String getOsFamily()
{
// in case the order of static initialization is
// wrong, get the list
// safely.
Set<String> families = null;
if ( !validFamilies.isEmpty() )
{
families = validFamilies;
}
else
{
families = setValidFamilies();
}
for ( String fam : families )
{
if ( Os.isFamily( fam ) )
{
return fam;
}
}
return null;
}
/**
* Helper method to check if the given family is in the
* following list:
* <ul>
* <li>dos</li>
* <li>mac</li>
* <li>netware</li>
* <li>os/2</li>
* <li>tandem</li>
* <li>unix</li>
* <li>windows</li>
* <li>win9x</li>
* <li>z/os</li>
* <li>os/400</li>
* <li>openvms</li>
* </ul>
*
* @param theFamily the family to check.
* @return true if one of the valid families.
* @since 1.4.2
*/
public static boolean isValidFamily( String theFamily )
{
return ( validFamilies.contains( theFamily ) );
}
/**
* @return a copy of the valid families
* @since 1.4.2
*/
public static Set<String> getValidFamilies()
{
return new HashSet<String>( validFamilies );
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.quicksight.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/quicksight-2018-04-01/DeleteTheme" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteThemeRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the Amazon Web Services account that contains the theme that you're deleting.
* </p>
*/
private String awsAccountId;
/**
* <p>
* An ID for the theme that you want to delete.
* </p>
*/
private String themeId;
/**
* <p>
* The version of the theme that you want to delete.
* </p>
* <p>
* <b>Note:</b> If you don't provide a version number, you're using this call to <code>DeleteTheme</code> to delete
* all versions of the theme.
* </p>
*/
private Long versionNumber;
/**
* <p>
* The ID of the Amazon Web Services account that contains the theme that you're deleting.
* </p>
*
* @param awsAccountId
* The ID of the Amazon Web Services account that contains the theme that you're deleting.
*/
public void setAwsAccountId(String awsAccountId) {
this.awsAccountId = awsAccountId;
}
/**
* <p>
* The ID of the Amazon Web Services account that contains the theme that you're deleting.
* </p>
*
* @return The ID of the Amazon Web Services account that contains the theme that you're deleting.
*/
public String getAwsAccountId() {
return this.awsAccountId;
}
/**
* <p>
* The ID of the Amazon Web Services account that contains the theme that you're deleting.
* </p>
*
* @param awsAccountId
* The ID of the Amazon Web Services account that contains the theme that you're deleting.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteThemeRequest withAwsAccountId(String awsAccountId) {
setAwsAccountId(awsAccountId);
return this;
}
/**
* <p>
* An ID for the theme that you want to delete.
* </p>
*
* @param themeId
* An ID for the theme that you want to delete.
*/
public void setThemeId(String themeId) {
this.themeId = themeId;
}
/**
* <p>
* An ID for the theme that you want to delete.
* </p>
*
* @return An ID for the theme that you want to delete.
*/
public String getThemeId() {
return this.themeId;
}
/**
* <p>
* An ID for the theme that you want to delete.
* </p>
*
* @param themeId
* An ID for the theme that you want to delete.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteThemeRequest withThemeId(String themeId) {
setThemeId(themeId);
return this;
}
/**
* <p>
* The version of the theme that you want to delete.
* </p>
* <p>
* <b>Note:</b> If you don't provide a version number, you're using this call to <code>DeleteTheme</code> to delete
* all versions of the theme.
* </p>
*
* @param versionNumber
* The version of the theme that you want to delete. </p>
* <p>
* <b>Note:</b> If you don't provide a version number, you're using this call to <code>DeleteTheme</code> to
* delete all versions of the theme.
*/
public void setVersionNumber(Long versionNumber) {
this.versionNumber = versionNumber;
}
/**
* <p>
* The version of the theme that you want to delete.
* </p>
* <p>
* <b>Note:</b> If you don't provide a version number, you're using this call to <code>DeleteTheme</code> to delete
* all versions of the theme.
* </p>
*
* @return The version of the theme that you want to delete. </p>
* <p>
* <b>Note:</b> If you don't provide a version number, you're using this call to <code>DeleteTheme</code> to
* delete all versions of the theme.
*/
public Long getVersionNumber() {
return this.versionNumber;
}
/**
* <p>
* The version of the theme that you want to delete.
* </p>
* <p>
* <b>Note:</b> If you don't provide a version number, you're using this call to <code>DeleteTheme</code> to delete
* all versions of the theme.
* </p>
*
* @param versionNumber
* The version of the theme that you want to delete. </p>
* <p>
* <b>Note:</b> If you don't provide a version number, you're using this call to <code>DeleteTheme</code> to
* delete all versions of the theme.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteThemeRequest withVersionNumber(Long versionNumber) {
setVersionNumber(versionNumber);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAwsAccountId() != null)
sb.append("AwsAccountId: ").append(getAwsAccountId()).append(",");
if (getThemeId() != null)
sb.append("ThemeId: ").append(getThemeId()).append(",");
if (getVersionNumber() != null)
sb.append("VersionNumber: ").append(getVersionNumber());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DeleteThemeRequest == false)
return false;
DeleteThemeRequest other = (DeleteThemeRequest) obj;
if (other.getAwsAccountId() == null ^ this.getAwsAccountId() == null)
return false;
if (other.getAwsAccountId() != null && other.getAwsAccountId().equals(this.getAwsAccountId()) == false)
return false;
if (other.getThemeId() == null ^ this.getThemeId() == null)
return false;
if (other.getThemeId() != null && other.getThemeId().equals(this.getThemeId()) == false)
return false;
if (other.getVersionNumber() == null ^ this.getVersionNumber() == null)
return false;
if (other.getVersionNumber() != null && other.getVersionNumber().equals(this.getVersionNumber()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAwsAccountId() == null) ? 0 : getAwsAccountId().hashCode());
hashCode = prime * hashCode + ((getThemeId() == null) ? 0 : getThemeId().hashCode());
hashCode = prime * hashCode + ((getVersionNumber() == null) ? 0 : getVersionNumber().hashCode());
return hashCode;
}
@Override
public DeleteThemeRequest clone() {
return (DeleteThemeRequest) super.clone();
}
}
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko;
import org.mozilla.gecko.db.BrowserContract;
import org.mozilla.gecko.util.GeckoAsyncTask;
import org.json.JSONArray;
import org.json.JSONException;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.os.SystemClock;
import android.util.Log;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public final class TabsAccessor {
private static final String LOGTAG = "GeckoTabsAccessor";
private static final String[] CLIENTS_AVAILABILITY_PROJECTION = new String[] {
BrowserContract.Clients.GUID
};
private static final String[] TABS_PROJECTION_COLUMNS = new String[] {
BrowserContract.Tabs.TITLE,
BrowserContract.Tabs.URL,
BrowserContract.Clients.GUID,
BrowserContract.Clients.NAME
};
// Projection column numbers
public static enum TABS_COLUMN {
TITLE,
URL,
GUID,
NAME
};
private static final String CLIENTS_SELECTION = BrowserContract.Clients.GUID + " IS NOT NULL";
private static final String TABS_SELECTION = BrowserContract.Tabs.CLIENT_GUID + " IS NOT NULL";
private static final String LOCAL_CLIENT_SELECTION = BrowserContract.Clients.GUID + " IS NULL";
private static final String LOCAL_TABS_SELECTION = BrowserContract.Tabs.CLIENT_GUID + " IS NULL";
public static class RemoteTab {
public String title;
public String url;
public String guid;
public String name;
}
public interface OnQueryTabsCompleteListener {
public void onQueryTabsComplete(List<RemoteTab> tabs);
}
public interface OnClientsAvailableListener {
public void areAvailable(boolean available);
}
// Helper method to check if there are any clients available
public static void areClientsAvailable(final Context context, final OnClientsAvailableListener listener) {
if (listener == null)
return;
(new GeckoAsyncTask<Void, Void, Boolean>(GeckoApp.mAppContext, GeckoAppShell.getHandler()) {
@Override
protected Boolean doInBackground(Void... unused) {
Uri uri = BrowserContract.Tabs.CONTENT_URI;
uri = uri.buildUpon()
.appendQueryParameter(BrowserContract.PARAM_LIMIT, "1")
.build();
Cursor cursor = context.getContentResolver().query(uri,
CLIENTS_AVAILABILITY_PROJECTION,
CLIENTS_SELECTION,
null,
null);
if (cursor == null)
return false;
try {
return cursor.moveToNext();
} finally {
cursor.close();
}
}
@Override
protected void onPostExecute(Boolean availability) {
listener.areAvailable(availability);
}
}).setPriority(GeckoAsyncTask.Priority.HIGH).execute();
}
// This method returns all tabs from all remote clients,
// ordered by most recent client first, most recent tab first
public static void getTabs(final Context context, final OnQueryTabsCompleteListener listener) {
getTabs(context, 0, listener);
}
// This method returns limited number of tabs from all remote clients,
// ordered by most recent client first, most recent tab first
public static void getTabs(final Context context, final int limit, final OnQueryTabsCompleteListener listener) {
// If there is no listener, no point in doing work.
if (listener == null)
return;
(new GeckoAsyncTask<Void, Void, List<RemoteTab>>(GeckoApp.mAppContext, GeckoAppShell.getHandler()) {
@Override
protected List<RemoteTab> doInBackground(Void... unused) {
Uri uri = BrowserContract.Tabs.CONTENT_URI;
if (limit > 0) {
uri = uri.buildUpon()
.appendQueryParameter(BrowserContract.PARAM_LIMIT, String.valueOf(limit))
.build();
}
Cursor cursor = context.getContentResolver().query(uri,
TABS_PROJECTION_COLUMNS,
TABS_SELECTION,
null,
null);
if (cursor == null)
return null;
RemoteTab tab;
final ArrayList<RemoteTab> tabs = new ArrayList<RemoteTab> ();
try {
while (cursor.moveToNext()) {
tab = new RemoteTab();
tab.title = cursor.getString(TABS_COLUMN.TITLE.ordinal());
tab.url = cursor.getString(TABS_COLUMN.URL.ordinal());
tab.guid = cursor.getString(TABS_COLUMN.GUID.ordinal());
tab.name = cursor.getString(TABS_COLUMN.NAME.ordinal());
tabs.add(tab);
}
} finally {
cursor.close();
}
return Collections.unmodifiableList(tabs);
}
@Override
protected void onPostExecute(List<RemoteTab> tabs) {
listener.onQueryTabsComplete(tabs);
}
}).execute();
}
// Updates the modified time of the local client with the current time.
private static void updateLocalClient(final ContentResolver cr) {
ContentValues values = new ContentValues();
values.put(BrowserContract.Clients.LAST_MODIFIED, System.currentTimeMillis());
cr.update(BrowserContract.Clients.CONTENT_URI, values, LOCAL_CLIENT_SELECTION, null);
}
// Deletes all local tabs.
private static void deleteLocalTabs(final ContentResolver cr) {
cr.delete(BrowserContract.Tabs.CONTENT_URI, LOCAL_TABS_SELECTION, null);
}
/**
* Tabs are positioned in the DB in the same order that they appear in the tabs param.
* - URL should never empty or null. Skip this tab if there's no URL.
* - TITLE should always a string, either a page title or empty.
* - LAST_USED should always be numeric.
* - FAVICON should be a URL or null.
* - HISTORY should be serialized JSON array of URLs.
* - POSITION should always be numeric.
* - CLIENT_GUID should always be null to represent the local client.
*/
private static void insertLocalTabs(final ContentResolver cr, final Iterable<Tab> tabs) {
// Reuse this for serializing individual history URLs as JSON.
JSONArray history = new JSONArray();
ArrayList<ContentValues> valuesToInsert = new ArrayList<ContentValues>();
int position = 0;
for (Tab tab : tabs) {
// Skip this tab if it has a null URL.
String url = tab.getURL();
if (url == null)
continue;
ContentValues values = new ContentValues();
values.put(BrowserContract.Tabs.URL, url);
values.put(BrowserContract.Tabs.TITLE, tab.getTitle());
values.put(BrowserContract.Tabs.LAST_USED, tab.getLastUsed());
String favicon = tab.getFaviconURL();
if (favicon != null)
values.put(BrowserContract.Tabs.FAVICON, favicon);
else
values.putNull(BrowserContract.Tabs.FAVICON);
// We don't have access to session history in Java, so for now, we'll
// just use a JSONArray that holds most recent history item.
try {
history.put(0, tab.getURL());
values.put(BrowserContract.Tabs.HISTORY, history.toString());
} catch (JSONException e) {
Log.e(LOGTAG, "JSONException adding URL to tab history array", e);
}
values.put(BrowserContract.Tabs.POSITION, position++);
// A null client guid corresponds to the local client.
values.putNull(BrowserContract.Tabs.CLIENT_GUID);
valuesToInsert.add(values);
}
ContentValues[] valuesToInsertArray = valuesToInsert.toArray(new ContentValues[valuesToInsert.size()]);
cr.bulkInsert(BrowserContract.Tabs.CONTENT_URI, valuesToInsertArray);
}
// Deletes all local tabs and replaces them with a new list of tabs.
public static synchronized void persistLocalTabs(final ContentResolver cr, final Iterable<Tab> tabs) {
Log.v(LOGTAG, "zerdatime " + SystemClock.uptimeMillis() + " - start of persistLocalTabs");
deleteLocalTabs(cr);
insertLocalTabs(cr, tabs);
updateLocalClient(cr);
Log.v(LOGTAG, "zerdatime " + SystemClock.uptimeMillis() + " - end of persistLocalTabs");
}
}
|
|
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.mapdemo;
import com.google.android.gms.maps.OnStreetViewPanoramaReadyCallback;
import com.google.android.gms.maps.StreetViewPanorama;
import com.google.android.gms.maps.SupportStreetViewPanoramaFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.StreetViewPanoramaCamera;
import com.google.android.gms.maps.model.StreetViewPanoramaLink;
import com.google.android.gms.maps.model.StreetViewPanoramaLocation;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.util.FloatMath;
import android.view.View;
import android.widget.SeekBar;
import android.widget.Toast;
/**
* This shows how to create an activity with access to all the options in Panorama
* which can be adjusted dynamically
*/
public class StreetViewPanoramaNavigationDemoActivity extends FragmentActivity {
// George St, Sydney
private static final LatLng SYDNEY = new LatLng(-33.87365, 151.20689);
// Cole St, San Fran
private static final LatLng SAN_FRAN = new LatLng(37.769263, -122.450727);
// Santorini, Greece
private static final String SANTORINI = "WddsUw1geEoAAAQIt9RnsQ";
// LatLng with no panorama
private static final LatLng INVALID = new LatLng(-45.125783, 151.276417);
/**
* The amount in degrees by which to scroll the camera
*/
private static final int PAN_BY_DEG = 30;
private static final float ZOOM_BY = 0.5f;
private StreetViewPanorama mStreetViewPanorama;
private SeekBar mCustomDurationBar;
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.street_view_panorama_navigation_demo);
SupportStreetViewPanoramaFragment streetViewPanoramaFragment =
(SupportStreetViewPanoramaFragment)
getSupportFragmentManager().findFragmentById(R.id.streetviewpanorama);
streetViewPanoramaFragment.getStreetViewPanoramaAsync(
new OnStreetViewPanoramaReadyCallback() {
@Override
public void onStreetViewPanoramaReady(StreetViewPanorama panorama) {
mStreetViewPanorama = panorama;
// Only set the panorama to SYDNEY on startup (when no panoramas have been
// loaded which is when the savedInstanceState is null).
if (savedInstanceState == null) {
mStreetViewPanorama.setPosition(SYDNEY);
}
}
});
mCustomDurationBar = (SeekBar) findViewById(R.id.duration_bar);
}
/**
* When the panorama is not ready the PanoramaView cannot be used. This should be called on
* all entry points that call methods on the Panorama API.
*/
private boolean checkReady() {
if (mStreetViewPanorama == null) {
Toast.makeText(this, R.string.panorama_not_ready, Toast.LENGTH_SHORT).show();
return false;
}
return true;
}
/**
* Called when the Go To San Fran button is clicked.
*/
public void onGoToSanFran(View view) {
if (!checkReady()) {
return;
}
mStreetViewPanorama.setPosition(SAN_FRAN, 30);
}
/**
* Called when the Animate To Sydney button is clicked.
*/
public void onGoToSydney(View view) {
if (!checkReady()) {
return;
}
mStreetViewPanorama.setPosition(SYDNEY);
}
/**
* Called when the Animate To Santorini button is clicked.
*/
public void onGoToSantorini(View view) {
if (!checkReady()) {
return;
}
mStreetViewPanorama.setPosition(SANTORINI);
}
/**
* Called when the Animate To Invalid button is clicked.
*/
public void onGoToInvalid(View view) {
if (!checkReady()) {
return;
}
mStreetViewPanorama.setPosition(INVALID);
}
public void onZoomIn(View view) {
if (!checkReady()) {
return;
}
mStreetViewPanorama.animateTo(
new StreetViewPanoramaCamera.Builder().zoom(
mStreetViewPanorama.getPanoramaCamera().zoom + ZOOM_BY)
.tilt(mStreetViewPanorama.getPanoramaCamera().tilt)
.bearing(mStreetViewPanorama.getPanoramaCamera().bearing)
.build(), getDuration());
}
public void onZoomOut(View view) {
if (!checkReady()) {
return;
}
mStreetViewPanorama.animateTo(
new StreetViewPanoramaCamera.Builder().zoom(
mStreetViewPanorama.getPanoramaCamera().zoom - ZOOM_BY)
.tilt(mStreetViewPanorama.getPanoramaCamera().tilt)
.bearing(mStreetViewPanorama.getPanoramaCamera().bearing)
.build(), getDuration());
}
public void onPanLeft(View view) {
if (!checkReady()) {
return;
}
mStreetViewPanorama.animateTo(
new StreetViewPanoramaCamera.Builder().zoom(
mStreetViewPanorama.getPanoramaCamera().zoom)
.tilt(mStreetViewPanorama.getPanoramaCamera().tilt)
.bearing(mStreetViewPanorama.getPanoramaCamera().bearing - PAN_BY_DEG)
.build(), getDuration());
}
public void onPanRight(View view) {
if (!checkReady()) {
return;
}
mStreetViewPanorama.animateTo(
new StreetViewPanoramaCamera.Builder().zoom(
mStreetViewPanorama.getPanoramaCamera().zoom)
.tilt(mStreetViewPanorama.getPanoramaCamera().tilt)
.bearing(mStreetViewPanorama.getPanoramaCamera().bearing + PAN_BY_DEG)
.build(), getDuration());
}
public void onPanUp(View view) {
if (!checkReady()) {
return;
}
float currentTilt = mStreetViewPanorama.getPanoramaCamera().tilt;
float newTilt = currentTilt + PAN_BY_DEG;
newTilt = (newTilt > 90) ? 90 : newTilt;
mStreetViewPanorama.animateTo(
new StreetViewPanoramaCamera.Builder()
.zoom(mStreetViewPanorama.getPanoramaCamera().zoom)
.tilt(newTilt)
.bearing(mStreetViewPanorama.getPanoramaCamera().bearing)
.build(), getDuration());
}
public void onPanDown(View view) {
if (!checkReady()) {
return;
}
float currentTilt = mStreetViewPanorama.getPanoramaCamera().tilt;
float newTilt = currentTilt - PAN_BY_DEG;
newTilt = (newTilt < -90) ? -90 : newTilt;
mStreetViewPanorama.animateTo(
new StreetViewPanoramaCamera.Builder()
.zoom(mStreetViewPanorama.getPanoramaCamera().zoom)
.tilt(newTilt)
.bearing(mStreetViewPanorama.getPanoramaCamera().bearing)
.build(), getDuration());
}
public void onRequestPosition(View view) {
if (!checkReady()){
return;
}
if (mStreetViewPanorama.getLocation() != null) {
Toast.makeText(view.getContext(), mStreetViewPanorama.getLocation().position.toString(),
Toast.LENGTH_SHORT).show();
}
}
public void onMovePosition(View view) {
StreetViewPanoramaLocation location = mStreetViewPanorama.getLocation();
StreetViewPanoramaCamera camera = mStreetViewPanorama.getPanoramaCamera();
if (location != null && location.links != null) {
StreetViewPanoramaLink link = findClosestLinkToBearing(location.links, camera.bearing);
mStreetViewPanorama.setPosition(link.panoId);
}
}
public static StreetViewPanoramaLink findClosestLinkToBearing(StreetViewPanoramaLink[] links,
float bearing) {
float minBearingDiff = 360;
StreetViewPanoramaLink closestLink = links[0];
for (StreetViewPanoramaLink link : links) {
if (minBearingDiff > findNormalizedDifference(bearing, link.bearing)) {
minBearingDiff = findNormalizedDifference(bearing, link.bearing);
closestLink = link;
}
}
return closestLink;
}
// Find the difference between angle a and b as a value between 0 and 180
public static float findNormalizedDifference(float a, float b) {
float diff = a - b;
float normalizedDiff = diff - (360.0f * FloatMath.floor(diff / 360.0f));
return (normalizedDiff < 180.0f) ? normalizedDiff : 360.0f - normalizedDiff;
}
private long getDuration() {
return mCustomDurationBar.getProgress();
}
}
|
|
package org.ethereum.net.p2p;
import org.ethereum.core.Block;
import org.ethereum.core.Transaction;
import org.ethereum.manager.WorldManager;
import org.ethereum.net.MessageQueue;
import org.ethereum.net.client.Capability;
import org.ethereum.net.eth.EthHandler;
import org.ethereum.net.eth.EthMessageCodes;
import org.ethereum.net.eth.NewBlockMessage;
import org.ethereum.net.eth.TransactionsMessage;
import org.ethereum.net.message.ReasonCode;
import org.ethereum.net.message.StaticMessages;
import org.ethereum.net.peerdiscovery.PeerInfo;
import org.ethereum.net.rlpx.HandshakeHelper;
import org.ethereum.net.server.Channel;
import org.ethereum.net.shh.ShhHandler;
import org.ethereum.net.shh.ShhMessageCodes;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import org.ethereum.net.swarm.bzz.BzzHandler;
import org.ethereum.net.swarm.bzz.BzzMessageCodes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import static org.ethereum.net.message.StaticMessages.*;
/**
* Process the basic protocol messages between every peer on the network.
*
* Peers can send/receive
* <ul>
* <li>HELLO : Announce themselves to the network</li>
* <li>DISCONNECT : Disconnect themselves from the network</li>
* <li>GET_PEERS : Request a list of other knows peers</li>
* <li>PEERS : Send a list of known peers</li>
* <li>PING : Check if another peer is still alive</li>
* <li>PONG : Confirm that they themselves are still alive</li>
* </ul>
*/
@Component
@Scope("prototype")
public class P2pHandler extends SimpleChannelInboundHandler<P2pMessage> {
public final static byte VERSION = 4;
private final static Logger logger = LoggerFactory.getLogger("net");
private final Timer timer = new Timer("MessageTimer");
private MessageQueue msgQueue;
private boolean tearDown = false;
private boolean peerDiscoveryMode = false;
private HelloMessage handshakeHelloMessage = null;
private Set<PeerInfo> lastPeersSent;
@Autowired
WorldManager worldManager;
private Channel channel;
public P2pHandler() {
this.peerDiscoveryMode = false;
}
public P2pHandler(MessageQueue msgQueue, boolean peerDiscoveryMode) {
this.msgQueue = msgQueue;
this.peerDiscoveryMode = peerDiscoveryMode;
}
public void setWorldManager(WorldManager worldManager) {
this.worldManager = worldManager;
}
public void setPeerDiscoveryMode(boolean peerDiscoveryMode) {
this.peerDiscoveryMode = peerDiscoveryMode;
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
logger.info("P2P protocol activated");
msgQueue.activate(ctx);
worldManager.getListener().trace("P2P protocol activated");
startTimers();
}
@Override
public void channelRead0(final ChannelHandlerContext ctx, P2pMessage msg) throws InterruptedException {
if (P2pMessageCodes.inRange(msg.getCommand().asByte()))
logger.trace("P2PHandler invoke: [{}]", msg.getCommand());
worldManager.getListener().trace(String.format("P2PHandler invoke: [%s]", msg.getCommand()));
switch (msg.getCommand()) {
case HELLO:
msgQueue.receivedMessage(msg);
setHandshake((HelloMessage) msg, ctx);
// sendGetPeers();
break;
case DISCONNECT:
msgQueue.receivedMessage(msg);
channel.getNodeStatistics().nodeDisconnectedRemote(((DisconnectMessage) msg).getReason());
break;
case PING:
msgQueue.receivedMessage(msg);
ctx.writeAndFlush(PONG_MESSAGE);
break;
case PONG:
msgQueue.receivedMessage(msg);
break;
case GET_PEERS:
msgQueue.receivedMessage(msg);
sendPeers(); // todo: implement session management for peer request
break;
case PEERS:
msgQueue.receivedMessage(msg);
processPeers(ctx, (PeersMessage) msg);
if (peerDiscoveryMode &&
!handshakeHelloMessage.getCapabilities().contains(Capability.ETH)) {
disconnect(ReasonCode.REQUESTED);
killTimers();
ctx.close().sync();
ctx.disconnect().sync();
}
break;
default:
ctx.fireChannelRead(msg);
break;
}
}
private void disconnect(ReasonCode reasonCode) {
msgQueue.sendMessage(new DisconnectMessage(reasonCode));
channel.getNodeStatistics().nodeDisconnectedLocal(reasonCode);
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
logger.info("channel inactive: ", ctx.toString());
this.killTimers();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.error("P2p handling failed", cause);
super.exceptionCaught(ctx, cause);
ctx.close();
killTimers();
}
private void processPeers(ChannelHandlerContext ctx, PeersMessage peersMessage) {
worldManager.getPeerDiscovery().addPeers(peersMessage.getPeers());
}
private void sendGetPeers() {
msgQueue.sendMessage(StaticMessages.GET_PEERS_MESSAGE);
}
private void sendPeers() {
Set<PeerInfo> peers = worldManager.getPeerDiscovery().getPeers();
if (lastPeersSent != null && peers.equals(lastPeersSent)) {
logger.info("No new peers discovered don't answer for GetPeers");
return;
}
Set<Peer> peerSet = new HashSet<>();
for (PeerInfo peer : peers) {
new Peer(peer.getAddress(), peer.getPort(), peer.getPeerId());
}
PeersMessage msg = new PeersMessage(peerSet);
lastPeersSent = peers;
msgQueue.sendMessage(msg);
}
public void setHandshake(HelloMessage msg, ChannelHandlerContext ctx) {
channel.getNodeStatistics().setClientId(msg.getClientId());
this.handshakeHelloMessage = msg;
if (msg.getP2PVersion() != VERSION) {
disconnect(ReasonCode.INCOMPATIBLE_PROTOCOL);
}
else {
List<Capability> capInCommon = HandshakeHelper.getSupportedCapabilities(msg);
channel.getMessageCodesResolver().init(capInCommon);
for (Capability capability : capInCommon) {
if (capability.getName().equals(Capability.ETH) &&
capability.getVersion() == EthHandler.VERSION) {
// Activate EthHandler for this peer
EthHandler ethHandler = channel.getEthHandler();
ethHandler.setPeerId(msg.getPeerId());
ctx.pipeline().addLast(Capability.ETH, ethHandler);
ethHandler.activate();
} else if
(capability.getName().equals(Capability.SHH) &&
capability.getVersion() == ShhHandler.VERSION) {
// Activate ShhHandler for this peer
ShhHandler shhHandler = channel.getShhHandler();
ctx.pipeline().addLast(Capability.SHH, shhHandler);
shhHandler.activate();
} else if
(capability.getName().equals(Capability.BZZ) &&
capability.getVersion() == BzzHandler.VERSION) {
// Activate ShhHandler for this peer
BzzHandler bzzHandler = channel.getBzzHandler();
ctx.pipeline().addLast(Capability.BZZ, bzzHandler);
bzzHandler.activate();
}
}
InetAddress address = ((InetSocketAddress) ctx.channel().remoteAddress()).getAddress();
int port = msg.getListenPort();
PeerInfo confirmedPeer = new PeerInfo(address, port, msg.getPeerId());
confirmedPeer.setOnline(false);
confirmedPeer.getCapabilities().addAll(msg.getCapabilities());
//todo calculate the Offsets
worldManager.getPeerDiscovery().getPeers().add(confirmedPeer);
worldManager.getListener().onHandShakePeer(msg);
}
}
/**
* submit transaction to the network
*
* @param tx - fresh transaction object
*/
public void sendTransaction(Transaction tx) {
TransactionsMessage msg = new TransactionsMessage(tx);
msgQueue.sendMessage(msg);
}
public void sendNewBlock(Block block) {
NewBlockMessage msg = new NewBlockMessage(block, block.getDifficulty());
msgQueue.sendMessage(msg);
}
public void sendDisconnect() {
msgQueue.disconnect();
}
public void adaptMessageIds(List<Capability> capabilities) {
Collections.sort(capabilities);
int offset = P2pMessageCodes.USER.asByte() + 1;
for (Capability capability : capabilities) {
if (capability.getName().equals(Capability.ETH)) {
EthMessageCodes.setOffset((byte)offset);
offset += EthMessageCodes.values().length;
}
if (capability.getName().equals(Capability.SHH)) {
ShhMessageCodes.setOffset((byte)offset);
offset += ShhMessageCodes.values().length;
}
if (capability.getName().equals(Capability.BZZ)) {
BzzMessageCodes.setOffset((byte) offset);
offset += BzzMessageCodes.values().length + 4;
// FIXME: for some reason Go left 4 codes between BZZ and ETH message codes
}
}
}
public HelloMessage getHandshakeHelloMessage() {
return handshakeHelloMessage;
}
private void startTimers() {
// sample for pinging in background
timer.scheduleAtFixedRate(new TimerTask() {
public void run() {
if (tearDown) cancel();
msgQueue.sendMessage(PING_MESSAGE);
}
}, 2000, 5000);
/*
timer.scheduleAtFixedRate(new TimerTask() {
public void run() {
msgQueue.sendMessage(GET_PEERS_MESSAGE);
}
}, 500, 25000);
*/
}
public void killTimers() {
timer.cancel();
timer.purge();
msgQueue.close();
}
public void setMsgQueue(MessageQueue msgQueue) {
this.msgQueue = msgQueue;
}
public void setChannel(Channel channel) {
this.channel = channel;
}
}
|
|
package me.pc.mobile.helper.v14.util;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.math.BigDecimal;
public class DimenUtil {
private static String PATH_BASE = "K:/ccc/BevaErgeTV_V2/";
/** path_large */
private static String path_large = PATH_BASE + "res/values-large/";
/** path_xlarge */
private static String path_xlarge = PATH_BASE + "res/values-xlarge/";
/** path_sw540dp */
private static String path_sw540dp = PATH_BASE + "res/values-sw540dp/";
/** path_sw600dp */
private static String path_sw600dp = PATH_BASE + "res/values-sw600dp/";
/** path_sw672dp */
private static String path_sw672dp = PATH_BASE + "res/values-sw672dp/";
/** path_sw720dp */
private static String path_sw720dp = PATH_BASE + "res/values-sw720dp/";
/** path_sw1080dp */
private static String path_sw1080dp = PATH_BASE + "res/values-sw1080dp/";
/** values_large scaled value */
private static float scale_values_large = 0.75f;
/** values_xlarge scaled value */
private static float scale_values_xlarge = 1.00f;// base case.
/** values_xlarge scaled value */
private static float scale_values_xxlarge = 1.50f;
/** values_large scaled value */
private static float scale_values_sw540dp = 0.75f;
/** values_sw600dp scaled value */
private static float scale_values_sw600dp = 1.0f;// exception.
/** values_sw672dp scaled value */
private static float scale_values_sw672dp = 0.9f;
/** values_xlarge scaled value */
private static float scale_values_sw720dp = 1.00f;// base case.
/** values_sw1080dp scaled value */
private static float scale_values_sw1080dp = 1.50f;
public static void main(String[] args) {
File src = new File(
"D:/Users/SilentKnight//res/values-sw1080dp/dimen.xml");
File target720 = new File(
"D:/Users/SilentKnight/res/values-sw720dp/dimen.xml");
File target540 = new File(
"D:/Users/SilentKnight/res/values-sw540dp/dimen.xml");
File targetLarge = new File(
"D:/Users/SilentKnight/res/values-large/dimen.xml");
File targetXLarge = new File(
"D:/Users/SilentKnight/res/values-xlarge/dimen.xml");
File targetXXLarge = new File(
"D:/Users/SilentKnight/res/values-xxlarge/dimen.xml");
// 720
String tmp = convertStreamToString(src.getAbsolutePath(), (float) 2 / 3);
writeFile(target720.getAbsolutePath(), tmp);
writeFile(targetXLarge.getAbsolutePath(), tmp);
// 540
tmp = convertStreamToString(src.getAbsolutePath(),
(float) 2 / 3 * 0.75f);
writeFile(target540.getAbsolutePath(), tmp);
writeFile(targetLarge.getAbsolutePath(), tmp);
// 1080
tmp = convertStreamToString(src.getAbsolutePath(), 1.0f);
writeFile(targetXXLarge.getAbsolutePath(), tmp);
// File file = new File(path_sw720dp);
// File[] files = file.listFiles();
// String temp = "";
// for (File file2 : files) {
// // write 540
// temp = convertStreamToString(file2.getAbsolutePath(),
// scale_values_sw540dp);
// writeFile(path_sw540dp + file2.getName(), temp);
// // write 600
// temp = convertStreamToString(file2.getAbsolutePath(),
// scale_values_sw600dp);
// writeFile(path_sw600dp + file2.getName(), temp);
// // write 672
// temp = convertStreamToString(file2.getAbsolutePath(),
// scale_values_sw672dp);
// writeFile(path_sw672dp + file2.getName(), temp);
// // write large
// temp = convertStreamToString(file2.getAbsolutePath(),
// scale_values_large);
// writeFile(path_large + file2.getName(), temp);
// write xlarge
// temp = convertStreamToString(file2.getAbsolutePath(),
// scale_values_sw720dp);
// writeFile(path_xlarge + file2.getName(), temp);
// // write 1080
// temp = convertStreamToString(file2.getAbsolutePath(),
// scale_values_sw1080dp);
// writeFile(path_sw1080dp + file2.getName(), temp);
// }
}
/**
* @Title: convertStreamToString
* @Description: read file and then convert it to String
* @param @param filepath
* @param @param f
* @param @return
* @return String
*/
public static String convertStreamToString(String filepath, float f) {
StringBuilder sb = new StringBuilder();
try {
File file = new File(filepath);
if (!file.exists()) {
file.mkdir();
}
BufferedReader bf = new BufferedReader(new FileReader(filepath));
String line = null;
String endMarkSP = "sp</dimen>";
String endMarkDP = "dp</dimen>";
String endMark = "</string>";
String startmark = ">";
while ((line = bf.readLine()) != null) {
if (line.contains(endMarkSP) || line.contains(endMarkDP)
|| line.contains(endMark)) {
int end = -1;
if (line.contains(endMarkSP)) {
end = line.lastIndexOf(endMarkSP);
} else if (line.contains(endMarkDP)) {
end = line.lastIndexOf(endMarkDP);
} else if (line.contains(endMark)) {
end = line.lastIndexOf(endMark);
}
int start = line.indexOf(startmark);
String temp = line.substring(start + 1, end);
double tempValue = Double.parseDouble(temp);
double newValue = (tempValue * f);
BigDecimal bigDecimal = new BigDecimal(newValue);
newValue = bigDecimal.setScale(2, BigDecimal.ROUND_HALF_UP)
.doubleValue();
String newValueStr = subZeroAndDot(String.valueOf(newValue));
String newline = "";
if (line.contains(endMarkSP)) {
newline = line.replace(temp + "sp", newValueStr + "sp");
} else if (line.contains(endMarkDP)) {
newline = line.replace(temp + "dp", newValueStr + "dp");
} else if (line.contains(endMark)) {
newline = line.replace(temp, newValueStr);
}
sb.append(newline + "\r\n");
} else {
sb.append(line + "\r\n");
}
}
System.out.println(sb.toString());
} catch (IOException e) {
e.printStackTrace();
}
return sb.toString();
}
/**
* @Title: DeleteFolder
* @Description: delete directory of the alignment
* @param @param sPath
* @param @return
* @return boolean
*/
public static boolean DeleteFolder(String sPath) {
File file = new File(sPath);
if (!file.exists()) {
return true;
} else {
if (file.isFile()) {
return deleteFile(sPath);
} else {
// return deleteDirectory(sPath);
}
}
return false;
}
/**
* @Title: subZeroAndDot
* @Description: delete Excess 0 and .
* @param @param s
* @param @return
* @return String
*/
public static String subZeroAndDot(String s) {
if (s.indexOf(".") > 0) {
s = s.replaceAll("0+?$", "");// delete Excess 0
s = s.replaceAll("[.]$", "");// delete . at the last index if any
}
return s;
}
/**
* @Title: writeFile
* @Description: write String into new file
* @param @param filepath
* @param @param st
* @return void
*/
public static void writeFile(String filepath, String st) {
try {
File file = new File(filepath);
file = new File(file.getParent());
if (!file.exists()) {
file.mkdir();
}
FileWriter fw = new FileWriter(filepath);
BufferedWriter bw = new BufferedWriter(fw);
bw.write(st);
bw.flush();
bw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* @Title: deleteFile
* @Description: delete file.
* @param @param sPath
* @param @return
* @return boolean
*/
public static boolean deleteFile(String sPath) {
boolean flag = false;
File file = new File(sPath);
if (file.isFile() && file.exists()) {
file.delete();
flag = true;
}
return flag;
}
}
|
|
/**
* This program will solve a Rubik's Cube Puzzle using the
* two phase algorithm described by Herbert Kociemba at
* http://kociemba.org/cube.htm
*
* This is for private use only, not for release
*
* @author Russell Feldhausen
* @version 1.0 2008.12.04
*/
package rubiksolver;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JOptionPane;
import rubiksolverdef.*;
import java.io.*;
import java.util.LinkedList;
import javax.swing.JButton;
import javax.swing.UIManager;
import org.netbeans.lib.awtextra.AbsoluteConstraints;
import rubiksolver.RubikModel2.*;
/**
* This class is the main GUI for the Rubik Solver Program
*/
public class RubikDisplay2 extends javax.swing.JFrame implements ActionListener{
static private RubikModel2 model;
static private TwistMoveTable table;
static private FlipUDSlice flip;
static private CornerPerm perm;
static private PruningTables prune;
static private ViewCube2 view;
static private CubieCube cube;
static private JButton[][] up, down, left, right, back, front;
static private boolean canEdit;
static private LinkedList<Integer> moveList;
static private int nextMove;
/** Creates new form RubikDisplay */
public RubikDisplay2() {
try {
UIManager.setLookAndFeel( UIManager.getCrossPlatformLookAndFeelClassName() );
} catch (Exception e) {
e.printStackTrace();
}
initComponents();
File input, output;
ObjectInputStream in;
ObjectOutputStream out;
model = null;
nextMove = 0;
moveList = null;
long time1 = System.currentTimeMillis();
//Initializes each structure by testing if it exists
try{
input = new File("model.dat");
in = new ObjectInputStream(new FileInputStream(input));
model = (RubikModel2)in.readObject();
in.close();
System.out.println("Model read from file");
}catch (Exception e){
System.out.println("Error reading model from file");
//e.printStackTrace();
}
//if unable to read from file, will recalculate
if(model == null){
model = new RubikModel2();
try{
output = new File("model.dat");
out = new ObjectOutputStream(new FileOutputStream(output));
out.writeObject(model);
out.close();
}catch(Exception e){
System.out.println("Error writing model");
e.printStackTrace();
}
System.out.println("Model created and saved");
}
long time2 = System.currentTimeMillis();
flip = null;
try{
input = new File("flip.dat");
in = new ObjectInputStream(new FileInputStream(input));
flip = (FlipUDSlice)in.readObject();
flip.setModel(model);
in.close();
System.out.println("FlipUDSlice read from file");
}catch (Exception e){
System.out.println("Error reading FlipUDSlice from file");
//e.printStackTrace();
}
if(flip == null){
flip = new FlipUDSlice(model);
try{
output = new File("flip.dat");
out = new ObjectOutputStream(new FileOutputStream(output));
out.writeObject(flip);
out.close();
}catch(Exception e){
System.out.println("Error writing FlipUDSlice");
e.printStackTrace();
}
System.out.println("FlipUDSlice created and saved");
}
long time6 = System.currentTimeMillis();
perm = null;
try{
input = new File("perm.dat");
in = new ObjectInputStream(new FileInputStream(input));
perm = (CornerPerm)in.readObject();
perm.setModel(model);
in.close();
System.out.println("CornerPerm read from file");
}catch (Exception e){
System.out.println("Error reading CornerPerm from file");
//e.printStackTrace();
}
if(perm == null){
perm = new CornerPerm(model);
try{
output = new File("perm.dat");
out = new ObjectOutputStream(new FileOutputStream(output));
out.writeObject(perm);
out.close();
}catch(Exception e){
System.out.println("Error writing CornerPerm");
e.printStackTrace();
}
System.out.println("CornerPerm created and saved");
}
long time3 = System.currentTimeMillis();
table = null;
try{
input = new File("twist.dat");
in = new ObjectInputStream(new FileInputStream(input));
table = (TwistMoveTable)in.readObject();
in.close();
System.out.println("TwistMoveTable read from file");
}catch (Exception e){
System.out.println("Error reading TwistMoveTable from file");
//e.printStackTrace();
}
if(table == null){
table = new TwistMoveTable(model, flip, perm);
try{
output = new File("twist.dat");
out = new ObjectOutputStream(new FileOutputStream(output));
out.writeObject(table);
out.close();
}catch(Exception e){
System.out.println("Error writing TwistMoveTable");
e.printStackTrace();
}
System.out.println("TwistMoveTable created and saved");
}
long time4 = System.currentTimeMillis();
prune = null;
try{
input = new File("prune.dat");
in = new ObjectInputStream(new FileInputStream(input));
prune = (PruningTables)in.readObject();
in.close();
System.out.println("PruningTables read from file");
}catch (Exception e){
System.out.println("Error reading PruningTables from file");
//e.printStackTrace();
}
if(prune == null){
prune = new PruningTables(model, flip, table, perm);
try{
output = new File("prune.dat");
out = new ObjectOutputStream(new FileOutputStream(output));
out.writeObject(prune);
out.close();
}catch(Exception e){
System.out.println("Error writing PruningTables");
e.printStackTrace();
}
System.out.println("PruningTables created and saved");
}
long time5 = System.currentTimeMillis();
System.out.println("Model Initialization took " + (time2 - time1) + "ms");
System.out.println("FlipUDSlice Initialization took " + (time6 - time2) + "ms");
System.out.println("CornerPerm Initialization took " + (time3 - time6) + "ms");
System.out.println("Table Initialization took " + (time4 - time3) + "ms");
System.out.println("PruningTable Initialization took " + (time5 - time4) + "ms");
this.setSize(800, 600);
up = new JButton[3][3];
down = new JButton[3][3];
left = new JButton[3][3];
right = new JButton[3][3];
back = new JButton[3][3];
front = new JButton[3][3];
for(int i = 0; i < 3; i++){
for(int j = 0; j < 3; j++){
up[i][j] = new JButton();
up[i][j].addActionListener(this);
up[i][j].setPreferredSize(new Dimension(30, 30));
this.getContentPane().add(up[i][j], new AbsoluteConstraints(461 + 110 + 35 * i, 110 + 5 + 35 * j));
down[i][j] = new JButton();
down[i][j].addActionListener(this);
down[i][j].setPreferredSize(new Dimension(30, 30));
this.getContentPane().add(down[i][j], new AbsoluteConstraints(461 + 110 + 35 * i, 110 + 215 + 35 * j));
left[i][j] = new JButton();
left[i][j].addActionListener(this);
left[i][j].setPreferredSize(new Dimension(30, 30));
this.getContentPane().add(left[i][j], new AbsoluteConstraints(461 + 5 + 35 * i, 110 + 110 + 35 * j));
right[i][j] = new JButton();
right[i][j].addActionListener(this);
right[i][j].setPreferredSize(new Dimension(30, 30));
this.getContentPane().add(right[i][j], new AbsoluteConstraints(461 + 215 + 35 * i, 110 + 110 + 35 * j));
back[i][j] = new JButton();
back[i][j].addActionListener(this);
back[i][j].setPreferredSize(new Dimension(30, 30));
this.getContentPane().add(back[i][j], new AbsoluteConstraints(461 + 110 + 35 * i, 110 + 320 + 35 * j));
front[i][j] = new JButton();
front[i][j].addActionListener(this);
front[i][j].setPreferredSize(new Dimension(30, 30));
this.getContentPane().add(front[i][j], new AbsoluteConstraints(461 + 110 + 35 * i, 110 + 110 + 35 * j));
}
}
cube = new CubieCube(true);
updateC();
}
/**
* Updates the text on the top of the GUI
* @param input - text to add
*/
public void updateText(String input){
jTextArea1.setText(jTextArea1.getText() + "\n" + input);
}
public void updateView(){
CubieCube cc = cube;
int corner = cc.c.a[Corner.URF.ordinal()].c.ordinal();
int ori = cc.c.a[Corner.URF.ordinal()].o;
if(ori % 3 == 2){
ori -= 1;
}else if(ori % 3 == 1){
ori += 1;
}
if(ori >= 3){
up[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori % 3]));
right[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
front[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
}else{
up[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori]));
right[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
front[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
}
corner = cc.c.a[Corner.UFL.ordinal()].c.ordinal();
ori = cc.c.a[Corner.UFL.ordinal()].o;
if(ori % 3 == 2){
ori -= 1;
}else if(ori % 3 == 1){
ori += 1;
}
if(ori >= 3){
up[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori % 3]));
front[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
left[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
}else{
up[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori]));
front[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
left[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
}
corner = cc.c.a[Corner.ULB.ordinal()].c.ordinal();
ori = cc.c.a[Corner.ULB.ordinal()].o;
if(ori % 3 == 2){
ori -= 1;
}else if(ori % 3 == 1){
ori += 1;
}
if(ori >= 3){
up[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori % 3]));
left[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
back[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
}else{
up[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori]));
left[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
back[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
}
corner = cc.c.a[Corner.UBR.ordinal()].c.ordinal();
ori = cc.c.a[Corner.UBR.ordinal()].o;
if(ori % 3 == 2){
ori -= 1;
}else if(ori % 3 == 1){
ori += 1;
}
if(ori >= 3){
up[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori % 3]));
back[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
right[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
}else{
up[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori]));
back[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
right[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
}
corner = cc.c.a[Corner.DFR.ordinal()].c.ordinal();
ori = cc.c.a[Corner.DFR.ordinal()].o;
if(ori % 3 == 2){
ori -= 1;
}else if(ori % 3 == 1){
ori += 1;
}
if(ori >= 3){
down[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori % 3]));
front[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
right[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
}else{
down[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori]));
front[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
right[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
}
corner = cc.c.a[Corner.DLF.ordinal()].c.ordinal();
ori = cc.c.a[Corner.DLF.ordinal()].o;
if(ori % 3 == 2){
ori -= 1;
}else if(ori % 3 == 1){
ori += 1;
}
if(ori >= 3){
down[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori % 3]));
left[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
front[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
}else{
down[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori % 3]));
left[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
front[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
}
corner = cc.c.a[Corner.DBL.ordinal()].c.ordinal();
ori = cc.c.a[Corner.DBL.ordinal()].o;
if(ori % 3 == 2){
ori -= 1;
}else if(ori % 3 == 1){
ori += 1;
}
if(ori >= 3){
down[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori % 3]));
back[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
left[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
}else{
down[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori]));
back[0][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
left[0][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
}
corner = cc.c.a[Corner.DRB.ordinal()].c.ordinal();
ori = cc.c.a[Corner.DRB.ordinal()].o;
if(ori % 3 == 2){
ori -= 1;
}else if(ori % 3 == 1){
ori += 1;
}
if(ori >= 3){
down[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori % 3]));
right[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
back[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
}else{
down[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][ori]));
right[2][2].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 1) % 3]));
back[2][0].setBackground(model.ColorIndexToColor(model.CCI.a[corner][(ori + 2) % 3]));
}
int edge = cc.e.a[Edge.UR.ordinal()].e.ordinal();
ori = cc.e.a[Edge.UR.ordinal()].o;
up[2][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
right[1][0].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.UF.ordinal()].e.ordinal();
ori = cc.e.a[Edge.UF.ordinal()].o;
up[1][2].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
front[1][0].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.UL.ordinal()].e.ordinal();
ori = cc.e.a[Edge.UL.ordinal()].o;
up[0][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
left[1][0].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.UB.ordinal()].e.ordinal();
ori = cc.e.a[Edge.UB.ordinal()].o;
up[1][0].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
back[1][2].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.DR.ordinal()].e.ordinal();
ori = cc.e.a[Edge.DR.ordinal()].o;
down[2][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
right[1][2].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.DF.ordinal()].e.ordinal();
ori = cc.e.a[Edge.DF.ordinal()].o;
down[1][0].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
front[1][2].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.DL.ordinal()].e.ordinal();
ori = cc.e.a[Edge.DL.ordinal()].o;
down[0][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
left[1][2].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.DB.ordinal()].e.ordinal();
ori = cc.e.a[Edge.DB.ordinal()].o;
down[1][2].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
back[1][0].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.FR.ordinal()].e.ordinal();
ori = cc.e.a[Edge.FR.ordinal()].o;
front[2][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
right[0][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.FL.ordinal()].e.ordinal();
ori = cc.e.a[Edge.FL.ordinal()].o;
front[0][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
left[2][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.BL.ordinal()].e.ordinal();
ori = cc.e.a[Edge.BL.ordinal()].o;
back[0][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
left[0][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
edge = cc.e.a[Edge.BR.ordinal()].e.ordinal();
ori = cc.e.a[Edge.BR.ordinal()].o;
back[2][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][ori]));
right[2][1].setBackground(model.ColorIndexToColor(model.ECI.a[edge][(ori + 1) % 2]));
up[1][1].setBackground(Color.ORANGE);
down[1][1].setBackground(Color.RED);
left[1][1].setBackground(Color.GREEN);
right[1][1].setBackground(Color.BLUE);
back[1][1].setBackground(Color.YELLOW);
front[1][1].setBackground(Color.WHITE);
up[1][1].setEnabled(false);
down[1][1].setEnabled(false);
left[1][1].setEnabled(false);
right[1][1].setEnabled(false);
back[1][1].setEnabled(false);
front[1][1].setEnabled(false);
}
/**
* Updates the coordinates displayed on the GUI
*/
public void updateC(){
if(!canEdit){
jTextField6.setText(""+model.CornOriCoord(cube.c));
jTextField5.setText(""+model.EdgeOriCoord(cube.e));
jTextField4.setText(""+model.UDSliceCoord(cube));
int flipUD = flip.FlipUDSliceCoord(cube);
jTextField3.setText(""+(int)(flipUD/ 16) + " " + (int)(flipUD % 16));
jTextField2.setText(""+prune.Phase1PruningTable[(int)(flipUD / 16)][table.CornOriSym[model.CornOriCoord(cube.c)][flipUD % 16]]);
jTextField11.setText(""+model.CornPermCoord(cube.c));
jTextField10.setText(""+model.EdgePermCoord(cube.e));
jTextField9.setText(""+model.UDSliceSortedCoord(cube));
if(model.CornOriCoord(cube.c) == 0 && model.EdgeOriCoord(cube.e) == 0 && (int)(flipUD / 16) == 0){ //phase2
jTextField12.setText(""+model.Phase2EdgePermCoord(cube));
int CornPerm = perm.CornPermCoord(cube);
jTextField8.setText("" + (int)(perm.CornPermCoord(cube) / 16) + " " + CornPerm % 16);
jTextField7.setText("" + prune.Phase2PruningTable[(int)(CornPerm / 16)][table.P2EdgePermSym[model.Phase2EdgePermCoord(cube)][CornPerm % 16]]);
}
updateView();
}
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
btn_input = new javax.swing.JButton();
btn_New = new javax.swing.JButton();
btn_Solve = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jButton3 = new javax.swing.JButton();
jButton4 = new javax.swing.JButton();
jButton5 = new javax.swing.JButton();
jButton6 = new javax.swing.JButton();
jButton7 = new javax.swing.JButton();
jButton8 = new javax.swing.JButton();
jScrollPane2 = new javax.swing.JScrollPane();
jList1 = new javax.swing.JList();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jLabel4 = new javax.swing.JLabel();
jButton9 = new javax.swing.JButton();
jButton10 = new javax.swing.JButton();
jButton11 = new javax.swing.JButton();
jLabel5 = new javax.swing.JLabel();
jLabel6 = new javax.swing.JLabel();
jLabel7 = new javax.swing.JLabel();
jLabel8 = new javax.swing.JLabel();
jLabel9 = new javax.swing.JLabel();
jLabel10 = new javax.swing.JLabel();
jLabel11 = new javax.swing.JLabel();
jLabel12 = new javax.swing.JLabel();
jLabel13 = new javax.swing.JLabel();
jLabel14 = new javax.swing.JLabel();
jLabel15 = new javax.swing.JLabel();
jLabel16 = new javax.swing.JLabel();
jLabel17 = new javax.swing.JLabel();
jTextField2 = new javax.swing.JTextField();
jTextField3 = new javax.swing.JTextField();
jTextField4 = new javax.swing.JTextField();
jTextField5 = new javax.swing.JTextField();
jTextField6 = new javax.swing.JTextField();
jTextField7 = new javax.swing.JTextField();
jTextField8 = new javax.swing.JTextField();
jTextField9 = new javax.swing.JTextField();
jTextField10 = new javax.swing.JTextField();
jTextField11 = new javax.swing.JTextField();
jLabel18 = new javax.swing.JLabel();
jTextField12 = new javax.swing.JTextField();
jScrollPane1 = new javax.swing.JScrollPane();
jTextArea1 = new javax.swing.JTextArea();
btn_save = new javax.swing.JButton();
btn_Solve1 = new javax.swing.JButton();
jLabel19 = new javax.swing.JLabel();
btn_Solve2 = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
addWindowListener(new java.awt.event.WindowAdapter() {
public void windowClosing(java.awt.event.WindowEvent evt) {
formWindowClosing(evt);
}
});
getContentPane().setLayout(new org.netbeans.lib.awtextra.AbsoluteLayout());
btn_input.setFont(new java.awt.Font("Tahoma", 0, 14));
btn_input.setText("Edit Cube Layout");
btn_input.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btn_inputActionPerformed(evt);
}
});
getContentPane().add(btn_input, new org.netbeans.lib.awtextra.AbsoluteConstraints(10, 170, 160, 30));
btn_New.setFont(new java.awt.Font("Tahoma", 0, 14));
btn_New.setText("Create Solved Cube");
btn_New.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btn_NewActionPerformed(evt);
}
});
getContentPane().add(btn_New, new org.netbeans.lib.awtextra.AbsoluteConstraints(10, 140, 160, 30));
btn_Solve.setFont(new java.awt.Font("Tahoma", 0, 14));
btn_Solve.setText("Solve");
btn_Solve.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btn_SolveActionPerformed(evt);
}
});
getContentPane().add(btn_Solve, new org.netbeans.lib.awtextra.AbsoluteConstraints(190, 140, 70, 60));
jButton2.setFont(new java.awt.Font("Tahoma", 0, 14));
jButton2.setText("U");
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
getContentPane().add(jButton2, new org.netbeans.lib.awtextra.AbsoluteConstraints(280, 140, 50, 40));
jButton3.setFont(new java.awt.Font("Tahoma", 0, 14));
jButton3.setText("D");
jButton3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton3ActionPerformed(evt);
}
});
getContentPane().add(jButton3, new org.netbeans.lib.awtextra.AbsoluteConstraints(280, 190, 50, 40));
jButton4.setFont(new java.awt.Font("Tahoma", 0, 14));
jButton4.setText("L");
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
getContentPane().add(jButton4, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 190, 50, 40));
jButton5.setFont(new java.awt.Font("Tahoma", 0, 14));
jButton5.setText("R");
jButton5.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton5ActionPerformed(evt);
}
});
getContentPane().add(jButton5, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 140, 50, 40));
jButton6.setFont(new java.awt.Font("Tahoma", 0, 14));
jButton6.setText("F");
jButton6.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton6ActionPerformed(evt);
}
});
getContentPane().add(jButton6, new org.netbeans.lib.awtextra.AbsoluteConstraints(400, 140, 50, 40));
jButton7.setFont(new java.awt.Font("Tahoma", 0, 14));
jButton7.setText("B");
jButton7.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton7ActionPerformed(evt);
}
});
getContentPane().add(jButton7, new org.netbeans.lib.awtextra.AbsoluteConstraints(400, 190, 50, 40));
jButton8.setFont(new java.awt.Font("Tahoma", 0, 12));
jButton8.setText("Sym Inverted");
jButton8.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton8ActionPerformed(evt);
}
});
getContentPane().add(jButton8, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 360, 110, -1));
jList1.setModel(new javax.swing.AbstractListModel() {
String[] strings = { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15" };
public int getSize() { return strings.length; }
public Object getElementAt(int i) { return strings[i]; }
});
jScrollPane2.setViewportView(jList1);
getContentPane().add(jScrollPane2, new org.netbeans.lib.awtextra.AbsoluteConstraints(280, 240, 50, 290));
jLabel1.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel1.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel1.setText("Moves");
getContentPane().add(jLabel1, new org.netbeans.lib.awtextra.AbsoluteConstraints(280, 110, 170, 30));
jLabel2.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel2.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel2.setText("Russ's Rubik Solver 1.0");
getContentPane().add(jLabel2, new org.netbeans.lib.awtextra.AbsoluteConstraints(0, 0, 780, 30));
jLabel3.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel3.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel3.setText("Coordinates");
getContentPane().add(jLabel3, new org.netbeans.lib.awtextra.AbsoluteConstraints(10, 240, 270, 30));
jLabel4.setFont(new java.awt.Font("Tahoma", 0, 14)); // NOI18N
jLabel4.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel4.setText("Solution");
getContentPane().add(jLabel4, new org.netbeans.lib.awtextra.AbsoluteConstraints(10, 30, 770, 20));
jButton9.setFont(new java.awt.Font("Tahoma", 0, 12));
jButton9.setText("S Then Sinv");
jButton9.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton9ActionPerformed(evt);
}
});
getContentPane().add(jButton9, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 270, 110, -1));
jButton10.setFont(new java.awt.Font("Tahoma", 0, 12));
jButton10.setText("Sinv Then S");
jButton10.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton10ActionPerformed(evt);
}
});
getContentPane().add(jButton10, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 300, 110, -1));
jButton11.setFont(new java.awt.Font("Tahoma", 0, 12));
jButton11.setText("Symmetry");
jButton11.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton11ActionPerformed(evt);
}
});
getContentPane().add(jButton11, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 330, 110, -1));
jLabel5.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel5.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel5.setText("Actions");
getContentPane().add(jLabel5, new org.netbeans.lib.awtextra.AbsoluteConstraints(10, 110, 160, 30));
getContentPane().add(jLabel6, new org.netbeans.lib.awtextra.AbsoluteConstraints(10, 260, -1, -1));
jLabel7.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel7.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel7.setText("Prune Depth");
jLabel7.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel7, new org.netbeans.lib.awtextra.AbsoluteConstraints(30, 350, 80, 20));
jLabel8.setFont(new java.awt.Font("Tahoma", 1, 14));
jLabel8.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel8.setText("CornOri");
jLabel8.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel8, new org.netbeans.lib.awtextra.AbsoluteConstraints(20, 270, 90, 20));
jLabel9.setFont(new java.awt.Font("Tahoma", 1, 14));
jLabel9.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel9.setText("CornPerm");
jLabel9.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel9, new org.netbeans.lib.awtextra.AbsoluteConstraints(30, 380, 80, 20));
jLabel10.setFont(new java.awt.Font("Tahoma", 1, 14));
jLabel10.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel10.setText("EdgeOri");
jLabel10.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel10, new org.netbeans.lib.awtextra.AbsoluteConstraints(20, 290, 90, 20));
jLabel11.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel11.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel11.setText("Prune Depth");
jLabel11.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel11, new org.netbeans.lib.awtextra.AbsoluteConstraints(20, 480, 90, 20));
jLabel12.setFont(new java.awt.Font("Tahoma", 1, 14));
jLabel12.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel12.setText("UDSlice");
jLabel12.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel12, new org.netbeans.lib.awtextra.AbsoluteConstraints(20, 310, 90, 20));
jLabel13.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel13.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel13.setText("Solution");
getContentPane().add(jLabel13, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 390, 120, 30));
jLabel14.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel14.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel14.setText("EdgePerm");
jLabel14.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel14, new org.netbeans.lib.awtextra.AbsoluteConstraints(40, 400, 70, 20));
jLabel15.setFont(new java.awt.Font("Tahoma", 1, 14));
jLabel15.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel15.setText("UDSliceSorted");
jLabel15.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel15, new org.netbeans.lib.awtextra.AbsoluteConstraints(10, 440, 100, 20));
jLabel16.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel16.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel16.setText("FlipUDSlice");
jLabel16.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel16, new org.netbeans.lib.awtextra.AbsoluteConstraints(30, 330, 80, 20));
jLabel17.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel17.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel17.setText("CornPermSym");
jLabel17.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel17, new org.netbeans.lib.awtextra.AbsoluteConstraints(20, 460, 90, 20));
jTextField2.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField2, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 350, 140, -1));
jTextField3.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField3, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 330, 140, -1));
jTextField4.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField4, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 310, 140, -1));
jTextField5.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField5, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 290, 140, -1));
jTextField6.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField6, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 270, 140, -1));
jTextField7.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField7, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 480, 140, -1));
jTextField8.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField8, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 460, 140, -1));
jTextField9.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField9, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 440, 140, -1));
jTextField10.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField10, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 400, 140, -1));
jTextField11.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField11, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 380, 140, -1));
jLabel18.setFont(new java.awt.Font("Tahoma", 1, 14));
jLabel18.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
jLabel18.setText("P2EdgePerm");
jLabel18.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT);
getContentPane().add(jLabel18, new org.netbeans.lib.awtextra.AbsoluteConstraints(0, 420, 110, 20));
jTextField12.setFont(new java.awt.Font("Tahoma", 0, 14));
getContentPane().add(jTextField12, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 420, 140, -1));
jTextArea1.setColumns(20);
jTextArea1.setRows(5);
jScrollPane1.setViewportView(jTextArea1);
getContentPane().add(jScrollPane1, new org.netbeans.lib.awtextra.AbsoluteConstraints(10, 50, 770, 50));
btn_save.setFont(new java.awt.Font("Tahoma", 0, 14));
btn_save.setText("Save");
btn_save.setEnabled(false);
btn_save.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btn_saveActionPerformed(evt);
}
});
getContentPane().add(btn_save, new org.netbeans.lib.awtextra.AbsoluteConstraints(10, 200, 160, 30));
btn_Solve1.setFont(new java.awt.Font("Tahoma", 0, 14));
btn_Solve1.setText("Step Forward");
btn_Solve1.setEnabled(false);
btn_Solve1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btn_Solve1ActionPerformed(evt);
}
});
getContentPane().add(btn_Solve1, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 420, 120, 40));
jLabel19.setFont(new java.awt.Font("Tahoma", 0, 14));
jLabel19.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel19.setText("Symmetries");
getContentPane().add(jLabel19, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 240, 110, 30));
btn_Solve2.setFont(new java.awt.Font("Tahoma", 0, 14));
btn_Solve2.setText("Step Back");
btn_Solve2.setEnabled(false);
btn_Solve2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btn_Solve2ActionPerformed(evt);
}
});
getContentPane().add(btn_Solve2, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 470, 120, 40));
pack();
}// </editor-fold>//GEN-END:initComponents
private void formWindowClosing(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowClosing
}//GEN-LAST:event_formWindowClosing
private void btn_SolveActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btn_SolveActionPerformed
if(JOptionPane.showConfirmDialog(this, "Are you sure?") == JOptionPane.YES_OPTION){
jTextArea1.setText("");
moveList = RubikSolver2.solve(cube, table, flip, prune, model, perm, this);
if(moveList != null){
btn_Solve1.setEnabled(true);
btn_Solve2.setEnabled(true);
nextMove = 0;
}else{
btn_Solve1.setEnabled(false);
btn_Solve2.setEnabled(false);
nextMove = 0;
}
}
}//GEN-LAST:event_btn_SolveActionPerformed
private void btn_NewActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btn_NewActionPerformed
cube = new CubieCube(true);
btn_Solve1.setEnabled(false);
btn_Solve2.setEnabled(false);
updateC();
}//GEN-LAST:event_btn_NewActionPerformed
private void btn_inputActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btn_inputActionPerformed
if(cube == null){
cube = new CubieCube(true);
}
canEdit = true;
btn_save.setEnabled(true);
btn_Solve1.setEnabled(false);
btn_Solve2.setEnabled(false);
btn_input.setEnabled(false);
btn_Solve.setEnabled(false);
}//GEN-LAST:event_btn_inputActionPerformed
private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed
cube = model.DoMove(cube, TurnAxis.U);
updateC();
}//GEN-LAST:event_jButton2ActionPerformed
private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed
cube = model.DoMove(cube, TurnAxis.D);
updateC();
}//GEN-LAST:event_jButton3ActionPerformed
private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed
cube = model.DoMove(cube, TurnAxis.L);
updateC();
}//GEN-LAST:event_jButton4ActionPerformed
private void jButton5ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton5ActionPerformed
cube = model.DoMove(cube, TurnAxis.R);
updateC();
}//GEN-LAST:event_jButton5ActionPerformed
private void jButton6ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton6ActionPerformed
cube = model.DoMove(cube, TurnAxis.F);
updateC();
}//GEN-LAST:event_jButton6ActionPerformed
private void jButton7ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton7ActionPerformed
cube = model.DoMove(cube, TurnAxis.B);
updateC();
}//GEN-LAST:event_jButton7ActionPerformed
private void jButton8ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton8ActionPerformed
if(jList1.getSelectedIndex() < 0) return;
cube = model.DoSym(cube, jList1.getSelectedIndices()[0], true);
updateC();
}//GEN-LAST:event_jButton8ActionPerformed
private void jButton9ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton9ActionPerformed
if(jList1.getSelectedIndex() < 0) return;
cube = model.SthenSinv(cube, jList1.getSelectedIndices()[0]);
updateC();
}//GEN-LAST:event_jButton9ActionPerformed
private void jButton10ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton10ActionPerformed
if(jList1.getSelectedIndex() < 0) return;
cube = model.SinvThenS(cube, jList1.getSelectedIndices()[0]);
updateC();
}//GEN-LAST:event_jButton10ActionPerformed
private void jButton11ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton11ActionPerformed
if(jList1.getSelectedIndex() < 0) return;
cube = model.DoSym(cube, jList1.getSelectedIndices()[0], false);
updateC();
}//GEN-LAST:event_jButton11ActionPerformed
private void btn_saveActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btn_saveActionPerformed
CubieCube cc = new CubieCube();
try{
cc.c.a[Corner.URF.ordinal()] = model.IDCorner(up[2][2].getBackground(), right[0][0].getBackground(), front[2][0].getBackground());
cc.c.a[Corner.UFL.ordinal()] = model.IDCorner(up[0][2].getBackground(), front[0][0].getBackground(), left[2][0].getBackground());
cc.c.a[Corner.ULB.ordinal()] = model.IDCorner(up[0][0].getBackground(), left[0][0].getBackground(), back[0][2].getBackground());
cc.c.a[Corner.UBR.ordinal()] = model.IDCorner(up[2][0].getBackground(), back[2][2].getBackground(), right[2][0].getBackground());
cc.c.a[Corner.DFR.ordinal()] = model.IDCorner(down[2][0].getBackground(), front[2][2].getBackground(), right[0][2].getBackground());
cc.c.a[Corner.DLF.ordinal()] = model.IDCorner(down[0][0].getBackground(), left[2][2].getBackground(), front[0][2].getBackground());
cc.c.a[Corner.DBL.ordinal()] = model.IDCorner(down[0][2].getBackground(), back[0][0].getBackground(), left[0][2].getBackground());
cc.c.a[Corner.DRB.ordinal()] = model.IDCorner(down[2][2].getBackground(), right[2][2].getBackground(), back[2][0].getBackground());
cc.e.a[Edge.UR.ordinal()] = model.IDEdge(up[2][1].getBackground(), right[1][0].getBackground());
cc.e.a[Edge.UF.ordinal()] = model.IDEdge(up[1][2].getBackground(), front[1][0].getBackground());
cc.e.a[Edge.UL.ordinal()] = model.IDEdge(up[0][1].getBackground(), left[1][0].getBackground());
cc.e.a[Edge.UB.ordinal()] = model.IDEdge(up[1][0].getBackground(), back[1][2].getBackground());
cc.e.a[Edge.DR.ordinal()] = model.IDEdge(down[2][1].getBackground(), right[1][2].getBackground());
cc.e.a[Edge.DF.ordinal()] = model.IDEdge(down[1][0].getBackground(), front[1][2].getBackground());
cc.e.a[Edge.DL.ordinal()] = model.IDEdge(down[0][1].getBackground(), left[1][2].getBackground());
cc.e.a[Edge.DB.ordinal()] = model.IDEdge(down[1][2].getBackground(), back[1][0].getBackground());
cc.e.a[Edge.FR.ordinal()] = model.IDEdge(front[2][1].getBackground(), right[0][1].getBackground());
cc.e.a[Edge.FL.ordinal()] = model.IDEdge(front[0][1].getBackground(), left[2][1].getBackground());
cc.e.a[Edge.BL.ordinal()] = model.IDEdge(back[0][1].getBackground(), left[0][1].getBackground());
cc.e.a[Edge.BR.ordinal()] = model.IDEdge(back[2][1].getBackground(), right[2][1].getBackground());
}catch(Exception e){
JOptionPane.showMessageDialog(this, "Cube is not valid!");
return;
}
if(model.verifyCube(cc)){
cube = cc;
canEdit = false;
updateC();
btn_Solve.setEnabled(true);
btn_input.setEnabled(true);
btn_save.setEnabled(false);
}else{
JOptionPane.showMessageDialog(this, "Cube is not valid!");
}
}//GEN-LAST:event_btn_saveActionPerformed
private void btn_Solve1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btn_Solve1ActionPerformed
if(nextMove >= 0 && nextMove < moveList.size()){
int move = moveList.get(nextMove);
for(int j = 0; j < (move % 3) + 1; j++){
cube = model.DoMove(cube, TurnAxis.values()[(int)(move / 3)]);
}
nextMove++;
updateC();
}
}//GEN-LAST:event_btn_Solve1ActionPerformed
private void btn_Solve2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btn_Solve2ActionPerformed
if(nextMove > 0 && nextMove <= moveList.size()){
nextMove--;
int move = moveList.get(nextMove);
for(int j = 4; j > (move % 3) + 1; j--){
cube = model.DoMove(cube, TurnAxis.values()[(int)(move / 3)]);
}
updateC();
}
}//GEN-LAST:event_btn_Solve2ActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new RubikDisplay2().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btn_New;
private javax.swing.JButton btn_Solve;
private javax.swing.JButton btn_Solve1;
private javax.swing.JButton btn_Solve2;
private javax.swing.JButton btn_input;
private javax.swing.JButton btn_save;
private javax.swing.JButton jButton10;
private javax.swing.JButton jButton11;
private javax.swing.JButton jButton2;
private javax.swing.JButton jButton3;
private javax.swing.JButton jButton4;
private javax.swing.JButton jButton5;
private javax.swing.JButton jButton6;
private javax.swing.JButton jButton7;
private javax.swing.JButton jButton8;
private javax.swing.JButton jButton9;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel10;
private javax.swing.JLabel jLabel11;
private javax.swing.JLabel jLabel12;
private javax.swing.JLabel jLabel13;
private javax.swing.JLabel jLabel14;
private javax.swing.JLabel jLabel15;
private javax.swing.JLabel jLabel16;
private javax.swing.JLabel jLabel17;
private javax.swing.JLabel jLabel18;
private javax.swing.JLabel jLabel19;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel jLabel9;
private javax.swing.JList jList1;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JTextArea jTextArea1;
private javax.swing.JTextField jTextField10;
private javax.swing.JTextField jTextField11;
private javax.swing.JTextField jTextField12;
private javax.swing.JTextField jTextField2;
private javax.swing.JTextField jTextField3;
private javax.swing.JTextField jTextField4;
private javax.swing.JTextField jTextField5;
private javax.swing.JTextField jTextField6;
private javax.swing.JTextField jTextField7;
private javax.swing.JTextField jTextField8;
private javax.swing.JTextField jTextField9;
// End of variables declaration//GEN-END:variables
public void actionPerformed(ActionEvent e) {
if(canEdit){
JButton pressed = (JButton)e.getSource();
if(pressed.getBackground() == Color.WHITE){
pressed.setBackground(Color.RED);
}else if(pressed.getBackground() == Color.RED){
pressed.setBackground(Color.ORANGE);
}else if(pressed.getBackground() == Color.ORANGE){
pressed.setBackground(Color.YELLOW);
}else if(pressed.getBackground() == Color.YELLOW){
pressed.setBackground(Color.GREEN);
}else if(pressed.getBackground() == Color.GREEN){
pressed.setBackground(Color.BLUE);
}else if(pressed.getBackground() == Color.BLUE){
pressed.setBackground(Color.WHITE);
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hive.ql.QTestMiniClusters.FsType;
import org.apache.hadoop.hive.ql.qoption.QTestReplaceHandler;
/**
* QOutProcessor: produces the final q.out from original q.out by postprocessing (e.g. masks)
*
*/
public class QOutProcessor {
public static final String PATH_HDFS_REGEX = "(hdfs://)([a-zA-Z0-9:/_\\-\\.=])+";
public static final String PATH_HDFS_WITH_DATE_USER_GROUP_REGEX =
"([a-z]+) ([a-z]+)([ ]+)([0-9]+) ([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}) "
+ PATH_HDFS_REGEX;
public static final String HDFS_MASK = "### HDFS PATH ###";
public static final String HDFS_DATE_MASK = "### HDFS DATE ###";
public static final String HDFS_USER_MASK = "### USER ###";
public static final String HDFS_GROUP_MASK = "### GROUP ###";
public static final String MASK_PATTERN = "#### A masked pattern was here ####";
public static final String PARTIAL_MASK_PATTERN = "#### A PARTIAL masked pattern was here ####";
private final Set<String> qMaskStatsQuerySet = new HashSet<String>();
private final Set<String> qMaskDataSizeQuerySet = new HashSet<String>();
private final Set<String> qMaskLineageQuerySet = new HashSet<String>();
private static final PatternReplacementPair MASK_STATS = new PatternReplacementPair(
Pattern.compile(" Num rows: [1-9][0-9]* Data size: [1-9][0-9]*"),
" Num rows: ###Masked### Data size: ###Masked###");
private static final PatternReplacementPair MASK_DATA_SIZE = new PatternReplacementPair(
Pattern.compile(" Data size: [1-9][0-9]*"),
" Data size: ###Masked###");
private static final PatternReplacementPair MASK_LINEAGE = new PatternReplacementPair(
Pattern.compile("POSTHOOK: Lineage: .*"),
"POSTHOOK: Lineage: ###Masked###");
private static final Pattern PATTERN_MASK_STATS = Pattern.compile("-- MASK_STATS");
private static final Pattern PATTERN_MASK_DATA_SIZE = Pattern.compile("-- MASK_DATA_SIZE");
private static final Pattern PATTERN_MASK_LINEAGE = Pattern.compile("-- MASK_LINEAGE");
private FsType fsType = FsType.LOCAL;
public static class LineProcessingResult {
private String line;
private boolean partialMaskWasMatched = false;
public LineProcessingResult(String line) {
this.line = line;
}
public String get() {
return line;
}
}
private final Pattern[] planMask = toPattern(new String[] {
".*[.][.][.] [0-9]* more.*",
"pk_-?[0-9]*_[0-9]*_[0-9]*",
"fk_-?[0-9]*_[0-9]*_[0-9]*",
"uk_-?[0-9]*_[0-9]*_[0-9]*",
"nn_-?[0-9]*_[0-9]*_[0-9]*", // not null constraint name
"dc_-?[0-9]*_[0-9]*_[0-9]*", // default constraint name
"org\\.apache\\.hadoop\\.hive\\.metastore\\.model\\.MConstraint@([0-9]|[a-z])*",
});
// Using patterns for matching the whole line can take a long time, therefore we should try to avoid it
// in case of really long lines trying to match a .*some string.* may take up to 4 seconds each!
// Using String.startsWith instead of pattern, as it is much faster
private final String[] maskIfStartsWith = new String[] {
"Deleted",
"Repair: Added partition to metastore",
"latestOffsets",
"minimumLag"
};
// Using String.contains instead of pattern, as it is much faster
private final String[] maskIfContains = new String[] {
"file:",
"pfile:",
"/tmp/",
"invalidscheme:",
"lastUpdateTime",
"lastAccessTime",
"lastModifiedTim",
"Owner",
"owner",
"CreateTime",
"LastAccessTime",
"Location",
"LOCATION '",
"transient_lastDdlTime",
"last_modified_",
"at org",
"at sun",
"at java",
"at junit",
"Caused by:",
"LOCK_QUERYID:",
"LOCK_TIME:",
"grantTime",
"job_",
"USING 'java -cp",
"DagName:",
"DagId:",
"total number of created files now is",
"hive-staging",
"at com.sun.proxy",
"at com.jolbox",
"at com.zaxxer"
};
// Using String.contains instead of pattern, as it is much faster
private final String[][] maskIfContainsMultiple = new String[][] {
{"Input:", "/data/files/"},
{"Output:", "/data/files/"}
};
private final QTestReplaceHandler replaceHandler;
public QOutProcessor(FsType fsType, QTestReplaceHandler replaceHandler) {
this.fsType = fsType;
this.replaceHandler = replaceHandler;
}
private Pattern[] toPattern(String[] patternStrs) {
Pattern[] patterns = new Pattern[patternStrs.length];
for (int i = 0; i < patternStrs.length; i++) {
patterns[i] = Pattern.compile(patternStrs[i]);
}
return patterns;
}
public void maskPatterns(String fname, String tname) throws Exception {
String line;
BufferedReader in;
BufferedWriter out;
File file = new File(fname);
File fileOrig = new File(fname + ".orig");
FileUtils.copyFile(file, fileOrig);
in = new BufferedReader(new InputStreamReader(new FileInputStream(fileOrig), "UTF-8"));
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8"));
boolean lastWasMasked = false;
while (null != (line = in.readLine())) {
LineProcessingResult result = processLine(line, tname);
if (result.line.equals(MASK_PATTERN)) {
// We're folding multiple masked lines into one.
if (!lastWasMasked) {
out.write(result.line);
out.write("\n");
lastWasMasked = true;
result.partialMaskWasMatched = false;
}
} else {
out.write(result.line);
out.write("\n");
lastWasMasked = false;
result.partialMaskWasMatched = false;
}
}
in.close();
out.close();
}
public LineProcessingResult processLine(String line, String tname) {
LineProcessingResult result = new LineProcessingResult(line);
Matcher matcher = null;
if (fsType == FsType.ENCRYPTED_HDFS) {
for (Pattern pattern : partialReservedPlanMask) {
matcher = pattern.matcher(result.line);
if (matcher.find()) {
result.line = PARTIAL_MASK_PATTERN + " " + matcher.group(0);
result.partialMaskWasMatched = true;
break;
}
}
}
else {
for (PatternReplacementPair prp : partialPlanMask) {
matcher = prp.pattern.matcher(result.line);
if (matcher.find()) {
result.line = result.line.replaceAll(prp.pattern.pattern(), prp.replacement);
result.partialMaskWasMatched = true;
}
}
}
if (!result.partialMaskWasMatched) {
for (Pair<Pattern, String> pair : patternsWithMaskComments) {
Pattern pattern = pair.getLeft();
String maskComment = pair.getRight();
matcher = pattern.matcher(result.line);
if (matcher.find()) {
result.line = matcher.replaceAll(maskComment);
result.partialMaskWasMatched = true;
break;
}
}
if (!result.partialMaskWasMatched && qMaskStatsQuerySet.contains(tname)) {
matcher = MASK_STATS.pattern.matcher(result.line);
if (matcher.find()) {
result.line = result.line.replaceAll(MASK_STATS.pattern.pattern(), MASK_STATS.replacement);
result.partialMaskWasMatched = true;
}
}
if (!result.partialMaskWasMatched && qMaskDataSizeQuerySet.contains(tname)) {
matcher = MASK_DATA_SIZE.pattern.matcher(result.line);
if (matcher.find()) {
result.line = result.line.replaceAll(MASK_DATA_SIZE.pattern.pattern(), MASK_DATA_SIZE.replacement);
result.partialMaskWasMatched = true;
}
}
if (!result.partialMaskWasMatched && qMaskLineageQuerySet.contains(tname)) {
matcher = MASK_LINEAGE.pattern.matcher(result.line);
if (matcher.find()) {
result.line = result.line.replaceAll(MASK_LINEAGE.pattern.pattern(), MASK_LINEAGE.replacement);
result.partialMaskWasMatched = true;
}
}
for (String prefix : maskIfStartsWith) {
if (result.line.startsWith(prefix)) {
result.line = MASK_PATTERN;
}
}
for (String word : maskIfContains) {
if (result.line.contains(word)) {
result.line = MASK_PATTERN;
}
}
for (String[] words : maskIfContainsMultiple) {
int pos = 0;
boolean containsAllInOrder = true;
for (String word : words) {
int wordPos = result.line.substring(pos).indexOf(word);
if (wordPos == -1) {
containsAllInOrder = false;
break;
} else {
pos += wordPos + word.length();
}
}
if (containsAllInOrder) {
result.line = MASK_PATTERN;
}
}
for (Pattern pattern : planMask) {
result.line = pattern.matcher(result.line).replaceAll(MASK_PATTERN);
}
}
result.line = replaceHandler.processLine(result.line);
return result;
}
private final Pattern[] partialReservedPlanMask = toPattern(new String[] {
"data/warehouse/(.*?/)+\\.hive-staging" // the directory might be db/table/partition
//TODO: add more expected test result here
});
/**
* Pattern to match and (partial) replacement text.
* For example, {"transaction":76,"bucketid":8249877}. We just want to mask 76 but a regex that
* matches just 76 will match a lot of other things.
*/
private final static class PatternReplacementPair {
private final Pattern pattern;
private final String replacement;
PatternReplacementPair(Pattern p, String r) {
pattern = p;
replacement = r;
}
}
private final PatternReplacementPair[] partialPlanMask;
{
ArrayList<PatternReplacementPair> ppm = new ArrayList<>();
ppm.add(new PatternReplacementPair(Pattern.compile("\\{\"writeid\":[1-9][0-9]*,\"bucketid\":"),
"{\"writeid\":### Masked writeid ###,\"bucketid\":"));
ppm.add(new PatternReplacementPair(Pattern.compile("attempt_[0-9_]+"), "attempt_#ID#"));
ppm.add(new PatternReplacementPair(Pattern.compile("vertex_[0-9_]+"), "vertex_#ID#"));
ppm.add(new PatternReplacementPair(Pattern.compile("task_[0-9_]+"), "task_#ID#"));
ppm.add(new PatternReplacementPair(Pattern.compile("for Spark session.*?:"),
"#SPARK_SESSION_ID#:"));
ppm.add(new PatternReplacementPair(Pattern.compile("rowcount = [0-9]+(\\.[0-9]+(E[0-9]+)?)?, cumulative cost = \\{.*\\}, id = [0-9]*"),
"rowcount = ###Masked###, cumulative cost = ###Masked###, id = ###Masked###"));
partialPlanMask = ppm.toArray(new PatternReplacementPair[ppm.size()]);
}
@SuppressWarnings("serial")
private ArrayList<Pair<Pattern, String>> initPatternWithMaskComments() {
return new ArrayList<Pair<Pattern, String>>() {
{
add(toPatternPair("(pblob|s3.?|swift|wasb.?).*hive-staging.*",
"### BLOBSTORE_STAGING_PATH ###"));
add(toPatternPair(PATH_HDFS_WITH_DATE_USER_GROUP_REGEX, String.format("%s %s$3$4 %s $6%s",
HDFS_USER_MASK, HDFS_GROUP_MASK, HDFS_DATE_MASK, HDFS_MASK)));
add(toPatternPair(PATH_HDFS_REGEX, String.format("$1%s", HDFS_MASK)));
}
};
}
/* This list may be modified by specific cli drivers to mask strings that change on every test */
private List<Pair<Pattern, String>> patternsWithMaskComments = initPatternWithMaskComments();
private Pair<Pattern, String> toPatternPair(String patternStr, String maskComment) {
return ImmutablePair.of(Pattern.compile(patternStr), maskComment);
}
public void addPatternWithMaskComment(String patternStr, String maskComment) {
patternsWithMaskComments.add(toPatternPair(patternStr, maskComment));
}
public void initMasks(File qf, String query) {
if (matches(PATTERN_MASK_STATS, query)) {
qMaskStatsQuerySet.add(qf.getName());
}
if (matches(PATTERN_MASK_DATA_SIZE, query)) {
qMaskDataSizeQuerySet.add(qf.getName());
}
if (matches(PATTERN_MASK_LINEAGE, query)) {
qMaskLineageQuerySet.add(qf.getName());
}
}
private boolean matches(Pattern pattern, String query) {
Matcher matcher = pattern.matcher(query);
if (matcher.find()) {
return true;
}
return false;
}
public void resetPatternwithMaskComments() {
patternsWithMaskComments = initPatternWithMaskComments();
}
}
|
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package Gemstone;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import phoenix.media;
import sage.SeriesInfo;
import sagex.api.AiringAPI;
import sagex.phoenix.metadata.ISeriesInfo;
import sagex.phoenix.vfs.IMediaFile;
import sagex.phoenix.vfs.IMediaFolder;
import sagex.phoenix.vfs.IMediaResource;
import sagex.phoenix.vfs.sage.SageMediaFile;
import sagex.phoenix.vfs.views.ViewFolder;
/**
*
* @author SBANTA
* @author JUSJOKEN
* - 10/01/2011 - added logging and changes to Category Filters
* - 04/04/2012 - updated for Gemstone
*/
public class MetadataCalls {
static private final Logger LOG = Logger.getLogger(MetadataCalls.class);
public static String PlayonDirectory = sagex.api.Configuration.GetServerProperty("PlayonPlayback/ImportDirectory", "/SageOnlineServicesEXEs\\UPnPBrowser\\PlayOn") + "\\TV\\";
public static String HuluFile = "Quicktime[H.264/50Kbps 480x368@24fps]";
public static String NetflixFile = "Quicktime[H.264/50Kbps 480x368@25fps]";
public static Integer GetSeasonNumber(Object MediaObject) {
return sagex.api.ShowAPI.GetShowSeasonNumber(MediaObject);
}
/*
public static String GetEpisodeTitle(Object MediaObject){
if(MediaObject.getClass().equals(Dividers.SageClass))
{
String Title = sagex.api.ShowAPI.GetShowEpisode(MediaObject);
if(Title.equals("")){
return sagex.api.MediaFileAPI.GetMediaFileMetadata(MediaObject, "EpisodeTitle");}
return Title;
}
else{
return MediaObject.toString();}}
*/
// Get the Season and Episode as a long number to be used in sorting
public static Long GetSeasonEpisodeForSort(IMediaResource IMR) {
//ensure there is a valid SE otherwise return 0
if (IMR==null){
return Long.valueOf(0);
}
Object MediaObject = phoenix.media.GetSageMediaFile(IMR);
return (GetSeasonNumber(MediaObject).longValue() * 1000) + GetEpisodeNumber(MediaObject).longValue();
}
// Gemstone_MetadataCalls_DisplaySeasonEpisode
public static String DisplaySeasonEpisode(Object MediaObject, String Property) {
//ensure there is a valid SE otherwise return a blank string
if (MediaObject==null){
return "";
}
MediaObject = phoenix.media.GetSageMediaFile(MediaObject);
return FormatSeasonEpisode(GetSeasonNumber(MediaObject), GetEpisodeNumber(MediaObject), Property);
}
// Gemstone_MetadataCalls_FormatSeasonEpisode
public static String FormatSeasonEpisode(int iSeason, int iEpisode, String Property) {
if (iSeason==0 || iEpisode==0){
return "";
}
if(Property.equals("S1E01")) {
return "S"+ iSeason + "E" + String.format("%02d", iEpisode);
} else if(Property.equals("S01E01")) {
return "S"+ String.format("%02d", iSeason) + "E" + String.format("%02d", iEpisode);
} else if(Property.equals("1x01")) {
return iSeason + "x" + String.format("%02d", iEpisode);
} else if(Property.equals("E01")) {
return "E" + String.format("%02d", iEpisode);
} else if(Property.equals("1")) {
return "" + iEpisode;
}else if(Property.equals("None")) {
return "";
} else {
return "S"+ iSeason + "E" + String.format("%02d", iEpisode);
}
}
public static String GetSeasonNumberDivider(Object MediaObject) {
return "Season " + GetSeasonNumber(MediaObject);
}
public static String GetSortTitle(Object MediaObject) {
String Title = GetMediaTitle(MediaObject);
if (Title==null || Title.equals("")) {
return "000";
}
return sagex.api.Database.StripLeadingArticles(Title.toLowerCase());
}
public static Integer GetEpisodeNumber(Object MediaObject) {
return sagex.api.ShowAPI.GetShowEpisodeNumber(MediaObject);
}
public static String GetEpisodeNumberPad(Object MediaObject) {
int en = GetEpisodeNumber(MediaObject);
return String.format("%02d", en);
}
public static String GetSeasonNumberPad(Object MediaObject) {
int en = GetSeasonNumber(MediaObject);
return String.format("%02d", en);
}
public static String GetFanartTitle(Object MediaObject){
String Title=sagex.api.MediaFileAPI.GetMediaFileMetadata(MediaObject, "MediaTitle");
if (Title==null || Title.equals("")){
Title=GetMediaTitle(MediaObject);
}
return Title;
}
//used for searching
public static String GetTitleLowerCase(Object MediaObject) {
return GetTitle(MediaObject).toLowerCase();
}
public static String GetMediaTitle(Object MediaObject) {
String Title ="";
if (Title==null || Title.equals("")) {
Title = sagex.api.ShowAPI.GetShowTitle(MediaObject);
}
if (Title==null || Title.equals("")) {
Title = sagex.api.MediaFileAPI.GetMediaTitle(MediaObject);
}
if (Title==null || Title.equals("")) {
Title = sagex.api.AiringAPI.GetAiringTitle(MediaObject);
}
if (Title==null || Title.equals("")) {
return Const.UnknownName; // was "Unkown"
}
return Title;
}
public static int GetShowDuration(Object MediaObject) {
long duration = sagex.api.ShowAPI.GetShowDuration(MediaObject);
int durationint = (int) duration;
if (durationint > 0) {
durationint = durationint / 60000;
}
return durationint;
}
public static boolean IsImportedNotPlayon(Object MediaObject) {
return !IsPlayonFile(MediaObject) && IsImportedTV(MediaObject);
}
public static boolean IsPlayonFile(Object MediaObject) {
if (sagex.api.MediaFileAPI.GetMediaFileMetadata(MediaObject, "Copyright").contains("PlayOn")) {
return true;
}
return false;
}
public static String GetPlayonFileType(Object MediaObject) {
String Comment = sagex.api.MediaFileAPI.GetMediaFileMetadata(MediaObject, "Copyright");
String[] SplitString = Comment.split(",");
if (SplitString.length == 2)
{
return SplitString[1];
}
return "";
}
// public static String GetEpisodeTitle(Object MediaObject){
// if(MediaObject.getClass().equals(Dividers.SageClass))
// {
// String Title = sagex.api.ShowAPI.GetShowEpisode(MediaObject);
// if(Title.equals("")){
// return sagex.api.MediaFileAPI.GetMediaFileMetadata(MediaObject, "EpisodeTitle");}
// return Title;
// }
// else{
// return MediaObject.toString();}}
// public static String GetEpisodeTitleDivider(Object MediaObject)
// {
// String EpisodeTitle=GetEpisodeTitle(MediaObject);
// if(EpisodeTitle.equals(null)||EpisodeTitle.length()<1){
// return "Unknown";}
// return GetEpisodeTitle(MediaObject).substring(0,1).toUpperCase();
// }
public static String GetMovieReleaseYear(Object MediaObject) {
//System.out.println("No Original Air Date");
return sagex.api.ShowAPI.GetShowYear(MediaObject);
}
public static int GetMediaFileID(Object MediaObject) {
return sagex.api.MediaFileAPI.GetMediaFileID(MediaObject);
}
public static long GetMovieOriginalAirDate(Object MediaObject) {
String s1 = sagex.api.MediaFileAPI.GetMediaFileMetadata(MediaObject, "OriginalAirDate");
if (s1.length() == 0) {
//System.out.println("No Original Air Date");
return 0;
}
// return DateConverter.GetDateFromLong(Long.parseLong(s1));
return Long.parseLong(s1);
}
public static long GetOriginalAirDate(Object MediaObject) /*returns the OriginalAiringDate as a long (in java date format) gathered from the metadta
* 0 if it does not exist or catches an error
*
* @param MediaObject, a sage Airing, Show, or MediaFile
*/ {
String s1 = sagex.api.MediaFileAPI.GetMediaFileMetadata(MediaObject, "OriginalAirDate");
//System.out.println("OriginalAirDateString = '" + OrigAirDateString + "'");
if (s1.length() == 0) {
//System.out.println("No Original Air Date");
return 0;
} //System.out.println("OriginalAirDateString = '" + OrigAirDateString + "'");
else {
// LOG.debug("OriginalAiringDate="+s1);
Long l1 = Long.parseLong(s1);
// DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
return l1 + GetSeasonEpisodeNumber(MediaObject);
}
}
public static int GetSeasonEpisodeNumber(Object MediaObject) /*
* returns the SeasonEpisode Number as an integer
* or 0 if Season Number does not exist
* 101 (s01e01)
* 201 (s02e01)
* 1010 (s10e10)
*
* @param MediaObject, a sage Airing, Show, or MediaFile Object
*/ {
int sn = GetSeasonNumber(MediaObject);
int en = GetEpisodeNumber(MediaObject);
if (sn == 0) {
return 0;
} else {
return sn * 100 + en;
}
}
public static String GetMediaType(Object MediaObject) {
return sagex.api.MediaFileAPI.GetMediaFileMetadata(MediaObject, "MediaType");
}
public static boolean IsMediaTypeTV(Object MediaObject) {
String Type = sagex.api.MediaFileAPI.GetMediaFileMetadata(MediaObject, "MediaType");
if (Type.contains("TV") || sagex.api.MediaFileAPI.IsTVFile(MediaObject)) {
return true;
} else {
return false;
}
}
public static boolean IsImportedTV(Object MediaObject) {
if (IsMediaTypeTV(MediaObject) && !sagex.api.MediaFileAPI.IsTVFile(MediaObject)) {
return true;
} else {
return false;
}
}
public static boolean IsRecordedTV(Object MediaObject) {
return sagex.api.MediaFileAPI.IsTVFile(MediaObject);
}
public static Long GetDateRecorded(Object MediaObject) {
return sagex.api.AiringAPI.GetAiringStartTime(MediaObject);
}
//return the first category ignoring the Movie and Film categories. Return unknow if none
public static String GetShowCategory(Object MediaObject) {
return GetAllShowCategories(MediaObject).get(0);
}
// public static String GetShowCategory(Object MediaObject) {
// String Cat = sagex.api.ShowAPI.GetShowCategory(MediaObject);
// LOG.debug("GetShowCategory = '" + Cat + "'");
// if (Cat.startsWith("Movie") && Cat.contains("/")) {
//
// Cat = Cat.substring(Cat.indexOf("/"));
// }
//
// if (Cat.equals("")) {
// return "unknown";
// }
// if (Cat.contains("and")) {
// return Cat.substring(0, Cat.indexOf("and") - 1);
// }
// if (Cat.contains(",")) {
//
// return Cat.substring(0, Cat.indexOf(","));
// }
// return Cat;
//
// }
// public static String[] GetShowCategories(Object MediaObject){
// return sagex.api.ShowAPI.GetShowCategory(MediaObject).split(",");}
// public static String GetTimeAdded(Object Title) {
// // Check to see if date variables have been set
// if(!DateConverter.IsDateVariableSet){
// LOG.trace("DateVariablesNot set Go ahead and set them");
// String First =sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupFirstTime","10080");
// String Second = sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupSecondTime","20160");
// String Third = sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupThirdTime","43200");
// String Fourth = sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupFourthTime","86400");
// Long LFirst = java.lang.Long.parseLong(First);
// LFirst = LFirst *60*1000;
// Long LSecond = java.lang.Long.parseLong(Second);
// LSecond = LSecond*60*1000;
// Long LThird = java.lang.Long.parseLong(Third);
// LThird = LThird*60*1000;
// Long LFourth = java.lang.Long.parseLong(Fourth);
// LFourth = LFourth *60*1000;
// Long CurrTime = System.currentTimeMillis();
// DateConverter.FirstDateGroup =CurrTime-LFirst;
// DateConverter.SecondDateGroup=CurrTime-LSecond;
// DateConverter.ThirdDateGroup=CurrTime-LThird;
// DateConverter.FourthDateGroup=CurrTime-LFourth;
// DateConverter.FifthDateGroup =CurrTime-(Long.parseLong(sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupFifthTime","86401"))*60*1000);
// DateConverter.IsDateVariableSet=true;}
// LOG.trace("DateVariables Set to="+DateConverter.FirstDateGroup+":");
// LOG.trace("DateVariables Set to="+DateConverter.SecondDateGroup+":");
// LOG.trace("DateVariables Set to="+DateConverter.ThirdDateGroup+":");
// LOG.trace("DateVariables Set to="+DateConverter.FourthDateGroup+":");
// LOG.trace("DateVariables Set to="+DateConverter.FifthDateGroup+":");
//
// Long DateAdded = (Long) ClassFromString.GetDateClass("GetOriginalAirDate", Title);
// if(DateAdded>=DateConverter.FirstDateGroup){
// return sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupFirstName","New");}
// else if (DateAdded>=DateConverter.SecondDateGroup){
// return sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupSecondName","Last Week");}
// else if (DateAdded>=DateConverter.ThirdDateGroup){
// return sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupThirdName","30 days");}
// else if (DateAdded>=DateConverter.FourthDateGroup){
// return sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupFourthName","60 days");}
// else{
//
// return sagex.api.Configuration.GetProperty(SortMethods.PropertyPrefix+"DateGroupFifthName","Older");}
//
// }
// public static ArrayList<String> GetAllShowCategories(Object MediaObject) {
// String[] Cats = sagex.api.ShowAPI.GetShowCategory(MediaObject).split(",");
//
// ArrayList<String> AllCats = new ArrayList<String>();
// for (String curr : Cats) {
// if (curr.contains("and")) {
// curr = curr.trim();
// String[] andsplit = curr.split("and");
// String cat1 = andsplit[0];
// cat1 = cat1.trim();
// String cat2 = andsplit[0];
// cat1 = cat2.trim();
// System.out.print("Adding Category==" + cat1 + "!!" + cat2 + "!!");
// AllCats.add(cat1);
// AllCats.add(cat2);
// } else if (curr.equals("")) {
// AllCats.add("unknown");
// } else if (curr.startsWith(" ")) {
// AllCats.add(curr.substring(1));
// } else {
// System.out.println("Adding single categories=" + curr + "!!");
// AllCats.add(curr);
// }
// }
// return AllCats;
// }
public static String GetShowCategoriesString(Object MediaObject){
String retString = "";
for (String Cat: GetAllShowCategories(MediaObject)){
if (retString.equals("unknown")){
//do not add the unknown category
}else if (retString.equals("")){
retString = Cat;
}else{
retString = retString + " / " + Cat;
}
}
return retString;
}
//get a list of all the categories with Movies/Film removed and unknown assigned if no category exists
public static ArrayList<String> GetAllShowCategories(Object MediaObject) {
//LOG.debug("====== " + GetMediaTitle(MediaObject) + "========");
//LOG.debug("GetShowCategoriesString = '" + sagex.api.ShowAPI.GetShowCategoriesString(MediaObject) + "'");
String SplitChars = "[,;/]";
String[] Cats = sagex.api.ShowAPI.GetShowCategoriesString(MediaObject).split(SplitChars);
ArrayList<String> AllCats = new ArrayList<String>();
for (String curr : Cats) {
curr = curr.trim();
if (curr.contains(" and ")) {
//curr = curr.trim();
String[] andsplit = curr.split("and");
String cat1 = andsplit[0];
cat1 = cat1.trim();
String cat2 = andsplit[1];
cat2 = cat2.trim();
//LOG.debug("Adding Category ='" + cat1 + "'");
AllCats.add(cat1);
//LOG.debug("Adding Category ='" + cat2 + "'");
AllCats.add(cat2);
} else if (curr.toLowerCase().equals("movie") || curr.toLowerCase().equals("film")) {
//do not add as we want to skip these
//LOG.debug("Skipping 'movie or film' category");
} else if (curr.equals("")) {
//LOG.debug("Adding 'unknown' category");
AllCats.add("unknown");
} else {
//LOG.debug("Adding single categories = '" + curr + "'");
AllCats.add(curr);
}
}
return AllCats;
}
public static String GetGenresasString(IMediaResource MediaObject, String Separator){
String Value = "";
IMediaResource imediaresource = MediaObject;
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
imediaresource = ImageCache.GetChild(imediaresource, Boolean.FALSE);
}
if (imediaresource==null){
return Value;
}
List<String> ListValue = phoenix.metadata.GetGenres(imediaresource);
if (ListValue.size()>0){
for (String ListItem : ListValue){
if (ListItem.equalsIgnoreCase("movie")||ListItem.equalsIgnoreCase("film")||ListItem.equalsIgnoreCase("")){
//skip these
}else{
if (Value.equals("")){
Value = ListItem;
}else{
Value = Value + Separator + ListItem;
}
}
//LOG.debug("GetGenresasString: checked '" + ListItem + "' result '" + Value + "'");
}
}
return Value;
}
//Convenience method that will convert the incoming object parameter to a IMediaResource type
public static String GetGenresasString(Object MediaObject, String Separator){
return GetGenresasString(Source.ConvertToIMR(MediaObject), Separator);
}
public static int AiredAgeInDays(Object MediaObject)
{
return (int)(sagex.api.Utility.Time() - sagex.api.AiringAPI.GetAiringStartTime(MediaObject))/(86400*1000);
}
public static int RecordedAgeInDays(Object MediaObject)
{
return (int)(sagex.api.Utility.Time() - sagex.api.AiringAPI.GetRealWatchedStartTime(MediaObject))/(86400*1000);
}
public static String GetTitle(IMediaResource imediaresource){
return GetTitle(imediaresource, true);
}
//return a consistent Title dependent on the media item and the type
public static String GetTitle(IMediaResource imediaresource, boolean IncludeDiscNo){
if (imediaresource==null){
LOG.debug("GetTitle: null imediaresource");
return "";
}
if (imediaresource.toString().contains("BlankItem")){
LOG.debug("GetTitle: title request for BlankItem");
return "";
}
String specialType = Source.GetSpecialType(imediaresource);
String tTitle = imediaresource.getTitle();
if ("tv".equals(specialType) || "airing".equals(specialType) || "recording".equals(specialType)){ //return the episode name
String eTitle = null;
if ("tv".equals(specialType)){
eTitle = phoenix.metadata.GetEpisodeName(imediaresource);
//LOG.debug("GetTitle: tv - eTitle from phoenix '" + eTitle + "'");
}else{ //special handling for airings (EPG) to use the show object
eTitle = sagex.api.ShowAPI.GetShowEpisode(phoenix.media.GetMediaObject(imediaresource));
//LOG.debug("GetTitle: airing - eTitle from ShowAPI.GetShowEpisode '" + eTitle + "'");
}
if (eTitle==null){
LOG.debug("GetTitle: sType '" + specialType + "' no episode title so using default Title '" + tTitle + "'");
return tTitle;
}else if (eTitle.equals("")){
LOG.debug("GetTitle: sType '" + specialType + "' no episode title so using default Title '" + tTitle + "'");
return tTitle;
}else{
LOG.debug("GetTitle: sType '" + specialType + "' episode title found '" + eTitle + "'");
return eTitle;
}
}
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
LOG.debug("GetTitle: sType '" + specialType + "' FOLDER found so using default Title '" + tTitle + "'");
}else{
if (IncludeDiscNo){
//see if there is a Disc or Part number to append
int Disc = phoenix.metadata.GetDiscNumber(imediaresource);
//LOG.debug("GetTitle: Disc '" + Disc + "' for tTitle '" + tTitle + "'");
if (Disc==0){
//do not append the Disc/Part
}else{
tTitle = tTitle + " (" + Disc + ")";
}
}
LOG.debug("GetTitle: sType '" + specialType + "' non tv type so using default Title '" + tTitle + "'");
}
return tTitle;
}
public static String GetTitle(Object imediaresource){
return GetTitle(Source.ConvertToIMR(imediaresource), true);
}
public static String GetTitle(Object imediaresource, boolean IncludeDiscNo){
return GetTitle(Source.ConvertToIMR(imediaresource), IncludeDiscNo);
}
//Series related metadata
public static String GetSeriesTitle(Object IMR){
IMediaResource imediaresource = Source.GetTVIMediaResource(IMR);
if (imediaresource!=null){
String tReturn = null;
String specialType = Source.GetSpecialType(imediaresource);
//special handling for Airing (EPG) items
if (specialType.equals("airing") || specialType.equals("recording")){
tReturn = sagex.api.AiringAPI.GetAiringTitle(phoenix.media.GetMediaObject(imediaresource));
//LOG.debug("GetSeriesTitle: SpecialType '" + Source.GetSpecialType(imediaresource) + "' returned '" + tReturn + "' for '" + imediaresource + "'");
}else{
tReturn = phoenix.metadata.GetMediaTitle(imediaresource);
//tReturn = phoenix.series.GetTitle(phoenix.media.GetSeriesInfo(phoenix.media.GetMediaFile(imediaresource)));
//LOG.debug("GetSeriesTitle: series.GetTitle returned '" + tReturn + "' for '" + imediaresource + "'");
}
if (tReturn==null){
LOG.debug("GetSeriesTitle: type '" + specialType + "' null found so using GetTitle instead '" + GetTitle(imediaresource) + "' for '" + imediaresource + "'");
return GetTitle(imediaresource);
}else if (tReturn.isEmpty()){
LOG.debug("GetSeriesTitle: type '" + specialType + "' empty found so using GetTitle instead '" + GetTitle(imediaresource) + "' for '" + imediaresource + "'");
return GetTitle(imediaresource);
}else{
LOG.debug("GetSeriesTitle: type '" + specialType + "' good return value found '" + tReturn + "' for '" + imediaresource + "'");
return tReturn;
}
}
return "";
}
//use for single file or folder requests
public static Boolean IsCurrentlyRecording(Object IMR){
IMediaResource imediaresource = Source.ConvertToIMR(IMR);
if (imediaresource==null){
return Boolean.FALSE;
}else{
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
ViewFolder Folder = (ViewFolder) imediaresource;
List Children = phoenix.media.GetAllChildren(Folder);
//see if any children are being recorded
Integer Count = sagex.api.Utility.Size(sagex.api.Utility.GetSubgroup(sagex.api.Database.GroupByMethod(Children,"Gemstone_MetadataCalls_IsFileCurrentlyRecording"),true));
//LOG.debug("IsCurrentlyRecording: FOLDER - Recording '" + Count + "' of '" + Children.size() + "' Items");
if (Count>0){
return Boolean.TRUE;
}else{
return Boolean.FALSE;
}
}else{
return sagex.api.MediaFileAPI.IsFileCurrentlyRecording(imediaresource.getMediaObject());
}
}
}
//use for single file requests
public static boolean IsFileCurrentlyRecording(Object MediaObject){
return sagex.api.MediaFileAPI.IsFileCurrentlyRecording(((IMediaFile)MediaObject).getMediaObject());
}
// public static String GetUserCategory(Object IMR){
// IMediaResource imediaresource = Source.ConvertToIMR(IMR);
// if (imediaresource!=null){
// if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
// return "FOLDER";
// }else{
// String tCat = sagex.api.MediaFileAPI.GetMediaFileMetadata(IMR, "UserCategory");
// if (tCat==null){
// return "null found";
// }else{
// return tCat;
// }
// }
// }
// return "";
// }
public static String GetAiredYear(Object IMR){
IMediaResource imediaresource = Source.ConvertToIMR(IMR);
if (imediaresource!=null){
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
//get the range of years
ViewFolder Folder = (ViewFolder) imediaresource;
Object[] tList = (Object[]) sagex.api.Database.Sort(phoenix.media.GetAllChildren(Folder), false, "phoenix_metadata_GetOriginalAirDate");
String firstYear = getAiredYear((IMediaResource)tList[0]);
String lastYear = getAiredYear((IMediaResource)tList[tList.length-1]);
if (firstYear.equals(lastYear)){
return "Aired in " + firstYear;
}else{
return "Aired " + firstYear + " - " + lastYear;
}
}else{
//LOG.debug("GetAiredYear: Not a folder so returning getAiredYear(imediaresource)");
return "Aired in " + getAiredYear(imediaresource);
}
}
return "";
}
private static String getAiredYear(IMediaResource imediaresource){
//get the aired Year
Date tReturn = phoenix.metadata.GetOriginalAirDate(imediaresource);
SimpleDateFormat simpleDateformat = new SimpleDateFormat("yyyy");
return simpleDateformat.format(tReturn);
}
//Series related metadata
public static String GetRunningInfo(Object IMR){
IMediaResource imediaresource = Source.GetTVIMediaResource(IMR);
if (imediaresource!=null){
String tReturn = phoenix.series.GetFinaleDate(phoenix.media.GetSeriesInfo(phoenix.media.GetMediaFile(imediaresource)));
//LOG.debug("GetRunningInfo: GetFinaleDate returned '" + tReturn + "' for '" + imediaresource + "'");
if (tReturn==null || tReturn.isEmpty()){
return "Series continuing";
}else{
return "Series ended";
}
}
return "";
}
//Series related metadata
public static String GetNetwork(Object IMR){
IMediaResource imediaresource = Source.GetTVIMediaResource(IMR);
if (imediaresource!=null){
String tReturn = phoenix.series.GetNetwork(phoenix.media.GetSeriesInfo(phoenix.media.GetMediaFile(imediaresource)));
//LOG.debug("GetNetwork: GetGetNetwork returned '" + tReturn + "' for '" + imediaresource + "'");
if (tReturn==null || tReturn.isEmpty()){
return "";
}else{
return tReturn;
}
}
return "";
}
public static String GetRated(Object IMR){
IMediaResource imediaresource = Source.GetChildIMediaResource(IMR);
if (imediaresource!=null){
String tReturn = phoenix.metadata.GetRated(imediaresource);
//LOG.debug("GetRated: GetRated returned '" + tReturn + "' for '" + imediaresource + "'");
if (tReturn==null || tReturn.isEmpty() || tReturn.equals("null")){
return "";
}else{
return tReturn;
}
}
return "";
}
public static String GetCollectionOverview(Object IMR){
IMediaResource imediaresource = Source.ConvertToIMR(IMR);
if (imediaresource!=null){
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
imediaresource = ImageCache.GetChild(imediaresource, Boolean.FALSE);
}
if (imediaresource!=null){
return phoenix.metadata.GetCollectionOverview(imediaresource);
}
}
return "";
}
public static String GetDescription(Object IMR){
IMediaResource imediaresource = Source.ConvertToIMR(IMR);
if (imediaresource!=null){
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
imediaresource = ImageCache.GetChild(imediaresource, Boolean.FALSE);
}
if (imediaresource!=null){
return phoenix.metadata.GetDescription(imediaresource);
}
}
return "";
}
//Series related metadata
public static String GetSeriesDescription(Object IMR){
IMediaResource imediaresource = Source.GetTVIMediaResource(IMR);
LOG.debug("GetSeriesDescription: converted passed in to IMR = '" + imediaresource + "'");
String tReturn = "";
if (imediaresource==null) {
LOG.debug("GetSeriesDescription: null passed in so returning empty string");
return "";
}
//we need a SageMediaFile to deal with the Series Info so get one
IMediaFile iMF = media.GetMediaFile(imediaresource);
if (iMF==null){
LOG.debug("GetSeriesDescription: could not convert IMR to a MediaFile so returning empty string. IMR = '" + imediaresource + "'");
return "";
}
ISeriesInfo iSeriesInfo = media.GetSeriesInfo(iMF);
if (iSeriesInfo==null){
//LOG.debug("GetSeriesDescription: could not get an ISeriesInfo so returning empty string. iMF = '" + iMF + "'");
//return "";
//TODO:: temp solution to get a SeriesInfo until V9 core is corrected
//first get AllSeriesInfo
Object[] AllSeriesInfo = sagex.api.SeriesInfoAPI.GetAllSeriesInfo();
if (AllSeriesInfo==null){
LOG.debug("GetSeriesDescription (ALT): could not get AllSeriesInfo");
return "";
}
//Group the Series objects by SeriesTitle
Map myMap = sagex.api.Database.GroupByMethod(AllSeriesInfo,"GetSeriesTitle");
if (myMap==null){
LOG.debug("GetSeriesDescription (ALT): could not get myMap from AllSeriesInfo '" + AllSeriesInfo + "'");
return "";
}
//need the Title of the Airing
String aTitle = sagex.api.AiringAPI.GetAiringTitle(phoenix.media.GetSageMediaFile(iMF));
if (aTitle==null){
LOG.debug("GetSeriesDescription (ALT): could not get aTitle from iMF '" + iMF + "'");
return "";
}
//now get the specific SeriesTitle subgroup
Object mySub = sagex.api.Utility.GetSubgroup(myMap,aTitle);
if (mySub==null){
LOG.debug("GetSeriesDescription (ALT): could not get mySub from myMap '" + myMap + "' for Title '" + aTitle + "'");
return "";
}
Object mySeries = sagex.api.Utility.GetElement(mySub,0);
if(mySeries==null){
LOG.debug("GetSeriesDescription (ALT): could not get a SeriesInfo from mySub '" + mySub + "'");
return "";
}
LOG.debug("GetSeriesDescription (ALT): using SeriesInfo as Object '" + mySeries + "'");
SeriesInfo sageSeries = (SeriesInfo) mySeries;
LOG.debug("GetSeriesDescription (ALT): using SeriesInfo as SeriesInfo '" + sageSeries + "' id: '" + sageSeries.getID() + "' SeriesID " + sageSeries.getSeriesID() + "' Title '" + sageSeries.getTitle() + "'");
//now return the SeriesDescription
String myDesc = sagex.api.SeriesInfoAPI.GetSeriesDescription(mySeries);
LOG.debug("GetSeriesDescription (ALT): returning '" + myDesc + "'");
return myDesc;
//CurrentSeries=GetElement(GetSubgroup(GroupByMethod(GetAllSeriesInfo(),"GetSeriesTitle"),GetAiringTitle(Gemstone_MetadataCalls_GetAiring(FocusedItem))),0)
}
//We have a SeriesInfo so now get the description
LOG.debug("GetSeriesDescription: returning '" + phoenix.series.GetDescription(iSeriesInfo) + "'");
return phoenix.series.GetDescription(iSeriesInfo);
//tReturn = sagex.api.SeriesInfoAPI.GetSeriesDescription(sagex.api.ShowAPI.GetShowSeriesInfo(sagex.api.MediaFileAPI.GetMediaFileAiring(sageMF)));
//String tReturn = phoenix.series.GetDescription(phoenix.media.GetSeriesInfo(phoenix.media.GetMediaFile(imediaresource)));
}
// public static Object GetAiring(Object IMR){
// IMediaResource imediaresource = Source.GetTVIMediaResource(IMR);
// Object tReturn = null;
// if (imediaresource!=null){
// tReturn = sagex.api.MediaFileAPI.GetMediaFileAiring(phoenix.media.GetSageMediaFile(imediaresource));
// }
// LOG.debug("*****GetAiring returned '" + tReturn + "' for '" + imediaresource + "'");
// return tReturn;
// }
//Series related metadata
public static Boolean IsHDTV(Object IMR){
IMediaResource imediaresource = Source.GetTVIMediaResource(IMR);
if (imediaresource!=null){
Boolean tReturn = phoenix.metadata.IsHDTV(imediaresource);
//LOG.debug("IsHDTV: IsHDTV returned '" + tReturn + "' for '" + imediaresource + "'");
return tReturn;
}
return Boolean.FALSE;
}
public static Boolean IsWatched(Object IMR){
IMediaResource imediaresource = Source.ConvertToIMR(IMR);
if (imediaresource!=null){
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
//see if ALL the Children are watched by seeing if we find any unwatched ones
ViewFolder Folder = (ViewFolder) imediaresource;
List Children = phoenix.media.GetAllChildren(Folder);
//Object[] tList = (Object[]) sagex.api.Database.Sort(Children, true, "phoenix_metadata_GetOriginalAirDate");
//Get all the Watched items
Integer WatchedCount = sagex.api.Utility.Size(sagex.api.Utility.GetSubgroup(sagex.api.Database.GroupByMethod(Children,"phoenix_media_IsWatched"),true));
//LOG.debug("IsWatched: FOLDER - Watched '" + WatchedCount + "' of '" + Children.size() + "' Items");
if (Children.size()==WatchedCount){
return Boolean.TRUE;
}
}else{
return imediaresource.isWatched();
}
}
return Boolean.FALSE;
}
public static void SetWatched(Object IMR){
ChangeWatched(IMR, Boolean.TRUE);
}
public static void ClearWatched(Object IMR){
ChangeWatched(IMR, Boolean.FALSE);
}
public static void ChangeWatched(Object IMR){
ChangeWatched(IMR, !IsWatched(IMR));
}
public static void ChangeWatched(Object IMR, Boolean Value){
IMediaResource imediaresource = Source.ConvertToIMR(IMR);
if (imediaresource!=null){
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
//see if ALL the Children are watched by seeing if we find any unwatched ones
ViewFolder Folder = (ViewFolder) imediaresource;
for (Object child:phoenix.media.GetAllChildren(Folder)){
if (child instanceof IMediaResource){
IMediaResource iChild = (IMediaResource) child;
iChild.setWatched(Value);
}
}
}else{
//LOG.info("***BEFORE***SETWATCHED*** getLastWatchedTimeStamp = '" + GetLastWatchedTimeStamp(imediaresource) + "' W = '" + imediaresource.isWatched() + "'");
imediaresource.setWatched(Value);
//LOG.info("***AFTER ***SETWATCHED*** getLastWatchedTimeStamp = '" + GetLastWatchedTimeStamp(imediaresource) + "' W = '" + imediaresource.isWatched() + "'");
}
}
}
public static long GetLastWatchedTimeStamp(IMediaResource o) {
if (o instanceof IMediaFile) {
Object theairing = o.getMediaObject();
Object thisAiring = phoenix.media.GetMediaObject(theairing);
//LOG.debug("IMediaFile found - realwatched from thisAiring '" + AiringAPI.GetRealWatchedStartTime(thisAiring) + "' for S" + phoenix.metadata.GetSeasonNumber(thisAiring) + "E" + phoenix.metadata.GetEpisodeNumber(thisAiring));
return AiringAPI.GetRealWatchedStartTime(thisAiring);
}
if (o instanceof IMediaFolder) {
//LOG.debug("IMediaFolder found - searching");
return searchFolderForLastWatchedTimeStamp((IMediaFolder) o);
}
// If it's not a File or Folder just return a 0
//LOG.debug("Neither found - class is '" + o.getClass() + "'");
return 0;
}
private static long searchFolderForLastWatchedTimeStamp(final IMediaFolder folder) {
List<IMediaResource> children = folder.getChildren();
long returnValue = 0;
for (IMediaResource r : children) {
long candidate = GetLastWatchedTimeStamp(r);
if (candidate > returnValue) {
// this item is more recent than the previously saved item
returnValue = candidate;
}
}
return returnValue;
}
public static Boolean IsArchived(Object IMR){
IMediaResource imediaresource = Source.ConvertToIMR(IMR);
if (imediaresource!=null){
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
//see if ALL the Children are archived by seeing if we find any unarchived ones
ViewFolder Folder = (ViewFolder) imediaresource;
List Children = phoenix.media.GetAllChildren(Folder);
//Get all the Archived items
Integer ArchivedCount = sagex.api.Utility.Size(sagex.api.Utility.GetSubgroup(sagex.api.Database.GroupByMethod(Children,"phoenix_media_isLibraryFile"),true));
if (Children.size()==ArchivedCount){
return Boolean.TRUE;
}
}else{
return imediaresource.isLibraryFile();
}
}
return Boolean.FALSE;
}
public static void SetArchived(Object IMR){
ChangeArchived(IMR, Boolean.TRUE);
}
public static void ClearArchived(Object IMR){
ChangeArchived(IMR, Boolean.FALSE);
}
public static void ChangeArchived(Object IMR){
ChangeArchived(IMR, !IsArchived(IMR));
}
public static void ChangeArchived(Object IMR, Boolean Value){
IMediaResource imediaresource = Source.ConvertToIMR(IMR);
if (imediaresource!=null){
if (phoenix.media.IsMediaType( imediaresource , "FOLDER" )){
//see if ALL the Children are archived by seeing if we find any unarchived ones
ViewFolder Folder = (ViewFolder) imediaresource;
for (Object child:phoenix.media.GetAllChildren(Folder)){
if (child instanceof IMediaResource){
IMediaResource iChild = (IMediaResource) child;
iChild.setLibraryFile(Value);
}
}
}else{
imediaresource.setLibraryFile(Value);
}
}
}
}
|
|
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver12;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFOxmInPortVer12 implements OFOxmInPort {
private static final Logger logger = LoggerFactory.getLogger(OFOxmInPortVer12.class);
// version: 1.2
final static byte WIRE_VERSION = 3;
final static int LENGTH = 8;
private final static OFPort DEFAULT_VALUE = OFPort.ANY;
// OF message fields
private final OFPort value;
//
// Immutable default instance
final static OFOxmInPortVer12 DEFAULT = new OFOxmInPortVer12(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFOxmInPortVer12(OFPort value) {
if(value == null) {
throw new NullPointerException("OFOxmInPortVer12: property value cannot be null");
}
this.value = value;
}
// Accessors for OF message fields
@Override
public long getTypeLen() {
return 0x80000004L;
}
@Override
public OFPort getValue() {
return value;
}
@Override
public MatchField<OFPort> getMatchField() {
return MatchField.IN_PORT;
}
@Override
public boolean isMasked() {
return false;
}
public OFOxm<OFPort> getCanonical() {
// exact match OXM is always canonical
return this;
}
@Override
public OFPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.2");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
public OFOxmInPort.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFOxmInPort.Builder {
final OFOxmInPortVer12 parentMessage;
// OF message fields
private boolean valueSet;
private OFPort value;
BuilderWithParent(OFOxmInPortVer12 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getTypeLen() {
return 0x80000004L;
}
@Override
public OFPort getValue() {
return value;
}
@Override
public OFOxmInPort.Builder setValue(OFPort value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<OFPort> getMatchField() {
return MatchField.IN_PORT;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<OFPort> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.2");
}
@Override
public OFPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.2");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
@Override
public OFOxmInPort build() {
OFPort value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
//
return new OFOxmInPortVer12(
value
);
}
}
static class Builder implements OFOxmInPort.Builder {
// OF message fields
private boolean valueSet;
private OFPort value;
@Override
public long getTypeLen() {
return 0x80000004L;
}
@Override
public OFPort getValue() {
return value;
}
@Override
public OFOxmInPort.Builder setValue(OFPort value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<OFPort> getMatchField() {
return MatchField.IN_PORT;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<OFPort> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.2");
}
@Override
public OFPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.2");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
//
@Override
public OFOxmInPort build() {
OFPort value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
return new OFOxmInPortVer12(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFOxmInPort> {
@Override
public OFOxmInPort readFrom(ChannelBuffer bb) throws OFParseError {
// fixed value property typeLen == 0x80000004L
int typeLen = bb.readInt();
if(typeLen != (int) 0x80000004)
throw new OFParseError("Wrong typeLen: Expected=0x80000004L(0x80000004L), got="+typeLen);
OFPort value = OFPort.read4Bytes(bb);
OFOxmInPortVer12 oxmInPortVer12 = new OFOxmInPortVer12(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", oxmInPortVer12);
return oxmInPortVer12;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFOxmInPortVer12Funnel FUNNEL = new OFOxmInPortVer12Funnel();
static class OFOxmInPortVer12Funnel implements Funnel<OFOxmInPortVer12> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFOxmInPortVer12 message, PrimitiveSink sink) {
// fixed value property typeLen = 0x80000004L
sink.putInt((int) 0x80000004);
message.value.putTo(sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFOxmInPortVer12> {
@Override
public void write(ChannelBuffer bb, OFOxmInPortVer12 message) {
// fixed value property typeLen = 0x80000004L
bb.writeInt((int) 0x80000004);
message.value.write4Bytes(bb);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFOxmInPortVer12(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFOxmInPortVer12 other = (OFOxmInPortVer12) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.fluo.recipes.core.types;
import java.nio.ByteBuffer;
import org.apache.fluo.api.client.Snapshot;
import org.apache.fluo.api.client.Transaction;
import org.apache.fluo.api.client.TransactionBase;
import org.apache.fluo.api.data.Bytes;
import org.apache.fluo.api.data.Column;
/**
* A simple convenience layer for Fluo. This layer attempts to make the following common operations
* easier.
*
* <UL>
* <LI>Working with different types.
* <LI>Supplying default values
* <LI>Dealing with null return types.
* <LI>Working with row/column and column maps
* </UL>
*
* <p>
* This layer was intentionally loosely coupled with the basic API. This allows other convenience
* layers for Fluo to build directly on the basic API w/o having to consider the particulars of this
* layer. Also its expected that integration with other languages may only use the basic API.
* </p>
*
* <h3>Using</h3>
*
* <p>
* A TypeLayer is created with a certain encoder that is used for converting from bytes to
* primitives and visa versa. In order to ensure that all of your code uses the same encoder, its
* probably best to centralize the choice of an encoder within your project. There are many ways do
* to this, below is an example of one way to centralize and use.
* </p>
*
* <pre>
* <code>
*
* public class MyTypeLayer extends TypeLayer {
* public MyTypeLayer() {
* super(new MyEncoder());
* }
* }
*
* public class MyObserver extends TypedObserver {
* MyObserver(){
* super(new MyTypeLayer());
* }
*
* public abstract void process(TypedTransaction tx, Bytes row, Column col){
* //do something w/ typed transaction
* }
* }
*
* public class MyUtil {
* //A little util to print out some stuff
* public void printStuff(Snapshot snap, byte[] row){
* TypedSnapshot tsnap = new MytTypeLayer().wrap(snap);
*
* System.out.println(tsnap.get().row(row).fam("b90000").qual(137).toString("NP"));
* }
* }
* </code>
* </pre>
*
* <h3>Working with different types</h3>
*
* <p>
* The following example code shows using the basic fluo API with different types.
* </p>
*
* <pre>
* <code>
*
* void process(Transaction tx, byte[] row, byte[] cf, int cq, long val){
* tx.set(Bytes.of(row), new Column(Bytes.of(cf), Bytes.of(Integer.toString(cq))),
* Bytes.of(Long.toString(val));
* }
* </code>
* </pre>
*
* <p>
* Alternatively, the same thing can be written using a {@link TypedTransactionBase} in the
* following way. Because row(), fam(), qual(), and set() each take many different types, this
* enables many different permutations that would not be achievable with overloading.
* </p>
*
* <pre>
* <code>
*
* void process(TypedTransaction tx, byte[] r, byte[] cf, int cq, long v){
* tx.mutate().row(r).fam(cf).qual(cq).set(v);
* }
* </code>
* </pre>
*
* <h3>Default values</h3>
*
* <p>
* The following example code shows using the basic fluo API to read a value and default to zero if
* it does not exist.
* </p>
*
* <pre>
* <code>
*
* void add(Transaction tx, byte[] row, Column col, long amount){
*
* long balance = 0;
* Bytes bval = tx.get(Bytes.of(row), col);
* if(bval != null)
* balance = Long.parseLong(bval.toString());
*
* balance += amount;
*
* tx.set(Bytes.of(row), col, Bytes.of(Long.toString(amount)));
*
* }
* </code>
* </pre>
*
* <p>
* Alternatively, the same thing can be written using a {@link TypedTransactionBase} in the
* following way. This code avoids the null check by supplying a default value of zero.
* </p>
*
* <pre>
* <code>
*
* void add(TypedTransaction tx, byte[] r, Column c, long amount){
* long balance = tx.get().row(r).col(c).toLong(0);
* balance += amount;
* tx.mutate().row(r).col(c).set(balance);
* }
* </code>
* </pre>
*
* <p>
* For this particular case, shorter code can be written by using the increment method.
* </p>
*
* <pre>
* <code>
*
* void add(TypedTransaction tx, byte[] r, Column c, long amount){
* tx.mutate().row(r).col(c).increment(amount);
* }
* </code>
* </pre>
*
* <h3>Null return types</h3>
*
* <p>
* When using the basic API, you must ensure the return type is not null before converting a string
* or long.
* </p>
*
* <pre>
* <code>
*
* void process(Transaction tx, byte[] row, Column col, long amount) {
* Bytes val = tx.get(Bytes.of(row), col);
* if(val == null)
* return;
* long balance = Long.parseLong(val.toString());
* }
* </code>
* </pre>
*
* <p>
* With {@link TypedTransactionBase} if no default value is supplied, then the null is passed
* through.
* </p>
*
* <pre>
* <code>
*
* void process(TypedTransaction tx, byte[] r, Column c, long amount){
* Long balance = tx.get().row(r).col(c).toLong();
* if(balance == null)
* return;
* }
* </code>
* </pre>
*
* <h3>Defaulted maps</h3>
*
* <p>
* The operations that return maps, return defaulted maps which make it easy to specify defaults and
* avoid null.
* </p>
*
* <pre>
* {@code
* // pretend this method has curly braces. javadoc has issues with less than.
*
* void process(TypedTransaction tx, byte[] r, Column c1, Column c2, Column c3, long amount)
*
* Map<Column, Value> columns = tx.get().row(r).columns(c1,c2,c3);
*
* // If c1 does not exist in map, a Value that wraps null will be returned.
* // When c1 does not exist val1 will be set to null and no NPE will be thrown.
* String val1 = columns.get(c1).toString();
*
* // If c2 does not exist in map, then val2 will be set to empty string.
* String val2 = columns.get(c2).toString("");
*
* // If c3 does not exist in map, then val9 will be set to 9.
* Long val3 = columns.get(c3).toLong(9);
* }
* </pre>
*
* <p>
* This also applies to getting sets of rows.
* </p>
*
* <pre>
* {@code
* // pretend this method has curly braces. javadoc has issues with less than.
*
* void process(TypedTransaction tx, List<String> rows, Column c1, Column c2, Column c3,
* long amount)
*
* Map<String,Map<Column,Value>> rowCols =
* tx.get().rowsString(rows).columns(c1,c2,c3).toStringMap();
*
* // this will set val1 to null if row does not exist in map and/or column does not
* // exist in child map
* String val1 = rowCols.get("row1").get(c1).toString();
* }
* </pre>
*
* @since 1.0.0
*/
public class TypeLayer {
private Encoder encoder;
static class Data {
Bytes row;
Bytes family;
Bytes qual;
Bytes vis;
Column getCol() {
if (qual == null) {
return new Column(family);
} else if (vis == null) {
return new Column(family, qual);
} else {
return new Column(family, qual, vis);
}
}
}
/**
* @since 1.0.0
*/
public abstract class RowMethods<R> {
abstract R create(Data data);
public R row(String row) {
return row(encoder.encode(row));
}
public R row(int row) {
return row(encoder.encode(row));
}
public R row(long row) {
return row(encoder.encode(row));
}
public R row(byte[] row) {
return row(Bytes.of(row));
}
public R row(ByteBuffer row) {
return row(Bytes.of(row));
}
public R row(Bytes row) {
Data data = new Data();
data.row = row;
R result = create(data);
return result;
}
}
/**
* @since 1.0.0
*/
public abstract class SimpleFamilyMethods<R1> {
Data data;
SimpleFamilyMethods(Data data) {
this.data = data;
}
abstract R1 create1(Data data);
public R1 fam(String family) {
return fam(encoder.encode(family));
}
public R1 fam(int family) {
return fam(encoder.encode(family));
}
public R1 fam(long family) {
return fam(encoder.encode(family));
}
public R1 fam(byte[] family) {
return fam(Bytes.of(family));
}
public R1 fam(ByteBuffer family) {
return fam(Bytes.of(family));
}
public R1 fam(Bytes family) {
data.family = family;
return create1(data);
}
}
/**
* @since 1.0.0
*/
public abstract class FamilyMethods<R1, R2> extends SimpleFamilyMethods<R1> {
FamilyMethods(Data data) {
super(data);
}
abstract R2 create2(Data data);
public R2 col(Column col) {
data.family = col.getFamily();
data.qual = col.getQualifier();
data.vis = col.getVisibility();
return create2(data);
}
}
/**
* @since 1.0.0
*/
public abstract class QualifierMethods<R> {
private Data data;
QualifierMethods(Data data) {
this.data = data;
}
abstract R create(Data data);
public R qual(String qualifier) {
return qual(encoder.encode(qualifier));
}
public R qual(int qualifier) {
return qual(encoder.encode(qualifier));
}
public R qual(long qualifier) {
return qual(encoder.encode(qualifier));
}
public R qual(byte[] qualifier) {
return qual(Bytes.of(qualifier));
}
public R qual(ByteBuffer qualifier) {
return qual(Bytes.of(qualifier));
}
public R qual(Bytes qualifier) {
data.qual = qualifier;
return create(data);
}
}
/**
* @since 1.0.0
*/
public static class VisibilityMethods {
private Data data;
VisibilityMethods(Data data) {
this.data = data;
}
public Column vis() {
return new Column(data.family, data.qual);
}
public Column vis(String cv) {
return vis(Bytes.of(cv));
}
public Column vis(Bytes cv) {
return new Column(data.family, data.qual, cv);
}
public Column vis(ByteBuffer cv) {
return vis(Bytes.of(cv));
}
public Column vis(byte[] cv) {
return vis(Bytes.of(cv));
}
}
/**
* @since 1.0.0
*/
public class CQB extends QualifierMethods<VisibilityMethods> {
CQB(Data data) {
super(data);
}
@Override
VisibilityMethods create(Data data) {
return new VisibilityMethods(data);
}
}
/**
* @since 1.0.0
*/
public class CFB extends SimpleFamilyMethods<CQB> {
CFB() {
super(new Data());
}
@Override
CQB create1(Data data) {
return new CQB(data);
}
}
public TypeLayer(Encoder encoder) {
this.encoder = encoder;
}
/**
* Initiates the chain of calls needed to build a column.
*
* @return a column builder
*/
public CFB bc() {
return new CFB();
}
public TypedSnapshot wrap(Snapshot snap) {
return new TypedSnapshot(snap, encoder, this);
}
public TypedTransactionBase wrap(TransactionBase tx) {
return new TypedTransactionBase(tx, encoder, this);
}
public TypedTransaction wrap(Transaction tx) {
return new TypedTransaction(tx, encoder, this);
}
}
|
|
package org.teachingkidsprogramming.section03ifs;
import org.junit.Assert;
import org.junit.Test;
import org.teachingextensions.logo.utils.EventUtils.MessageBox;
@SuppressWarnings("unused")
public class DeepDive03Ifs
{
// Step 1: SELECT the method name (doesABear on line 19), then click the Run Button
// Keyboard shortcut to run -> PC: Ctrl+F11 or Mac: Command+fn+F11
// Step 2: READ the name of the method that failed
// Step 3: FILL IN the blank (___) to make that method pass
// Step 4: SAY at least one thing you just learned
// Step 5: GO to the next method
// IMPORTANT - Do NOT change anything except the blank (___)//
//
@Test
public void doesABear() throws Exception
{
String bearPoopPlace = "";
if (true)
{
bearPoopPlace = "woods";
}
Assert.assertEquals("woods", bearPoopPlace);
}
@Test
public void neverEverEver() throws Exception
{
String dessert = "chocolate";
if (false)
{
dessert = "ketchup";
}
Assert.assertEquals("chocolate", dessert);
}
@Test
public void notEverEverEver() throws Exception
{
String dessert = "chocolate";
if (!true)
{
dessert = "ketchup";
}
Assert.assertEquals("chocolate", dessert);
}
@Test
public void isThePopeCatholic() throws Exception
{
String pope = "";
if (true)
{
pope = "Catholic";
}
Assert.assertEquals("Catholic", pope);
}
@Test
public void trueOrFalse() throws Exception
{
String animal = "cat";
boolean elephant = true;
if (elephant)
{
animal = "flat " + animal;
}
Assert.assertEquals("flat cat", animal);
}
@Test
public void letSleepingBabiesLie() throws Exception
{
String babySounds = "";
boolean sleeping = false;
if (sleeping)
{
babySounds = "zzzzzzzzzzzz";
}
else
{
babySounds = "waaaaaahhh!";
}
Assert.assertEquals("waaaaaahhh!", babySounds);
}
@Test
public void howCoachThinks() throws Exception
{
String coachSays = "try harder";
int percentEffort = 110;
if (percentEffort == 110)
{
coachSays = "good job";
}
Assert.assertEquals("good job", coachSays);
}
@Test
public void lessThan() throws Exception
{
String modeOfTransportation = "";
int age = 1;
if (age < 16)
{
modeOfTransportation = "keep walking";
}
else
{
modeOfTransportation = "drive away";
}
Assert.assertEquals("keep walking", modeOfTransportation);
}
@Test
public void greaterThan() throws Exception
{
String kidSays = "";
int numberOfIceCreams = 7;
if (numberOfIceCreams > 4)
{
kidSays = "I think I'm gonna barf";
}
else
{
kidSays = "More ice cream!";
}
Assert.assertEquals("I think I'm gonna barf", kidSays);
}
@Test
public void notEqual() throws Exception
{
String playerSays = "";
int cards = 52;
if (cards != 52)
{
playerSays = "Not playing with a full deck!";
}
else
{
playerSays = "Game on!";
}
Assert.assertEquals("Game on!", playerSays);
}
@Test
public void equalsForStrings() throws Exception
{
String knockKnock = "";
String whosThere = "bananas";
if (whosThere.equals("bananas"))
{
knockKnock = "Who's there?";
}
else if (whosThere.equals("orange"))
{
knockKnock = "Orange you glad I didn't say bananas?";
}
Assert.assertEquals("Who's there?", knockKnock);
}
@Test
public void thisAndThat() throws Exception
{
String time = "";
int score = 4;
int years = 7;
if (score == 4 && years == 7)
{
time = "Presidential";
}
Assert.assertEquals("Presidential", time);
}
@Test
public void theBeginningOrEnd() throws Exception
{
String shoppingList = "";
int age = 90;
if (age <= 2 || 90 <= age)
{
shoppingList = "diapers";
}
Assert.assertEquals("diapers", shoppingList);
}
@Test
public void ifInHighSchool() throws Exception
{
String status = "";
int age = 16;
if (age <= 15)
{
status = "smarty";
}
else if (age > 19)
{
status = "dummy";
}
else
{
status = "normal";
}
Assert.assertEquals("normal", status);
}
@Test
public void nestedIfOrPigsInABlanket() throws Exception
{
String status = "";
String animal = "PIG";
boolean isWinningKarate = false;
if (animal.equalsIgnoreCase("pig"))
{
if (isWinningKarate)
{
status = "pork chop";
}
else
{
status = "hambulance";
}
}
Assert.assertEquals("hambulance", status);
}
@Test
public void semicolonsMessUpIfStatements() throws Exception
{
String dessert = "chocolate";
if (false)
{
dessert = "ketchup";
}
Assert.assertEquals("chocolate", dessert);
}
@Test
public void cyoaInputTest() throws Exception
{
String result = MessageBox.askForTextInput("Do you want to 'wake up' or 'explore' the dream?");
//test that user entered "hello"
Assert.assertEquals(result, "wake up");
}
@Test
public void cyoaIncorrectInputTest() throws Exception
{
String result = MessageBox.askForTextInput("Do you want to 'wake up' or 'explore' the dream?");
Assert.assertEquals("", result);
}
@Test
public void ifStatements() throws Exception
{
String teacherSays = "may";
if (false)
{
teacherSays = "can";
}
Assert.assertEquals("may", teacherSays);
}
@Test
public void TwoPlusTwo() throws Exception
{
String TwoPlusTwo = "5";
if (true)
{
TwoPlusTwo = "4";
}
Assert.assertEquals("4", TwoPlusTwo);
}
/**
* Ignore the following, It's needed to run the deep dive
*
*
*
*
*
*
*
*
*
*
*/
public boolean _____ = false;
//public boolean _____ = false;
public boolean ______ = true;
public String ___ = "You need to fill in the blank ___";
public Integer ____ = null;
public String ___()
{
return ___;
}
}
|
|
/**
* Copyright 2015 StreamSets Inc.
*
* Licensed under the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.lib.io;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.UUID;
public class TestLiveDirectoryScanner {
private File testDir;
@Before
public void setUp() throws IOException {
testDir = new File("target", UUID.randomUUID().toString());
Assert.assertTrue(testDir.mkdirs());
}
@Test
public void testNoFilesInSpoolDir() throws Exception {
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(), null,
LogRollModeFactory.REVERSE_COUNTER.get("my.log", ""));
Assert.assertNull(spooler.scan(null));
}
@Test
public void testLiveFileOnlyInSpoolDir() throws Exception {
Path file = new File(testDir, "my.log").toPath();
Files.createFile(file);
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(), null,
LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""));
LiveFile lf = spooler.scan(null);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(file), lf);
}
@Test
public void testDirectoryMatchingName() throws Exception {
Path file = new File(testDir, "my.log").toPath();
Files.createDirectories(file);
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(), null,
LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""));
Assert.assertNull(spooler.scan(null));
}
@Test(expected = IllegalArgumentException.class)
public void testGetUsingLiveFile() throws Exception {
Path file = new File(testDir, "my.log").toPath();
Files.createFile(file);
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(), null,
LogRollModeFactory.REVERSE_COUNTER.get(file.getFileName().toString(), ""));
spooler.scan(new LiveFile(file));
}
@Test
public void testWithRolledFileAndNoLiveFileInSpoolDir() throws Exception {
Path rolledFile = new File(testDir, "my.log.1").toPath();
Files.createFile(rolledFile);
Path liveFile = new File(testDir, "my.log").toPath();
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(), null,
LogRollModeFactory.REVERSE_COUNTER.get(liveFile.getFileName().toString(), ""));
LiveFile lf = spooler.scan(null);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(rolledFile), lf);
lf = spooler.scan(lf);
Assert.assertNull(lf);
}
@Test
public void testWithRolledFileAndLiveFileInSpoolDir() throws Exception {
Path rolledFile = new File(testDir, "my.log.1").toPath();
Files.createFile(rolledFile);
Path liveFile = new File(testDir, "my.log").toPath();
Files.createFile(liveFile);
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(), null,
LogRollModeFactory.REVERSE_COUNTER.get(liveFile.getFileName().toString(), ""));
LiveFile lf = spooler.scan(null);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(rolledFile), lf);
lf = spooler.scan(lf);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(liveFile), lf);
}
@Test
public void testRolledFilesOrderReverseNumberInSpoolDir() throws Exception {
Path rolledFile1 = new File(testDir, "my.log.12").toPath();
Path rolledFile2 = new File(testDir, "my.log.2").toPath();
Files.createFile(rolledFile1);
Files.createFile(rolledFile2);
Path liveFile = new File(testDir, "my.log").toPath();
Files.createFile(liveFile);
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(), null,
LogRollModeFactory.REVERSE_COUNTER.get(liveFile.getFileName().toString(), ""));
LiveFile lf = spooler.scan(null);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(rolledFile1), lf);
lf = spooler.scan(lf);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(rolledFile2), lf);
lf = spooler.scan(lf);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(liveFile), lf);
}
@Test
public void testRolledFilesOrderAlphabeticalInSpoolDir() throws Exception {
Path rolledFile1 = new File(testDir, "my.log.13").toPath();
Path rolledFile2 = new File(testDir, "my.log.2").toPath();
Files.createFile(rolledFile1);
Files.createFile(rolledFile2);
Path liveFile = new File(testDir, "my.log").toPath();
Files.createFile(liveFile);
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(), null,
LogRollModeFactory.ALPHABETICAL.get(liveFile.getFileName().toString(), ""));
LiveFile lf = spooler.scan(null);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(rolledFile1), lf);
lf = spooler.scan(lf);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(rolledFile2), lf);
lf = spooler.scan(lf);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(liveFile), lf);
}
@Test
public void testRefreshedRolledFiles() throws Exception {
Path rolledFile1 = new File(testDir, "my.log.2").toPath();
Path rolledFile2 = new File(testDir, "my.log.1").toPath();
Files.createFile(rolledFile1);
Files.createFile(rolledFile2);
Path liveFile = new File(testDir, "my.log").toPath();
Files.createFile(liveFile);
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(), null,
LogRollModeFactory.REVERSE_COUNTER.get(liveFile.getFileName().toString(), ""));
LiveFile lf = spooler.scan(null);
Assert.assertNotNull(lf);
//got my.log.2
Assert.assertEquals(new LiveFile(rolledFile1), lf);
//shifting files 2 -> 3, 1 - >2
Path rolledFile0 = new File(testDir, "my.log.3").toPath();
Files.move(rolledFile1, rolledFile0);
Files.move(rolledFile2, rolledFile1);
// a refresh should get us to my.log.3
lf = lf.refresh();
Assert.assertEquals(rolledFile0.toAbsolutePath(), lf.getPath());
// getting the file should get us the new 2
lf = spooler.scan(lf);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(rolledFile1), lf);
}
@Test
public void testUsingFirstFile() throws Exception {
Path rolledFile1 = new File(testDir, "my.log.13").toPath();
Path rolledFile2 = new File(testDir, "my.log.2").toPath();
Files.createFile(rolledFile1);
Files.createFile(rolledFile2);
Path liveFile = new File(testDir, "my.log").toPath();
Files.createFile(liveFile);
LiveDirectoryScanner spooler = new LiveDirectoryScanner(testDir.getAbsolutePath(),
rolledFile2.getFileName().toString(),
LogRollModeFactory.ALPHABETICAL.get(liveFile.getFileName().toString(), ""));
LiveFile lf = spooler.scan(null);
Assert.assertEquals(new LiveFile(rolledFile2), lf);
lf = spooler.scan(lf);
Assert.assertNotNull(lf);
Assert.assertEquals(new LiveFile(liveFile), lf);
}
@Test
public void testPendingFilesWithReverseCounter() throws Exception {
testPendingFiles("2", "1", LogRollModeFactory.REVERSE_COUNTER);
}
@Test
public void testPendingFilesWithAlphabetical() throws Exception {
testPendingFiles("a", "b", LogRollModeFactory.ALPHABETICAL);
}
@Test
public void testPendingFilesWithDATE_YYYY_MM() throws Exception {
testPendingFiles("2017-05", "2017-06", LogRollModeFactory.DATE_YYYY_MM);
}
@Test
public void testPendingFilesWithDATE_YYYY_MM_DD() throws Exception {
testPendingFiles("2017-05-06", "2017-05-07", LogRollModeFactory.DATE_YYYY_MM_DD);
}
@Test
public void testPendingFilesWithDATE_YYYY_MM_DD_HH() throws Exception {
testPendingFiles("2017-05-06-01", "2017-05-06-02", LogRollModeFactory.DATE_YYYY_MM_DD_HH);
}
@Test
public void testPendingFilesWithDATE_YYYY_MM_DD_HH_MM() throws Exception {
testPendingFiles("2017-05-06-01-01", "2017-05-06-01-02", LogRollModeFactory.DATE_YYYY_MM_DD_HH_MM);
}
private void testPendingFiles(
String rolledFileNameSuffix1,
String rolledFileNameSuffix2,
LogRollModeFactory factory
) throws Exception {
Path rolledFile1 = new File(testDir, "my.log." + rolledFileNameSuffix1).toPath();
Path rolledFile2 = new File(testDir, "my.log." + rolledFileNameSuffix2).toPath();
Files.createFile(rolledFile1);
Files.createFile(rolledFile2);
Path liveFile = new File(testDir, "my.log").toPath();
Files.createFile(liveFile);
LiveDirectoryScanner spooler = new LiveDirectoryScanner(
testDir.getAbsolutePath(),
null,
factory.get(liveFile.getFileName().toString(), "")
);
LiveFile lf = spooler.scan(null);
Assert.assertNotNull(lf);
//got my.log.2, Only pending file with reverse counter is my.log.1
Assert.assertEquals(new LiveFile(rolledFile1), lf);
Assert.assertEquals(1, spooler.getPendingFiles(lf));
lf = spooler.scan(lf);
//got my.log.1, all files with reverse counter are processed
Assert.assertEquals(new LiveFile(rolledFile2), lf);
Assert.assertEquals(0, spooler.getPendingFiles(lf));
lf = spooler.scan(lf);
Assert.assertEquals(new LiveFile(liveFile), lf);
Assert.assertEquals(0, spooler.getPendingFiles(lf));
}
}
|
|
package org.jcodec.codecs.vpx;
import java.nio.ByteBuffer;
/**
* This class is part of JCodec ( www.jcodec.org ) This software is distributed
* under FreeBSD License
*
* @author The JCodec project
*
*/
public class VPXBooleanDecoder {
int bit_count; /* # of bits shifted out of value, at most 7 */
ByteBuffer input;
int offset; /* pointer to next compressed data byte */
int range; /* always identical to encoder's range */
int value; /* contains at least 24 significant bits */
long callCounter=0;
@SuppressWarnings("unused")
private String debugName;
public VPXBooleanDecoder(ByteBuffer input, int offset) {
this.input = input;
this.offset = offset;
initBoolDecoder();
}
void initBoolDecoder() {
value = 0; /* value = first 16 input bits */
// data.position(offset);
value = (input.get() & 0xFF) << 8; // readUnsignedByte() << 8;
// value = (data[offset]) << 8;
offset++;
range = 255; /* initial range is full */
bit_count = 0; /* have not yet shifted out any bits */
}
public int decodeBit() {
return decodeBool(128);
}
public int decodeBool(int probability) {
int bit = 0;
int range = this.range;
int value = this.value;
int split = 1 + (((range - 1) * probability) >> 8);
int bigsplit = (split << 8);
this.callCounter++;
// System.out.println();
// System.out.println("this.range: " + this.range + " binary: " + Integer.toBinaryString(this.range));
// System.out.println("split: " + split + " binary: " + Integer.toBinaryString(split));
// System.out.println("SPLIT: " + bigsplit + " binary: " + Integer.toBinaryString(bigsplit));
// System.out.println("value: " + value + " binary: " + Integer.toBinaryString(value));
range = split;
if (value >= bigsplit) {
range = this.range - range;
value = value - bigsplit;
bit = 1;
}
int count = this.bit_count;
int shift = leadingZeroCountInByte((byte)range);
range <<= shift;
value <<= shift;
count -= shift;
if (count <= 0) {
value |= (input.get() & 0xFF) << (-count);
// System.out.println("read value: " + value + " binary: " + Integer.toBinaryString(value));
offset++;
count += 8;
}
this.bit_count = count;
this.value = value;
this.range = range;
return bit;
}
/*
* Convenience function reads a "literal", that is, a "num_bits" wide unsigned value whose bits come high- to low-order, with each bit encoded at probability 128 (i.e., 1/2).
*/
public int decodeInt(int sizeInBits) {
int v = 0;
while (sizeInBits-- > 0)
v = (v << 1) | decodeBool(128);
return v;
}
/* root: "0", "1" subtrees */
/* "00" = 0th value, "01" = 1st value */
/* "10" = 2nd value, "11" = 3rd value */
/**
*
* General formula in VP8 trees.
* <ul>
* <li> if tree element is a positive number it is treated as index of the child elements <pre>tree[i] > 0</pre>
* <ul>
* <li> left child is assumed to have index <pre>i</pre> and value <pre>tree[i]</pre> </li>
* <li> right child is assumed to have index <pre>i+1</pre> and value <pre>tree[i+1]</pre></li>
* </ul>
* </li>
* <li> a negative tree value means a leaf node was reached and it's negated value should be returned <pre>-tree[i]</pre></li>
* </ul>
*
* Here's a real example of a tree coded according to this formula in VP8 spec.
* <pre>
* const tree_index mb_segment_tree [2 * (4-1)] =
* // +-------+---+
* // | | |
* { 2, 4, -0, -1, -2, -3 };
* // | | |
* // +-----------+---+
* </pre>
*
* If presented in hierarchical form this tree would look like:
* <pre>
* +---------------+
* | root |
* | / \ |
* | 2 4 |
* | / \ / \ |
* | -0 -1 -2 -3 |
* +---------------+
* <pre>
*
* On the other hand probabilities are coded only for non-leaf nodes.
* Thus tree array has twice as many nodes as probabilities array
* Consider (3>>1) == 1 == (2>>1), and (0>>1) == 0 == (1>>1)
* Thus single probability element refers to single parent element in tree.
* if (using that probability) a '0' is coded, algorithm goes to the left
* branch, correspondingly if '1' is coded, algorithm goes to
* the right branch (see tree structure above).
*
* The process is repeated until a negative tree element is found.
*
*/
public int readTree(int tree[],int probability[]) {
int i = 0;
/*
* 1. pick corresponding probability probability[i >> 1]
* 2. pick left or right branch from coded info decodeBool(probability)
* 3. tree[i+decodedBool] get corresponding (left of right) value
* 4. repeat until tree[i+decodedBool] is positive
*/
while ((i = tree[i + decodeBool(probability[i >> 1])]) > 0) {
}
return -i; /* negate the return value */
}
public int readTreeSkip(int t[], /* tree specification */
int p[], /* corresponding interior node probabilities */
int skip_branches) {
int i = skip_branches * 2; /* begin at root */
/* Descend tree until leaf is reached */
while ((i = t[i + decodeBool(p[i >> 1])]) > 0) {
}
return -i; /* return value is negation of nonpositive index */
}
public void seek() {
input.position(offset);
}
public String toString() {
return "bc: " + value;
}
public static int getBitInBytes(byte[] bs, int i) {
int byteIndex = i >> 3;
int bitIndex = i & 0x07;
return (bs[byteIndex] >> (0x07 - bitIndex)) & 0x01;
}
public static int getBitsInBytes(byte[] bytes, int idx, int len){
int val = 0;
for(int i=0;i<len;i++){
val = (val << 1) | getBitInBytes(bytes, idx+i);
}
return val;
}
public static int leadingZeroCountInByte(byte b) {
int i = b&0xFF;
if (i>=128 || i == 0)
return 0;
return Integer.numberOfLeadingZeros(b)-24;
/*
* if-less alternative:
* http://aggregate.ee.engr.uky.edu/MAGIC/#Leading Zero Count
*/
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.util.HadoopApiStats;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ColumnType;
import com.facebook.presto.spi.ConnectorMetadata;
import com.facebook.presto.spi.ConnectorRecordSetProvider;
import com.facebook.presto.spi.ConnectorRecordSinkProvider;
import com.facebook.presto.spi.ConnectorSplitManager;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.Domain;
import com.facebook.presto.spi.OutputTableHandle;
import com.facebook.presto.spi.Partition;
import com.facebook.presto.spi.PartitionResult;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.spi.RecordSet;
import com.facebook.presto.spi.RecordSink;
import com.facebook.presto.spi.SchemaNotFoundException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.SchemaTablePrefix;
import com.facebook.presto.spi.Split;
import com.facebook.presto.spi.SplitSource;
import com.facebook.presto.spi.TableHandle;
import com.facebook.presto.spi.TableNotFoundException;
import com.facebook.presto.spi.TupleDomain;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.net.HostAndPort;
import io.airlift.log.Logger;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import org.testng.annotations.Test;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import static com.facebook.presto.hive.HiveBucketing.HiveBucket;
import static com.facebook.presto.hive.HiveUtil.partitionIdGetter;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Maps.uniqueIndex;
import static com.google.common.util.concurrent.MoreExecutors.sameThreadExecutor;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.testing.Assertions.assertInstanceOf;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
@Test(groups = "hive")
public abstract class AbstractTestHiveClient
{
protected static final String INVALID_DATABASE = "totally_invalid_database";
protected static final String INVALID_COLUMN = "totally_invalid_column_name";
protected static final byte[] EMPTY_STRING = new byte[0];
protected String database;
protected SchemaTableName table;
protected SchemaTableName tableUnpartitioned;
protected SchemaTableName tableOffline;
protected SchemaTableName tableOfflinePartition;
protected SchemaTableName view;
protected SchemaTableName invalidTable;
protected SchemaTableName tableBucketedStringInt;
protected SchemaTableName tableBucketedBigintBoolean;
protected SchemaTableName tableBucketedDoubleFloat;
protected SchemaTableName temporaryCreateTable;
protected String tableOwner;
protected TableHandle invalidTableHandle;
protected ColumnHandle dsColumn;
protected ColumnHandle fileFormatColumn;
protected ColumnHandle dummyColumn;
protected ColumnHandle intColumn;
protected ColumnHandle invalidColumnHandle;
protected Set<Partition> partitions;
protected Set<Partition> unpartitionedPartitions;
protected Partition invalidPartition;
protected CachingHiveMetastore metastoreClient;
protected ConnectorMetadata metadata;
protected ConnectorSplitManager splitManager;
protected ConnectorRecordSetProvider recordSetProvider;
protected ConnectorRecordSinkProvider recordSinkProvider;
protected void setupHive(String connectorId, String databaseName)
{
database = databaseName;
table = new SchemaTableName(database, "presto_test");
tableUnpartitioned = new SchemaTableName(database, "presto_test_unpartitioned");
tableOffline = new SchemaTableName(database, "presto_test_offline");
tableOfflinePartition = new SchemaTableName(database, "presto_test_offline_partition");
view = new SchemaTableName(database, "presto_test_view");
invalidTable = new SchemaTableName(database, "totally_invalid_table_name");
tableBucketedStringInt = new SchemaTableName(database, "presto_test_bucketed_by_string_int");
tableBucketedBigintBoolean = new SchemaTableName(database, "presto_test_bucketed_by_bigint_boolean");
tableBucketedDoubleFloat = new SchemaTableName(database, "presto_test_bucketed_by_double_float");
String random = UUID.randomUUID().toString().toLowerCase().replace("-", "");
temporaryCreateTable = new SchemaTableName(database, "tmp_presto_test_create_" + random);
tableOwner = "presto_test";
invalidTableHandle = new HiveTableHandle("hive", database, "totally_invalid_table_name");
dsColumn = new HiveColumnHandle(connectorId, "ds", 0, HiveType.STRING, -1, true);
fileFormatColumn = new HiveColumnHandle(connectorId, "file_format", 1, HiveType.STRING, -1, true);
dummyColumn = new HiveColumnHandle(connectorId, "dummy", 2, HiveType.INT, -1, true);
intColumn = new HiveColumnHandle(connectorId, "t_int", 0, HiveType.INT, -1, true);
invalidColumnHandle = new HiveColumnHandle(connectorId, INVALID_COLUMN, 0, HiveType.STRING, 0, false);
partitions = ImmutableSet.<Partition>of(
new HivePartition(table,
"ds=2012-12-29/file_format=rcfile-text/dummy=0",
ImmutableMap.<ColumnHandle, Comparable<?>>of(dsColumn, "2012-12-29", fileFormatColumn, "rcfile-text", dummyColumn, 0L),
Optional.<HiveBucket>absent()),
new HivePartition(table,
"ds=2012-12-29/file_format=rcfile-binary/dummy=2",
ImmutableMap.<ColumnHandle, Comparable<?>>of(dsColumn, "2012-12-29", fileFormatColumn, "rcfile-binary", dummyColumn, 2L),
Optional.<HiveBucket>absent()),
new HivePartition(table,
"ds=2012-12-29/file_format=sequencefile/dummy=4",
ImmutableMap.<ColumnHandle, Comparable<?>>of(dsColumn, "2012-12-29", fileFormatColumn, "sequencefile", dummyColumn, 4L),
Optional.<HiveBucket>absent()),
new HivePartition(table,
"ds=2012-12-29/file_format=textfile/dummy=6",
ImmutableMap.<ColumnHandle, Comparable<?>>of(dsColumn, "2012-12-29", fileFormatColumn, "textfile", dummyColumn, 6L),
Optional.<HiveBucket>absent()));
unpartitionedPartitions = ImmutableSet.<Partition>of(new HivePartition(tableUnpartitioned));
invalidPartition = new HivePartition(invalidTable, "unknown", ImmutableMap.<ColumnHandle, Comparable<?>>of(), Optional.<HiveBucket>absent());
}
protected void setup(String host, int port, String databaseName)
{
setup(host, port, databaseName, "hive-test", 100, 50);
}
protected void setup(String host, int port, String databaseName, String connectorName, int maxOutstandingSplits, int maxThreads)
{
setupHive(connectorName, databaseName);
HiveClientConfig hiveClientConfig = new HiveClientConfig();
String proxy = System.getProperty("hive.metastore.thrift.client.socks-proxy");
if (proxy != null) {
hiveClientConfig.setMetastoreSocksProxy(HostAndPort.fromString(proxy));
}
HiveCluster hiveCluster = new TestingHiveCluster(hiveClientConfig, host, port);
ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("hive-%s"));
metastoreClient = new CachingHiveMetastore(hiveCluster, executor, Duration.valueOf("1m"), Duration.valueOf("15s"));
HiveConnectorId hiveConnectorId = new HiveConnectorId(connectorName);
HiveClient client = new HiveClient(
hiveConnectorId,
metastoreClient,
new HadoopApiStats(),
new HdfsEnvironment(new HdfsConfiguration(hiveClientConfig, hiveConnectorId)),
sameThreadExecutor(),
hiveClientConfig.getMaxSplitSize(),
maxOutstandingSplits,
maxThreads,
hiveClientConfig.getMinPartitionBatchSize(),
hiveClientConfig.getMaxPartitionBatchSize());
metadata = client;
splitManager = client;
recordSetProvider = client;
recordSinkProvider = client;
}
@Test
public void testGetDatabaseNames()
throws Exception
{
List<String> databases = metadata.listSchemaNames();
assertTrue(databases.contains(database));
}
@Test
public void testGetTableNames()
throws Exception
{
List<SchemaTableName> tables = metadata.listTables(database);
assertTrue(tables.contains(table));
}
// disabled until metadata manager is updated to handle invalid catalogs and schemas
@Test(enabled = false, expectedExceptions = SchemaNotFoundException.class)
public void testGetTableNamesException()
throws Exception
{
metadata.listTables(INVALID_DATABASE);
}
@Test
public void testListUnknownSchema()
{
assertNull(metadata.getTableHandle(new SchemaTableName("totally_invalid_database_name", "dual")));
assertEquals(metadata.listTables("totally_invalid_database_name"), ImmutableList.of());
assertEquals(metadata.listTableColumns(new SchemaTablePrefix("totally_invalid_database_name", "dual")), ImmutableMap.of());
}
@Test
public void testGetPartitions()
throws Exception
{
TableHandle tableHandle = getTableHandle(table);
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
assertExpectedPartitions(partitionResult.getPartitions());
}
@Test
public void testGetPartitionsWithBindings()
throws Exception
{
TableHandle tableHandle = getTableHandle(table);
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>of(intColumn, Domain.singleValue(5L))));
assertExpectedPartitions(partitionResult.getPartitions());
}
@Test(expectedExceptions = TableNotFoundException.class)
public void testGetPartitionsException()
throws Exception
{
splitManager.getPartitions(invalidTableHandle, TupleDomain.all());
}
@Test
public void testGetPartitionNames()
throws Exception
{
TableHandle tableHandle = getTableHandle(table);
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
assertExpectedPartitions(partitionResult.getPartitions());
}
protected void assertExpectedPartitions(List<Partition> actualPartitions)
{
Map<String, Partition> actualById = uniqueIndex(actualPartitions, partitionIdGetter());
for (Partition expected : partitions) {
assertInstanceOf(expected, HivePartition.class);
HivePartition expectedPartition = (HivePartition) expected;
Partition actual = actualById.get(expectedPartition.getPartitionId());
assertEquals(actual, expected);
assertInstanceOf(actual, HivePartition.class);
HivePartition actualPartition = (HivePartition) actual;
assertNotNull(actualPartition, "partition " + expectedPartition.getPartitionId());
assertEquals(actualPartition.getPartitionId(), expectedPartition.getPartitionId());
assertEquals(actualPartition.getKeys(), expectedPartition.getKeys());
assertEquals(actualPartition.getTableName(), expectedPartition.getTableName());
assertEquals(actualPartition.getBucket(), expectedPartition.getBucket());
assertEquals(actualPartition.getTupleDomain(), expectedPartition.getTupleDomain());
}
}
@Test
public void testGetPartitionNamesUnpartitioned()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableUnpartitioned);
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
assertEquals(partitionResult.getPartitions().size(), 1);
assertEquals(partitionResult.getPartitions(), unpartitionedPartitions);
}
@Test(expectedExceptions = TableNotFoundException.class)
public void testGetPartitionNamesException()
throws Exception
{
splitManager.getPartitions(invalidTableHandle, TupleDomain.all());
}
@SuppressWarnings({"ValueOfIncrementOrDecrementUsed", "UnusedAssignment"})
@Test
public void testGetTableSchema()
throws Exception
{
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(getTableHandle(table));
Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), columnNameGetter());
int i = 0;
assertPrimitiveField(map, i++, "t_string", ColumnType.STRING, false);
assertPrimitiveField(map, i++, "t_tinyint", ColumnType.LONG, false);
assertPrimitiveField(map, i++, "t_smallint", ColumnType.LONG, false);
assertPrimitiveField(map, i++, "t_int", ColumnType.LONG, false);
assertPrimitiveField(map, i++, "t_bigint", ColumnType.LONG, false);
assertPrimitiveField(map, i++, "t_float", ColumnType.DOUBLE, false);
assertPrimitiveField(map, i++, "t_double", ColumnType.DOUBLE, false);
assertPrimitiveField(map, i++, "t_map", ColumnType.STRING, false); // Currently mapped as a string
assertPrimitiveField(map, i++, "t_boolean", ColumnType.BOOLEAN, false);
assertPrimitiveField(map, i++, "t_timestamp", ColumnType.LONG, false);
assertPrimitiveField(map, i++, "t_binary", ColumnType.STRING, false);
assertPrimitiveField(map, i++, "t_array_string", ColumnType.STRING, false); // Currently mapped as a string
assertPrimitiveField(map, i++, "t_complex", ColumnType.STRING, false); // Currently mapped as a string
assertPrimitiveField(map, i++, "ds", ColumnType.STRING, true);
assertPrimitiveField(map, i++, "file_format", ColumnType.STRING, true);
assertPrimitiveField(map, i++, "dummy", ColumnType.LONG, true);
}
@Test
public void testGetTableSchemaUnpartitioned()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableUnpartitioned);
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), columnNameGetter());
assertPrimitiveField(map, 0, "t_string", ColumnType.STRING, false);
assertPrimitiveField(map, 1, "t_tinyint", ColumnType.LONG, false);
}
@Test
public void testGetTableSchemaOffline()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableOffline);
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), columnNameGetter());
assertPrimitiveField(map, 0, "t_string", ColumnType.STRING, false);
}
@Test
public void testGetTableSchemaOfflinePartition()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableOfflinePartition);
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), columnNameGetter());
assertPrimitiveField(map, 0, "t_string", ColumnType.STRING, false);
}
@Test
public void testGetTableSchemaException()
throws Exception
{
assertNull(metadata.getTableHandle(invalidTable));
}
@Test
public void testGetPartitionSplitsBatch()
throws Exception
{
TableHandle tableHandle = getTableHandle(table);
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
SplitSource splitSource = splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions());
assertEquals(getSplitCount(splitSource), partitions.size());
}
@Test
public void testGetPartitionSplitsBatchUnpartitioned()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableUnpartitioned);
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
SplitSource splitSource = splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions());
assertEquals(getSplitCount(splitSource), 1);
}
@Test(expectedExceptions = TableNotFoundException.class)
public void testGetPartitionSplitsBatchInvalidTable()
throws Exception
{
splitManager.getPartitionSplits(invalidTableHandle, ImmutableList.of(invalidPartition));
}
@Test
public void testGetPartitionSplitsEmpty()
throws Exception
{
SplitSource splitSource = splitManager.getPartitionSplits(invalidTableHandle, ImmutableList.<Partition>of());
// fetch full list
getSplitCount(splitSource);
}
@Test
public void testGetPartitionTableOffline()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableOffline);
try {
splitManager.getPartitions(tableHandle, TupleDomain.all());
fail("expected TableOfflineException");
}
catch (TableOfflineException e) {
assertEquals(e.getTableName(), tableOffline);
}
}
@Test
public void testGetPartitionSplitsTableOfflinePartition()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableOfflinePartition);
assertNotNull(tableHandle);
ColumnHandle dsColumn = metadata.getColumnHandle(tableHandle, "ds");
assertNotNull(dsColumn);
TupleDomain tupleDomain = TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>of(dsColumn, Domain.singleValue("2012-12-30")));
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, tupleDomain);
for (Partition partition : partitionResult.getPartitions()) {
if (Domain.singleValue("2012-12-30").equals(partition.getTupleDomain().getDomains().get(dsColumn))) {
try {
getSplitCount(splitManager.getPartitionSplits(tableHandle, ImmutableList.of(partition)));
fail("Expected PartitionOfflineException");
}
catch (PartitionOfflineException e) {
assertEquals(e.getTableName(), tableOfflinePartition);
assertEquals(e.getPartition(), "ds=2012-12-30");
}
}
else {
getSplitCount(splitManager.getPartitionSplits(tableHandle, ImmutableList.of(partition)));
}
}
}
@Test
public void testBucketedTableStringInt()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableBucketedStringInt);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
assertTableIsBucketed(tableHandle);
String testString = "sequencefile test";
Long testInt = 413L;
Long testSmallint = 412L;
// Reverse the order of bindings as compared to bucketing order
ImmutableMap<ColumnHandle, Comparable<?>> bindings = ImmutableMap.<ColumnHandle, Comparable<?>>builder()
.put(columnHandles.get(columnIndex.get("t_int")), testInt)
.put(columnHandles.get(columnIndex.get("t_string")), testString)
.put(columnHandles.get(columnIndex.get("t_smallint")), testSmallint)
.build();
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.withFixedValues(bindings));
List<Split> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 1);
boolean rowFound = false;
try (RecordCursor cursor = recordSetProvider.getRecordSet(splits.get(0), columnHandles).cursor()) {
while (cursor.advanceNextPosition()) {
if (testString.equals(new String(cursor.getString(columnIndex.get("t_string")))) &&
testInt == cursor.getLong(columnIndex.get("t_int")) &&
testSmallint == cursor.getLong(columnIndex.get("t_smallint"))) {
rowFound = true;
}
}
assertTrue(rowFound);
}
}
@Test
public void testBucketedTableBigintBoolean()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableBucketedBigintBoolean);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
assertTableIsBucketed(tableHandle);
String testString = "textfile test";
// This needs to match one of the rows where t_string is not empty or null, and where t_bigint is not null
// (i.e. (testBigint - 604) % 19 > 1 and (testBigint - 604) % 13 != 0)
Long testBigint = 608L;
Boolean testBoolean = true;
ImmutableMap<ColumnHandle, Comparable<?>> bindings = ImmutableMap.<ColumnHandle, Comparable<?>>builder()
.put(columnHandles.get(columnIndex.get("t_string")), testString)
.put(columnHandles.get(columnIndex.get("t_bigint")), testBigint)
.put(columnHandles.get(columnIndex.get("t_boolean")), testBoolean)
.build();
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.withFixedValues(bindings));
List<Split> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 1);
boolean rowFound = false;
try (RecordCursor cursor = recordSetProvider.getRecordSet(splits.get(0), columnHandles).cursor()) {
while (cursor.advanceNextPosition()) {
if (testString.equals(new String(cursor.getString(columnIndex.get("t_string")))) &&
testBigint == cursor.getLong(columnIndex.get("t_bigint")) &&
testBoolean == cursor.getBoolean(columnIndex.get("t_boolean"))) {
rowFound = true;
break;
}
}
assertTrue(rowFound);
}
}
@Test
public void testBucketedTableDoubleFloat()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableBucketedDoubleFloat);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
assertTableIsBucketed(tableHandle);
ImmutableMap<ColumnHandle, Comparable<?>> bindings = ImmutableMap.<ColumnHandle, Comparable<?>>builder()
.put(columnHandles.get(columnIndex.get("t_float")), 406.1000061035156)
.put(columnHandles.get(columnIndex.get("t_double")), 407.2)
.build();
// floats and doubles are not supported, so we should see all splits
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.withFixedValues(bindings));
List<Split> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 32);
int count = 0;
for (Split split : splits) {
try (RecordCursor cursor = recordSetProvider.getRecordSet(split, columnHandles).cursor()) {
while (cursor.advanceNextPosition()) {
count++;
}
}
}
assertEquals(count, 300);
}
private void assertTableIsBucketed(TableHandle tableHandle)
throws Exception
{
// the bucketed test tables should have exactly 32 splits
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
List<Split> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 32);
// verify all paths are unique
Set<String> paths = new HashSet<>();
for (Split split : splits) {
assertTrue(paths.add(((HiveSplit) split).getPath()));
}
}
@Test
public void testGetRecords()
throws Exception
{
TableHandle tableHandle = getTableHandle(table);
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
List<Split> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), this.partitions.size());
for (Split split : splits) {
HiveSplit hiveSplit = (HiveSplit) split;
List<HivePartitionKey> partitionKeys = hiveSplit.getPartitionKeys();
String ds = partitionKeys.get(0).getValue();
String fileType = partitionKeys.get(1).getValue();
long dummy = Long.parseLong(partitionKeys.get(2).getValue());
long baseValue = getBaseValueForFileType(fileType);
assertEquals(dummy * 100, baseValue);
long rowNumber = 0;
long completedBytes = 0;
try (RecordCursor cursor = recordSetProvider.getRecordSet(hiveSplit, columnHandles).cursor()) {
assertRecordCursorType(cursor, fileType);
assertEquals(cursor.getTotalBytes(), hiveSplit.getLength());
while (cursor.advanceNextPosition()) {
try {
assertReadFields(cursor, tableMetadata.getColumns());
}
catch (RuntimeException e) {
throw new RuntimeException("row " + rowNumber, e);
}
rowNumber++;
if (rowNumber % 19 == 0) {
assertTrue(cursor.isNull(columnIndex.get("t_string")));
}
else if (rowNumber % 19 == 1) {
assertEquals(cursor.getString(columnIndex.get("t_string")), EMPTY_STRING);
}
else {
assertEquals(cursor.getString(columnIndex.get("t_string")), (fileType + " test").getBytes(Charsets.UTF_8));
}
assertEquals(cursor.getLong(columnIndex.get("t_tinyint")), (long) ((byte) (baseValue + 1 + rowNumber)));
assertEquals(cursor.getLong(columnIndex.get("t_smallint")), baseValue + 2 + rowNumber);
assertEquals(cursor.getLong(columnIndex.get("t_int")), baseValue + 3 + rowNumber);
if (rowNumber % 13 == 0) {
assertTrue(cursor.isNull(columnIndex.get("t_bigint")));
}
else {
assertEquals(cursor.getLong(columnIndex.get("t_bigint")), baseValue + 4 + rowNumber);
}
assertEquals(cursor.getDouble(columnIndex.get("t_float")), baseValue + 5.1 + rowNumber, 0.001);
assertEquals(cursor.getDouble(columnIndex.get("t_double")), baseValue + 6.2 + rowNumber);
if (rowNumber % 3 == 2) {
assertTrue(cursor.isNull(columnIndex.get("t_boolean")));
}
else {
assertEquals(cursor.getBoolean(columnIndex.get("t_boolean")), rowNumber % 3 != 0);
}
if (rowNumber % 17 == 0) {
assertTrue(cursor.isNull(columnIndex.get("t_timestamp")));
}
else {
long seconds = MILLISECONDS.toSeconds(new DateTime(2011, 5, 6, 7, 8, 9, 123).getMillis());
assertEquals(cursor.getLong(columnIndex.get("t_timestamp")), seconds);
}
if (rowNumber % 23 == 0) {
assertTrue(cursor.isNull(columnIndex.get("t_binary")));
}
else {
assertEquals(new String(cursor.getString(columnIndex.get("t_binary"))), (fileType + " test"));
}
if (rowNumber % 29 == 0) {
assertTrue(cursor.isNull(columnIndex.get("t_map")));
}
else {
String expectedJson = "{\"format\":\"" + fileType + "\"}";
assertEquals(cursor.getString(columnIndex.get("t_map")), expectedJson.getBytes(Charsets.UTF_8));
}
if (rowNumber % 27 == 0) {
assertTrue(cursor.isNull(columnIndex.get("t_array_string")));
}
else {
String expectedJson = "[\"" + fileType + "\",\"test\",\"data\"]";
assertEquals(cursor.getString(columnIndex.get("t_array_string")), expectedJson.getBytes(Charsets.UTF_8));
}
if (rowNumber % 31 == 0) {
assertTrue(cursor.isNull(columnIndex.get("t_complex")));
}
else {
String expectedJson = "{\"1\":[{\"s_string\":\"" + fileType + "-a\",\"s_double\":0.1},{\"s_string\":\"" + fileType + "-b\",\"s_double\":0.2}]}";
assertEquals(cursor.getString(columnIndex.get("t_complex")), expectedJson.getBytes(Charsets.UTF_8));
}
assertEquals(cursor.getString(columnIndex.get("ds")), ds.getBytes(Charsets.UTF_8));
assertEquals(cursor.getString(columnIndex.get("file_format")), fileType.getBytes(Charsets.UTF_8));
assertEquals(cursor.getLong(columnIndex.get("dummy")), dummy);
long newCompletedBytes = cursor.getCompletedBytes();
assertTrue(newCompletedBytes >= completedBytes);
assertTrue(newCompletedBytes <= hiveSplit.getLength());
completedBytes = newCompletedBytes;
}
}
assertTrue(completedBytes <= hiveSplit.getLength());
assertEquals(rowNumber, 100);
}
}
@Test
public void testGetPartialRecords()
throws Exception
{
TableHandle tableHandle = getTableHandle(table);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
List<Split> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), this.partitions.size());
for (Split split : splits) {
HiveSplit hiveSplit = (HiveSplit) split;
List<HivePartitionKey> partitionKeys = hiveSplit.getPartitionKeys();
String ds = partitionKeys.get(0).getValue();
String fileType = partitionKeys.get(1).getValue();
long dummy = Long.parseLong(partitionKeys.get(2).getValue());
long baseValue = getBaseValueForFileType(fileType);
long rowNumber = 0;
try (RecordCursor cursor = recordSetProvider.getRecordSet(hiveSplit, columnHandles).cursor()) {
assertRecordCursorType(cursor, fileType);
while (cursor.advanceNextPosition()) {
rowNumber++;
assertEquals(cursor.getDouble(columnIndex.get("t_double")), baseValue + 6.2 + rowNumber);
assertEquals(cursor.getString(columnIndex.get("ds")), ds.getBytes(Charsets.UTF_8));
assertEquals(cursor.getString(columnIndex.get("file_format")), fileType.getBytes(Charsets.UTF_8));
assertEquals(cursor.getLong(columnIndex.get("dummy")), dummy);
}
}
assertEquals(rowNumber, 100);
}
}
@Test
public void testGetRecordsUnpartitioned()
throws Exception
{
TableHandle tableHandle = getTableHandle(tableUnpartitioned);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
List<Split> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 1);
for (Split split : splits) {
HiveSplit hiveSplit = (HiveSplit) split;
assertEquals(hiveSplit.getPartitionKeys(), ImmutableList.of());
long rowNumber = 0;
try (RecordCursor cursor = recordSetProvider.getRecordSet(split, columnHandles).cursor()) {
assertRecordCursorType(cursor, "textfile");
assertEquals(cursor.getTotalBytes(), hiveSplit.getLength());
while (cursor.advanceNextPosition()) {
rowNumber++;
if (rowNumber % 19 == 0) {
assertTrue(cursor.isNull(columnIndex.get("t_string")));
}
else if (rowNumber % 19 == 1) {
assertEquals(cursor.getString(columnIndex.get("t_string")), EMPTY_STRING);
}
else {
assertEquals(cursor.getString(columnIndex.get("t_string")), "unpartitioned".getBytes(Charsets.UTF_8));
}
assertEquals(cursor.getLong(columnIndex.get("t_tinyint")), 1 + rowNumber);
}
}
assertEquals(rowNumber, 100);
}
}
@Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = ".*" + INVALID_COLUMN + ".*")
public void testGetRecordsInvalidColumn()
throws Exception
{
TableHandle table = getTableHandle(tableUnpartitioned);
PartitionResult partitionResult = splitManager.getPartitions(table, TupleDomain.all());
Split split = Iterables.getFirst(getAllSplits(splitManager.getPartitionSplits(table, partitionResult.getPartitions())), null);
RecordSet recordSet = recordSetProvider.getRecordSet(split, ImmutableList.of(invalidColumnHandle));
recordSet.cursor();
}
@Test
public void testViewsAreNotSupported()
throws Exception
{
try {
getTableHandle(view);
fail("Expected HiveViewNotSupportedException");
}
catch (HiveViewNotSupportedException e) {
assertEquals(e.getTableName(), view);
}
}
@Test
public void testTableCreation()
throws Exception
{
try {
doCreateTable();
}
finally {
dropTable(temporaryCreateTable);
}
}
private void doCreateTable()
throws InterruptedException
{
// begin creating the table
List<ColumnMetadata> columns = ImmutableList.<ColumnMetadata>builder()
.add(new ColumnMetadata("id", ColumnType.LONG, 1, false))
.add(new ColumnMetadata("t_string", ColumnType.STRING, 2, false))
.add(new ColumnMetadata("t_bigint", ColumnType.LONG, 3, false))
.add(new ColumnMetadata("t_double", ColumnType.DOUBLE, 4, false))
.add(new ColumnMetadata("t_boolean", ColumnType.BOOLEAN, 5, false))
.build();
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(temporaryCreateTable, columns, tableOwner);
OutputTableHandle outputHandle = metadata.beginCreateTable(tableMetadata);
// write the records
RecordSink sink = recordSinkProvider.getRecordSink(outputHandle);
sink.beginRecord(1);
sink.appendLong(1);
sink.appendString("hello".getBytes(UTF_8));
sink.appendLong(123);
sink.appendDouble(43.5);
sink.appendBoolean(true);
sink.finishRecord();
sink.beginRecord(1);
sink.appendLong(2);
sink.appendNull();
sink.appendNull();
sink.appendNull();
sink.appendNull();
sink.finishRecord();
sink.beginRecord(1);
sink.appendLong(3);
sink.appendString("bye".getBytes(UTF_8));
sink.appendLong(456);
sink.appendDouble(98.1);
sink.appendBoolean(false);
sink.finishRecord();
String fragment = sink.commit();
// commit the table
metadata.commitCreateTable(outputHandle, ImmutableList.of(fragment));
// load the new table
TableHandle tableHandle = getTableHandle(temporaryCreateTable);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
// verify the metadata
tableMetadata = metadata.getTableMetadata(getTableHandle(temporaryCreateTable));
assertEquals(tableMetadata.getOwner(), tableOwner);
Map<String, ColumnMetadata> columnMap = uniqueIndex(tableMetadata.getColumns(), columnNameGetter());
assertPrimitiveField(columnMap, 0, "id", ColumnType.LONG, false);
assertPrimitiveField(columnMap, 1, "t_string", ColumnType.STRING, false);
assertPrimitiveField(columnMap, 2, "t_bigint", ColumnType.LONG, false);
assertPrimitiveField(columnMap, 3, "t_double", ColumnType.DOUBLE, false);
assertPrimitiveField(columnMap, 4, "t_boolean", ColumnType.BOOLEAN, false);
// verify the data
PartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.all());
assertEquals(partitionResult.getPartitions().size(), 1);
SplitSource splitSource = splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions());
Split split = getOnlyElement(splitSource.getNextBatch(1000));
assertTrue(splitSource.isFinished());
try (RecordCursor cursor = recordSetProvider.getRecordSet(split, columnHandles).cursor()) {
assertRecordCursorType(cursor, "rcfile-binary");
assertTrue(cursor.advanceNextPosition());
assertEquals(cursor.getLong(0), 1);
assertEquals(cursor.getString(1), "hello".getBytes(UTF_8));
assertEquals(cursor.getLong(2), 123);
assertEquals(cursor.getDouble(3), 43.5);
assertEquals(cursor.getBoolean(4), true);
assertTrue(cursor.advanceNextPosition());
assertEquals(cursor.getLong(0), 2);
assertTrue(cursor.isNull(1));
assertTrue(cursor.isNull(2));
assertTrue(cursor.isNull(3));
assertTrue(cursor.isNull(4));
assertTrue(cursor.advanceNextPosition());
assertEquals(cursor.getLong(0), 3);
assertEquals(cursor.getString(1), "bye".getBytes(UTF_8));
assertEquals(cursor.getLong(2), 456);
assertEquals(cursor.getDouble(3), 98.1);
assertEquals(cursor.getBoolean(4), false);
assertFalse(cursor.advanceNextPosition());
}
}
private void dropTable(SchemaTableName table)
{
try {
metastoreClient.dropTable(table.getSchemaName(), table.getTableName());
}
catch (RuntimeException e) {
Logger.get(getClass()).warn(e, "Failed to drop table: %s", table);
}
}
private TableHandle getTableHandle(SchemaTableName tableName)
{
TableHandle handle = metadata.getTableHandle(tableName);
checkArgument(handle != null, "table not found: %s", tableName);
return handle;
}
private static int getSplitCount(SplitSource splitSource)
throws InterruptedException
{
int splitCount = 0;
while (!splitSource.isFinished()) {
List<Split> batch = splitSource.getNextBatch(1000);
splitCount += batch.size();
}
return splitCount;
}
private static List<Split> getAllSplits(SplitSource splitSource)
throws InterruptedException
{
ImmutableList.Builder<Split> splits = ImmutableList.builder();
while (!splitSource.isFinished()) {
List<Split> batch = splitSource.getNextBatch(1000);
splits.addAll(batch);
}
return splits.build();
}
private static long getBaseValueForFileType(String fileType)
{
switch (fileType) {
case "rcfile-text":
return 0;
case "rcfile-binary":
return 200;
case "sequencefile":
return 400;
case "textfile":
return 600;
default:
throw new IllegalArgumentException("Unexpected fileType key " + fileType);
}
}
private static void assertRecordCursorType(RecordCursor cursor, String fileType)
{
if (fileType.equals("rcfile-text")) {
assertInstanceOf(cursor, ColumnarTextHiveRecordCursor.class, fileType);
}
else if (fileType.equals("rcfile-binary")) {
assertInstanceOf(cursor, ColumnarBinaryHiveRecordCursor.class, fileType);
}
else {
assertInstanceOf(cursor, GenericHiveRecordCursor.class, fileType);
}
}
private static void assertReadFields(RecordCursor cursor, List<ColumnMetadata> schema)
{
for (int columnIndex = 0; columnIndex < schema.size(); columnIndex++) {
ColumnMetadata column = schema.get(columnIndex);
if (!cursor.isNull(columnIndex)) {
switch (column.getType()) {
case BOOLEAN:
cursor.getBoolean(columnIndex);
break;
case LONG:
cursor.getLong(columnIndex);
break;
case DOUBLE:
cursor.getDouble(columnIndex);
break;
case STRING:
try {
cursor.getString(columnIndex);
}
catch (RuntimeException e) {
throw new RuntimeException("column " + column, e);
}
break;
default:
fail("Unknown primitive type " + columnIndex);
}
}
}
}
private static void assertPrimitiveField(Map<String, ColumnMetadata> map, int position, String name, ColumnType type, boolean partitionKey)
{
assertTrue(map.containsKey(name));
ColumnMetadata column = map.get(name);
assertEquals(column.getOrdinalPosition(), position);
assertEquals(column.getType(), type, name);
assertEquals(column.isPartitionKey(), partitionKey, name);
}
private static ImmutableMap<String, Integer> indexColumns(List<ColumnHandle> columnHandles)
{
ImmutableMap.Builder<String, Integer> index = ImmutableMap.builder();
int i = 0;
for (ColumnHandle columnHandle : columnHandles) {
checkArgument(columnHandle instanceof HiveColumnHandle, "columnHandle is not an instance of HiveColumnHandle");
HiveColumnHandle hiveColumnHandle = (HiveColumnHandle) columnHandle;
index.put(hiveColumnHandle.getName(), i);
i++;
}
return index.build();
}
private static Function<ColumnMetadata, String> columnNameGetter()
{
return new Function<ColumnMetadata, String>()
{
@Override
public String apply(ColumnMetadata input)
{
return input.getName();
}
};
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexer;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.apache.commons.io.FileUtils;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.CSVParseSpec;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.StringInputRowParser;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.hll.HyperLogLogCollector;
import org.apache.druid.indexer.hadoop.WindowedDataSegment;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexStorageAdapter;
import org.apache.druid.segment.StorageAdapter;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.segment.loading.LocalDataSegmentPuller;
import org.apache.druid.segment.realtime.firehose.IngestSegmentFirehose;
import org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter;
import org.apache.druid.segment.transform.TransformSpec;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class BatchDeltaIngestionTest
{
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private static final ObjectMapper MAPPER;
private static final IndexIO INDEX_IO;
private static final Interval INTERVAL_FULL = Intervals.of("2014-10-22T00:00:00Z/P1D");
private static final Interval INTERVAL_PARTIAL = Intervals.of("2014-10-22T00:00:00Z/PT2H");
private static final DataSegment SEGMENT;
static {
MAPPER = new DefaultObjectMapper();
MAPPER.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed"));
InjectableValues inject = new InjectableValues.Std()
.addValue(ObjectMapper.class, MAPPER)
.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
MAPPER.setInjectableValues(inject);
INDEX_IO = HadoopDruidIndexerConfig.INDEX_IO;
try {
SEGMENT = MAPPER
.readValue(
BatchDeltaIngestionTest.class.getClassLoader().getResource("test-segment/descriptor.json"),
DataSegment.class
)
.withLoadSpec(
ImmutableMap.of(
"type",
"local",
"path",
BatchDeltaIngestionTest.class.getClassLoader().getResource("test-segment/index.zip").getPath()
)
);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
@Test
public void testReindexing() throws Exception
{
List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL));
HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig(
ImmutableMap.of(
"type",
"dataSource",
"ingestionSpec",
ImmutableMap.of(
"dataSource",
"testds",
"interval",
INTERVAL_FULL
),
"segments",
segments
),
temporaryFolder.newFolder()
);
List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of(
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T00:00:00.000Z"),
"host", ImmutableList.of("a.example.com"),
"visited_sum", 100L,
"unique_hosts", 1.0d
),
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T01:00:00.000Z"),
"host", ImmutableList.of("b.example.com"),
"visited_sum", 150L,
"unique_hosts", 1.0d
),
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T02:00:00.000Z"),
"host", ImmutableList.of("c.example.com"),
"visited_sum", 200L,
"unique_hosts", 1.0d
)
);
testIngestion(
config,
expectedRows,
Iterables.getOnlyElement(segments),
ImmutableList.of("host"),
ImmutableList.of("visited_sum", "unique_hosts")
);
}
/**
* By default re-indexing expects same aggregators as used by original indexing job. But, with additional flag
* "useNewAggs" in DatasourcePathSpec, user can optionally have any set of aggregators.
* See https://github.com/apache/incubator-druid/issues/5277 .
*/
@Test
public void testReindexingWithNewAggregators() throws Exception
{
List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL));
AggregatorFactory[] aggregators = new AggregatorFactory[]{
new LongSumAggregatorFactory("visited_sum2", "visited_sum"),
new HyperUniquesAggregatorFactory("unique_hosts2", "unique_hosts")
};
Map<String, Object> inputSpec = ImmutableMap.of(
"type",
"dataSource",
"ingestionSpec",
ImmutableMap.of(
"dataSource",
"testds",
"interval",
INTERVAL_FULL
),
"segments",
segments,
"useNewAggs", true
);
File tmpDir = temporaryFolder.newFolder();
HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig(
inputSpec,
tmpDir,
aggregators
);
List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of(
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T00:00:00.000Z"),
"host", ImmutableList.of("a.example.com"),
"visited_sum2", 100L,
"unique_hosts2", 1.0d
),
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T01:00:00.000Z"),
"host", ImmutableList.of("b.example.com"),
"visited_sum2", 150L,
"unique_hosts2", 1.0d
),
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T02:00:00.000Z"),
"host", ImmutableList.of("c.example.com"),
"visited_sum2", 200L,
"unique_hosts2", 1.0d
)
);
testIngestion(
config,
expectedRows,
Iterables.getOnlyElement(segments),
ImmutableList.of("host"),
ImmutableList.of("visited_sum2", "unique_hosts2")
);
}
@Test
public void testReindexingWithPartialWindow() throws Exception
{
List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_PARTIAL));
HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig(
ImmutableMap.of(
"type",
"dataSource",
"ingestionSpec",
ImmutableMap.of(
"dataSource",
"testds",
"interval",
INTERVAL_FULL
),
"segments",
segments
),
temporaryFolder.newFolder()
);
List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of(
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T00:00:00.000Z"),
"host", ImmutableList.of("a.example.com"),
"visited_sum", 100L,
"unique_hosts", 1.0d
),
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T01:00:00.000Z"),
"host", ImmutableList.of("b.example.com"),
"visited_sum", 150L,
"unique_hosts", 1.0d
)
);
testIngestion(
config,
expectedRows,
Iterables.getOnlyElement(segments),
ImmutableList.of("host"),
ImmutableList.of("visited_sum", "unique_hosts")
);
}
@Test
public void testDeltaIngestion() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File dataFile1 = new File(tmpDir, "data1");
FileUtils.writeLines(
dataFile1,
ImmutableList.of(
"2014102200,a.example.com,a.example.com,90",
"2014102201,b.example.com,b.example.com,25"
)
);
File dataFile2 = new File(tmpDir, "data2");
FileUtils.writeLines(
dataFile2,
ImmutableList.of(
"2014102202,c.example.com,c.example.com,70"
)
);
//using a hadoop glob path to test that it continues to work with hadoop MultipleInputs usage and not
//affected by
//https://issues.apache.org/jira/browse/MAPREDUCE-5061
String inputPath = tmpDir.getPath() + "/{data1,data2}";
List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL));
HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig(
ImmutableMap.of(
"type",
"multi",
"children",
ImmutableList.of(
ImmutableMap.of(
"type",
"dataSource",
"ingestionSpec",
ImmutableMap.of(
"dataSource",
"testds",
"interval",
INTERVAL_FULL
),
"segments",
segments
),
ImmutableMap.<String, Object>of(
"type",
"static",
"paths",
inputPath
)
)
),
temporaryFolder.newFolder()
);
List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of(
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T00:00:00.000Z"),
"host", ImmutableList.of("a.example.com"),
"visited_sum", 190L,
"unique_hosts", 1.0d
),
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T01:00:00.000Z"),
"host", ImmutableList.of("b.example.com"),
"visited_sum", 175L,
"unique_hosts", 1.0d
),
ImmutableMap.of(
"time", DateTimes.of("2014-10-22T02:00:00.000Z"),
"host", ImmutableList.of("c.example.com"),
"visited_sum", 270L,
"unique_hosts", 1.0d
)
);
testIngestion(
config,
expectedRows,
Iterables.getOnlyElement(segments),
ImmutableList.of("host"),
ImmutableList.of("visited_sum", "unique_hosts")
);
}
private void testIngestion(
HadoopDruidIndexerConfig config,
List<ImmutableMap<String, Object>> expectedRowsGenerated,
WindowedDataSegment windowedDataSegment,
List<String> expectedDimensions,
List<String> expectedMetrics
) throws Exception
{
IndexGeneratorJob job = new IndexGeneratorJob(config);
Assert.assertTrue(JobHelper.runJobs(ImmutableList.of(job), config));
File segmentFolder = new File(
StringUtils.format(
"%s/%s/%s_%s/%s/0",
config.getSchema().getIOConfig().getSegmentOutputPath(),
config.getSchema().getDataSchema().getDataSource(),
INTERVAL_FULL.getStart().toString(),
INTERVAL_FULL.getEnd().toString(),
config.getSchema().getTuningConfig().getVersion()
)
);
Assert.assertTrue(segmentFolder.exists());
File descriptor = new File(segmentFolder, "descriptor.json");
File indexZip = new File(segmentFolder, "index.zip");
Assert.assertTrue(descriptor.exists());
Assert.assertTrue(indexZip.exists());
DataSegment dataSegment = MAPPER.readValue(descriptor, DataSegment.class);
Assert.assertEquals("website", dataSegment.getDataSource());
Assert.assertEquals(config.getSchema().getTuningConfig().getVersion(), dataSegment.getVersion());
Assert.assertEquals(INTERVAL_FULL, dataSegment.getInterval());
Assert.assertEquals("local", dataSegment.getLoadSpec().get("type"));
Assert.assertEquals(indexZip.getCanonicalPath(), dataSegment.getLoadSpec().get("path"));
Assert.assertEquals(expectedDimensions, dataSegment.getDimensions());
Assert.assertEquals(expectedMetrics, dataSegment.getMetrics());
Assert.assertEquals(Integer.valueOf(9), dataSegment.getBinaryVersion());
HashBasedNumberedShardSpec spec = (HashBasedNumberedShardSpec) dataSegment.getShardSpec();
Assert.assertEquals(0, spec.getPartitionNum());
Assert.assertEquals(1, spec.getPartitions());
File tmpUnzippedSegmentDir = temporaryFolder.newFolder();
new LocalDataSegmentPuller().getSegmentFiles(dataSegment, tmpUnzippedSegmentDir);
QueryableIndex index = INDEX_IO.loadIndex(tmpUnzippedSegmentDir);
StorageAdapter adapter = new QueryableIndexStorageAdapter(index);
Firehose firehose = new IngestSegmentFirehose(
ImmutableList.of(new WindowedStorageAdapter(adapter, windowedDataSegment.getInterval())),
TransformSpec.NONE,
expectedDimensions,
expectedMetrics,
null
);
List<InputRow> rows = new ArrayList<>();
while (firehose.hasMore()) {
rows.add(firehose.nextRow());
}
verifyRows(expectedRowsGenerated, rows, expectedDimensions, expectedMetrics);
}
private HadoopDruidIndexerConfig makeHadoopDruidIndexerConfig(Map<String, Object> inputSpec, File tmpDir)
throws Exception
{
return makeHadoopDruidIndexerConfig(inputSpec, tmpDir, null);
}
private HadoopDruidIndexerConfig makeHadoopDruidIndexerConfig(
Map<String, Object> inputSpec,
File tmpDir,
AggregatorFactory[] aggregators
)
throws Exception
{
HadoopDruidIndexerConfig config = new HadoopDruidIndexerConfig(
new HadoopIngestionSpec(
new DataSchema(
"website",
MAPPER.convertValue(
new StringInputRowParser(
new CSVParseSpec(
new TimestampSpec("timestamp", "yyyyMMddHH", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")), null, null),
null,
ImmutableList.of("timestamp", "host", "host2", "visited_num"),
false,
0
),
null
),
Map.class
),
aggregators != null ? aggregators : new AggregatorFactory[]{
new LongSumAggregatorFactory("visited_sum", "visited_num"),
new HyperUniquesAggregatorFactory("unique_hosts", "host2")
},
new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, ImmutableList.of(INTERVAL_FULL)),
null,
MAPPER
),
new HadoopIOConfig(
inputSpec,
null,
tmpDir.getCanonicalPath()
),
new HadoopTuningConfig(
tmpDir.getCanonicalPath(),
null,
null,
null,
null,
null,
null,
false,
false,
false,
false,
null,
false,
false,
null,
null,
null,
false,
false,
null,
null,
null
)
)
);
config.setShardSpecs(
ImmutableMap.of(
INTERVAL_FULL.getStartMillis(),
ImmutableList.of(
new HadoopyShardSpec(
new HashBasedNumberedShardSpec(0, 1, null, HadoopDruidIndexerConfig.JSON_MAPPER),
0
)
)
)
);
config = HadoopDruidIndexerConfig.fromSpec(config.getSchema());
return config;
}
private void verifyRows(
List<ImmutableMap<String, Object>> expectedRows,
List<InputRow> actualRows,
List<String> expectedDimensions,
List<String> expectedMetrics
)
{
Assert.assertEquals(expectedRows.size(), actualRows.size());
for (int i = 0; i < expectedRows.size(); i++) {
Map<String, Object> expected = expectedRows.get(i);
InputRow actual = actualRows.get(i);
Assert.assertEquals(expected.get("time"), actual.getTimestamp());
Assert.assertEquals(expectedDimensions, actual.getDimensions());
expectedDimensions.forEach(s -> Assert.assertEquals(expected.get(s), actual.getDimension(s)));
for (String metric : expectedMetrics) {
Object actualValue = actual.getRaw(metric);
if (actualValue instanceof HyperLogLogCollector) {
Assert.assertEquals(
(Double) expected.get(metric),
(Double) HyperUniquesAggregatorFactory.estimateCardinality(actualValue, false),
0.001
);
} else {
Assert.assertEquals(expected.get(metric), actual.getMetric(metric));
}
}
}
}
}
|
|
/*
* Copyright 2016 Hippo Seven
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hippo.image.example;
/*
* Created by Hippo on 8/19/2016.
*/
import android.graphics.Bitmap;
import android.support.annotation.Nullable;
import com.hippo.image.ImageData;
import com.hippo.image.ImageRenderer;
/**
* This class uses {@link Bitmap} to represent {@link ImageRenderer}.
* Call {@link #getBitmap()} to get the bitmap.
*/
public class ImageBitmap {
private int mRatio;
private int mWidth;
private int mHeight;
private int mFormat;
private boolean mOpaque;
private int mFrameCount;
private int[] mDelayArray;
private int mByteCount;
@Nullable
private Bitmap mBitmap;
@Nullable
private ImageRenderer mImageRenderer;
/**
* The ImageData must be completed and
* ratio <= width && ratio <= height, or throw
* IllegalStateException.
*/
public ImageBitmap(ImageData imageData, int ratio) {
// Only completed image supported
if (!imageData.isCompleted()) {
throw new IllegalStateException("ImageBitmap can only handle completed ImageData");
}
int width = imageData.getWidth();
int height = imageData.getHeight();
if (ratio > width || ratio > height) {
throw new IllegalStateException("Ratio is too big");
}
mRatio = ratio;
mWidth = width / ratio;
mHeight = height / ratio;
mFormat = imageData.getFormat();
mOpaque = imageData.isOpaque();
mFrameCount = imageData.getFrameCount();
mByteCount = imageData.getByteCount();
mDelayArray = new int[mFrameCount];
for (int i = 0; i < mFrameCount; i++) {
mDelayArray[i] = imageData.getDelay(i);
}
mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
// Render first frame
ImageRenderer imageRenderer = imageData.createImageRenderer();
imageRenderer.reset();
imageRenderer.render(mBitmap, 0, 0, 0, 0, width, height, ratio, false, 0);
if (mFrameCount == 1) {
// Recycle image renderer if it is not animated
imageRenderer.recycle();
} else {
// Store image renderer if it is animated
mImageRenderer = imageRenderer;
}
}
/**
* Recycle the Bitmap and ImageRenderer.
*/
public void recycle() {
if (mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
if (mImageRenderer != null) {
mImageRenderer.recycle();
mBitmap = null;
}
}
/**
* Draw first frame to bitmap.
*/
public void reset() {
if (mBitmap != null && mImageRenderer != null) {
mImageRenderer.reset();
mImageRenderer.render(mBitmap, 0, 0, 0, 0, mWidth, mHeight, mRatio, false, 0);
}
}
/**
* Draw next frame to bitmap.
*/
public void advance() {
if (mBitmap != null && mImageRenderer != null) {
mImageRenderer.advance();
mImageRenderer.render(mBitmap, 0, 0, 0, 0, mWidth, mHeight, mRatio, false, 0);
}
}
/**
* Get the Bitmap.
*/
@Nullable
public Bitmap getBitmap() {
return mBitmap;
}
/**
* Return the ratio.
*/
public int getRatio() {
return mRatio;
}
/**
* Return the width of the Bitmap.
*/
public int getWidth() {
return mWidth;
}
/**
* Return the height of the Bitmap.
*/
public int getHeight() {
return mHeight;
}
/**
* Return the format of the ImageData.
*/
public int getFormat() {
return mFormat;
}
/**
* Return true if the ImageData is opaque.
*/
public boolean isOpaque() {
return mOpaque;
}
/**
* Return the frame count of the ImageData.
*/
public int getFrameCount() {
return mFrameCount;
}
/**
* Return true if it is animated image
*/
public boolean isAnimated() {
return mFrameCount > 1;
}
/**
* Return the delay of the frame.
*/
public int getDelay(int frame) {
return mDelayArray[frame];
}
/**
* Return delay of current delay.
* Return 0 if current frame is invalid.
*/
public int getCurrentDelay() {
if (mImageRenderer != null) {
return mImageRenderer.getCurrentDelay();
} else {
return Integer.MAX_VALUE;
}
}
/**
* Return the byte count of the ImageData.
*/
public int getByteCount() {
return mByteCount;
}
}
|
|
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Yahoo! Inc., Seiji Sogabe,
* Andrew Bayer
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import hudson.PluginManager;
import hudson.PluginWrapper;
import hudson.lifecycle.Lifecycle;
import hudson.model.UpdateCenter.UpdateCenterJob;
import hudson.util.FormValidation;
import hudson.util.FormValidation.Kind;
import hudson.util.HttpResponses;
import hudson.util.IOUtils;
import hudson.util.TextFile;
import hudson.util.VersionNumber;
import jenkins.model.Jenkins;
import jenkins.util.JSONSignatureValidator;
import net.sf.json.JSONException;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.export.Exported;
import org.kohsuke.stapler.export.ExportedBean;
import org.kohsuke.stapler.interceptor.RequirePOST;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.Future;
import java.util.logging.Level;
import java.util.logging.Logger;
import static hudson.util.TimeUnit2.*;
/**
* Source of the update center information, like "http://jenkins-ci.org/update-center.json"
*
* <p>
* Jenkins can have multiple {@link UpdateSite}s registered in the system, so that it can pick up plugins
* from different locations.
*
* @author Andrew Bayer
* @author Kohsuke Kawaguchi
* @since 1.333
*/
@ExportedBean
public class UpdateSite {
/**
* What's the time stamp of data file?
*/
private transient long dataTimestamp = -1;
/**
* When was the last time we asked a browser to check the data for us?
*
* <p>
* There's normally some delay between when we send HTML that includes the check code,
* until we get the data back, so this variable is used to avoid asking too many browseres
* all at once.
*/
private transient volatile long lastAttempt = -1;
/**
* If the attempt to fetch data fails, we progressively use longer time out before retrying,
* to avoid overloading the server.
*/
private transient volatile long retryWindow;
/**
* lastModified time of the data file when it was last read.
*/
private transient long dataLastReadFromFile;
/**
* Latest data as read from the data file.
*/
private Data data;
/**
* ID string for this update source.
*/
private final String id;
/**
* Path to <tt>update-center.json</tt>, like <tt>http://jenkins-ci.org/update-center.json</tt>.
*/
private final String url;
public UpdateSite(String id, String url) {
this.id = id;
this.url = url;
}
/**
* When read back from XML, initialize them back to -1.
*/
private Object readResolve() {
dataTimestamp = lastAttempt = -1;
return this;
}
/**
* Get ID string.
*/
@Exported
public String getId() {
return id;
}
@Exported
public long getDataTimestamp() {
return dataTimestamp;
}
/**
* This is the endpoint that receives the update center data file from the browser.
*/
public FormValidation doPostBack(StaplerRequest req) throws IOException, GeneralSecurityException {
dataTimestamp = System.currentTimeMillis();
String json = IOUtils.toString(req.getInputStream(),"UTF-8");
JSONObject o = JSONObject.fromObject(json);
int v = o.getInt("updateCenterVersion");
if(v !=1)
throw new IllegalArgumentException("Unrecognized update center version: "+v);
if (signatureCheck) {
FormValidation e = verifySignature(o);
if (e.kind!=Kind.OK) {
LOGGER.severe(e.renderHtml());
return e;
}
}
LOGGER.info("Obtained the latest update center data file for UpdateSource " + id);
retryWindow = 0;
getDataFile().write(json);
return FormValidation.ok();
}
public FormValidation doVerifySignature() throws IOException {
return verifySignature(getJSONObject());
}
/**
* Verifies the signature in the update center data file.
*/
private FormValidation verifySignature(JSONObject o) throws IOException {
return new JSONSignatureValidator("update site '"+id+"'").verifySignature(o);
}
/**
* Returns true if it's time for us to check for new version.
*/
public boolean isDue() {
if(neverUpdate) return false;
if(dataTimestamp==-1)
dataTimestamp = getDataFile().file.lastModified();
long now = System.currentTimeMillis();
retryWindow = Math.max(retryWindow,SECONDS.toMillis(15));
boolean due = now - dataTimestamp > DAY && now - lastAttempt > retryWindow;
if(due) {
lastAttempt = now;
retryWindow = Math.min(retryWindow*2, HOURS.toMillis(1)); // exponential back off but at most 1 hour
}
return due;
}
/**
* Invalidates the cached data and force retrieval.
*
* @since 1.432
*/
@RequirePOST
public HttpResponse doInvalidateData() {
Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER);
dataTimestamp = 0;
return HttpResponses.ok();
}
/**
* Loads the update center data, if any and if modified since last read.
*
* @return null if no data is available.
*/
public Data getData() {
TextFile df = getDataFile();
if (df.exists() && dataLastReadFromFile != df.file.lastModified()) {
JSONObject o = getJSONObject();
if (o!=null) {
data = new Data(o);
dataLastReadFromFile = df.file.lastModified();
} else {
data = null;
}
}
return data;
}
/**
* Gets the raw update center JSON data.
*/
public JSONObject getJSONObject() {
TextFile df = getDataFile();
if(df.exists()) {
try {
return JSONObject.fromObject(df.read());
} catch (JSONException e) {
LOGGER.log(Level.SEVERE,"Failed to parse "+df,e);
df.delete(); // if we keep this file, it will cause repeated failures
return null;
} catch (IOException e) {
LOGGER.log(Level.SEVERE,"Failed to parse "+df,e);
df.delete(); // if we keep this file, it will cause repeated failures
return null;
}
} else {
return null;
}
}
/**
* Returns a list of plugins that should be shown in the "available" tab.
* These are "all plugins - installed plugins".
*/
@Exported
public List<Plugin> getAvailables() {
List<Plugin> r = new ArrayList<Plugin>();
Data data = getData();
if(data==null) return Collections.emptyList();
for (Plugin p : data.plugins.values()) {
if(p.getInstalled()==null)
r.add(p);
}
return r;
}
/**
* Gets the information about a specific plugin.
*
* @param artifactId
* The short name of the plugin. Corresponds to {@link PluginWrapper#getShortName()}.
*
* @return
* null if no such information is found.
*/
public Plugin getPlugin(String artifactId) {
Data dt = getData();
if(dt==null) return null;
return dt.plugins.get(artifactId);
}
public Api getApi() {
return new Api(this);
}
/**
* Returns an "always up" server for Internet connectivity testing, or null if we are going to skip the test.
*/
@Exported
public String getConnectionCheckUrl() {
Data dt = getData();
if(dt==null) return "http://www.google.com/";
return dt.connectionCheckUrl;
}
/**
* This is where we store the update center data.
*/
private TextFile getDataFile() {
return new TextFile(new File(Jenkins.getInstance().getRootDir(),
"updates/" + getId()+".json"));
}
/**
* Returns the list of plugins that are updates to currently installed ones.
*
* @return
* can be empty but never null.
*/
@Exported
public List<Plugin> getUpdates() {
Data data = getData();
if(data==null) return Collections.emptyList(); // fail to determine
List<Plugin> r = new ArrayList<Plugin>();
for (PluginWrapper pw : Jenkins.getInstance().getPluginManager().getPlugins()) {
Plugin p = pw.getUpdateInfo();
if(p!=null) r.add(p);
}
return r;
}
/**
* Does any of the plugin has updates?
*/
@Exported
public boolean hasUpdates() {
Data data = getData();
if(data==null) return false;
for (PluginWrapper pw : Jenkins.getInstance().getPluginManager().getPlugins()) {
if(!pw.isBundled() && pw.getUpdateInfo()!=null)
// do not advertize updates to bundled plugins, since we generally want users to get them
// as a part of jenkins.war updates. This also avoids unnecessary pinning of plugins.
return true;
}
return false;
}
/**
* Exposed to get rid of hardcoding of the URL that serves up update-center.json
* in Javascript.
*/
@Exported
public String getUrl() {
return url;
}
/**
* Where to actually download the update center?
*
* @deprecated
* Exposed only for UI.
*/
public String getDownloadUrl() {
/*
HACKISH:
Loading scripts in HTTP from HTTPS pages cause browsers to issue a warning dialog.
The elegant way to solve the problem is to always load update center from HTTPS,
but our backend mirroring scheme isn't ready for that. So this hack serves regular
traffic in HTTP server, and only use HTTPS update center for Jenkins in HTTPS.
We'll monitor the traffic to see if we can sustain this added traffic.
*/
if (url.equals("http://updates.jenkins-ci.org/update-center.json") && Jenkins.getInstance().isRootUrlSecure())
return "https"+url.substring(4);
return url;
}
/**
* Is this the legacy default update center site?
*/
public boolean isLegacyDefault() {
return id.equals(UpdateCenter.ID_DEFAULT) && url.startsWith("http://hudson-ci.org/") || url.startsWith("http://updates.hudson-labs.org/");
}
/**
* In-memory representation of the update center data.
*/
public final class Data {
/**
* The {@link UpdateSite} ID.
*/
public final String sourceId;
/**
* The latest jenkins.war.
*/
public final Entry core;
/**
* Plugins in the repository, keyed by their artifact IDs.
*/
public final Map<String,Plugin> plugins = new TreeMap<String,Plugin>(String.CASE_INSENSITIVE_ORDER);
/**
* If this is non-null, Jenkins is going to check the connectivity to this URL to make sure
* the network connection is up. Null to skip the check.
*/
public final String connectionCheckUrl;
Data(JSONObject o) {
this.sourceId = (String)o.get("id");
if (sourceId.equals(UpdateCenter.ID_DEFAULT)) {
core = new Entry(sourceId, o.getJSONObject("core"), url);
}
else {
core = null;
}
for(Map.Entry<String,JSONObject> e : (Set<Map.Entry<String,JSONObject>>)o.getJSONObject("plugins").entrySet()) {
plugins.put(e.getKey(),new Plugin(sourceId, e.getValue()));
}
connectionCheckUrl = (String)o.get("connectionCheckUrl");
}
/**
* Is there a new version of the core?
*/
public boolean hasCoreUpdates() {
return core != null && core.isNewerThan(Jenkins.VERSION);
}
/**
* Do we support upgrade?
*/
public boolean canUpgrade() {
return Lifecycle.get().canRewriteHudsonWar();
}
}
@ExportedBean
public static class Entry {
/**
* {@link UpdateSite} ID.
*/
@Exported
public final String sourceId;
/**
* Artifact ID.
*/
@Exported
public final String name;
/**
* The version.
*/
@Exported
public final String version;
/**
* Download URL.
*/
@Exported
public final String url;
public Entry(String sourceId, JSONObject o) {
this(sourceId, o, null);
}
Entry(String sourceId, JSONObject o, String baseURL) {
this.sourceId = sourceId;
this.name = o.getString("name");
this.version = o.getString("version");
String url = o.getString("url");
if (!URI.create(url).isAbsolute()) {
if (baseURL == null) {
throw new IllegalArgumentException("Cannot resolve " + url + " without a base URL");
}
url = URI.create(baseURL).resolve(url).toString();
}
this.url = url;
}
/**
* Checks if the specified "current version" is older than the version of this entry.
*
* @param currentVersion
* The string that represents the version number to be compared.
* @return
* true if the version listed in this entry is newer.
* false otherwise, including the situation where the strings couldn't be parsed as version numbers.
*/
public boolean isNewerThan(String currentVersion) {
try {
return new VersionNumber(currentVersion).compareTo(new VersionNumber(version)) < 0;
} catch (IllegalArgumentException e) {
// couldn't parse as the version number.
return false;
}
}
public Api getApi() {
return new Api(this);
}
}
public final class Plugin extends Entry {
/**
* Optional URL to the Wiki page that discusses this plugin.
*/
@Exported
public final String wiki;
/**
* Human readable title of the plugin, taken from Wiki page.
* Can be null.
*
* <p>
* beware of XSS vulnerability since this data comes from Wiki
*/
@Exported
public final String title;
/**
* Optional excerpt string.
*/
@Exported
public final String excerpt;
/**
* Optional version # from which this plugin release is configuration-compatible.
*/
@Exported
public final String compatibleSinceVersion;
/**
* Version of Jenkins core this plugin was compiled against.
*/
@Exported
public final String requiredCore;
/**
* Categories for grouping plugins, taken from labels assigned to wiki page.
* Can be null.
*/
@Exported
public final String[] categories;
/**
* Dependencies of this plugin.
*/
@Exported
public final Map<String,String> dependencies = new HashMap<String,String>();
@DataBoundConstructor
public Plugin(String sourceId, JSONObject o) {
super(sourceId, o, UpdateSite.this.url);
this.wiki = get(o,"wiki");
this.title = get(o,"title");
this.excerpt = get(o,"excerpt");
this.compatibleSinceVersion = get(o,"compatibleSinceVersion");
this.requiredCore = get(o,"requiredCore");
this.categories = o.has("labels") ? (String[])o.getJSONArray("labels").toArray(new String[0]) : null;
for(Object jo : o.getJSONArray("dependencies")) {
JSONObject depObj = (JSONObject) jo;
// Make sure there's a name attribute, that that name isn't maven-plugin - we ignore that one -
// and that the optional value isn't true.
if (get(depObj,"name")!=null
&& !get(depObj,"name").equals("maven-plugin")
&& get(depObj,"optional").equals("false")) {
dependencies.put(get(depObj,"name"), get(depObj,"version"));
}
}
}
private String get(JSONObject o, String prop) {
if(o.has(prop))
return o.getString(prop);
else
return null;
}
public String getDisplayName() {
if(title!=null) return title;
return name;
}
/**
* If some version of this plugin is currently installed, return {@link PluginWrapper}.
* Otherwise null.
*/
@Exported
public PluginWrapper getInstalled() {
PluginManager pm = Jenkins.getInstance().getPluginManager();
return pm.getPlugin(name);
}
/**
* If the plugin is already installed, and the new version of the plugin has a "compatibleSinceVersion"
* value (i.e., it's only directly compatible with that version or later), this will check to
* see if the installed version is older than the compatible-since version. If it is older, it'll return false.
* If it's not older, or it's not installed, or it's installed but there's no compatibleSinceVersion
* specified, it'll return true.
*/
@Exported
public boolean isCompatibleWithInstalledVersion() {
PluginWrapper installedVersion = getInstalled();
if (installedVersion != null) {
if (compatibleSinceVersion != null) {
if (new VersionNumber(installedVersion.getVersion())
.isOlderThan(new VersionNumber(compatibleSinceVersion))) {
return false;
}
}
}
return true;
}
/**
* Returns a list of dependent plugins which need to be installed or upgraded for this plugin to work.
*/
@Exported
public List<Plugin> getNeededDependencies() {
List<Plugin> deps = new ArrayList<Plugin>();
for(Map.Entry<String,String> e : dependencies.entrySet()) {
Plugin depPlugin = Jenkins.getInstance().getUpdateCenter().getPlugin(e.getKey());
if (depPlugin == null) {
LOGGER.log(Level.WARNING, "Could not find dependency {0} of {1}", new Object[] {e.getKey(), name});
continue;
}
VersionNumber requiredVersion = new VersionNumber(e.getValue());
// Is the plugin installed already? If not, add it.
PluginWrapper current = depPlugin.getInstalled();
if (current ==null) {
deps.add(depPlugin);
}
// If the dependency plugin is installed, is the version we depend on newer than
// what's installed? If so, upgrade.
else if (current.isOlderThan(requiredVersion)) {
deps.add(depPlugin);
}
}
return deps;
}
public boolean isForNewerHudson() {
try {
return requiredCore!=null && new VersionNumber(requiredCore).isNewerThan(
new VersionNumber(Jenkins.VERSION.replaceFirst("SHOT *\\(private.*\\)", "SHOT")));
} catch (NumberFormatException nfe) {
return true; // If unable to parse version
}
}
/**
* @deprecated as of 1.326
* Use {@link #deploy()}.
*/
public void install() {
deploy();
}
public Future<UpdateCenterJob> deploy() {
return deploy(false);
}
/**
* Schedules the installation of this plugin.
*
* <p>
* This is mainly intended to be called from the UI. The actual installation work happens
* asynchronously in another thread.
*
* @param dynamicLoad
* If true, the plugin will be dynamically loaded into this Jenkins. If false,
* the plugin will only take effect after the reboot.
* See {@link UpdateCenter#isRestartRequiredForCompletion()}
*/
public Future<UpdateCenterJob> deploy(boolean dynamicLoad) {
Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER);
UpdateCenter uc = Jenkins.getInstance().getUpdateCenter();
for (Plugin dep : getNeededDependencies()) {
UpdateCenter.InstallationJob job = uc.getJob(dep);
if (job == null || job.status instanceof UpdateCenter.DownloadJob.Failure) {
LOGGER.log(Level.WARNING, "Adding dependent install of " + dep.name + " for plugin " + name);
dep.deploy(dynamicLoad);
} else {
LOGGER.log(Level.WARNING, "Dependent install of " + dep.name + " for plugin " + name + " already added, skipping");
}
}
return uc.addJob(uc.new InstallationJob(this, UpdateSite.this, Jenkins.getAuthentication(), dynamicLoad));
}
/**
* Schedules the downgrade of this plugin.
*/
public Future<UpdateCenterJob> deployBackup() {
Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER);
UpdateCenter uc = Jenkins.getInstance().getUpdateCenter();
return uc.addJob(uc.new PluginDowngradeJob(this, UpdateSite.this, Jenkins.getAuthentication()));
}
/**
* Making the installation web bound.
*/
@RequirePOST
public HttpResponse doInstall() throws IOException {
deploy(false);
return HttpResponses.redirectTo("../..");
}
@RequirePOST
public HttpResponse doInstallNow() throws IOException {
deploy(true);
return HttpResponses.redirectTo("../..");
}
/**
* Performs the downgrade of the plugin.
*/
@RequirePOST
public HttpResponse doDowngrade() throws IOException {
deployBackup();
return HttpResponses.redirectTo("../..");
}
}
private static final long DAY = DAYS.toMillis(1);
private static final Logger LOGGER = Logger.getLogger(UpdateSite.class.getName());
// The name uses UpdateCenter for compatibility reason.
public static boolean neverUpdate = Boolean.getBoolean(UpdateCenter.class.getName()+".never");
/**
* Off by default until we know this is reasonably working.
*/
public static boolean signatureCheck = true; // Boolean.getBoolean(UpdateCenter.class.getName()+".signatureCheck");
}
|
|
package io.agora.chatdemo.group;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import androidx.annotation.Nullable;
import com.google.android.material.snackbar.Snackbar;
import androidx.appcompat.app.AlertDialog;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import androidx.appcompat.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Toast;
import io.agora.chat.ChatClient;
import io.agora.chat.CursorResult;
import io.agora.chat.Group;
import io.agora.chatdemo.DemoHelper;
import io.agora.chatdemo.R;
import io.agora.chatdemo.ui.BaseActivity;
import io.agora.chatdemo.utils.ThreadPoolManager;
import io.agora.easeui.widget.EaseListItemClickListener;
import io.agora.easeui.widget.EaseSwipeLayout;
import io.agora.easeui.widget.RecyclerSwipeView;
import io.agora.exceptions.ChatException;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import static io.agora.chatdemo.group.GroupListActivity.toolbar;
/**
* Created by benson on 2016/10/25.
*/
public class GroupMembersListActivity extends BaseActivity {
@BindView(R.id.recycler_members) RecyclerSwipeView recyclerView;
private final int REQUEST_CODE_MEMBER_REFRESH = 1;
LinearLayoutManager manager;
List<String> membersList = new ArrayList<>();
boolean isOwner = false;
String groupId;
Group group;
ProgressDialog progressDialog;
private boolean isChange;
MucMembersHorizontalAdapter adapter;
Handler handler = new Handler();
GroupUtils.LoadMoreData<String> loadMoreData;
GroupUtils.MucRoleJudge mucRoleJudge = new GroupUtils.MucRoleJudgeImpl();
@Override protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.em_activity_members);
ButterKnife.bind(this);
groupId = getIntent().getExtras().getString("groupId");
group = ChatClient.getInstance().groupManager().getGroup(groupId);
isOwner = group.getOwner().equals(ChatClient.getInstance().getCurrentUser());
manager = new LinearLayoutManager(this);
manager.setOrientation(LinearLayoutManager.VERTICAL);
recyclerView.setLayoutManager(manager);
adapter = new MucMembersHorizontalAdapter(this, membersList, mucRoleJudge, recyclerView.getSwipeListener());
recyclerView.setAdapter(adapter);
recyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView view, int scrollState) {
if(scrollState == RecyclerView.SCROLL_STATE_IDLE){
if(adapter.getItemCount() != 0){
int lastVisibleItem = manager.findLastVisibleItemPosition();
int totalItemCount = manager.getItemCount();
if(loadMoreData.hasMoreData() && !loadMoreData.isLoading() && lastVisibleItem == totalItemCount-1){
loadMoreData.load();
}
}
}
}
});
final Toolbar toolbar = getActionBarToolbar();
toolbar.setTitle(getString(R.string.em_group_members) + "(" + group.getMemberCount() + ")");
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override public void onClick(View v) {
onBackPressed();
}
});
toolbar.setOnMenuItemClickListener(onMenuItemClick);
adapter.setItemClickListener(new EaseListItemClickListener() {
@Override public void onItemClick(View view, int position) {
}
@Override public void onItemLongClick(View view, final int position) {
String[] menus = { getString(R.string.em_group_member_alert_item_add_mute),
getString(R.string.em_group_member_alert_item_add_black),
getString(R.string.em_group_member_alert_item_remove) };
if (isOwner || group.getAdminList().contains(membersList.get(position))) {
new AlertDialog.Builder(GroupMembersListActivity.this).setTitle(getString(R.string.em_group_member))
.setItems(menus, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, final int i) {
DemoHelper.getInstance().execute(new Runnable() {
@Override
public void run() {
try {
final String username = membersList.get(i);
switch (i) {
case 0: // mute
List<String> members = new ArrayList<>();
members.add(username);
ChatClient.getInstance().groupManager().muteGroupMembers(groupId, members, 24 * 60 * 60 * 1000); // mute 24h. prefer mute Long.MAX_VALUE, server error.
runOnUiThread(new Runnable() {
@Override
public void run() {
mucRoleJudge.update(group);
adapter.notifyDataSetChanged();
}
});
break;
case 1: // block
ChatClient.getInstance().groupManager().blockUser(groupId, username);
runOnUiThread(new Runnable() {
@Override
public void run() {
membersList.remove(username);
mucRoleJudge.update(group);
adapter.notifyDataSetChanged();
}
});
break;
case 2: // remove
confirmRemoveMember(username);
break;
default:
break;
}
updateUIList();
} catch (ChatException e) {
e.printStackTrace();
}
}
});
}
}).show();
}
}
});
EaseSwipeLayout.SwipeAction muteAction = new EaseSwipeLayout.SwipeAction("mute", "#ADB9C1", new View.OnClickListener() {
@Override
public void onClick(final View view) {
Toast.makeText(GroupMembersListActivity.this, "mute", Toast.LENGTH_SHORT).show();
DemoHelper.getInstance().execute(new Runnable() {
@Override
public void run() {
Integer position = (Integer) view.getTag();
List<String> members = new ArrayList<>();
members.add(membersList.get(position.intValue()));
try {
ChatClient.getInstance().groupManager().muteGroupMembers(groupId, members, 24 * 60 * 60 * 1000); //mute 24h. prefer mute Long.MAX_VALUE, current server not support
// TODO: update UI
updateUIList();
} catch (ChatException e) {
e.printStackTrace();
}
}
});
}
});
EaseSwipeLayout.SwipeAction blockAction = new EaseSwipeLayout.SwipeAction("block", "#405E7A", new View.OnClickListener() { // block
@Override
public void onClick(final View view) {
Toast.makeText(GroupMembersListActivity.this, "block", Toast.LENGTH_SHORT).show();
DemoHelper.getInstance().execute(new Runnable() {
@Override
public void run() {
final Integer position = (Integer) view.getTag();
try {
ChatClient.getInstance().groupManager().blockUser(groupId, membersList.get(position.intValue()));
// TODO: update UI
runOnUiThread(new Runnable() {
@Override
public void run() {
membersList.remove(position.intValue());
updateUIList();
}
});
} catch (ChatException e) {
e.printStackTrace();
}
}
});
}
});
EaseSwipeLayout.SwipeAction deleteAction = new EaseSwipeLayout.SwipeAction("delete", "#F52700", new View.OnClickListener() { // delete
@Override
public void onClick(final View view) {
Toast.makeText(GroupMembersListActivity.this, "delete", Toast.LENGTH_SHORT).show();
DemoHelper.getInstance().execute(new Runnable() {
@Override
public void run() {
final Integer position = (Integer) view.getTag();
try {
ChatClient.getInstance().groupManager().removeUserFromGroup(groupId, membersList.get(position.intValue()));
// TODO: update UI
runOnUiThread(new Runnable() {
@Override
public void run() {
membersList.remove(position.intValue());
updateUIList();
}
});
} catch (ChatException e) {
e.printStackTrace();
}
}
});
}
});
mucRoleJudge.update(group);
String currentUser = ChatClient.getInstance().getCurrentUser();
if (mucRoleJudge.isAdmin(currentUser) || mucRoleJudge.isOwner(currentUser)) {
adapter.setSwipeLayoutActions(muteAction, blockAction, deleteAction);
}
loadMoreData = new GroupUtils.LoadMoreData<>(this, membersList, adapter, new GroupUtils.LoadMoreData.onLoadListener() {
@Override
public void onInitialAction() {
try {
group = ChatClient.getInstance().groupManager().getGroupFromServer(groupId);
mucRoleJudge.update(group);
List<String> list = new ArrayList<>();
list.add(group.getOwner());
list.addAll(group.getAdminList());
CursorResult<String> result = new CursorResult<>();
result.setData(list);
loadMoreData.setFetchResult(result);
runOnUiThread(new Runnable() {
@Override
public void run() {
final Toolbar toolbar = getActionBarToolbar();
toolbar.setTitle(getString(R.string.em_group_members) + "(" + group.getMemberCount() + ")");
}
});
} catch (ChatException e) { e.printStackTrace(); }
}
@Override
public void onLoadAction() {
try {
loadMoreData.setFetchResult(ChatClient.getInstance().groupManager().fetchGroupMembers(groupId, loadMoreData.getCursor(), GroupUtils.LoadMoreData.PAGE_SIZE));
} catch (ChatException e) { e.printStackTrace(); }
}
@Override
public void onNoMoreDataAction() {
handler.postDelayed(new Runnable() {
@Override
public void run() {
Snackbar.make(recyclerView, "No more data", Snackbar.LENGTH_LONG).show();
}
}, 1000);
}
});
loadMoreData.load();
}
private void updateUIList() {
runOnUiThread(new Runnable() {
@Override
public void run() {
mucRoleJudge.update(group);
adapter.notifyDataSetChanged();
}
});
}
private void confirmRemoveMember(final String member) {
new AlertDialog.Builder(GroupMembersListActivity.this).setTitle(getString(R.string.em_group_member))
.setMessage(getString(R.string.em_group_delete_member))
.setPositiveButton(getString(R.string.em_ok), new DialogInterface.OnClickListener() {
@Override public void onClick(final DialogInterface dialog, int which) {
progressDialog = ProgressDialog.show(GroupMembersListActivity.this, getResources().getString(R.string.em_group_delete_member), getString(R.string.em_waiting), false);
if (!ChatClient.getInstance().getCurrentUser().equals(member)) {
ThreadPoolManager.getInstance().executeTask(new ThreadPoolManager.Task() {
@Override
public Object onRequest() throws ChatException {
ChatClient.getInstance().groupManager().removeUserFromGroup(groupId, member);
return null;
}
@Override
public void onSuccess(Object o) {
progressDialog.dismiss();
membersList.remove(member);
isChange = true;
adapter.notifyDataSetChanged();
toolbar.setTitle(getResources().getString(R.string.em_group_members) + "(" + membersList.size() + ")");
}
@Override
public void onError(ChatException e) {
e.printStackTrace();
progressDialog.dismiss();
Snackbar.make(toolbar, "delete failure" + e.getMessage(), Snackbar.LENGTH_SHORT).show();
}
});
} else {
Snackbar.make(toolbar, "you can not delete yourself", Snackbar.LENGTH_SHORT).show();
progressDialog.dismiss();
}
dialog.dismiss();
}
})
.setNegativeButton(getString(R.string.em_cancel), new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.show();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.em_group_members_menu, menu);
menu.findItem(R.id.menu_item_group_members_add).setVisible(GroupUtils.isCanAddMember(group));
return true;
}
private Toolbar.OnMenuItemClickListener onMenuItemClick = new Toolbar.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem menuItem) {
switch (menuItem.getItemId()) {
case R.id.menu_item_group_members_add:
startActivityForResult(new Intent(GroupMembersListActivity.this, InviteMembersActivity.class).putExtra("groupId", groupId)
.putExtra("isOwner", isOwner)
.putStringArrayListExtra("members", (ArrayList<String>) membersList), REQUEST_CODE_MEMBER_REFRESH);
break;
default:
break;
}
return true;
}
};
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
if (REQUEST_CODE_MEMBER_REFRESH == requestCode) {
if (data != null) {
List<String> list = data.getStringArrayListExtra("selectedMembers");
membersList.clear();
if (!group.getMembers().contains(list.get(0))) {
membersList.addAll(group.getMembers());
}
membersList.addAll(list);
adapter.notifyDataSetChanged();
}
}
}
}
@Override public void onBackPressed() {
if (isChange){
Intent intent = new Intent();
intent.putExtra("selectedMembers", (ArrayList<String>) membersList);
setResult(RESULT_OK,intent);
}
finish();
}
}
|
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser;
import android.content.Context;
import android.os.RemoteException;
import android.util.Log;
import android.view.Surface;
import java.util.ArrayList;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.chromium.base.CalledByNative;
import org.chromium.base.JNINamespace;
import org.chromium.base.ThreadUtils;
import org.chromium.content.app.LibraryLoader;
import org.chromium.content.common.CommandLine;
import org.chromium.content.common.ISandboxedProcessCallback;
import org.chromium.content.common.ISandboxedProcessService;
/**
* This class provides the method to start/stop SandboxedProcess called by
* native.
*/
@JNINamespace("content")
public class SandboxedProcessLauncher {
private static String TAG = "SandboxedProcessLauncher";
// The upper limit on the number of simultaneous service process instances supported.
// This must not exceed total number of SandboxedProcessServiceX classes declared in
// this package, and defined as services in the embedding application's manifest file.
// (See {@link SandboxedProcessService} for more details on defining the services.)
/* package */ static final int MAX_REGISTERED_SERVICES = 6;
private static final SandboxedProcessConnection[] mConnections =
new SandboxedProcessConnection[MAX_REGISTERED_SERVICES];
// The list of free slots in mConnections. When looking for a free connection,
// the first index in that list should be used. When a connection is freed, its index
// is added to the end of the list. This is so that we avoid immediately reusing a freed
// connection (see bug crbug.com/164069): the framework might keep a service process alive
// when it's been unbound for a short time. If a connection to that same service is bound
// at that point, the process is reused and bad things happen (mostly static variables are
// set when we don't expect them to).
// SHOULD BE ACCESSED WITH THE mConnections LOCK.
private static final ArrayList<Integer> mFreeConnectionIndices =
new ArrayList<Integer>(MAX_REGISTERED_SERVICES);
static {
for (int i = 0; i < MAX_REGISTERED_SERVICES; i++) {
mFreeConnectionIndices.add(i);
}
}
private static SandboxedProcessConnection allocateConnection(Context context) {
SandboxedProcessConnection.DeathCallback deathCallback =
new SandboxedProcessConnection.DeathCallback() {
@Override
public void onSandboxedProcessDied(int pid) {
stop(pid);
}
};
synchronized (mConnections) {
if (mFreeConnectionIndices.isEmpty()) {
Log.w(TAG, "Ran out of sandboxed services.");
return null;
}
int slot = mFreeConnectionIndices.remove(0);
assert mConnections[slot] == null;
mConnections[slot] = new SandboxedProcessConnection(context, slot, deathCallback);
return mConnections[slot];
}
}
private static SandboxedProcessConnection allocateBoundConnection(Context context,
String[] commandLine) {
SandboxedProcessConnection connection = allocateConnection(context);
if (connection != null) {
String libraryName = LibraryLoader.getLibraryToLoad();
assert libraryName != null : "Attempting to launch a sandbox process without first "
+ "calling LibraryLoader.setLibraryToLoad";
connection.bind(libraryName, commandLine);
}
return connection;
}
private static void freeConnection(SandboxedProcessConnection connection) {
if (connection == null) {
return;
}
int slot = connection.getServiceNumber();
synchronized (mConnections) {
if (mConnections[slot] != connection) {
int occupier = mConnections[slot] == null ?
-1 : mConnections[slot].getServiceNumber();
Log.e(TAG, "Unable to find connection to free in slot: " + slot +
" already occupied by service: " + occupier);
assert false;
} else {
mConnections[slot] = null;
assert !mFreeConnectionIndices.contains(slot);
mFreeConnectionIndices.add(slot);
}
}
}
public static int getNumberOfConnections() {
synchronized (mConnections) {
return mFreeConnectionIndices.size();
}
}
// Represents an invalid process handle; same as base/process.h kNullProcessHandle.
private static final int NULL_PROCESS_HANDLE = 0;
// Map from pid to SandboxedService connection.
private static Map<Integer, SandboxedProcessConnection> mServiceMap =
new ConcurrentHashMap<Integer, SandboxedProcessConnection>();
// A pre-allocated and pre-bound connection ready for connection setup, or null.
static SandboxedProcessConnection mSpareConnection = null;
/**
* Returns the sandboxed process service interface for the given pid. This may be called on
* any thread, but the caller must assume that the service can disconnect at any time. All
* service calls should catch and handle android.os.RemoteException.
*
* @param pid The pid (process handle) of the service obtained from {@link #start}.
* @return The ISandboxedProcessService or null if the service no longer exists.
*/
public static ISandboxedProcessService getSandboxedService(int pid) {
SandboxedProcessConnection connection = mServiceMap.get(pid);
if (connection != null) {
return connection.getService();
}
return null;
}
/**
* Should be called early in startup so the work needed to spawn the sandboxed process can
* be done in parallel to other startup work. Must not be called on the UI thread.
* @param context the application context used for the connection.
*/
public static synchronized void warmUp(Context context) {
assert !ThreadUtils.runningOnUiThread();
if (mSpareConnection == null) {
mSpareConnection = allocateBoundConnection(context, null);
}
}
/**
* Spawns and connects to a sandboxed process. May be called on any thread. It will not
* block, but will instead callback to {@link #nativeOnSandboxedProcessStarted} when the
* connection is established. Note this callback will not necessarily be from the same thread
* (currently it always comes from the main thread).
*
* @param context Context used to obtain the application context.
* @param commandLine The sandboxed process command line argv.
* @param file_ids The ID that should be used when mapping files in the created process.
* @param file_fds The file descriptors that should be mapped in the created process.
* @param file_auto_close Whether the file descriptors should be closed once they were passed to
* the created process.
* @param clientContext Arbitrary parameter used by the client to distinguish this connection.
*/
@CalledByNative
static void start(
Context context,
final String[] commandLine,
int[] fileIds,
int[] fileFds,
boolean[] fileAutoClose,
final int clientContext) {
assert fileIds.length == fileFds.length && fileFds.length == fileAutoClose.length;
FileDescriptorInfo[] filesToBeMapped = new FileDescriptorInfo[fileFds.length];
for (int i = 0; i < fileFds.length; i++) {
filesToBeMapped[i] =
new FileDescriptorInfo(fileIds[i], fileFds[i], fileAutoClose[i]);
}
assert clientContext != 0;
SandboxedProcessConnection allocatedConnection;
synchronized (SandboxedProcessLauncher.class) {
allocatedConnection = mSpareConnection;
mSpareConnection = null;
}
if (allocatedConnection == null) {
allocatedConnection = allocateBoundConnection(context, commandLine);
if (allocatedConnection == null) {
// Notify the native code so it can free the heap allocated callback.
nativeOnSandboxedProcessStarted(clientContext, 0);
return;
}
}
final SandboxedProcessConnection connection = allocatedConnection;
Log.d(TAG, "Setting up connection to process: slot=" + connection.getServiceNumber());
// Note: This runnable will be executed when the sandboxed connection is setup.
final Runnable onConnect = new Runnable() {
@Override
public void run() {
final int pid = connection.getPid();
Log.d(TAG, "on connect callback, pid=" + pid + " context=" + clientContext);
if (pid != NULL_PROCESS_HANDLE) {
mServiceMap.put(pid, connection);
} else {
freeConnection(connection);
}
nativeOnSandboxedProcessStarted(clientContext, pid);
}
};
connection.setupConnection(commandLine, filesToBeMapped, createCallback(), onConnect);
}
/**
* Terminates a sandboxed process. This may be called from any thread.
*
* @param pid The pid (process handle) of the service connection obtained from {@link #start}.
*/
@CalledByNative
static void stop(int pid) {
Log.d(TAG, "stopping sandboxed connection: pid=" + pid);
SandboxedProcessConnection connection = mServiceMap.remove(pid);
if (connection == null) {
Log.w(TAG, "Tried to stop non-existent connection to pid: " + pid);
return;
}
connection.unbind();
freeConnection(connection);
}
/**
* Bind a sandboxed process as a high priority process so that it has the same
* priority as the main process. This can be used for the foreground renderer
* process to distinguish it from the the background renderer process.
*
* @param pid The process handle of the service connection obtained from {@link #start}.
*/
static void bindAsHighPriority(int pid) {
SandboxedProcessConnection connection = mServiceMap.get(pid);
if (connection == null) {
Log.w(TAG, "Tried to bind a non-existent connection to pid: " + pid);
return;
}
connection.bindHighPriority();
}
/**
* Unbind a high priority process which is bound by {@link #bindAsHighPriority}.
*
* @param pid The process handle of the service obtained from {@link #start}.
*/
static void unbindAsHighPriority(int pid) {
SandboxedProcessConnection connection = mServiceMap.get(pid);
if (connection == null) {
Log.w(TAG, "Tried to unbind non-existent connection to pid: " + pid);
return;
}
connection.unbindHighPriority(false);
}
static void establishSurfacePeer(
int pid, int type, Surface surface, int primaryID, int secondaryID) {
Log.d(TAG, "establishSurfaceTexturePeer: pid = " + pid + ", " +
"type = " + type + ", " +
"primaryID = " + primaryID + ", " +
"secondaryID = " + secondaryID);
ISandboxedProcessService service = SandboxedProcessLauncher.getSandboxedService(pid);
if (service == null) {
Log.e(TAG, "Unable to get SandboxedProcessService from pid.");
return;
}
try {
service.setSurface(type, surface, primaryID, secondaryID);
} catch (RemoteException e) {
Log.e(TAG, "Unable to call setSurface: " + e);
}
}
/**
* This implementation is used to receive callbacks from the remote service.
*/
private static ISandboxedProcessCallback createCallback() {
return new ISandboxedProcessCallback.Stub() {
/**
* This is called by the remote service regularly to tell us about
* new values. Note that IPC calls are dispatched through a thread
* pool running in each process, so the code executing here will
* NOT be running in our main thread -- so, to update the UI, we need
* to use a Handler.
*/
public void establishSurfacePeer(
int pid, int type, Surface surface, int primaryID, int secondaryID) {
SandboxedProcessLauncher.establishSurfacePeer(pid, type, surface,
primaryID, secondaryID);
// The SandboxProcessService now holds a reference to the
// Surface's resources, so we release our reference to it now to
// avoid waiting for the finalizer to get around to it.
if (surface != null) {
surface.release();
}
}
};
};
private static native void nativeOnSandboxedProcessStarted(int clientContext, int pid);
}
|
|
/**
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.nbp;
import static org.junit.Assert.*;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import java.util.function.*;
import org.junit.Test;
import org.mockito.*;
import io.reactivex.*;
import io.reactivex.NbpObservable.*;
import io.reactivex.Observable;
import io.reactivex.disposables.Disposable;
import io.reactivex.internal.disposables.EmptyDisposable;
import io.reactivex.internal.subscriptions.BooleanSubscription;
import io.reactivex.observables.nbp.NbpGroupedObservable;
import io.reactivex.schedulers.Schedulers;
import io.reactivex.subjects.nbp.NbpPublishSubject;
import io.reactivex.subscribers.nbp.NbpTestSubscriber;
public class NbpOperatorRetryTest {
@Test
public void iterativeBackoff() {
NbpSubscriber<String> consumer = TestHelper.mockNbpSubscriber();
NbpObservable<String> producer = NbpObservable.create(new NbpOnSubscribe<String>() {
private AtomicInteger count = new AtomicInteger(4);
long last = System.currentTimeMillis();
@Override
public void accept(NbpSubscriber<? super String> t1) {
t1.onSubscribe(EmptyDisposable.INSTANCE);
System.out.println(count.get() + " @ " + String.valueOf(last - System.currentTimeMillis()));
last = System.currentTimeMillis();
if (count.getAndDecrement() == 0) {
t1.onNext("hello");
t1.onComplete();
}
else
t1.onError(new RuntimeException());
}
});
NbpTestSubscriber<String> ts = new NbpTestSubscriber<>(consumer);
producer.retryWhen(new Function<NbpObservable<? extends Throwable>, NbpObservable<?>>() {
@Override
public NbpObservable<?> apply(NbpObservable<? extends Throwable> attempts) {
// Worker w = Schedulers.computation().createWorker();
return attempts
.map(new Function<Throwable, Tuple>() {
@Override
public Tuple apply(Throwable n) {
return new Tuple(new Long(1), n);
}})
.scan(new BiFunction<Tuple, Tuple, Tuple>(){
@Override
public Tuple apply(Tuple t, Tuple n) {
return new Tuple(t.count + n.count, n.n);
}})
.flatMap(new Function<Tuple, NbpObservable<Long>>() {
@Override
public NbpObservable<Long> apply(Tuple t) {
System.out.println("Retry # "+t.count);
return t.count > 20 ?
NbpObservable.<Long>error(t.n) :
NbpObservable.timer(t.count *1L, TimeUnit.MILLISECONDS);
}});
}
}).subscribe(ts);
ts.awaitTerminalEvent();
ts.assertNoErrors();
InOrder inOrder = inOrder(consumer);
inOrder.verify(consumer, never()).onError(any(Throwable.class));
inOrder.verify(consumer, times(1)).onNext("hello");
inOrder.verify(consumer, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
public static class Tuple {
Long count;
Throwable n;
Tuple(Long c, Throwable n) {
count = c;
this.n = n;
}
}
@Test
public void testRetryIndefinitely() {
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
int NUM_RETRIES = 20;
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(NUM_RETRIES));
origin.retry().unsafeSubscribe(new NbpTestSubscriber<>(NbpObserver));
InOrder inOrder = inOrder(NbpObserver);
// should show 3 attempts
inOrder.verify(NbpObserver, times(NUM_RETRIES + 1)).onNext("beginningEveryTime");
// should have no errors
inOrder.verify(NbpObserver, never()).onError(any(Throwable.class));
// should have a single success
inOrder.verify(NbpObserver, times(1)).onNext("onSuccessOnly");
// should have a single successful onCompleted
inOrder.verify(NbpObserver, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void testSchedulingNotificationHandler() {
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
int NUM_RETRIES = 2;
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(NUM_RETRIES));
NbpTestSubscriber<String> NbpSubscriber = new NbpTestSubscriber<>(NbpObserver);
origin.retryWhen(new Function<NbpObservable<? extends Throwable>, NbpObservable<Integer>>() {
@Override
public NbpObservable<Integer> apply(NbpObservable<? extends Throwable> t1) {
return t1.observeOn(Schedulers.computation()).map(new Function<Throwable, Integer>() {
@Override
public Integer apply(Throwable t1) {
return 1;
}
}).startWith(1);
}
})
.doOnError(Throwable::printStackTrace)
.subscribe(NbpSubscriber);
NbpSubscriber.awaitTerminalEvent();
InOrder inOrder = inOrder(NbpObserver);
// should show 3 attempts
inOrder.verify(NbpObserver, times(1 + NUM_RETRIES)).onNext("beginningEveryTime");
// should have no errors
inOrder.verify(NbpObserver, never()).onError(any(Throwable.class));
// should have a single success
inOrder.verify(NbpObserver, times(1)).onNext("onSuccessOnly");
// should have a single successful onCompleted
inOrder.verify(NbpObserver, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void testOnNextFromNotificationHandler() {
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
int NUM_RETRIES = 2;
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(NUM_RETRIES));
origin.retryWhen(new Function<NbpObservable<? extends Throwable>, NbpObservable<?>>() {
@Override
public NbpObservable<?> apply(NbpObservable<? extends Throwable> t1) {
return t1.map(new Function<Throwable, Integer>() {
@Override
public Integer apply(Throwable t1) {
return 0;
}
}).startWith(0);
}
}).subscribe(NbpObserver);
InOrder inOrder = inOrder(NbpObserver);
// should show 3 attempts
inOrder.verify(NbpObserver, times(NUM_RETRIES + 1)).onNext("beginningEveryTime");
// should have no errors
inOrder.verify(NbpObserver, never()).onError(any(Throwable.class));
// should have a single success
inOrder.verify(NbpObserver, times(1)).onNext("onSuccessOnly");
// should have a single successful onCompleted
inOrder.verify(NbpObserver, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void testOnCompletedFromNotificationHandler() {
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(1));
NbpTestSubscriber<String> NbpSubscriber = new NbpTestSubscriber<>(NbpObserver);
origin.retryWhen(new Function<NbpObservable<? extends Throwable>, NbpObservable<?>>() {
@Override
public NbpObservable<?> apply(NbpObservable<? extends Throwable> t1) {
return NbpObservable.empty();
}
}).subscribe(NbpSubscriber);
InOrder inOrder = inOrder(NbpObserver);
inOrder.verify(NbpObserver).onSubscribe((Disposable)notNull());
inOrder.verify(NbpObserver, never()).onNext("beginningEveryTime");
inOrder.verify(NbpObserver, never()).onNext("onSuccessOnly");
inOrder.verify(NbpObserver, times(1)).onComplete();
inOrder.verify(NbpObserver, never()).onError(any(Exception.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void testOnErrorFromNotificationHandler() {
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(2));
origin.retryWhen(new Function<NbpObservable<? extends Throwable>, NbpObservable<?>>() {
@Override
public NbpObservable<?> apply(NbpObservable<? extends Throwable> t1) {
return NbpObservable.error(new RuntimeException());
}
}).subscribe(NbpObserver);
InOrder inOrder = inOrder(NbpObserver);
inOrder.verify(NbpObserver).onSubscribe((Disposable)notNull());
inOrder.verify(NbpObserver, never()).onNext("beginningEveryTime");
inOrder.verify(NbpObserver, never()).onNext("onSuccessOnly");
inOrder.verify(NbpObserver, never()).onComplete();
inOrder.verify(NbpObserver, times(1)).onError(any(IllegalStateException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void testSingleSubscriptionOnFirst() throws Exception {
final AtomicInteger inc = new AtomicInteger(0);
NbpOnSubscribe<Integer> onSubscribe = new NbpOnSubscribe<Integer>() {
@Override
public void accept(NbpSubscriber<? super Integer> NbpSubscriber) {
NbpSubscriber.onSubscribe(EmptyDisposable.INSTANCE);
final int emit = inc.incrementAndGet();
NbpSubscriber.onNext(emit);
NbpSubscriber.onComplete();
}
};
int first = NbpObservable.create(onSubscribe)
.retryWhen(new Function<NbpObservable<? extends Throwable>, NbpObservable<?>>() {
@Override
public NbpObservable<?> apply(NbpObservable<? extends Throwable> attempt) {
return attempt.zipWith(NbpObservable.just(1), new BiFunction<Throwable, Integer, Void>() {
@Override
public Void apply(Throwable o, Integer integer) {
return null;
}
});
}
})
.toBlocking()
.first();
assertEquals("NbpObserver did not receive the expected output", 1, first);
assertEquals("Subscribe was not called once", 1, inc.get());
}
@Test
public void testOriginFails() {
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(1));
origin.subscribe(NbpObserver);
InOrder inOrder = inOrder(NbpObserver);
inOrder.verify(NbpObserver, times(1)).onNext("beginningEveryTime");
inOrder.verify(NbpObserver, times(1)).onError(any(RuntimeException.class));
inOrder.verify(NbpObserver, never()).onNext("onSuccessOnly");
inOrder.verify(NbpObserver, never()).onComplete();
}
@Test
public void testRetryFail() {
int NUM_RETRIES = 1;
int NUM_FAILURES = 2;
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(NUM_FAILURES));
origin.retry(NUM_RETRIES).subscribe(NbpObserver);
InOrder inOrder = inOrder(NbpObserver);
// should show 2 attempts (first time fail, second time (1st retry) fail)
inOrder.verify(NbpObserver, times(1 + NUM_RETRIES)).onNext("beginningEveryTime");
// should only retry once, fail again and emit onError
inOrder.verify(NbpObserver, times(1)).onError(any(RuntimeException.class));
// no success
inOrder.verify(NbpObserver, never()).onNext("onSuccessOnly");
inOrder.verify(NbpObserver, never()).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void testRetrySuccess() {
int NUM_FAILURES = 1;
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(NUM_FAILURES));
origin.retry(3).subscribe(NbpObserver);
InOrder inOrder = inOrder(NbpObserver);
// should show 3 attempts
inOrder.verify(NbpObserver, times(1 + NUM_FAILURES)).onNext("beginningEveryTime");
// should have no errors
inOrder.verify(NbpObserver, never()).onError(any(Throwable.class));
// should have a single success
inOrder.verify(NbpObserver, times(1)).onNext("onSuccessOnly");
// should have a single successful onCompleted
inOrder.verify(NbpObserver, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void testInfiniteRetry() {
int NUM_FAILURES = 20;
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(NUM_FAILURES));
origin.retry().subscribe(NbpObserver);
InOrder inOrder = inOrder(NbpObserver);
// should show 3 attempts
inOrder.verify(NbpObserver, times(1 + NUM_FAILURES)).onNext("beginningEveryTime");
// should have no errors
inOrder.verify(NbpObserver, never()).onError(any(Throwable.class));
// should have a single success
inOrder.verify(NbpObserver, times(1)).onNext("onSuccessOnly");
// should have a single successful onCompleted
inOrder.verify(NbpObserver, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
/**
* Checks in a simple and synchronous way that retry resubscribes
* after error. This test fails against 0.16.1-0.17.4, hangs on 0.17.5 and
* passes in 0.17.6 thanks to fix for issue #1027.
*/
@SuppressWarnings("unchecked")
@Test
public void testRetrySubscribesAgainAfterError() {
// record emitted values with this action
Consumer<Integer> record = mock(Consumer.class);
InOrder inOrder = inOrder(record);
// always throw an exception with this action
Consumer<Integer> throwException = mock(Consumer.class);
doThrow(new RuntimeException()).when(throwException).accept(Mockito.anyInt());
// create a retrying NbpObservable based on a NbpPublishSubject
NbpPublishSubject<Integer> subject = NbpPublishSubject.create();
subject
// record item
.doOnNext(record)
// throw a RuntimeException
.doOnNext(throwException)
// retry on error
.retry()
// subscribe and ignore
.subscribe();
inOrder.verifyNoMoreInteractions();
subject.onNext(1);
inOrder.verify(record).accept(1);
subject.onNext(2);
inOrder.verify(record).accept(2);
subject.onNext(3);
inOrder.verify(record).accept(3);
inOrder.verifyNoMoreInteractions();
}
public static class FuncWithErrors implements NbpOnSubscribe<String> {
private final int numFailures;
private final AtomicInteger count = new AtomicInteger(0);
FuncWithErrors(int count) {
this.numFailures = count;
}
@Override
public void accept(final NbpSubscriber<? super String> o) {
o.onSubscribe(EmptyDisposable.INSTANCE);
o.onNext("beginningEveryTime");
int i = count.getAndIncrement();
if (i < numFailures) {
o.onError(new RuntimeException("forced failure: " + (i + 1)));
} else {
o.onNext("onSuccessOnly");
o.onComplete();
}
}
}
@Test
public void testUnsubscribeFromRetry() {
NbpPublishSubject<Integer> subject = NbpPublishSubject.create();
final AtomicInteger count = new AtomicInteger(0);
Disposable sub = subject.retry().subscribe(new Consumer<Integer>() {
@Override
public void accept(Integer n) {
count.incrementAndGet();
}
});
subject.onNext(1);
sub.dispose();
subject.onNext(2);
assertEquals(1, count.get());
}
@Test
public void testRetryAllowsSubscriptionAfterAllSubscriptionsUnsubscribed() throws InterruptedException {
final AtomicInteger subsCount = new AtomicInteger(0);
NbpOnSubscribe<String> onSubscribe = new NbpOnSubscribe<String>() {
@Override
public void accept(NbpSubscriber<? super String> s) {
subsCount.incrementAndGet();
s.onSubscribe(() -> {
subsCount.decrementAndGet();
});
}
};
NbpObservable<String> stream = NbpObservable.create(onSubscribe);
NbpObservable<String> streamWithRetry = stream.retry();
Disposable sub = streamWithRetry.subscribe();
assertEquals(1, subsCount.get());
sub.dispose();
assertEquals(0, subsCount.get());
streamWithRetry.subscribe();
assertEquals(1, subsCount.get());
}
@Test
public void testSourceObservableCallsUnsubscribe() throws InterruptedException {
final AtomicInteger subsCount = new AtomicInteger(0);
final NbpTestSubscriber<String> ts = new NbpTestSubscriber<>();
NbpOnSubscribe<String> onSubscribe = new NbpOnSubscribe<String>() {
@Override
public void accept(NbpSubscriber<? super String> s) {
BooleanSubscription bs = new BooleanSubscription();
// if isUnsubscribed is true that means we have a bug such as
// https://github.com/ReactiveX/RxJava/issues/1024
if (!bs.isCancelled()) {
subsCount.incrementAndGet();
s.onError(new RuntimeException("failed"));
// it unsubscribes the child directly
// this simulates various error/completion scenarios that could occur
// or just a source that proactively triggers cleanup
// FIXME can't unsubscribe child
// s.unsubscribe();
bs.cancel();
} else {
s.onError(new RuntimeException());
}
}
};
NbpObservable.create(onSubscribe).retry(3).subscribe(ts);
assertEquals(4, subsCount.get()); // 1 + 3 retries
}
@Test
public void testSourceObservableRetry1() throws InterruptedException {
final AtomicInteger subsCount = new AtomicInteger(0);
final NbpTestSubscriber<String> ts = new NbpTestSubscriber<>();
NbpOnSubscribe<String> onSubscribe = new NbpOnSubscribe<String>() {
@Override
public void accept(NbpSubscriber<? super String> s) {
s.onSubscribe(EmptyDisposable.INSTANCE);
subsCount.incrementAndGet();
s.onError(new RuntimeException("failed"));
}
};
NbpObservable.create(onSubscribe).retry(1).subscribe(ts);
assertEquals(2, subsCount.get());
}
@Test
public void testSourceObservableRetry0() throws InterruptedException {
final AtomicInteger subsCount = new AtomicInteger(0);
final NbpTestSubscriber<String> ts = new NbpTestSubscriber<>();
NbpOnSubscribe<String> onSubscribe = new NbpOnSubscribe<String>() {
@Override
public void accept(NbpSubscriber<? super String> s) {
s.onSubscribe(EmptyDisposable.INSTANCE);
subsCount.incrementAndGet();
s.onError(new RuntimeException("failed"));
}
};
NbpObservable.create(onSubscribe).retry(0).subscribe(ts);
assertEquals(1, subsCount.get());
}
static final class SlowObservable implements NbpOnSubscribe<Long> {
final AtomicInteger efforts = new AtomicInteger(0);
final AtomicInteger active = new AtomicInteger(0), maxActive = new AtomicInteger(0);
final AtomicInteger nextBeforeFailure;
private final int emitDelay;
public SlowObservable(int emitDelay, int countNext) {
this.emitDelay = emitDelay;
this.nextBeforeFailure = new AtomicInteger(countNext);
}
@Override
public void accept(final NbpSubscriber<? super Long> NbpSubscriber) {
final AtomicBoolean terminate = new AtomicBoolean(false);
NbpSubscriber.onSubscribe(() -> {
terminate.set(true);
active.decrementAndGet();
});
efforts.getAndIncrement();
active.getAndIncrement();
maxActive.set(Math.max(active.get(), maxActive.get()));
final Thread thread = new Thread() {
@Override
public void run() {
long nr = 0;
try {
while (!terminate.get()) {
Thread.sleep(emitDelay);
if (nextBeforeFailure.getAndDecrement() > 0) {
NbpSubscriber.onNext(nr++);
} else {
NbpSubscriber.onError(new RuntimeException("expected-failed"));
}
}
} catch (InterruptedException t) {
}
}
};
thread.start();
}
}
/** NbpObserver for listener on seperate thread */
static final class AsyncObserver<T> extends NbpObserver<T> {
protected CountDownLatch latch = new CountDownLatch(1);
protected NbpSubscriber<T> target;
/** Wrap existing NbpObserver */
public AsyncObserver(NbpSubscriber<T> target) {
this.target = target;
}
/** Wait */
public void await() {
try {
latch.await();
} catch (InterruptedException e) {
fail("Test interrupted");
}
}
// NbpObserver implementation
@Override
public void onComplete() {
target.onComplete();
latch.countDown();
}
@Override
public void onError(Throwable t) {
target.onError(t);
latch.countDown();
}
@Override
public void onNext(T v) {
target.onNext(v);
}
}
@Test(timeout = 10000)
public void testUnsubscribeAfterError() {
NbpSubscriber<Long> NbpObserver = TestHelper.mockNbpSubscriber();
// NbpObservable that always fails after 100ms
SlowObservable so = new SlowObservable(100, 0);
NbpObservable<Long> o = NbpObservable.create(so).retry(5);
AsyncObserver<Long> async = new AsyncObserver<>(NbpObserver);
o.subscribe(async);
async.await();
InOrder inOrder = inOrder(NbpObserver);
// Should fail once
inOrder.verify(NbpObserver, times(1)).onError(any(Throwable.class));
inOrder.verify(NbpObserver, never()).onComplete();
assertEquals("Start 6 threads, retry 5 then fail on 6", 6, so.efforts.get());
assertEquals("Only 1 active subscription", 1, so.maxActive.get());
}
@Test(timeout = 10000)
public void testTimeoutWithRetry() {
@SuppressWarnings("unchecked")
NbpObserver<Long> NbpObserver = mock(NbpObserver.class);
// NbpObservable that sends every 100ms (timeout fails instead)
SlowObservable so = new SlowObservable(100, 10);
NbpObservable<Long> o = NbpObservable.create(so).timeout(80, TimeUnit.MILLISECONDS).retry(5);
AsyncObserver<Long> async = new AsyncObserver<>(NbpObserver);
o.subscribe(async);
async.await();
InOrder inOrder = inOrder(NbpObserver);
// Should fail once
inOrder.verify(NbpObserver, times(1)).onError(any(Throwable.class));
inOrder.verify(NbpObserver, never()).onComplete();
assertEquals("Start 6 threads, retry 5 then fail on 6", 6, so.efforts.get());
}
@Test//(timeout = 15000)
public void testRetryWithBackpressure() throws InterruptedException {
final int NUM_LOOPS = 1;
for (int j=0;j<NUM_LOOPS;j++) {
final int NUM_RETRIES = Observable.bufferSize() * 2;
for (int i = 0; i < 400; i++) {
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(NUM_RETRIES));
NbpTestSubscriber<String> ts = new NbpTestSubscriber<>(NbpObserver);
origin.retry().observeOn(Schedulers.computation()).unsafeSubscribe(ts);
ts.awaitTerminalEvent(5, TimeUnit.SECONDS);
InOrder inOrder = inOrder(NbpObserver);
// should have no errors
verify(NbpObserver, never()).onError(any(Throwable.class));
// should show NUM_RETRIES attempts
inOrder.verify(NbpObserver, times(NUM_RETRIES + 1)).onNext("beginningEveryTime");
// should have a single success
inOrder.verify(NbpObserver, times(1)).onNext("onSuccessOnly");
// should have a single successful onCompleted
inOrder.verify(NbpObserver, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
}
}
@Test//(timeout = 15000)
public void testRetryWithBackpressureParallel() throws InterruptedException {
final int NUM_LOOPS = 1;
final int NUM_RETRIES = Observable.bufferSize() * 2;
int ncpu = Runtime.getRuntime().availableProcessors();
ExecutorService exec = Executors.newFixedThreadPool(Math.max(ncpu / 2, 2));
try {
for (int r = 0; r < NUM_LOOPS; r++) {
if (r % 10 == 0) {
System.out.println("testRetryWithBackpressureParallelLoop -> " + r);
}
final AtomicInteger timeouts = new AtomicInteger();
final Map<Integer, List<String>> data = new ConcurrentHashMap<>();
int m = 5000;
final CountDownLatch cdl = new CountDownLatch(m);
for (int i = 0; i < m; i++) {
final int j = i;
exec.execute(new Runnable() {
@Override
public void run() {
final AtomicInteger nexts = new AtomicInteger();
try {
NbpObservable<String> origin = NbpObservable.create(new FuncWithErrors(NUM_RETRIES));
NbpTestSubscriber<String> ts = new NbpTestSubscriber<>();
origin.retry()
.observeOn(Schedulers.computation()).unsafeSubscribe(ts);
ts.awaitTerminalEvent(2500, TimeUnit.MILLISECONDS);
List<String> onNextEvents = new ArrayList<>(ts.values());
if (onNextEvents.size() != NUM_RETRIES + 2) {
for (Throwable t : ts.errors()) {
onNextEvents.add(t.toString());
}
for (long err = ts.completions(); err != 0; err--) {
onNextEvents.add("onCompleted");
}
data.put(j, onNextEvents);
}
} catch (Throwable t) {
timeouts.incrementAndGet();
System.out.println(j + " | " + cdl.getCount() + " !!! " + nexts.get());
}
cdl.countDown();
}
});
}
cdl.await();
assertEquals(0, timeouts.get());
if (data.size() > 0) {
fail("Data content mismatch: " + allSequenceFrequency(data));
}
}
} finally {
exec.shutdown();
}
}
static <T> StringBuilder allSequenceFrequency(Map<Integer, List<T>> its) {
StringBuilder b = new StringBuilder();
for (Map.Entry<Integer, List<T>> e : its.entrySet()) {
if (b.length() > 0) {
b.append(", ");
}
b.append(e.getKey()).append("={");
b.append(sequenceFrequency(e.getValue()));
b.append("}");
}
return b;
}
static <T> StringBuilder sequenceFrequency(Iterable<T> it) {
StringBuilder sb = new StringBuilder();
Object prev = null;
int cnt = 0;
for (Object curr : it) {
if (sb.length() > 0) {
if (!curr.equals(prev)) {
if (cnt > 1) {
sb.append(" x ").append(cnt);
cnt = 1;
}
sb.append(", ");
sb.append(curr);
} else {
cnt++;
}
} else {
sb.append(curr);
cnt++;
}
prev = curr;
}
if (cnt > 1) {
sb.append(" x ").append(cnt);
}
return sb;
}
@Test//(timeout = 3000)
public void testIssue1900() throws InterruptedException {
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
final int NUM_MSG = 1034;
final AtomicInteger count = new AtomicInteger();
NbpObservable<String> origin = NbpObservable.range(0, NUM_MSG)
.map(new Function<Integer, String>() {
@Override
public String apply(Integer t1) {
return "msg: " + count.incrementAndGet();
}
});
origin.retry()
.groupBy(new Function<String, String>() {
@Override
public String apply(String t1) {
return t1;
}
})
.flatMap(new Function<NbpGroupedObservable<String,String>, NbpObservable<String>>() {
@Override
public NbpObservable<String> apply(NbpGroupedObservable<String, String> t1) {
return t1.take(1);
}
})
.unsafeSubscribe(new NbpTestSubscriber<>(NbpObserver));
InOrder inOrder = inOrder(NbpObserver);
// should show 3 attempts
inOrder.verify(NbpObserver, times(NUM_MSG)).onNext(any(java.lang.String.class));
// // should have no errors
inOrder.verify(NbpObserver, never()).onError(any(Throwable.class));
// should have a single success
//inOrder.verify(NbpObserver, times(1)).onNext("onSuccessOnly");
// should have a single successful onCompleted
inOrder.verify(NbpObserver, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test//(timeout = 3000)
public void testIssue1900SourceNotSupportingBackpressure() {
NbpSubscriber<String> NbpObserver = TestHelper.mockNbpSubscriber();
final int NUM_MSG = 1034;
final AtomicInteger count = new AtomicInteger();
NbpObservable<String> origin = NbpObservable.create(new NbpOnSubscribe<String>() {
@Override
public void accept(NbpSubscriber<? super String> o) {
o.onSubscribe(EmptyDisposable.INSTANCE);
for(int i=0; i<NUM_MSG; i++) {
o.onNext("msg:" + count.incrementAndGet());
}
o.onComplete();
}
});
origin.retry()
.groupBy(new Function<String, String>() {
@Override
public String apply(String t1) {
return t1;
}
})
.flatMap(new Function<NbpGroupedObservable<String,String>, NbpObservable<String>>() {
@Override
public NbpObservable<String> apply(NbpGroupedObservable<String, String> t1) {
return t1.take(1);
}
})
.unsafeSubscribe(new NbpTestSubscriber<>(NbpObserver));
InOrder inOrder = inOrder(NbpObserver);
// should show 3 attempts
inOrder.verify(NbpObserver, times(NUM_MSG)).onNext(any(java.lang.String.class));
// // should have no errors
inOrder.verify(NbpObserver, never()).onError(any(Throwable.class));
// should have a single success
//inOrder.verify(NbpObserver, times(1)).onNext("onSuccessOnly");
// should have a single successful onCompleted
inOrder.verify(NbpObserver, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.webapp;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.servlet.RequestScoped;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Map;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import static org.apache.hadoop.yarn.util.StringHelper.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for all views
*/
@InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"})
public abstract class View implements Params {
public static final Logger LOG = LoggerFactory.getLogger(View.class);
@RequestScoped
public static class ViewContext {
final Controller.RequestContext rc;
int nestLevel = 0;
boolean wasInline;
@Inject ViewContext(Controller.RequestContext ctx) {
rc = ctx;
}
public int nestLevel() { return nestLevel; }
public boolean wasInline() { return wasInline; }
public void set(int nestLevel, boolean wasInline) {
this.nestLevel = nestLevel;
this.wasInline = wasInline;
}
public Controller.RequestContext requestContext() { return rc; }
}
private ViewContext vc;
@Inject Injector injector;
public View() {
// Makes injection in subclasses optional.
// Time will tell if this buy us more than the NPEs :)
}
public View(ViewContext ctx) {
vc = ctx;
}
/**
* The API to render the view
*/
public abstract void render();
public ViewContext context() {
if (vc == null) {
if (injector == null) {
// One downside of making the injection in subclasses optional
throw new WebAppException(join("Error accessing ViewContext from a\n",
"child constructor, either move the usage of the View methods\n",
"out of the constructor or inject the ViewContext into the\n",
"constructor"));
}
vc = injector.getInstance(ViewContext.class);
}
return vc;
}
public Throwable error() { return context().rc.error; }
public int status() { return context().rc.status; }
public boolean inDevMode() { return context().rc.devMode; }
public Injector injector() { return context().rc.injector; }
public <T> T getInstance(Class<T> cls) {
return injector().getInstance(cls);
}
public HttpServletRequest request() {
return context().rc.request;
}
public HttpServletResponse response() {
return context().rc.response;
}
public Map<String, String> moreParams() {
return context().rc.moreParams();
}
/**
* Get the cookies
* @return the cookies map
*/
public Map<String, Cookie> cookies() {
return context().rc.cookies();
}
public ServletOutputStream outputStream() {
try {
return response().getOutputStream();
} catch (IOException e) {
throw new WebAppException(e);
}
}
public PrintWriter writer() {
try {
return response().getWriter();
} catch (IOException e) {
throw new WebAppException(e);
}
}
/**
* Lookup a value from the current context.
* @param key to lookup
* @param defaultValue if key is missing
* @return the value of the key or the default value
*/
public String $(String key, String defaultValue) {
// moreParams take precedence
String value = moreParams().get(key);
if (value == null) {
value = request().getParameter(key);
}
return value == null ? defaultValue : value;
}
/**
* Lookup a value from the current context
* @param key to lookup
* @return the value of the key or empty string
*/
public String $(String key) {
return $(key, "");
}
/**
* Set a context value. (e.g. UI properties for sub views.)
* Try to avoid any application (vs view/ui) logic.
* @param key to set
* @param value to set
*/
public void set(String key, String value) {
moreParams().put(key, value);
}
public String root() {
String root = System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV);
if(root == null || root.isEmpty()) {
root = "/";
}
return root;
}
public String prefix() {
if(context().rc.prefix == null) {
return root();
} else {
return ujoin(root(), context().rc.prefix);
}
}
public void setTitle(String title) {
set(TITLE, title);
}
public void setTitle(String title, String url) {
setTitle(title);
set(TITLE_LINK, url);
}
/**
* Create an url from url components
* @param parts components to join
* @return an url string
*/
public String root_url(String... parts) {
return ujoin(root(), parts);
}
/**
* Create an url from url components
* @param parts components to join
* @return an url string
*/
public String url(String... parts) {
return ujoin(prefix(), parts);
}
public ResponseInfo info(String about) {
return getInstance(ResponseInfo.class).about(about);
}
/**
* Render a sub-view
* @param cls the class of the sub-view
*/
public void render(Class<? extends SubView> cls) {
int saved = context().nestLevel;
getInstance(cls).renderPartial();
if (context().nestLevel != saved) {
throw new WebAppException("View "+ cls.getSimpleName() +" not complete");
}
}
}
|
|
/*******************************************************************************
* Copyright 2009-2016 Amazon Services. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
*
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at: http://aws.amazon.com/apache2.0
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*******************************************************************************
* Library Version: 2016-10-05
* Generated: Wed Oct 05 06:15:34 PDT 2016
*/
package com.amazonservices.mws.FulfillmentInboundShipment.model;
import javax.xml.bind.annotation.XmlRegistry;
/**
* Object factory for jaxb.
*/
@XmlRegistry
public class ObjectFactory {
/**
* Create PutTransportContentRequest
*
* @return new PutTransportContentRequest
*/
public PutTransportContentRequest createPutTransportContentRequest() {
return new PutTransportContentRequest();
}
/**
* Create TransportDetailInput
*
* @return new TransportDetailInput
*/
public TransportDetailInput createTransportDetailInput() {
return new TransportDetailInput();
}
/**
* Create PartneredSmallParcelDataInput
*
* @return new PartneredSmallParcelDataInput
*/
public PartneredSmallParcelDataInput createPartneredSmallParcelDataInput() {
return new PartneredSmallParcelDataInput();
}
/**
* Create PartneredSmallParcelPackageInputList
*
* @return new PartneredSmallParcelPackageInputList
*/
public PartneredSmallParcelPackageInputList createPartneredSmallParcelPackageInputList() {
return new PartneredSmallParcelPackageInputList();
}
/**
* Create PartneredSmallParcelPackageInput
*
* @return new PartneredSmallParcelPackageInput
*/
public PartneredSmallParcelPackageInput createPartneredSmallParcelPackageInput() {
return new PartneredSmallParcelPackageInput();
}
/**
* Create Dimensions
*
* @return new Dimensions
*/
public Dimensions createDimensions() {
return new Dimensions();
}
/**
* Create Weight
*
* @return new Weight
*/
public Weight createWeight() {
return new Weight();
}
/**
* Create NonPartneredSmallParcelDataInput
*
* @return new NonPartneredSmallParcelDataInput
*/
public NonPartneredSmallParcelDataInput createNonPartneredSmallParcelDataInput() {
return new NonPartneredSmallParcelDataInput();
}
/**
* Create NonPartneredSmallParcelPackageInputList
*
* @return new NonPartneredSmallParcelPackageInputList
*/
public NonPartneredSmallParcelPackageInputList createNonPartneredSmallParcelPackageInputList() {
return new NonPartneredSmallParcelPackageInputList();
}
/**
* Create NonPartneredSmallParcelPackageInput
*
* @return new NonPartneredSmallParcelPackageInput
*/
public NonPartneredSmallParcelPackageInput createNonPartneredSmallParcelPackageInput() {
return new NonPartneredSmallParcelPackageInput();
}
/**
* Create PartneredLtlDataInput
*
* @return new PartneredLtlDataInput
*/
public PartneredLtlDataInput createPartneredLtlDataInput() {
return new PartneredLtlDataInput();
}
/**
* Create Contact
*
* @return new Contact
*/
public Contact createContact() {
return new Contact();
}
/**
* Create PalletList
*
* @return new PalletList
*/
public PalletList createPalletList() {
return new PalletList();
}
/**
* Create Pallet
*
* @return new Pallet
*/
public Pallet createPallet() {
return new Pallet();
}
/**
* Create Amount
*
* @return new Amount
*/
public Amount createAmount() {
return new Amount();
}
/**
* Create NonPartneredLtlDataInput
*
* @return new NonPartneredLtlDataInput
*/
public NonPartneredLtlDataInput createNonPartneredLtlDataInput() {
return new NonPartneredLtlDataInput();
}
/**
* Create TransportResult
*
* @return new TransportResult
*/
public TransportResult createTransportResult() {
return new TransportResult();
}
/**
* Create GetInboundGuidanceForSKURequest
*
* @return new GetInboundGuidanceForSKURequest
*/
public GetInboundGuidanceForSKURequest createGetInboundGuidanceForSKURequest() {
return new GetInboundGuidanceForSKURequest();
}
/**
* Create SellerSKUList
*
* @return new SellerSKUList
*/
public SellerSKUList createSellerSKUList() {
return new SellerSKUList();
}
/**
* Create SKUInboundGuidanceList
*
* @return new SKUInboundGuidanceList
*/
public SKUInboundGuidanceList createSKUInboundGuidanceList() {
return new SKUInboundGuidanceList();
}
/**
* Create SKUInboundGuidance
*
* @return new SKUInboundGuidance
*/
public SKUInboundGuidance createSKUInboundGuidance() {
return new SKUInboundGuidance();
}
/**
* Create GuidanceReasonList
*
* @return new GuidanceReasonList
*/
public GuidanceReasonList createGuidanceReasonList() {
return new GuidanceReasonList();
}
/**
* Create InvalidSKUList
*
* @return new InvalidSKUList
*/
public InvalidSKUList createInvalidSKUList() {
return new InvalidSKUList();
}
/**
* Create InvalidSKU
*
* @return new InvalidSKU
*/
public InvalidSKU createInvalidSKU() {
return new InvalidSKU();
}
/**
* Create ConfirmTransportInputRequest
*
* @return new ConfirmTransportInputRequest
*/
public ConfirmTransportInputRequest createConfirmTransportInputRequest() {
return new ConfirmTransportInputRequest();
}
/**
* Create ConfirmPreorderRequest
*
* @return new ConfirmPreorderRequest
*/
public ConfirmPreorderRequest createConfirmPreorderRequest() {
return new ConfirmPreorderRequest();
}
/**
* Create ListInboundShipmentsRequest
*
* @return new ListInboundShipmentsRequest
*/
public ListInboundShipmentsRequest createListInboundShipmentsRequest() {
return new ListInboundShipmentsRequest();
}
/**
* Create ShipmentStatusList
*
* @return new ShipmentStatusList
*/
public ShipmentStatusList createShipmentStatusList() {
return new ShipmentStatusList();
}
/**
* Create ShipmentIdList
*
* @return new ShipmentIdList
*/
public ShipmentIdList createShipmentIdList() {
return new ShipmentIdList();
}
/**
* Create InboundShipmentList
*
* @return new InboundShipmentList
*/
public InboundShipmentList createInboundShipmentList() {
return new InboundShipmentList();
}
/**
* Create InboundShipmentInfo
*
* @return new InboundShipmentInfo
*/
public InboundShipmentInfo createInboundShipmentInfo() {
return new InboundShipmentInfo();
}
/**
* Create Address
*
* @return new Address
*/
public Address createAddress() {
return new Address();
}
/**
* Create BoxContentsFeeDetails
*
* @return new BoxContentsFeeDetails
*/
public BoxContentsFeeDetails createBoxContentsFeeDetails() {
return new BoxContentsFeeDetails();
}
/**
* Create GetPalletLabelsRequest
*
* @return new GetPalletLabelsRequest
*/
public GetPalletLabelsRequest createGetPalletLabelsRequest() {
return new GetPalletLabelsRequest();
}
/**
* Create TransportDocument
*
* @return new TransportDocument
*/
public TransportDocument createTransportDocument() {
return new TransportDocument();
}
/**
* Create EstimateTransportInputRequest
*
* @return new EstimateTransportInputRequest
*/
public EstimateTransportInputRequest createEstimateTransportInputRequest() {
return new EstimateTransportInputRequest();
}
/**
* Create ListInboundShipmentItemsByNextTokenRequest
*
* @return new ListInboundShipmentItemsByNextTokenRequest
*/
public ListInboundShipmentItemsByNextTokenRequest createListInboundShipmentItemsByNextTokenRequest() {
return new ListInboundShipmentItemsByNextTokenRequest();
}
/**
* Create InboundShipmentItemList
*
* @return new InboundShipmentItemList
*/
public InboundShipmentItemList createInboundShipmentItemList() {
return new InboundShipmentItemList();
}
/**
* Create InboundShipmentItem
*
* @return new InboundShipmentItem
*/
public InboundShipmentItem createInboundShipmentItem() {
return new InboundShipmentItem();
}
/**
* Create PrepDetailsList
*
* @return new PrepDetailsList
*/
public PrepDetailsList createPrepDetailsList() {
return new PrepDetailsList();
}
/**
* Create PrepDetails
*
* @return new PrepDetails
*/
public PrepDetails createPrepDetails() {
return new PrepDetails();
}
/**
* Create GetTransportContentRequest
*
* @return new GetTransportContentRequest
*/
public GetTransportContentRequest createGetTransportContentRequest() {
return new GetTransportContentRequest();
}
/**
* Create TransportContent
*
* @return new TransportContent
*/
public TransportContent createTransportContent() {
return new TransportContent();
}
/**
* Create TransportHeader
*
* @return new TransportHeader
*/
public TransportHeader createTransportHeader() {
return new TransportHeader();
}
/**
* Create TransportDetailOutput
*
* @return new TransportDetailOutput
*/
public TransportDetailOutput createTransportDetailOutput() {
return new TransportDetailOutput();
}
/**
* Create PartneredSmallParcelDataOutput
*
* @return new PartneredSmallParcelDataOutput
*/
public PartneredSmallParcelDataOutput createPartneredSmallParcelDataOutput() {
return new PartneredSmallParcelDataOutput();
}
/**
* Create PartneredSmallParcelPackageOutputList
*
* @return new PartneredSmallParcelPackageOutputList
*/
public PartneredSmallParcelPackageOutputList createPartneredSmallParcelPackageOutputList() {
return new PartneredSmallParcelPackageOutputList();
}
/**
* Create PartneredSmallParcelPackageOutput
*
* @return new PartneredSmallParcelPackageOutput
*/
public PartneredSmallParcelPackageOutput createPartneredSmallParcelPackageOutput() {
return new PartneredSmallParcelPackageOutput();
}
/**
* Create PartneredEstimate
*
* @return new PartneredEstimate
*/
public PartneredEstimate createPartneredEstimate() {
return new PartneredEstimate();
}
/**
* Create NonPartneredSmallParcelDataOutput
*
* @return new NonPartneredSmallParcelDataOutput
*/
public NonPartneredSmallParcelDataOutput createNonPartneredSmallParcelDataOutput() {
return new NonPartneredSmallParcelDataOutput();
}
/**
* Create NonPartneredSmallParcelPackageOutputList
*
* @return new NonPartneredSmallParcelPackageOutputList
*/
public NonPartneredSmallParcelPackageOutputList createNonPartneredSmallParcelPackageOutputList() {
return new NonPartneredSmallParcelPackageOutputList();
}
/**
* Create NonPartneredSmallParcelPackageOutput
*
* @return new NonPartneredSmallParcelPackageOutput
*/
public NonPartneredSmallParcelPackageOutput createNonPartneredSmallParcelPackageOutput() {
return new NonPartneredSmallParcelPackageOutput();
}
/**
* Create PartneredLtlDataOutput
*
* @return new PartneredLtlDataOutput
*/
public PartneredLtlDataOutput createPartneredLtlDataOutput() {
return new PartneredLtlDataOutput();
}
/**
* Create NonPartneredLtlDataOutput
*
* @return new NonPartneredLtlDataOutput
*/
public NonPartneredLtlDataOutput createNonPartneredLtlDataOutput() {
return new NonPartneredLtlDataOutput();
}
/**
* Create CreateInboundShipmentPlanRequest
*
* @return new CreateInboundShipmentPlanRequest
*/
public CreateInboundShipmentPlanRequest createCreateInboundShipmentPlanRequest() {
return new CreateInboundShipmentPlanRequest();
}
/**
* Create InboundShipmentPlanRequestItemList
*
* @return new InboundShipmentPlanRequestItemList
*/
public InboundShipmentPlanRequestItemList createInboundShipmentPlanRequestItemList() {
return new InboundShipmentPlanRequestItemList();
}
/**
* Create InboundShipmentPlanRequestItem
*
* @return new InboundShipmentPlanRequestItem
*/
public InboundShipmentPlanRequestItem createInboundShipmentPlanRequestItem() {
return new InboundShipmentPlanRequestItem();
}
/**
* Create InboundShipmentPlanList
*
* @return new InboundShipmentPlanList
*/
public InboundShipmentPlanList createInboundShipmentPlanList() {
return new InboundShipmentPlanList();
}
/**
* Create InboundShipmentPlan
*
* @return new InboundShipmentPlan
*/
public InboundShipmentPlan createInboundShipmentPlan() {
return new InboundShipmentPlan();
}
/**
* Create InboundShipmentPlanItemList
*
* @return new InboundShipmentPlanItemList
*/
public InboundShipmentPlanItemList createInboundShipmentPlanItemList() {
return new InboundShipmentPlanItemList();
}
/**
* Create InboundShipmentPlanItem
*
* @return new InboundShipmentPlanItem
*/
public InboundShipmentPlanItem createInboundShipmentPlanItem() {
return new InboundShipmentPlanItem();
}
/**
* Create GetPrepInstructionsForASINRequest
*
* @return new GetPrepInstructionsForASINRequest
*/
public GetPrepInstructionsForASINRequest createGetPrepInstructionsForASINRequest() {
return new GetPrepInstructionsForASINRequest();
}
/**
* Create AsinList
*
* @return new AsinList
*/
public ASINList createAsinList() {
return new ASINList();
}
/**
* Create ASINPrepInstructionsList
*
* @return new ASINPrepInstructionsList
*/
public ASINPrepInstructionsList createASINPrepInstructionsList() {
return new ASINPrepInstructionsList();
}
/**
* Create ASINPrepInstructions
*
* @return new ASINPrepInstructions
*/
public ASINPrepInstructions createASINPrepInstructions() {
return new ASINPrepInstructions();
}
/**
* Create PrepInstructionList
*
* @return new PrepInstructionList
*/
public PrepInstructionList createPrepInstructionList() {
return new PrepInstructionList();
}
/**
* Create InvalidASINList
*
* @return new InvalidASINList
*/
public InvalidASINList createInvalidASINList() {
return new InvalidASINList();
}
/**
* Create InvalidASIN
*
* @return new InvalidASIN
*/
public InvalidASIN createInvalidASIN() {
return new InvalidASIN();
}
/**
* Create GetPreorderInfoRequest
*
* @return new GetPreorderInfoRequest
*/
public GetPreorderInfoRequest createGetPreorderInfoRequest() {
return new GetPreorderInfoRequest();
}
/**
* Create GetUniquePackageLabelsRequest
*
* @return new GetUniquePackageLabelsRequest
*/
public GetUniquePackageLabelsRequest createGetUniquePackageLabelsRequest() {
return new GetUniquePackageLabelsRequest();
}
/**
* Create PackageIdentifiers
*
* @return new PackageIdentifiers
*/
public PackageIdentifiers createPackageIdentifiers() {
return new PackageIdentifiers();
}
/**
* Create GetServiceStatusRequest
*
* @return new GetServiceStatusRequest
*/
public GetServiceStatusRequest createGetServiceStatusRequest() {
return new GetServiceStatusRequest();
}
/**
* Create GetPrepInstructionsForSKURequest
*
* @return new GetPrepInstructionsForSKURequest
*/
public GetPrepInstructionsForSKURequest createGetPrepInstructionsForSKURequest() {
return new GetPrepInstructionsForSKURequest();
}
/**
* Create SKUPrepInstructionsList
*
* @return new SKUPrepInstructionsList
*/
public SKUPrepInstructionsList createSKUPrepInstructionsList() {
return new SKUPrepInstructionsList();
}
/**
* Create SKUPrepInstructions
*
* @return new SKUPrepInstructions
*/
public SKUPrepInstructions createSKUPrepInstructions() {
return new SKUPrepInstructions();
}
/**
* Create AmazonPrepFeesDetailsList
*
* @return new AmazonPrepFeesDetailsList
*/
public AmazonPrepFeesDetailsList createAmazonPrepFeesDetailsList() {
return new AmazonPrepFeesDetailsList();
}
/**
* Create AmazonPrepFeesDetails
*
* @return new AmazonPrepFeesDetails
*/
public AmazonPrepFeesDetails createAmazonPrepFeesDetails() {
return new AmazonPrepFeesDetails();
}
/**
* Create GetPackageLabelsRequest
*
* @return new GetPackageLabelsRequest
*/
public GetPackageLabelsRequest createGetPackageLabelsRequest() {
return new GetPackageLabelsRequest();
}
/**
* Create GetBillOfLadingRequest
*
* @return new GetBillOfLadingRequest
*/
public GetBillOfLadingRequest createGetBillOfLadingRequest() {
return new GetBillOfLadingRequest();
}
/**
* Create ListInboundShipmentsByNextTokenRequest
*
* @return new ListInboundShipmentsByNextTokenRequest
*/
public ListInboundShipmentsByNextTokenRequest createListInboundShipmentsByNextTokenRequest() {
return new ListInboundShipmentsByNextTokenRequest();
}
/**
* Create GetInboundGuidanceForASINRequest
*
* @return new GetInboundGuidanceForASINRequest
*/
public GetInboundGuidanceForASINRequest createGetInboundGuidanceForASINRequest() {
return new GetInboundGuidanceForASINRequest();
}
/**
* Create ASINList
*
* @return new ASINList
*/
public ASINList createASINList() {
return new ASINList();
}
/**
* Create ASINInboundGuidanceList
*
* @return new ASINInboundGuidanceList
*/
public ASINInboundGuidanceList createASINInboundGuidanceList() {
return new ASINInboundGuidanceList();
}
/**
* Create ASINInboundGuidance
*
* @return new ASINInboundGuidance
*/
public ASINInboundGuidance createASINInboundGuidance() {
return new ASINInboundGuidance();
}
/**
* Create UpdateInboundShipmentRequest
*
* @return new UpdateInboundShipmentRequest
*/
public UpdateInboundShipmentRequest createUpdateInboundShipmentRequest() {
return new UpdateInboundShipmentRequest();
}
/**
* Create InboundShipmentHeader
*
* @return new InboundShipmentHeader
*/
public InboundShipmentHeader createInboundShipmentHeader() {
return new InboundShipmentHeader();
}
/**
* Create CreateInboundShipmentRequest
*
* @return new CreateInboundShipmentRequest
*/
public CreateInboundShipmentRequest createCreateInboundShipmentRequest() {
return new CreateInboundShipmentRequest();
}
/**
* Create VoidTransportInputRequest
*
* @return new VoidTransportInputRequest
*/
public VoidTransportInputRequest createVoidTransportInputRequest() {
return new VoidTransportInputRequest();
}
/**
* Create ListInboundShipmentItemsRequest
*
* @return new ListInboundShipmentItemsRequest
*/
public ListInboundShipmentItemsRequest createListInboundShipmentItemsRequest() {
return new ListInboundShipmentItemsRequest();
}
/**
* Create ResponseMetadata
*
* @return new ResponseMetadata
*/
public ResponseMetadata createResponseMetadata() {
return new ResponseMetadata();
}
/**
* Create ResponseHeaderMetadata
*
* @return new ResponseHeaderMetadata
*/
public ResponseHeaderMetadata createResponseHeaderMetadata() {
return new ResponseHeaderMetadata();
}
/**
* Create ConfirmPreorderResult
*
* @return new ConfirmPreorderResult
*/
public ConfirmPreorderResult createConfirmPreorderResult() {
return new ConfirmPreorderResult();
}
/**
* Create ConfirmPreorderResponse
*
* @return new ConfirmPreorderResponse
*/
public ConfirmPreorderResponse createConfirmPreorderResponse() {
return new ConfirmPreorderResponse();
}
/**
* Create ConfirmTransportRequestResult
*
* @return new ConfirmTransportRequestResult
*/
public ConfirmTransportRequestResult createConfirmTransportRequestResult() {
return new ConfirmTransportRequestResult();
}
/**
* Create ConfirmTransportRequestResponse
*
* @return new ConfirmTransportRequestResponse
*/
public ConfirmTransportRequestResponse createConfirmTransportRequestResponse() {
return new ConfirmTransportRequestResponse();
}
/**
* Create CreateInboundShipmentResult
*
* @return new CreateInboundShipmentResult
*/
public CreateInboundShipmentResult createCreateInboundShipmentResult() {
return new CreateInboundShipmentResult();
}
/**
* Create CreateInboundShipmentResponse
*
* @return new CreateInboundShipmentResponse
*/
public CreateInboundShipmentResponse createCreateInboundShipmentResponse() {
return new CreateInboundShipmentResponse();
}
/**
* Create CreateInboundShipmentPlanResult
*
* @return new CreateInboundShipmentPlanResult
*/
public CreateInboundShipmentPlanResult createCreateInboundShipmentPlanResult() {
return new CreateInboundShipmentPlanResult();
}
/**
* Create CreateInboundShipmentPlanResponse
*
* @return new CreateInboundShipmentPlanResponse
*/
public CreateInboundShipmentPlanResponse createCreateInboundShipmentPlanResponse() {
return new CreateInboundShipmentPlanResponse();
}
/**
* Create EstimateTransportRequestResult
*
* @return new EstimateTransportRequestResult
*/
public EstimateTransportRequestResult createEstimateTransportRequestResult() {
return new EstimateTransportRequestResult();
}
/**
* Create EstimateTransportRequestResponse
*
* @return new EstimateTransportRequestResponse
*/
public EstimateTransportRequestResponse createEstimateTransportRequestResponse() {
return new EstimateTransportRequestResponse();
}
/**
* Create GetBillOfLadingResult
*
* @return new GetBillOfLadingResult
*/
public GetBillOfLadingResult createGetBillOfLadingResult() {
return new GetBillOfLadingResult();
}
/**
* Create GetBillOfLadingResponse
*
* @return new GetBillOfLadingResponse
*/
public GetBillOfLadingResponse createGetBillOfLadingResponse() {
return new GetBillOfLadingResponse();
}
/**
* Create GetInboundGuidanceForASINResult
*
* @return new GetInboundGuidanceForASINResult
*/
public GetInboundGuidanceForASINResult createGetInboundGuidanceForASINResult() {
return new GetInboundGuidanceForASINResult();
}
/**
* Create GetInboundGuidanceForASINResponse
*
* @return new GetInboundGuidanceForASINResponse
*/
public GetInboundGuidanceForASINResponse createGetInboundGuidanceForASINResponse() {
return new GetInboundGuidanceForASINResponse();
}
/**
* Create GetInboundGuidanceForSKUResult
*
* @return new GetInboundGuidanceForSKUResult
*/
public GetInboundGuidanceForSKUResult createGetInboundGuidanceForSKUResult() {
return new GetInboundGuidanceForSKUResult();
}
/**
* Create GetInboundGuidanceForSKUResponse
*
* @return new GetInboundGuidanceForSKUResponse
*/
public GetInboundGuidanceForSKUResponse createGetInboundGuidanceForSKUResponse() {
return new GetInboundGuidanceForSKUResponse();
}
/**
* Create GetPackageLabelsResult
*
* @return new GetPackageLabelsResult
*/
public GetPackageLabelsResult createGetPackageLabelsResult() {
return new GetPackageLabelsResult();
}
/**
* Create GetPackageLabelsResponse
*
* @return new GetPackageLabelsResponse
*/
public GetPackageLabelsResponse createGetPackageLabelsResponse() {
return new GetPackageLabelsResponse();
}
/**
* Create GetPalletLabelsResult
*
* @return new GetPalletLabelsResult
*/
public GetPalletLabelsResult createGetPalletLabelsResult() {
return new GetPalletLabelsResult();
}
/**
* Create GetPalletLabelsResponse
*
* @return new GetPalletLabelsResponse
*/
public GetPalletLabelsResponse createGetPalletLabelsResponse() {
return new GetPalletLabelsResponse();
}
/**
* Create GetPreorderInfoResult
*
* @return new GetPreorderInfoResult
*/
public GetPreorderInfoResult createGetPreorderInfoResult() {
return new GetPreorderInfoResult();
}
/**
* Create GetPreorderInfoResponse
*
* @return new GetPreorderInfoResponse
*/
public GetPreorderInfoResponse createGetPreorderInfoResponse() {
return new GetPreorderInfoResponse();
}
/**
* Create GetPrepInstructionsForASINResult
*
* @return new GetPrepInstructionsForASINResult
*/
public GetPrepInstructionsForASINResult createGetPrepInstructionsForASINResult() {
return new GetPrepInstructionsForASINResult();
}
/**
* Create GetPrepInstructionsForASINResponse
*
* @return new GetPrepInstructionsForASINResponse
*/
public GetPrepInstructionsForASINResponse createGetPrepInstructionsForASINResponse() {
return new GetPrepInstructionsForASINResponse();
}
/**
* Create GetPrepInstructionsForSKUResult
*
* @return new GetPrepInstructionsForSKUResult
*/
public GetPrepInstructionsForSKUResult createGetPrepInstructionsForSKUResult() {
return new GetPrepInstructionsForSKUResult();
}
/**
* Create GetPrepInstructionsForSKUResponse
*
* @return new GetPrepInstructionsForSKUResponse
*/
public GetPrepInstructionsForSKUResponse createGetPrepInstructionsForSKUResponse() {
return new GetPrepInstructionsForSKUResponse();
}
/**
* Create GetServiceStatusResult
*
* @return new GetServiceStatusResult
*/
public GetServiceStatusResult createGetServiceStatusResult() {
return new GetServiceStatusResult();
}
/**
* Create GetServiceStatusResponse
*
* @return new GetServiceStatusResponse
*/
public GetServiceStatusResponse createGetServiceStatusResponse() {
return new GetServiceStatusResponse();
}
/**
* Create GetTransportContentResult
*
* @return new GetTransportContentResult
*/
public GetTransportContentResult createGetTransportContentResult() {
return new GetTransportContentResult();
}
/**
* Create GetTransportContentResponse
*
* @return new GetTransportContentResponse
*/
public GetTransportContentResponse createGetTransportContentResponse() {
return new GetTransportContentResponse();
}
/**
* Create GetUniquePackageLabelsResult
*
* @return new GetUniquePackageLabelsResult
*/
public GetUniquePackageLabelsResult createGetUniquePackageLabelsResult() {
return new GetUniquePackageLabelsResult();
}
/**
* Create GetUniquePackageLabelsResponse
*
* @return new GetUniquePackageLabelsResponse
*/
public GetUniquePackageLabelsResponse createGetUniquePackageLabelsResponse() {
return new GetUniquePackageLabelsResponse();
}
/**
* Create ListInboundShipmentItemsResult
*
* @return new ListInboundShipmentItemsResult
*/
public ListInboundShipmentItemsResult createListInboundShipmentItemsResult() {
return new ListInboundShipmentItemsResult();
}
/**
* Create ListInboundShipmentItemsResponse
*
* @return new ListInboundShipmentItemsResponse
*/
public ListInboundShipmentItemsResponse createListInboundShipmentItemsResponse() {
return new ListInboundShipmentItemsResponse();
}
/**
* Create ListInboundShipmentItemsByNextTokenResult
*
* @return new ListInboundShipmentItemsByNextTokenResult
*/
public ListInboundShipmentItemsByNextTokenResult createListInboundShipmentItemsByNextTokenResult() {
return new ListInboundShipmentItemsByNextTokenResult();
}
/**
* Create ListInboundShipmentItemsByNextTokenResponse
*
* @return new ListInboundShipmentItemsByNextTokenResponse
*/
public ListInboundShipmentItemsByNextTokenResponse createListInboundShipmentItemsByNextTokenResponse() {
return new ListInboundShipmentItemsByNextTokenResponse();
}
/**
* Create ListInboundShipmentsResult
*
* @return new ListInboundShipmentsResult
*/
public ListInboundShipmentsResult createListInboundShipmentsResult() {
return new ListInboundShipmentsResult();
}
/**
* Create ListInboundShipmentsResponse
*
* @return new ListInboundShipmentsResponse
*/
public ListInboundShipmentsResponse createListInboundShipmentsResponse() {
return new ListInboundShipmentsResponse();
}
/**
* Create ListInboundShipmentsByNextTokenResult
*
* @return new ListInboundShipmentsByNextTokenResult
*/
public ListInboundShipmentsByNextTokenResult createListInboundShipmentsByNextTokenResult() {
return new ListInboundShipmentsByNextTokenResult();
}
/**
* Create ListInboundShipmentsByNextTokenResponse
*
* @return new ListInboundShipmentsByNextTokenResponse
*/
public ListInboundShipmentsByNextTokenResponse createListInboundShipmentsByNextTokenResponse() {
return new ListInboundShipmentsByNextTokenResponse();
}
/**
* Create PutTransportContentResult
*
* @return new PutTransportContentResult
*/
public PutTransportContentResult createPutTransportContentResult() {
return new PutTransportContentResult();
}
/**
* Create PutTransportContentResponse
*
* @return new PutTransportContentResponse
*/
public PutTransportContentResponse createPutTransportContentResponse() {
return new PutTransportContentResponse();
}
/**
* Create UpdateInboundShipmentResult
*
* @return new UpdateInboundShipmentResult
*/
public UpdateInboundShipmentResult createUpdateInboundShipmentResult() {
return new UpdateInboundShipmentResult();
}
/**
* Create UpdateInboundShipmentResponse
*
* @return new UpdateInboundShipmentResponse
*/
public UpdateInboundShipmentResponse createUpdateInboundShipmentResponse() {
return new UpdateInboundShipmentResponse();
}
/**
* Create VoidTransportRequestResult
*
* @return new VoidTransportRequestResult
*/
public VoidTransportRequestResult createVoidTransportRequestResult() {
return new VoidTransportRequestResult();
}
/**
* Create VoidTransportRequestResponse
*
* @return new VoidTransportRequestResponse
*/
public VoidTransportRequestResponse createVoidTransportRequestResponse() {
return new VoidTransportRequestResponse();
}
/**
* Create a new ObjectFactory.
*/
public ObjectFactory() {
//
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.exercise.security.user;
import javax.jcr.Credentials;
import javax.jcr.Node;
import javax.jcr.Property;
import javax.jcr.RepositoryException;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.oak.exercise.ExerciseUtility;
import org.apache.jackrabbit.oak.exercise.security.user.action.L2_AuthorizableActionTest;
import org.apache.jackrabbit.test.AbstractJCRTest;
import static org.apache.jackrabbit.oak.exercise.ExerciseUtility.TEST_PW;
/**
* <pre>
* Module: User Management
* =============================================================================
*
* Title: Password Test
* -----------------------------------------------------------------------------
*
* Goal:
* Become familiar with password related parts of the user management API and
* get to know some implementation details.
*
* Exercises:
*
* - {@link #testGetCredentials()}
* Understand that the password is not exposed as plain-word property from
* the user. Look at the return-value of the {@link org.apache.jackrabbit.api.security.user.User#getCredentials()}
* call and what it looks like. Fix the test-case accordingly.
*
* Question: Can you use the exposed Credentials to login to the repository?
*
* - {@link #testPasswordInContent()}
* Creates a new user with a valid password. Inspect how the password is being
* store in the repository (Note: implementation detail!) and fill in the
* right property name to get the test-case pass.
* Explain why the password property doesn't contain the password string.
*
* - {@link #testCreateUserAndLogin()}
* Same as {@link #testPasswordInContent()} but additional aims to login as
* the new user.
* Fix the test by creating the correct {@link javax.jcr.Credentials}.
*
* - {@link #testCreateUserWithoutPassword()}
* This test creates a new user with a 'null' password. Inspect the user node
* created by this method and add the correct assertion wrt password.
*
* - {@link #testCreateUserWithoutPasswordAndLogin()}
* Same as {@link #testCreateUserWithoutPassword()}. This time fix the test
* case to properly reflect the expected behavior upon login for that new user.
*
* - {@link #testChangePassword()}
* Change the password of an existing user. Use both variants and get familiar
* with the implementation specific constraints.
*
*
* Additional Exercises:
* -----------------------------------------------------------------------------
*
* In a OSGI-based Oak installation (Sling|Granite|CQ) you can easily perform the
* following additional test.
* Note: You can also do that in Java by building a new Jcr/Oak repository with
* the corresponding configuration parameters set.
*
* - Go to the system console and change the default configuration parameters
* in the 'Apache Jackrabbit Oak UserConfiguration' and play with the following
* configuration parameters:
* - {@link org.apache.jackrabbit.oak.spi.security.user.UserConstants#PARAM_PASSWORD_HASH_ALGORITHM}
* - {@link org.apache.jackrabbit.oak.spi.security.user.UserConstants#PARAM_PASSWORD_HASH_ITERATIONS}
* - {@link org.apache.jackrabbit.oak.spi.security.user.UserConstants#PARAM_PASSWORD_SALT_SIZE}
* Change the password of a test user and observe the changes.
*
* - Go to the system console and look for the 'Apache Jackrabbit Oak AuthorizableActionProvider'.
* Enable the password validation action and then change the password of
* an existing test user.
*
*
* Advanced Exercises:
* -----------------------------------------------------------------------------
*
* - Write a custom password validation action and plug it into your repository.
* See Oak documentation for some hints.
*
*
* Related Exercises:
* -----------------------------------------------------------------------------
*
* - {@link L12_PasswordExpiryTest ()}
* - {@link L2_AuthorizableActionTest ()}
*
* </pre>
*
* @see User#changePassword(String, String)
* @see User#changePassword(String)
* @see org.apache.jackrabbit.oak.spi.security.user.action.PasswordValidationAction
* @see org.apache.jackrabbit.oak.spi.security.user.util.PasswordUtil
*/
public class L11_PasswordTest extends AbstractJCRTest {
private UserManager userManager;
private String testId;
private User testUser;
@Override
protected void setUp() throws Exception {
super.setUp();
userManager = ((JackrabbitSession) superuser).getUserManager();
testId = ExerciseUtility.getTestId("testUser");
}
@Override
protected void tearDown() throws Exception {
try {
if (testUser != null) {
testUser.remove();
}
superuser.save();
} finally {
super.tearDown();
}
}
public void testGetCredentials() throws RepositoryException {
testUser = userManager.createUser(testId, TEST_PW);
Credentials creds = testUser.getCredentials();
// EXERCISE fix the expectation
Credentials expected = null;
assertEquals(expected, creds);
// EXERCISE : complete and explain the expected behavior
getHelper().getRepository().login(creds).logout();
}
public void testPasswordInContent() throws RepositoryException {
testUser = userManager.createUser(testId, TEST_PW);
superuser.save();
Node userNode = superuser.getNode(testUser.getPath());
String pwPropertyName = null; // EXERCISE: fill in
Property pwProperty = userNode.getProperty(pwPropertyName);
// EXERCISE: explain why the password property doesn't contain the 'pw' string
assertFalse(TEST_PW.equals(pwProperty.getString()));
}
public void testCreateUserAndLogin() throws RepositoryException {
testUser = userManager.createUser(testId, TEST_PW);
superuser.save();
Credentials creds = null; // EXERCISE build the credentials
getHelper().getRepository().login(creds).logout();
}
public void testCreateUserWithoutPassword() throws RepositoryException {
testUser = userManager.createUser(testId, null);
superuser.save();
// EXERCISE: look at the user node. does it have a password property set?
// EXERCISE: add the correct assertion
Node userNode = superuser.getNode(testUser.getPath());
}
public void testCreateUserWithoutPasswordAndLogin() throws RepositoryException {
testUser = userManager.createUser(testId, null);
superuser.save();
// EXERCISE: build the credentials and fix the test-case such that it no longer fails
Credentials creds = null;
getHelper().getRepository().login(creds).logout();
}
public void testChangePassword() throws RepositoryException {
testUser = userManager.createUser(testId, null);
superuser.save();
String newPassword = null; // EXERCISE : define valid value(s)
testUser.changePassword(newPassword);
String oldPassword = null; // EXERCISE : fill in the correct value
newPassword = null; // EXERCISE : fill in a valid value; Q: can you use null?
testUser.changePassword(newPassword, oldPassword);
superuser.save();
}
}
|
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1/datastream_resources.proto
package com.google.cloud.datastream.v1;
/**
*
*
* <pre>
* A validation to perform on a stream.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.Validation}
*/
public final class Validation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1.Validation)
ValidationOrBuilder {
private static final long serialVersionUID = 0L;
// Use Validation.newBuilder() to construct.
private Validation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Validation() {
description_ = "";
state_ = 0;
message_ = java.util.Collections.emptyList();
code_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Validation();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Validation(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
description_ = s;
break;
}
case 16:
{
int rawValue = input.readEnum();
state_ = rawValue;
break;
}
case 26:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
message_ =
new java.util.ArrayList<com.google.cloud.datastream.v1.ValidationMessage>();
mutable_bitField0_ |= 0x00000001;
}
message_.add(
input.readMessage(
com.google.cloud.datastream.v1.ValidationMessage.parser(),
extensionRegistry));
break;
}
case 34:
{
java.lang.String s = input.readStringRequireUtf8();
code_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
message_ = java.util.Collections.unmodifiableList(message_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_Validation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_Validation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.Validation.class,
com.google.cloud.datastream.v1.Validation.Builder.class);
}
/**
*
*
* <pre>
* Validation execution state.
* </pre>
*
* Protobuf enum {@code google.cloud.datastream.v1.Validation.State}
*/
public enum State implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified state.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
STATE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Validation did not execute.
* </pre>
*
* <code>NOT_EXECUTED = 1;</code>
*/
NOT_EXECUTED(1),
/**
*
*
* <pre>
* Validation failed.
* </pre>
*
* <code>FAILED = 2;</code>
*/
FAILED(2),
/**
*
*
* <pre>
* Validation passed.
* </pre>
*
* <code>PASSED = 3;</code>
*/
PASSED(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified state.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
public static final int STATE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Validation did not execute.
* </pre>
*
* <code>NOT_EXECUTED = 1;</code>
*/
public static final int NOT_EXECUTED_VALUE = 1;
/**
*
*
* <pre>
* Validation failed.
* </pre>
*
* <code>FAILED = 2;</code>
*/
public static final int FAILED_VALUE = 2;
/**
*
*
* <pre>
* Validation passed.
* </pre>
*
* <code>PASSED = 3;</code>
*/
public static final int PASSED_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static State valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static State forNumber(int value) {
switch (value) {
case 0:
return STATE_UNSPECIFIED;
case 1:
return NOT_EXECUTED;
case 2:
return FAILED;
case 3:
return PASSED;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<State> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<State>() {
public State findValueByNumber(int number) {
return State.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.datastream.v1.Validation.getDescriptor().getEnumTypes().get(0);
}
private static final State[] VALUES = values();
public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private State(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.datastream.v1.Validation.State)
}
public static final int DESCRIPTION_FIELD_NUMBER = 1;
private volatile java.lang.Object description_;
/**
*
*
* <pre>
* A short description of the validation.
* </pre>
*
* <code>string description = 1;</code>
*
* @return The description.
*/
@java.lang.Override
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
}
}
/**
*
*
* <pre>
* A short description of the validation.
* </pre>
*
* <code>string description = 1;</code>
*
* @return The bytes for description.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int STATE_FIELD_NUMBER = 2;
private int state_;
/**
*
*
* <pre>
* Validation execution status.
* </pre>
*
* <code>.google.cloud.datastream.v1.Validation.State state = 2;</code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* Validation execution status.
* </pre>
*
* <code>.google.cloud.datastream.v1.Validation.State state = 2;</code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.datastream.v1.Validation.State getState() {
@SuppressWarnings("deprecation")
com.google.cloud.datastream.v1.Validation.State result =
com.google.cloud.datastream.v1.Validation.State.valueOf(state_);
return result == null ? com.google.cloud.datastream.v1.Validation.State.UNRECOGNIZED : result;
}
public static final int MESSAGE_FIELD_NUMBER = 3;
private java.util.List<com.google.cloud.datastream.v1.ValidationMessage> message_;
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datastream.v1.ValidationMessage> getMessageList() {
return message_;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datastream.v1.ValidationMessageOrBuilder>
getMessageOrBuilderList() {
return message_;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
@java.lang.Override
public int getMessageCount() {
return message_.size();
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1.ValidationMessage getMessage(int index) {
return message_.get(index);
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1.ValidationMessageOrBuilder getMessageOrBuilder(int index) {
return message_.get(index);
}
public static final int CODE_FIELD_NUMBER = 4;
private volatile java.lang.Object code_;
/**
*
*
* <pre>
* A custom code identifying this validation.
* </pre>
*
* <code>string code = 4;</code>
*
* @return The code.
*/
@java.lang.Override
public java.lang.String getCode() {
java.lang.Object ref = code_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
code_ = s;
return s;
}
}
/**
*
*
* <pre>
* A custom code identifying this validation.
* </pre>
*
* <code>string code = 4;</code>
*
* @return The bytes for code.
*/
@java.lang.Override
public com.google.protobuf.ByteString getCodeBytes() {
java.lang.Object ref = code_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
code_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, description_);
}
if (state_ != com.google.cloud.datastream.v1.Validation.State.STATE_UNSPECIFIED.getNumber()) {
output.writeEnum(2, state_);
}
for (int i = 0; i < message_.size(); i++) {
output.writeMessage(3, message_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(code_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, code_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, description_);
}
if (state_ != com.google.cloud.datastream.v1.Validation.State.STATE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, state_);
}
for (int i = 0; i < message_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, message_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(code_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, code_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1.Validation)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1.Validation other =
(com.google.cloud.datastream.v1.Validation) obj;
if (!getDescription().equals(other.getDescription())) return false;
if (state_ != other.state_) return false;
if (!getMessageList().equals(other.getMessageList())) return false;
if (!getCode().equals(other.getCode())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
if (getMessageCount() > 0) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessageList().hashCode();
}
hash = (37 * hash) + CODE_FIELD_NUMBER;
hash = (53 * hash) + getCode().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1.Validation parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.Validation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.Validation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.Validation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.Validation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.Validation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.Validation parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.Validation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.Validation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.Validation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.Validation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.Validation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.datastream.v1.Validation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A validation to perform on a stream.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.Validation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1.Validation)
com.google.cloud.datastream.v1.ValidationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_Validation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_Validation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.Validation.class,
com.google.cloud.datastream.v1.Validation.Builder.class);
}
// Construct using com.google.cloud.datastream.v1.Validation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMessageFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
description_ = "";
state_ = 0;
if (messageBuilder_ == null) {
message_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
messageBuilder_.clear();
}
code_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_Validation_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1.Validation getDefaultInstanceForType() {
return com.google.cloud.datastream.v1.Validation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1.Validation build() {
com.google.cloud.datastream.v1.Validation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1.Validation buildPartial() {
com.google.cloud.datastream.v1.Validation result =
new com.google.cloud.datastream.v1.Validation(this);
int from_bitField0_ = bitField0_;
result.description_ = description_;
result.state_ = state_;
if (messageBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
message_ = java.util.Collections.unmodifiableList(message_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.message_ = message_;
} else {
result.message_ = messageBuilder_.build();
}
result.code_ = code_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1.Validation) {
return mergeFrom((com.google.cloud.datastream.v1.Validation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datastream.v1.Validation other) {
if (other == com.google.cloud.datastream.v1.Validation.getDefaultInstance()) return this;
if (!other.getDescription().isEmpty()) {
description_ = other.description_;
onChanged();
}
if (other.state_ != 0) {
setStateValue(other.getStateValue());
}
if (messageBuilder_ == null) {
if (!other.message_.isEmpty()) {
if (message_.isEmpty()) {
message_ = other.message_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureMessageIsMutable();
message_.addAll(other.message_);
}
onChanged();
}
} else {
if (!other.message_.isEmpty()) {
if (messageBuilder_.isEmpty()) {
messageBuilder_.dispose();
messageBuilder_ = null;
message_ = other.message_;
bitField0_ = (bitField0_ & ~0x00000001);
messageBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMessageFieldBuilder()
: null;
} else {
messageBuilder_.addAllMessages(other.message_);
}
}
}
if (!other.getCode().isEmpty()) {
code_ = other.code_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.datastream.v1.Validation parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.datastream.v1.Validation) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object description_ = "";
/**
*
*
* <pre>
* A short description of the validation.
* </pre>
*
* <code>string description = 1;</code>
*
* @return The description.
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A short description of the validation.
* </pre>
*
* <code>string description = 1;</code>
*
* @return The bytes for description.
*/
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A short description of the validation.
* </pre>
*
* <code>string description = 1;</code>
*
* @param value The description to set.
* @return This builder for chaining.
*/
public Builder setDescription(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
description_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* A short description of the validation.
* </pre>
*
* <code>string description = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearDescription() {
description_ = getDefaultInstance().getDescription();
onChanged();
return this;
}
/**
*
*
* <pre>
* A short description of the validation.
* </pre>
*
* <code>string description = 1;</code>
*
* @param value The bytes for description to set.
* @return This builder for chaining.
*/
public Builder setDescriptionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
description_ = value;
onChanged();
return this;
}
private int state_ = 0;
/**
*
*
* <pre>
* Validation execution status.
* </pre>
*
* <code>.google.cloud.datastream.v1.Validation.State state = 2;</code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* Validation execution status.
* </pre>
*
* <code>.google.cloud.datastream.v1.Validation.State state = 2;</code>
*
* @param value The enum numeric value on the wire for state to set.
* @return This builder for chaining.
*/
public Builder setStateValue(int value) {
state_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Validation execution status.
* </pre>
*
* <code>.google.cloud.datastream.v1.Validation.State state = 2;</code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.datastream.v1.Validation.State getState() {
@SuppressWarnings("deprecation")
com.google.cloud.datastream.v1.Validation.State result =
com.google.cloud.datastream.v1.Validation.State.valueOf(state_);
return result == null ? com.google.cloud.datastream.v1.Validation.State.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Validation execution status.
* </pre>
*
* <code>.google.cloud.datastream.v1.Validation.State state = 2;</code>
*
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(com.google.cloud.datastream.v1.Validation.State value) {
if (value == null) {
throw new NullPointerException();
}
state_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Validation execution status.
* </pre>
*
* <code>.google.cloud.datastream.v1.Validation.State state = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearState() {
state_ = 0;
onChanged();
return this;
}
private java.util.List<com.google.cloud.datastream.v1.ValidationMessage> message_ =
java.util.Collections.emptyList();
private void ensureMessageIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
message_ =
new java.util.ArrayList<com.google.cloud.datastream.v1.ValidationMessage>(message_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.ValidationMessage,
com.google.cloud.datastream.v1.ValidationMessage.Builder,
com.google.cloud.datastream.v1.ValidationMessageOrBuilder>
messageBuilder_;
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public java.util.List<com.google.cloud.datastream.v1.ValidationMessage> getMessageList() {
if (messageBuilder_ == null) {
return java.util.Collections.unmodifiableList(message_);
} else {
return messageBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public int getMessageCount() {
if (messageBuilder_ == null) {
return message_.size();
} else {
return messageBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public com.google.cloud.datastream.v1.ValidationMessage getMessage(int index) {
if (messageBuilder_ == null) {
return message_.get(index);
} else {
return messageBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public Builder setMessage(int index, com.google.cloud.datastream.v1.ValidationMessage value) {
if (messageBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMessageIsMutable();
message_.set(index, value);
onChanged();
} else {
messageBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public Builder setMessage(
int index, com.google.cloud.datastream.v1.ValidationMessage.Builder builderForValue) {
if (messageBuilder_ == null) {
ensureMessageIsMutable();
message_.set(index, builderForValue.build());
onChanged();
} else {
messageBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public Builder addMessage(com.google.cloud.datastream.v1.ValidationMessage value) {
if (messageBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMessageIsMutable();
message_.add(value);
onChanged();
} else {
messageBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public Builder addMessage(int index, com.google.cloud.datastream.v1.ValidationMessage value) {
if (messageBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMessageIsMutable();
message_.add(index, value);
onChanged();
} else {
messageBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public Builder addMessage(
com.google.cloud.datastream.v1.ValidationMessage.Builder builderForValue) {
if (messageBuilder_ == null) {
ensureMessageIsMutable();
message_.add(builderForValue.build());
onChanged();
} else {
messageBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public Builder addMessage(
int index, com.google.cloud.datastream.v1.ValidationMessage.Builder builderForValue) {
if (messageBuilder_ == null) {
ensureMessageIsMutable();
message_.add(index, builderForValue.build());
onChanged();
} else {
messageBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public Builder addAllMessage(
java.lang.Iterable<? extends com.google.cloud.datastream.v1.ValidationMessage> values) {
if (messageBuilder_ == null) {
ensureMessageIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, message_);
onChanged();
} else {
messageBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public Builder clearMessage() {
if (messageBuilder_ == null) {
message_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
messageBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public Builder removeMessage(int index) {
if (messageBuilder_ == null) {
ensureMessageIsMutable();
message_.remove(index);
onChanged();
} else {
messageBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public com.google.cloud.datastream.v1.ValidationMessage.Builder getMessageBuilder(int index) {
return getMessageFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public com.google.cloud.datastream.v1.ValidationMessageOrBuilder getMessageOrBuilder(
int index) {
if (messageBuilder_ == null) {
return message_.get(index);
} else {
return messageBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public java.util.List<? extends com.google.cloud.datastream.v1.ValidationMessageOrBuilder>
getMessageOrBuilderList() {
if (messageBuilder_ != null) {
return messageBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(message_);
}
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public com.google.cloud.datastream.v1.ValidationMessage.Builder addMessageBuilder() {
return getMessageFieldBuilder()
.addBuilder(com.google.cloud.datastream.v1.ValidationMessage.getDefaultInstance());
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public com.google.cloud.datastream.v1.ValidationMessage.Builder addMessageBuilder(int index) {
return getMessageFieldBuilder()
.addBuilder(index, com.google.cloud.datastream.v1.ValidationMessage.getDefaultInstance());
}
/**
*
*
* <pre>
* Messages reflecting the validation results.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.ValidationMessage message = 3;</code>
*/
public java.util.List<com.google.cloud.datastream.v1.ValidationMessage.Builder>
getMessageBuilderList() {
return getMessageFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.ValidationMessage,
com.google.cloud.datastream.v1.ValidationMessage.Builder,
com.google.cloud.datastream.v1.ValidationMessageOrBuilder>
getMessageFieldBuilder() {
if (messageBuilder_ == null) {
messageBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.ValidationMessage,
com.google.cloud.datastream.v1.ValidationMessage.Builder,
com.google.cloud.datastream.v1.ValidationMessageOrBuilder>(
message_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
message_ = null;
}
return messageBuilder_;
}
private java.lang.Object code_ = "";
/**
*
*
* <pre>
* A custom code identifying this validation.
* </pre>
*
* <code>string code = 4;</code>
*
* @return The code.
*/
public java.lang.String getCode() {
java.lang.Object ref = code_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
code_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A custom code identifying this validation.
* </pre>
*
* <code>string code = 4;</code>
*
* @return The bytes for code.
*/
public com.google.protobuf.ByteString getCodeBytes() {
java.lang.Object ref = code_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
code_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A custom code identifying this validation.
* </pre>
*
* <code>string code = 4;</code>
*
* @param value The code to set.
* @return This builder for chaining.
*/
public Builder setCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
code_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* A custom code identifying this validation.
* </pre>
*
* <code>string code = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearCode() {
code_ = getDefaultInstance().getCode();
onChanged();
return this;
}
/**
*
*
* <pre>
* A custom code identifying this validation.
* </pre>
*
* <code>string code = 4;</code>
*
* @param value The bytes for code to set.
* @return This builder for chaining.
*/
public Builder setCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
code_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1.Validation)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1.Validation)
private static final com.google.cloud.datastream.v1.Validation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1.Validation();
}
public static com.google.cloud.datastream.v1.Validation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Validation> PARSER =
new com.google.protobuf.AbstractParser<Validation>() {
@java.lang.Override
public Validation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Validation(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Validation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Validation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1.Validation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.DoubleArray;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.bucket.BestBucketsDeferringCollector;
import org.elasticsearch.search.aggregations.bucket.DeferableBucketAggregator;
import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.function.LongUnaryOperator;
public class VariableWidthHistogramAggregator extends DeferableBucketAggregator {
/**
* This aggregator goes through multiple phases of collection. Each phase has a different CollectionPhase::collectValue
* implementation
*
* Running a clustering algorithm like K-Means is unfeasible because large indices don't fit into memory.
* But having multiple collection phases lets us accurately bucket the docs in one pass.
*/
private abstract class CollectionPhase implements Releasable {
/**
* This method will collect the doc and then either return itself or a new CollectionPhase
* It is responsible for determining when a phase is over and what phase will run next
*/
abstract CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val) throws IOException;
/**
* @return the final number of buckets that will be used
* If this is not the final phase, then an instance of the next phase is created and it is asked for this answer.
*/
abstract int finalNumBuckets();
/**
* If this CollectionPhase is the final phase then this method will build and return the i'th bucket
* Otherwise, it will create an instance of the next phase and ask it for the i'th bucket (naturally, if that phase
* not the last phase then it will do the same and so on...)
*/
abstract InternalVariableWidthHistogram.Bucket buildBucket(int bucketOrd, InternalAggregations subAggregations) throws IOException;
}
/**
* Phase 1: Build up a buffer of docs (i.e. give each new doc its own bucket). No clustering decisions are made here.
* Building this buffer lets us analyze the distribution of the data before we begin clustering.
*/
private class BufferValuesPhase extends CollectionPhase{
private DoubleArray buffer;
private int bufferSize;
private int bufferLimit;
private MergeBucketsPhase mergeBucketsPhase;
BufferValuesPhase(int bufferLimit){
this.buffer = bigArrays().newDoubleArray(1);
this.bufferSize = 0;
this.bufferLimit = bufferLimit;
this.mergeBucketsPhase = null;
}
@Override
public CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val) throws IOException{
if (bufferSize < bufferLimit) {
// Add to the buffer i.e store the doc in a new bucket
buffer = bigArrays().grow(buffer, bufferSize + 1);
buffer.set((long) bufferSize, val);
collectBucket(sub, doc, bufferSize);
bufferSize += 1;
}
if(bufferSize == bufferLimit) {
// We have hit the buffer limit. Switch to merge mode
CollectionPhase mergeBuckets = new MergeBucketsPhase(buffer, bufferSize);
Releasables.close(this);
return mergeBuckets;
} else {
// There is still room in the buffer
return this;
}
}
int finalNumBuckets(){
return getMergeBucketPhase().finalNumBuckets();
}
@Override
InternalVariableWidthHistogram.Bucket buildBucket(int bucketOrd, InternalAggregations subAggregations) throws IOException{
InternalVariableWidthHistogram.Bucket bucket = getMergeBucketPhase().buildBucket(bucketOrd, subAggregations);
return bucket;
}
MergeBucketsPhase getMergeBucketPhase(){
if(mergeBucketsPhase == null){
mergeBucketsPhase = new MergeBucketsPhase(buffer, bufferSize);
}
return mergeBucketsPhase;
}
@Override
public void close() {
if(mergeBucketsPhase != null){
Releasables.close(mergeBucketsPhase);
}
Releasables.close(buffer);
}
}
/**
* Phase 2: This phase is initialized with the buffer created in Phase 1.
* It is responsible for merging the buffered docs into a smaller number of buckets and then determining which existing
* bucket all subsequent docs belong to. New buckets will be created for docs that are distant from all existing ones
*/
private class MergeBucketsPhase extends CollectionPhase{
/**
* "Cluster" refers to intermediate buckets during collection
* They are kept sorted by centroid. The i'th index in all these arrays always refers to the i'th cluster
*/
public DoubleArray clusterMaxes;
public DoubleArray clusterMins;
public DoubleArray clusterCentroids;
public DoubleArray clusterSizes; // clusterSizes != bucketDocCounts when clusters are in the middle of a merge
public int numClusters;
private double avgBucketDistance;
MergeBucketsPhase(DoubleArray buffer, int bufferSize) {
// Cluster the documents to reduce the number of buckets
bucketBufferedDocs(buffer, bufferSize, mergePhaseInitialBucketCount(shardSize));
if(bufferSize > 1) {
updateAvgBucketDistance();
}
}
/**
* Sorts the <b>indices</b> of <code>values</code> by their underlying value
* This will produce a merge map whose application will sort <code>values</code>
*/
private class ClusterSorter extends InPlaceMergeSorter {
final DoubleArray values;
final long[] indexes;
ClusterSorter(DoubleArray values, int length){
this.values = values;
this.indexes = new long[length];
for(int i = 0; i < indexes.length; i++){
indexes[i] = i;
}
}
@Override
protected int compare(int i, int j) {
double iVal = values.get(indexes[i]);
double jVal = values.get(indexes[j]);
return Double.compare(iVal, jVal);
}
@Override
protected void swap(int i, int j) {
long hold = indexes[i];
indexes[i] = indexes[j];
indexes[j] = hold;
}
/**
* Produces a merge map where `mergeMap[i]` represents the index that <code>values[i]</code>
* would be moved to <b>if</b> <code>values</code> were sorted
* In other words, this method produces a merge map that will sort <code>values</code>
*
* See BucketsAggregator::mergeBuckets to learn more about the merge map
*/
public long[] generateMergeMap(){
sort(0, indexes.length);
return indexes;
}
}
/**
* Sorting the documents by key lets us bucket the documents into groups with a single linear scan
*
* But we can't do this by just sorting <code>buffer</code>, because we also need to generate a merge map
* for every change we make to the list, so that we can apply the changes to the underlying buckets as well.
*
* By just creating a merge map, we eliminate the need to actually sort <code>buffer</code>. We can just
* use the merge map to find any doc's sorted index.
*/
private void bucketBufferedDocs(final DoubleArray buffer, final int bufferSize, final int numBuckets) {
// Allocate space for the clusters about to be created
clusterMins = bigArrays().newDoubleArray(1);
clusterMaxes = bigArrays().newDoubleArray(1);
clusterCentroids = bigArrays().newDoubleArray(1);
clusterSizes = bigArrays().newDoubleArray(1);
numClusters = 0;
ClusterSorter sorter = new ClusterSorter(buffer, bufferSize);
long[] mergeMap = sorter.generateMergeMap();
// Naively use basic linear separation to group the first bufferSize docs into initialNumBuckets buckets
// This will require modifying the merge map, which currently represents a sorted list of buckets with 1 doc / bucket
int docsPerBucket = (int) Math.ceil((double) bufferSize / (double) numBuckets);
int bucketOrd = 0;
for(int i = 0; i < mergeMap.length; i++){
// mergeMap[i] is the index of the i'th smallest doc
double val = buffer.get(mergeMap[i]);
// Put the i'th smallest doc into the bucket at bucketOrd
mergeMap[i] = (int)(mergeMap[i]/docsPerBucket);
if(bucketOrd == numClusters){
createAndAppendNewCluster(val);
} else {
addToCluster(bucketOrd, val);
}
if((i + 1) % docsPerBucket == 0){
// This bucket is full. Make a new one
bucketOrd += 1;
}
}
LongUnaryOperator howToRewrite = b -> mergeMap[(int) b];
rewriteBuckets(bucketOrd + 1, howToRewrite);
if (deferringCollector != null) {
deferringCollector.rewriteBuckets(howToRewrite);
}
}
@Override
public CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val) throws IOException{
int bucketOrd = getNearestBucket(val);
double distance = Math.abs(clusterCentroids.get(bucketOrd) - val);
if(bucketOrd == -1 || distance > (2 * avgBucketDistance) && numClusters < shardSize) {
// Make a new bucket since the document is distant from all existing buckets
// TODO: (maybe) Create a new bucket for <b>all</b> distant docs and merge down to shardSize buckets at end
createAndAppendNewCluster(val);
collectBucket(sub, doc, numClusters - 1);
if(val > clusterCentroids.get(bucketOrd)){
/*
* If the new value is bigger than the nearest bucket then insert
* just ahead of bucketOrd so that the array remains sorted.
*/
bucketOrd += 1;
}
moveLastCluster(bucketOrd);
// We've added a new bucket so update the average distance between the buckets
updateAvgBucketDistance();
} else {
addToCluster(bucketOrd, val);
collectExistingBucket(sub, doc, bucketOrd);
if (bucketOrd == 0 || bucketOrd == numClusters - 1) {
// Only update average distance if the centroid of one of the end buckets is modifed.
updateAvgBucketDistance();
}
}
return this;
}
private void updateAvgBucketDistance() {
// Centroids are sorted so the average distance is the difference between the first and last.
avgBucketDistance = (clusterCentroids.get(numClusters - 1) - clusterCentroids.get(0)) / (numClusters - 1);
}
/**
* Creates a new cluster with <code>value</code> and appends it to the cluster arrays
*/
private void createAndAppendNewCluster(double value){
// Ensure there is space for the cluster
clusterMaxes = bigArrays().grow(clusterMaxes, numClusters + 1); // + 1 because indexing starts at 0
clusterMins = bigArrays().grow(clusterMins, numClusters + 1);
clusterCentroids = bigArrays().grow(clusterCentroids, numClusters + 1);
clusterSizes = bigArrays().grow(clusterSizes, numClusters + 1);
// Initialize the cluster at the end of the array
clusterMaxes.set(numClusters, value);
clusterMins.set(numClusters, value);
clusterCentroids.set(numClusters, value);
clusterSizes.set(numClusters, 1);
numClusters += 1;
}
/**
* Move the last cluster to position <code>idx</code>
* This is expensive because a merge map of size <code>numClusters</code> is created, so don't call this method too often
*
* TODO: Make this more efficient
*/
private void moveLastCluster(int index){
if(index != numClusters - 1) {
// Move the cluster metadata
double holdMax = clusterMaxes.get(numClusters-1);
double holdMin = clusterMins.get(numClusters-1);
double holdCentroid = clusterCentroids.get(numClusters-1);
double holdSize = clusterSizes.get(numClusters-1);
for (int i = numClusters - 1; i > index; i--) {
// The clusters in range {index ... numClusters - 1} move up 1 index to make room for the new cluster
clusterMaxes.set(i, clusterMaxes.get(i-1));
clusterMins.set(i, clusterMins.get(i-1));
clusterCentroids.set(i, clusterCentroids.get(i-1));
clusterSizes.set(i, clusterSizes.get(i-1));
}
clusterMaxes.set(index, holdMax);
clusterMins.set(index, holdMin);
clusterCentroids.set(index, holdCentroid);
clusterSizes.set(index, holdSize);
// Move the underlying buckets
LongUnaryOperator mergeMap = new LongUnaryOperator() {
@Override
public long applyAsLong(long i) {
if(i < index) {
// The clusters in range {0 ... idx - 1} don't move
return i;
}
if(i == numClusters - 1) {
// The new cluster moves to index
return (long)index;
}
// The clusters in range {index ... numClusters - 1} shift forward
return i + 1;
}
};
rewriteBuckets(numClusters, mergeMap);
if (deferringCollector != null) {
deferringCollector.rewriteBuckets(mergeMap);
}
}
}
/**
* Adds <code>val</code> to the cluster at index <code>bucketOrd</code>.
* The cluster's centroid, min, max, and size are recalculated.
*/
private void addToCluster(int bucketOrd, double val){
assert bucketOrd < numClusters;
double max = Math.max(clusterMaxes.get(bucketOrd), val);
double min = Math.min(clusterMins.get(bucketOrd), val);
// Recalculate the centroid
double oldCentroid = clusterCentroids.get(bucketOrd);
double size = clusterSizes.get(bucketOrd);
double newCentroid = ((oldCentroid * size) + val) / (size + 1);
clusterMaxes.set(bucketOrd, max);
clusterMins.set(bucketOrd, min);
clusterCentroids.set(bucketOrd, newCentroid);
clusterSizes.increment(bucketOrd, 1);
}
/**
* Returns the ordinal of the bucket whose centroid is closest to <code>val</code>, or -1 if there are no buckets.
**/
private int getNearestBucket(double value){
if (numClusters == 0){
return -1;
}
BigArrays.DoubleBinarySearcher binarySearcher = new BigArrays.DoubleBinarySearcher(clusterCentroids);
return binarySearcher.search(0, numClusters - 1, value);
}
@Override
int finalNumBuckets(){
return numClusters;
}
@Override
InternalVariableWidthHistogram.Bucket buildBucket(int bucketOrd, InternalAggregations subAggregations){
return new InternalVariableWidthHistogram.Bucket(
clusterCentroids.get(bucketOrd),
new InternalVariableWidthHistogram.Bucket.BucketBounds(clusterMins.get(bucketOrd), clusterMaxes.get(bucketOrd)),
bucketDocCount(bucketOrd),
formatter,
subAggregations);
}
@Override
public void close() {
Releasables.close(clusterMaxes, clusterMins, clusterCentroids, clusterSizes);
}
}
private final ValuesSource.Numeric valuesSource;
private final DocValueFormat formatter;
// Aggregation parameters
private final int numBuckets;
private final int shardSize;
private final int bufferLimit;
private CollectionPhase collector;
private BestBucketsDeferringCollector deferringCollector;
VariableWidthHistogramAggregator(String name, AggregatorFactories factories, int numBuckets, int shardSize,
int initialBuffer, @Nullable ValuesSourceConfig valuesSourceConfig,
AggregationContext context, Aggregator parent,
Map<String, Object> metadata) throws IOException{
super(name, factories, context, parent, metadata);
this.numBuckets = numBuckets;
this.valuesSource = (ValuesSource.Numeric) valuesSourceConfig.getValuesSource();
this.formatter = valuesSourceConfig.format();
this.shardSize = shardSize;
this.bufferLimit = initialBuffer;
collector = new BufferValuesPhase(this.bufferLimit);
String scoringAgg = subAggsNeedScore();
String nestedAgg = descendsFromNestedAggregator(parent);
if (scoringAgg != null && nestedAgg != null) {
/*
* Terms agg would force the collect mode to depth_first here, because
* we need to access the score of nested documents in a sub-aggregation
* and we are not able to generate this score while replaying deferred documents.
*
* But the VariableWidthHistogram agg _must_ execute in breadth first since it relies on
* deferring execution, so we just have to throw up our hands and refuse
*/
throw new IllegalStateException("VariableWidthHistogram agg [" + name() + "] is the child of the nested agg [" + nestedAgg
+ "], and also has a scoring child agg [" + scoringAgg + "]. This combination is not supported because " +
"it requires executing in [depth_first] mode, which the VariableWidthHistogram agg cannot do.");
}
}
private String subAggsNeedScore() {
for (Aggregator subAgg : subAggregators) {
if (subAgg.scoreMode().needsScores()) {
return subAgg.name();
}
}
return null;
}
private String descendsFromNestedAggregator(Aggregator parent) {
while (parent != null) {
if (parent.getClass() == NestedAggregator.class) {
return parent.name();
}
parent = parent.parent();
}
return null;
}
@Override
public ScoreMode scoreMode() {
if (valuesSource != null && valuesSource.needsScores()) {
return ScoreMode.COMPLETE;
}
return super.scoreMode();
}
@Override
protected boolean shouldDefer(Aggregator aggregator) {
return true;
}
@Override
public DeferringBucketCollector buildDeferringCollector() {
deferringCollector = new BestBucketsDeferringCollector(topLevelQuery(), searcher(), descendsFromGlobalAggregator(parent()));
return deferringCollector;
}
@Override
protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx);
return new LeafBucketCollectorBase(sub, values){
@Override
public void collect(int doc, long bucket) throws IOException {
assert bucket == 0;
if(values.advanceExact(doc)){
final int valuesCount = values.docValueCount();
double prevVal = Double.NEGATIVE_INFINITY;
for (int i = 0; i < valuesCount; ++i) {
double val = values.nextValue();
assert val >= prevVal;
if (val == prevVal){
continue;
}
collector = collector.collectValue(sub, doc, val);
}
}
}
};
}
@Override
public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException {
int numClusters = collector.finalNumBuckets();
long[] bucketOrdsToCollect = new long[numClusters];
for (int i = 0; i < numClusters; i++) {
bucketOrdsToCollect[i] = i;
}
InternalAggregations[] subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
List<InternalVariableWidthHistogram.Bucket> buckets = new ArrayList<>(numClusters);
for (int bucketOrd = 0; bucketOrd < numClusters; bucketOrd++) {
buckets.add(collector.buildBucket(bucketOrd, subAggregationResults[bucketOrd]));
}
Function<List<InternalVariableWidthHistogram.Bucket>, InternalAggregation> resultBuilder = bucketsToFormat -> {
// The contract of the histogram aggregation is that shards must return
// buckets ordered by centroid in ascending order
CollectionUtil.introSort(bucketsToFormat, BucketOrder.key(true).comparator());
InternalVariableWidthHistogram.EmptyBucketInfo emptyBucketInfo = new InternalVariableWidthHistogram.EmptyBucketInfo(
buildEmptySubAggregations());
return new InternalVariableWidthHistogram(name, bucketsToFormat, emptyBucketInfo, numBuckets, formatter, metadata());
};
return new InternalAggregation[] { resultBuilder.apply(buckets) };
}
@Override
public InternalAggregation buildEmptyAggregation() {
InternalVariableWidthHistogram.EmptyBucketInfo emptyBucketInfo = new InternalVariableWidthHistogram.EmptyBucketInfo(
buildEmptySubAggregations()
);
return new InternalVariableWidthHistogram(name(), Collections.emptyList(), emptyBucketInfo, numBuckets, formatter, metadata());
}
@Override
public void doClose() {
Releasables.close(collector);
}
public static int mergePhaseInitialBucketCount(int shardSize) {
// Target shardSizes * (3/4) buckets so that there's room for more distant buckets to be added during rest of collection
return (int) ((long) shardSize * 3 / 4);
}
}
|
|
package com.ru.andr.walkinggame;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.FragmentManager;
import android.app.ProgressDialog;
import android.content.Intent;
import android.graphics.Point;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.Display;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import com.shephertz.app42.gaming.multiplayer.client.ConnectionState;
import com.shephertz.app42.gaming.multiplayer.client.WarpClient;
import com.shephertz.app42.gaming.multiplayer.client.command.WarpResponseResultCode;
import com.shephertz.app42.gaming.multiplayer.client.events.ChatEvent;
import com.shephertz.app42.gaming.multiplayer.client.events.ConnectEvent;
import com.shephertz.app42.gaming.multiplayer.client.events.LiveRoomInfoEvent;
import com.shephertz.app42.gaming.multiplayer.client.events.LobbyData;
import com.shephertz.app42.gaming.multiplayer.client.events.MoveEvent;
import com.shephertz.app42.gaming.multiplayer.client.events.RoomData;
import com.shephertz.app42.gaming.multiplayer.client.events.RoomEvent;
import com.shephertz.app42.gaming.multiplayer.client.events.UpdateEvent;
import com.shephertz.app42.gaming.multiplayer.client.listener.ChatRequestListener;
import com.shephertz.app42.gaming.multiplayer.client.listener.ConnectionRequestListener;
import com.shephertz.app42.gaming.multiplayer.client.listener.NotifyListener;
import com.shephertz.app42.gaming.multiplayer.client.listener.RoomRequestListener;
import java.util.ArrayList;
import java.util.HashMap;
public class ChatActivity extends Activity implements RoomRequestListener, NotifyListener, ConnectionRequestListener, ChatRequestListener {
private ProgressDialog progressDialog;
private WarpClient theClient;
private ImageView imageView;
private Button btn;
private UserListAdapter userListAdapter;
private Handler handler = new Handler();
private ArrayList<Player> onlineUserList = new ArrayList<Player>();
private Player player;
private int myscore;
private int enemyscore;
private float startPos;
private int stepSize;
private int winDiff = 50;
public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
btn = (Button)findViewById(R.id.sendBtn);
userListAdapter = new UserListAdapter(this);
setContentView(R.layout.activity_chat);
imageView = (ImageView)findViewById(R.id.gameProgress);
Point mPointT = new Point();
Display d = getWindowManager().getDefaultDisplay();
d.getSize(mPointT);
startPos = 0;
stepSize = mPointT.x / 100;
imageView.setTranslationX(startPos);
player = Player.getPlayer(this);
myscore = 0;
enemyscore = 0;
try{
theClient = WarpClient.getInstance();
}catch(Exception e){
e.printStackTrace();
}
runOnUiThread(update);
}
@Override
protected void onStart(){
super.onStart();
theClient.addConnectionRequestListener(this);
theClient.addRoomRequestListener(this);
theClient.addNotificationListener(this);
theClient.addChatRequestListener(this);
if(theClient.getConnectionState()== ConnectionState.CONNECTED){
progressDialog = ProgressDialog.show(this, "", "Please wait..");
theClient.getLiveRoomInfo(Constants.roomId);
}else{
theClient.RecoverConnection();
}
}
@Override
protected void onStop(){
super.onStop();
theClient.removeConnectionRequestListener(this);
theClient.removeRoomRequestListener(this);
theClient.removeNotificationListener(this);
theClient.removeChatRequestListener(this);
}
@Override
public void onDestroy(){
super.onDestroy();
if(theClient!=null){
theClient.removeConnectionRequestListener(this);
theClient.removeRoomRequestListener(this);
theClient.removeNotificationListener(this);
theClient.removeChatRequestListener(this);
handleLeaveRoom();
}
}
@Override
public void onBackPressed(){
super.onBackPressed();
}
private void handleLeaveRoom(){
if(theClient!=null){
theClient.unsubscribeRoom(Utils.roomID);
theClient.leaveRoom(Utils.roomID);
theClient.disconnect();
}
}
public void onSendClicked(View view){
int val = player.getStrength();
myscore += val;
theClient.sendChat(String.valueOf(val));
runOnUiThread(update);
}
@Override
public void onGetLiveRoomInfoDone(final LiveRoomInfoEvent event) {
if(progressDialog!=null){
runOnUiThread(new Runnable() {
@Override
public void run() {
progressDialog.dismiss();
}
});
}
if(event.getResult()== WarpResponseResultCode.SUCCESS){
onlineUserList.clear();
if (event.getJoinedUsers() == null){return;}
if(event.getJoinedUsers().length>1){// if more than one user is online
final String onlineUser[] = Utils.removeLocalUserNameFromArray(event.getJoinedUsers());
for(int i=0;i<onlineUser.length;i++){
Player user = new Player(onlineUser[i].toString(), true);
Log.d(onlineUser[i].toString(), onlineUser[i].toString());
onlineUserList.add(user);
}
resetAdapter();
}else{
showToastOnUIThread("No online user found");
}
}else{
showToastOnUIThread("onGetLiveRoomInfoDone Failed with ErrorCode: " + event.getResult());
}
}
private void resetAdapter(){
runOnUiThread(new Runnable() {
@Override
public void run() {
if (onlineUserList.size() > 0) {
userListAdapter.setData(onlineUserList);
} else {
userListAdapter.clear();
}
}
});
}
// region room managemengt empty methods
@Override
public void onJoinRoomDone(RoomEvent arg0) {
}
@Override
public void onLeaveRoomDone(RoomEvent arg0) {
}
@Override
public void onSetCustomRoomDataDone(LiveRoomInfoEvent arg0) {
}
@Override
public void onSubscribeRoomDone(RoomEvent arg0) {
}
@Override
public void onUnSubscribeRoomDone(RoomEvent arg0) {
}
@Override
public void onUpdatePropertyDone(LiveRoomInfoEvent arg0) {
}
// endregion
@Override
public void onChatReceived(final ChatEvent event) {
if (event.getSender().compareTo(player.getName()) != 0){
enemyscore += Integer.parseInt(event.getMessage());
}
runOnUiThread(update);
int diff = myscore - enemyscore;
if (diff >= winDiff){
handleLeaveRoom();
//btn.setEnabled(false);
GameOverDialog d = new GameOverDialog("you win :) ");
d.show(this.getFragmentManager(), null);
}else if (diff <= -winDiff){
handleLeaveRoom();
//btn.setEnabled(false);
GameOverDialog d = new GameOverDialog("You lost :( ");
d.show(this.getFragmentManager(), null);
}
}
// region empty inteface methods
@Override
public void onPrivateChatReceived(final String userName, final String message) {
}
@Override
public void onRoomCreated(RoomData arg0) {
}
@Override
public void onRoomDestroyed(RoomData arg0) {
}
@Override
public void onUpdatePeersReceived(UpdateEvent arg0) {
}
@Override
public void onUserJoinedLobby(LobbyData arg0, String arg1) {
}
@Override
public void onUserLeftLobby(LobbyData arg0, String arg1) {
}
@Override
public void onMoveCompleted(MoveEvent arg0) {
}
@Override
public void onLockPropertiesDone(byte arg0) {
}
@Override
public void onUnlockPropertiesDone(byte arg0) {
}
@Override
public void onDisconnectDone(ConnectEvent arg0) {
}
@Override
public void onInitUDPDone(byte result) {
}
@Override
public void onSendPrivateChatDone(byte result) {
}
// endregion
@Override
public void onUserJoinedRoom(final RoomData roomData, final String userName) {
if(!userName.equals(Utils.USER_NAME)){
onlineUserList.add(new Player(userName, true));
resetAdapter();
}
}
@Override
public void onUserLeftRoom(final RoomData roomData, final String userName) {
for(int i=0;i<onlineUserList.size();i++){
Player user = onlineUserList.get(i);
if(user.getName().equals(userName)){
onlineUserList.remove(user);
}
}
resetAdapter();
}
@Override
public void onUserPaused(String locid, boolean isLobby, String userName) {
for(int i=0;i<onlineUserList.size();i++){
Player user = onlineUserList.get(i);
if(user.getName().equals(userName)){
user.setStatus(false);
}
}
resetAdapter();
}
@Override
public void onUserResumed(String locid, boolean isLobby, String userName) {
for(int i=0;i<onlineUserList.size();i++){
Player user = onlineUserList.get(i);
if(user.getName().equals(userName)){
user.setStatus(true);
}
}
resetAdapter();
}
@Override
public void onConnectDone(final ConnectEvent event) {
if(progressDialog!=null){
runOnUiThread(new Runnable() {
@Override
public void run() {
progressDialog.dismiss();
}
});
}
if(event.getResult() == WarpResponseResultCode.SUCCESS){
showToastOnUIThread("Connection success");
}
else if(event.getResult() == WarpResponseResultCode.SUCCESS_RECOVERED){
showToastOnUIThread("Connection recovered");
runOnUiThread(new Runnable() {
@Override
public void run() {
if(progressDialog!=null){
progressDialog.dismiss();
}
progressDialog = ProgressDialog.show(ChatActivity.this, "", "Please wait..");
}
});
theClient.getLiveRoomInfo(Constants.roomId);
}
else if(event.getResult() == WarpResponseResultCode.CONNECTION_ERROR_RECOVERABLE){
runOnUiThread(new Runnable() {
@Override
public void run() {
progressDialog = ProgressDialog.show(ChatActivity.this, "", "Recoverable connection error. Recovering session after 5 seconds");
}
});
handler.postDelayed(new Runnable() {
@Override
public void run() {
progressDialog.setMessage("Recovering...");
theClient.RecoverConnection();
}
}, 5000);
}
else{
showToastOnUIThread("Non-recoverable connection error."+event.getResult());
handleLeaveRoom();
this.finish();
}
}
@Override
public void onSendChatDone(byte result) {
if(result!=WarpResponseResultCode.SUCCESS){
showToastOnUIThread("onSendChatDone Failed with ErrorCode: " + result);
}
}
private void showToastOnUIThread(final String message){
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(ChatActivity.this, message, Toast.LENGTH_LONG).show();
}
});
}
private Runnable update = new Runnable() {
@Override
public void run() {
TextView mytext = (TextView)findViewById(R.id.myscore);
TextView enemy = (TextView)findViewById(R.id.oponentcore);
mytext.setText("myScore: " + String.valueOf(myscore));
imageView.setTranslationX(startPos + (enemyscore - myscore)*stepSize);
enemy.setText("oponentScore: " + String.valueOf(enemyscore));
}
};
// region autogenerated stubs
@Override
public void onGameStarted(String arg0, String arg1, String arg2) {
// TODO Auto-generated method stub
}
@Override
public void onGameStopped(String arg0, String arg1) {
// TODO Auto-generated method stub
}
@Override
public void onUserChangeRoomProperty(RoomData arg0, String arg1,
HashMap<String, Object> arg2, HashMap<String, String> arg3) {
// TODO Auto-generated method stub
}
@Override
public void onNextTurnRequest(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void onPrivateUpdateReceived(String arg0, byte[] arg1, boolean arg2) {
// TODO Auto-generated method stub
}
// endregion
}
|
|
/***
* ASM: a very small and fast Java bytecode manipulation framework Copyright (c) 2000-2011 INRIA, France Telecom All
* rights reserved.
* <p>
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or other materials provided with the
* distribution. 3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse
* or promote products derived from this software without specific prior written permission.
* <p>
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package dyvilx.tools.asm;
public class ClassWriter implements ClassVisitor
{
public static final int COMPUTE_MAXS = 1;
public static final int COMPUTE_FRAMES = 2;
static final int ACC_SYNTHETIC_ATTRIBUTE = 0x40000;
static final int TO_ACC_SYNTHETIC = ACC_SYNTHETIC_ATTRIBUTE / ASMConstants.ACC_SYNTHETIC;
static final int NOARG_INSN = 0;
static final int SBYTE_INSN = 1;
static final int SHORT_INSN = 2;
static final int VAR_INSN = 3;
static final int IMPLVAR_INSN = 4;
static final int TYPE_INSN = 5;
static final int FIELDORMETH_INSN = 6;
static final int ITFMETH_INSN = 7;
static final int INDYMETH_INSN = 8;
static final int LABEL_INSN = 9;
static final int LABELW_INSN = 10;
static final int LDC_INSN = 11;
static final int LDCW_INSN = 12;
static final int IINC_INSN = 13;
static final int TABL_INSN = 14;
static final int LOOK_INSN = 15;
static final int MANA_INSN = 16;
static final int WIDE_INSN = 17;
static final byte[] TYPE;
static final int CLASS = 7;
static final int FIELD = 9;
static final int METH = 10;
static final int IMETH = 11;
static final int STR = 8;
static final int INT = 3;
static final int FLOAT = 4;
static final int LONG = 5;
static final int DOUBLE = 6;
static final int NAME_TYPE = 12;
static final int UTF8 = 1;
static final int MTYPE = 16;
static final int HANDLE = 15;
static final int INDY = 18;
static final int HANDLE_BASE = 20;
static final int TYPE_NORMAL = 30;
static final int TYPE_UNINIT = 31;
static final int TYPE_MERGED = 32;
static final int BSM = 33;
ClassReader cr;
int version;
int index;
final ByteVector pool;
Item[] items;
int threshold;
final Item key;
final Item key2;
final Item key3;
final Item key4;
Item[] typeTable;
private short typeCount;
private int access;
private int name;
String thisName;
private int signature;
private int superName;
private int interfaceCount;
private int[] interfaces;
private int sourceFile;
private ByteVector sourceDebug;
private int enclosingMethodOwner;
private int enclosingMethod;
private AnnotationWriter anns;
private AnnotationWriter ianns;
private AnnotationWriter tanns;
private AnnotationWriter itanns;
private Attribute attrs;
private int innerClassesCount;
private ByteVector innerClasses;
int bootstrapMethodsCount;
ByteVector bootstrapMethods;
FieldWriter firstField;
FieldWriter lastField;
MethodWriter firstMethod;
MethodWriter lastMethod;
private boolean computeMaxs;
private boolean computeFrames;
boolean invalidFrames;
static
{
int i;
byte[] b = new byte[220];
String s = "AAAAAAAAAAAAAAAABCLMMDDDDDEEEEEEEEEEEEEEEEEEEEAAAAAAAADD"
+ "DDDEEEEEEEEEEEEEEEEEEEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "AAAAAAAAAAAAAAAAANAAAAAAAAAAAAAAAAAAAAJJJJJJJJJJJJJJJJDOPAA"
+ "AAAAGGGGGGGHIFBFAAFFAARQJJKKJJJJJJJJJJJJJJJJJJ";
for (i = 0; i < b.length; ++i)
{
b[i] = (byte) (s.charAt(i) - 'A');
}
TYPE = b;
}
public ClassWriter(final int flags)
{
this.index = 1;
this.pool = new ByteVector();
this.items = new Item[256];
this.threshold = (int) (0.75d * this.items.length);
this.key = new Item();
this.key2 = new Item();
this.key3 = new Item();
this.key4 = new Item();
this.computeMaxs = (flags & COMPUTE_MAXS) != 0;
this.computeFrames = (flags & COMPUTE_FRAMES) != 0;
}
public ClassWriter(final ClassReader classReader, final int flags)
{
this(flags);
classReader.copyPool(this);
this.cr = classReader;
}
@Override
public final void visit(final int version, final int access, final String name, final String signature, final String superName, final String[] interfaces)
{
this.version = version;
this.access = access;
this.name = this.newClass(name);
this.thisName = name;
if (signature != null)
{
this.signature = this.newUTF8(signature);
}
this.superName = superName == null ? 0 : this.newClass(superName);
if (interfaces != null && interfaces.length > 0)
{
this.interfaceCount = interfaces.length;
this.interfaces = new int[this.interfaceCount];
for (int i = 0; i < this.interfaceCount; ++i)
{
this.interfaces[i] = this.newClass(interfaces[i]);
}
}
}
@Override
public final void visitSource(final String file, final String debug)
{
if (file != null)
{
this.sourceFile = this.newUTF8(file);
}
if (debug != null)
{
this.sourceDebug = new ByteVector().encodeUTF8(debug, 0, Integer.MAX_VALUE);
}
}
@Override
public final void visitOuterClass(final String owner, final String name, final String desc)
{
this.enclosingMethodOwner = this.newClass(owner);
if (name != null && desc != null)
{
this.enclosingMethod = this.newNameType(name, desc);
}
}
@Override
public final AnnotationVisitor visitAnnotation(final String desc, final boolean visible)
{
ByteVector bv = new ByteVector();
// write type, and reserve space for values count
bv.putShort(this.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(this, true, bv, bv, 2);
if (visible)
{
aw.next = this.anns;
this.anns = aw;
}
else
{
aw.next = this.ianns;
this.ianns = aw;
}
return aw;
}
@Override
public final AnnotationVisitor visitTypeAnnotation(int typeRef, TypePath typePath, final String desc, final boolean visible)
{
ByteVector bv = new ByteVector();
// write target_type and target_info
AnnotationWriter.putTarget(typeRef, typePath, bv);
// write type, and reserve space for values count
bv.putShort(this.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(this, true, bv, bv, bv.length - 2);
if (visible)
{
aw.next = this.tanns;
this.tanns = aw;
}
else
{
aw.next = this.itanns;
this.itanns = aw;
}
return aw;
}
@Override
public final void visitAttribute(final Attribute attr)
{
attr.next = this.attrs;
this.attrs = attr;
}
@Override
public final void visitInnerClass(final String name, final String outerName, final String innerName, final int access)
{
if (this.innerClasses == null)
{
this.innerClasses = new ByteVector();
}
Item nameItem = this.newClassItem(name);
if (nameItem.intVal == 0)
{
++this.innerClassesCount;
this.innerClasses.putShort(nameItem.index);
this.innerClasses.putShort(outerName == null ? 0 : this.newClass(outerName));
this.innerClasses.putShort(innerName == null ? 0 : this.newUTF8(innerName));
this.innerClasses.putShort(access);
nameItem.intVal = this.innerClassesCount;
}
}
@Override
public final FieldVisitor visitField(final int access, final String name, final String desc, final String signature, final Object value)
{
return new FieldWriter(this, access, name, desc, signature, value);
}
@Override
public final MethodVisitor visitMethod(final int access, final String name, final String desc, final String signature, final String[] exceptions)
{
return new MethodWriter(this, access, name, desc, signature, exceptions, this.computeMaxs, this.computeFrames);
}
@Override
public final void visitEnd()
{
}
public byte[] toByteArray()
{
if (this.index > 0xFFFF)
{
throw new RuntimeException("Class file too large!");
}
// computes the real size of the bytecode of this class
int size = 24 + 2 * this.interfaceCount;
int nbFields = 0;
FieldWriter fb = this.firstField;
while (fb != null)
{
++nbFields;
size += fb.getSize();
fb = fb.next;
}
int nbMethods = 0;
MethodWriter mb = this.firstMethod;
while (mb != null)
{
++nbMethods;
size += mb.getSize();
mb = mb.next;
}
int attributeCount = 0;
if (this.bootstrapMethods != null)
{
// we put it as first attribute in order to improve a bit
// ClassReader.copyBootstrapMethods
++attributeCount;
size += 8 + this.bootstrapMethods.length;
this.newUTF8("BootstrapMethods");
}
if (this.signature != 0)
{
++attributeCount;
size += 8;
this.newUTF8("Signature");
}
if (this.sourceFile != 0)
{
++attributeCount;
size += 8;
this.newUTF8("SourceFile");
}
if (this.sourceDebug != null)
{
++attributeCount;
size += this.sourceDebug.length + 6;
this.newUTF8("SourceDebugExtension");
}
if (this.enclosingMethodOwner != 0)
{
++attributeCount;
size += 10;
this.newUTF8("EnclosingMethod");
}
if ((this.access & ASMConstants.ACC_DEPRECATED) != 0)
{
++attributeCount;
size += 6;
this.newUTF8("Deprecated");
}
if ((this.access & ASMConstants.ACC_SYNTHETIC) != 0)
{
if ((this.version & 0xFFFF) < ASMConstants.V1_5 || (this.access & ACC_SYNTHETIC_ATTRIBUTE) != 0)
{
++attributeCount;
size += 6;
this.newUTF8("Synthetic");
}
}
if (this.innerClasses != null)
{
++attributeCount;
size += 8 + this.innerClasses.length;
this.newUTF8("InnerClasses");
}
if (this.anns != null)
{
++attributeCount;
size += 8 + this.anns.getSize();
this.newUTF8("RuntimeVisibleAnnotations");
}
if (this.ianns != null)
{
++attributeCount;
size += 8 + this.ianns.getSize();
this.newUTF8("RuntimeInvisibleAnnotations");
}
if (this.tanns != null)
{
++attributeCount;
size += 8 + this.tanns.getSize();
this.newUTF8("RuntimeVisibleTypeAnnotations");
}
if (this.itanns != null)
{
++attributeCount;
size += 8 + this.itanns.getSize();
this.newUTF8("RuntimeInvisibleTypeAnnotations");
}
if (this.attrs != null)
{
attributeCount += this.attrs.getCount();
size += this.attrs.getSize(this, null, 0, -1, -1);
}
size += this.pool.length;
// allocates a byte vector of this size, in order to avoid unnecessary
// arraycopy operations in the ByteVector.enlarge() method
ByteVector out = new ByteVector(size);
out.putInt(0xCAFEBABE).putInt(this.version);
out.putShort(this.index).putByteArray(this.pool.data, 0, this.pool.length);
int mask = ASMConstants.ACC_DEPRECATED | ACC_SYNTHETIC_ATTRIBUTE
| (this.access & ACC_SYNTHETIC_ATTRIBUTE) / TO_ACC_SYNTHETIC;
out.putShort(this.access & ~mask).putShort(this.name).putShort(this.superName);
out.putShort(this.interfaceCount);
for (int i = 0; i < this.interfaceCount; ++i)
{
out.putShort(this.interfaces[i]);
}
out.putShort(nbFields);
fb = this.firstField;
while (fb != null)
{
fb.put(out);
fb = fb.next;
}
out.putShort(nbMethods);
mb = this.firstMethod;
while (mb != null)
{
mb.put(out);
mb = mb.next;
}
out.putShort(attributeCount);
if (this.bootstrapMethods != null)
{
out.putShort(this.newUTF8("BootstrapMethods"));
out.putInt(this.bootstrapMethods.length + 2).putShort(this.bootstrapMethodsCount);
out.putByteArray(this.bootstrapMethods.data, 0, this.bootstrapMethods.length);
}
if (this.signature != 0)
{
out.putShort(this.newUTF8("Signature")).putInt(2).putShort(this.signature);
}
if (this.sourceFile != 0)
{
out.putShort(this.newUTF8("SourceFile")).putInt(2).putShort(this.sourceFile);
}
if (this.sourceDebug != null)
{
int len = this.sourceDebug.length;
out.putShort(this.newUTF8("SourceDebugExtension")).putInt(len);
out.putByteArray(this.sourceDebug.data, 0, len);
}
if (this.enclosingMethodOwner != 0)
{
out.putShort(this.newUTF8("EnclosingMethod")).putInt(4);
out.putShort(this.enclosingMethodOwner).putShort(this.enclosingMethod);
}
if ((this.access & ASMConstants.ACC_DEPRECATED) != 0)
{
out.putShort(this.newUTF8("Deprecated")).putInt(0);
}
if ((this.access & ASMConstants.ACC_SYNTHETIC) != 0)
{
if ((this.version & 0xFFFF) < ASMConstants.V1_5 || (this.access & ACC_SYNTHETIC_ATTRIBUTE) != 0)
{
out.putShort(this.newUTF8("Synthetic")).putInt(0);
}
}
if (this.innerClasses != null)
{
out.putShort(this.newUTF8("InnerClasses"));
out.putInt(this.innerClasses.length + 2).putShort(this.innerClassesCount);
out.putByteArray(this.innerClasses.data, 0, this.innerClasses.length);
}
if (this.anns != null)
{
out.putShort(this.newUTF8("RuntimeVisibleAnnotations"));
this.anns.put(out);
}
if (this.ianns != null)
{
out.putShort(this.newUTF8("RuntimeInvisibleAnnotations"));
this.ianns.put(out);
}
if (this.tanns != null)
{
out.putShort(this.newUTF8("RuntimeVisibleTypeAnnotations"));
this.tanns.put(out);
}
if (this.itanns != null)
{
out.putShort(this.newUTF8("RuntimeInvisibleTypeAnnotations"));
this.itanns.put(out);
}
if (this.attrs != null)
{
this.attrs.put(this, null, 0, -1, -1, out);
}
if (this.invalidFrames)
{
this.anns = null;
this.ianns = null;
this.attrs = null;
this.innerClassesCount = 0;
this.innerClasses = null;
this.bootstrapMethodsCount = 0;
this.bootstrapMethods = null;
this.firstField = null;
this.lastField = null;
this.firstMethod = null;
this.lastMethod = null;
this.computeMaxs = false;
this.computeFrames = true;
this.invalidFrames = false;
new ClassReader(out.data).accept(this, ClassReader.SKIP_FRAMES);
return this.toByteArray();
}
return out.data;
}
Item newConstItem(final Object cst)
{
if (cst instanceof Integer)
{
int val = (Integer) cst;
return this.newInteger(val);
}
else if (cst instanceof Byte)
{
int val = ((Byte) cst).intValue();
return this.newInteger(val);
}
else if (cst instanceof Character)
{
int val = (Character) cst;
return this.newInteger(val);
}
else if (cst instanceof Short)
{
int val = ((Short) cst).intValue();
return this.newInteger(val);
}
else if (cst instanceof Boolean)
{
int val = (Boolean) cst ? 1 : 0;
return this.newInteger(val);
}
else if (cst instanceof Float)
{
float val = (Float) cst;
return this.newFloat(val);
}
else if (cst instanceof Long)
{
long val = (Long) cst;
return this.newLong(val);
}
else if (cst instanceof Double)
{
double val = (Double) cst;
return this.newDouble(val);
}
else if (cst instanceof String)
{
return this.newString((String) cst);
}
else if (cst instanceof Type)
{
Type t = (Type) cst;
int s = t.getSort();
if (s == Type.OBJECT)
{
return this.newClassItem(t.getInternalName());
}
else if (s == Type.METHOD)
{
return this.newMethodTypeItem(t.getDescriptor());
}
else
{ // s == primitive type or array
return this.newClassItem(t.getDescriptor());
}
}
else if (cst instanceof Handle)
{
Handle h = (Handle) cst;
return this.newHandleItem(h.tag, h.owner, h.name, h.desc);
}
else
{
throw new IllegalArgumentException("value " + cst);
}
}
public int newConst(final Object cst)
{
return this.newConstItem(cst).index;
}
public int newUTF8(final String value)
{
this.key.set(UTF8, value, null, null);
Item result = this.get(this.key);
if (result == null)
{
this.pool.putByte(UTF8).putUTF8(value);
result = new Item(this.index++, this.key);
this.put(result);
}
return result.index;
}
Item newClassItem(final String value)
{
this.key2.set(CLASS, value, null, null);
Item result = this.get(this.key2);
if (result == null)
{
this.pool.put12(CLASS, this.newUTF8(value));
result = new Item(this.index++, this.key2);
this.put(result);
}
return result;
}
public int newClass(final String value)
{
return this.newClassItem(value).index;
}
Item newMethodTypeItem(final String methodDesc)
{
this.key2.set(MTYPE, methodDesc, null, null);
Item result = this.get(this.key2);
if (result == null)
{
this.pool.put12(MTYPE, this.newUTF8(methodDesc));
result = new Item(this.index++, this.key2);
this.put(result);
}
return result;
}
public int newMethodType(final String methodDesc)
{
return this.newMethodTypeItem(methodDesc).index;
}
Item newHandleItem(final int tag, final String owner, final String name, final String desc)
{
this.key4.set(HANDLE_BASE + tag, owner, name, desc);
Item result = this.get(this.key4);
if (result == null)
{
if (tag <= ASMConstants.H_PUTSTATIC)
{
this.put112(HANDLE, tag, this.newField(owner, name, desc));
}
else
{
this.put112(HANDLE, tag, this.newMethod(owner, name, desc, tag == ASMConstants.H_INVOKEINTERFACE));
}
result = new Item(this.index++, this.key4);
this.put(result);
}
return result;
}
public int newHandle(final int tag, final String owner, final String name, final String desc)
{
return this.newHandleItem(tag, owner, name, desc).index;
}
Item newInvokeDynamicItem(final String name, final String desc, final Handle bsm, final Object... bsmArgs)
{
// cache for performance
ByteVector bootstrapMethods = this.bootstrapMethods;
if (bootstrapMethods == null)
{
bootstrapMethods = this.bootstrapMethods = new ByteVector();
}
int position = bootstrapMethods.length; // record current position
int hashCode = bsm.hashCode();
bootstrapMethods.putShort(this.newHandle(bsm.tag, bsm.owner, bsm.name, bsm.desc));
int argsLength = bsmArgs.length;
bootstrapMethods.putShort(argsLength);
for (Object bsmArg : bsmArgs)
{
hashCode ^= bsmArg.hashCode();
bootstrapMethods.putShort(this.newConst(bsmArg));
}
byte[] data = bootstrapMethods.data;
int length = 1 + 1 + argsLength << 1; // (bsm + argCount + arguments)
hashCode &= 0x7FFFFFFF;
Item result = this.items[hashCode % this.items.length];
loop:
while (result != null)
{
if (result.type != BSM || result.hashCode != hashCode)
{
result = result.next;
continue;
}
// because the data encode the size of the argument
// we don't need to test if these size are equals
int resultPosition = result.intVal;
for (int p = 0; p < length; p++)
{
if (data[position + p] != data[resultPosition + p])
{
result = result.next;
continue loop;
}
}
break;
}
int bootstrapMethodIndex;
if (result != null)
{
bootstrapMethodIndex = result.index;
bootstrapMethods.length = position; // revert to old position
}
else
{
bootstrapMethodIndex = this.bootstrapMethodsCount++;
result = new Item(bootstrapMethodIndex);
result.set(position, hashCode);
this.put(result);
}
// now, create the InvokeDynamic constant
this.key3.set(name, desc, bootstrapMethodIndex);
result = this.get(this.key3);
if (result == null)
{
this.put122(INDY, bootstrapMethodIndex, this.newNameType(name, desc));
result = new Item(this.index++, this.key3);
this.put(result);
}
return result;
}
public int newInvokeDynamic(final String name, final String desc, final Handle bsm, final Object... bsmArgs)
{
return this.newInvokeDynamicItem(name, desc, bsm, bsmArgs).index;
}
Item newFieldItem(final String owner, final String name, final String desc)
{
this.key3.set(FIELD, owner, name, desc);
Item result = this.get(this.key3);
if (result == null)
{
this.put122(FIELD, this.newClass(owner), this.newNameType(name, desc));
result = new Item(this.index++, this.key3);
this.put(result);
}
return result;
}
public int newField(final String owner, final String name, final String desc)
{
return this.newFieldItem(owner, name, desc).index;
}
Item newMethodItem(final String owner, final String name, final String desc, final boolean itf)
{
int type = itf ? IMETH : METH;
this.key3.set(type, owner, name, desc);
Item result = this.get(this.key3);
if (result == null)
{
this.put122(type, this.newClass(owner), this.newNameType(name, desc));
result = new Item(this.index++, this.key3);
this.put(result);
}
return result;
}
public int newMethod(final String owner, final String name, final String desc, final boolean itf)
{
return this.newMethodItem(owner, name, desc, itf).index;
}
Item newInteger(final int value)
{
this.key.set(value);
Item result = this.get(this.key);
if (result == null)
{
this.pool.putByte(INT).putInt(value);
result = new Item(this.index++, this.key);
this.put(result);
}
return result;
}
Item newFloat(final float value)
{
this.key.set(value);
Item result = this.get(this.key);
if (result == null)
{
this.pool.putByte(FLOAT).putInt(this.key.intVal);
result = new Item(this.index++, this.key);
this.put(result);
}
return result;
}
Item newLong(final long value)
{
this.key.set(value);
Item result = this.get(this.key);
if (result == null)
{
this.pool.putByte(LONG).putLong(value);
result = new Item(this.index, this.key);
this.index += 2;
this.put(result);
}
return result;
}
Item newDouble(final double value)
{
this.key.set(value);
Item result = this.get(this.key);
if (result == null)
{
this.pool.putByte(DOUBLE).putLong(this.key.longVal);
result = new Item(this.index, this.key);
this.index += 2;
this.put(result);
}
return result;
}
private Item newString(final String value)
{
this.key2.set(STR, value, null, null);
Item result = this.get(this.key2);
if (result == null)
{
this.pool.put12(STR, this.newUTF8(value));
result = new Item(this.index++, this.key2);
this.put(result);
}
return result;
}
public int newNameType(final String name, final String desc)
{
return this.newNameTypeItem(name, desc).index;
}
Item newNameTypeItem(final String name, final String desc)
{
this.key2.set(NAME_TYPE, name, desc, null);
Item result = this.get(this.key2);
if (result == null)
{
this.put122(NAME_TYPE, this.newUTF8(name), this.newUTF8(desc));
result = new Item(this.index++, this.key2);
this.put(result);
}
return result;
}
int addType(final String type)
{
this.key.set(TYPE_NORMAL, type, null, null);
Item result = this.get(this.key);
if (result == null)
{
result = this.addType(this.key);
}
return result.index;
}
int addUninitializedType(final String type, final int offset)
{
this.key.type = TYPE_UNINIT;
this.key.intVal = offset;
this.key.strVal1 = type;
this.key.hashCode = 0x7FFFFFFF & TYPE_UNINIT + type.hashCode() + offset;
Item result = this.get(this.key);
if (result == null)
{
result = this.addType(this.key);
}
return result.index;
}
private Item addType(final Item item)
{
++this.typeCount;
Item result = new Item(this.typeCount, item);
this.put(result);
if (this.typeTable == null)
{
this.typeTable = new Item[16];
}
if (this.typeCount == this.typeTable.length)
{
Item[] newTable = new Item[2 * this.typeTable.length];
System.arraycopy(this.typeTable, 0, newTable, 0, this.typeTable.length);
this.typeTable = newTable;
}
this.typeTable[this.typeCount] = result;
return result;
}
int getMergedType(final int type1, final int type2)
{
this.key2.type = TYPE_MERGED;
this.key2.longVal = type1 | (long) type2 << 32;
this.key2.hashCode = 0x7FFFFFFF & TYPE_MERGED + type1 + type2;
Item result = this.get(this.key2);
if (result == null)
{
String t = this.typeTable[type1].strVal1;
String u = this.typeTable[type2].strVal1;
this.key2.intVal = this.addType(this.getCommonSuperClass(t, u));
result = new Item((short) 0, this.key2);
this.put(result);
}
return result.intVal;
}
protected String getCommonSuperClass(final String type1, final String type2)
{
Class<?> c, d;
ClassLoader classLoader = this.getClass().getClassLoader();
try
{
c = Class.forName(type1.replace('/', '.'), false, classLoader);
d = Class.forName(type2.replace('/', '.'), false, classLoader);
}
catch (Exception e)
{
throw new RuntimeException(e.toString());
}
if (c.isAssignableFrom(d))
{
return type1;
}
if (d.isAssignableFrom(c))
{
return type2;
}
if (c.isInterface() || d.isInterface())
{
return "java/lang/Object";
}
do
{
c = c.getSuperclass();
}
while (!c.isAssignableFrom(d));
return c.getName().replace('.', '/');
}
private Item get(final Item key)
{
Item i = this.items[key.hashCode % this.items.length];
while (i != null && (i.type != key.type || !key.isEqualTo(i)))
{
i = i.next;
}
return i;
}
private void put(final Item i)
{
if (this.index + this.typeCount > this.threshold)
{
int ll = this.items.length;
int nl = ll * 2 + 1;
Item[] newItems = new Item[nl];
for (int l = ll - 1; l >= 0; --l)
{
Item j = this.items[l];
while (j != null)
{
int index = j.hashCode % newItems.length;
Item k = j.next;
j.next = newItems[index];
newItems[index] = j;
j = k;
}
}
this.items = newItems;
this.threshold = (int) (nl * 0.75);
}
int index = i.hashCode % this.items.length;
i.next = this.items[index];
this.items[index] = i;
}
private void put122(final int b, final int s1, final int s2)
{
this.pool.put12(b, s1).putShort(s2);
}
private void put112(final int b1, final int b2, final int s)
{
this.pool.put11(b1, b2).putShort(s);
}
}
|
|
/**
* Licensed to Apereo under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright ownership. Apereo
* licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the License at the
* following location:
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apereo.portal.layout.dlm;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apereo.portal.PortalException;
import org.apereo.portal.layout.IUserLayoutStore;
import org.apereo.portal.security.IPerson;
import org.apereo.portal.spring.locator.UserLayoutStoreLocator;
import org.apereo.portal.xml.XmlUtilitiesImpl;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* Applies and updates position specifiers for child nodes in the composite layout.
*
* @since uPortal 2.5
*/
public class PositionManager {
private static Log LOG = LogFactory.getLog(PositionManager.class);
private static IUserLayoutStore dls = null;
/**
* Hands back the single instance of RDBMDistributedLayoutStore. There is already a method for
* acquiring a single instance of the configured layout store so we delegate over there so that
* all references refer to the same instance. This method is solely for convenience so that we
* don't have to keep calling UserLayoutStoreFactory and casting the resulting class.
*/
private static IUserLayoutStore getDLS() {
if (dls == null) {
dls = UserLayoutStoreLocator.getUserLayoutStore();
}
return dls;
}
/**
* This method and ones that it delegates to have the responsibility of organizing the child
* nodes of the passed in composite view parent node according to the order specified in the
* passed in position set and return via the passed in result set whether the personal layout
* fragment (one portion of which is the position set) or the incoporated layouts fragment (one
* portion of which is the compViewParent) were changed.
*
* <p>This may also include pulling nodes in from other parents under certain circumstances. For
* example, if allowed a user can move nodes that are not part of their personal layout fragment
* or PLF; the UI elements that they own. These node do not exist in their layout in the
* database but instead are merged in with their owned elements at log in and other times. So to
* move them during subsequent merges a position set can contain a position directive indicating
* the id of the node to be moved into a specific position in the sibling list and that well may
* refer to a node not in the sibling list to begin with. If the node no longer exists in the
* composite view then that position directive can safely be discarded.
*
* <p>Positioning is meant to preserve as much as possible the user's specified ordering of user
* interface elements but always respecting movement restrictions placed on those elements that
* are incorporated by their owners. So the following rules apply from most important to least.
*
* <p>1) nodes with moveAllowed="false" prevent nodes of lower precedence from being to their
* left or higher with left or higher defined as having a lower index in the sibling list.
* (applyLowerPrecRestriction)
*
* <p>2) nodes with moveAllowed="false" prevent nodes of equal precedence from moving from one
* side of this node to the other from their position as found in the compViewParent initially
* and prevents nodes with the same precedence from moving from other parents into this parent
* prior to the restricted node. Prior to implies a lower sibling index.
* (applyHoppingRestriction)
*
* <p>3) nodes with moveAllowed="false" prevent nodes of equal precedence lower in the sibling
* list from being reparented. (ie: moving to another parent) However, they can be deleted.
* (applyReparentingCheck)
*
* <p>4) nodes should be ordered as much as possible in the order specified by the user but in
* view of the above conditions. So if a user has moved nodes thus specifying some order and the
* owner of some node in that set then locks one of those nodes some of those nodes will have to
* move back to their orinial positions to conform with the rules above but for the remaining
* nodes they should be found in the same relative order specified by the user. (getOrder)
*
* <p>5) nodes not included in the order specified by the user (ie: nodes added since the user
* last ordered them) should maintain their relative order as much as possible and be appended
* to the end of the sibling list after all others rules have been applied. (getOrder)
*
* <p>Each of these rules is applied by a call to a method 5 being first and 1 being last so
* that 1 has the highest precedence and last say. Once the final ordering is specified then it
* is applied to the children of the compViewParent and returned.
*/
static void applyPositions(
Element compViewParent,
Element positionSet,
IntegrationResult result,
NodeInfoTracker tracker)
throws PortalException {
if (positionSet == null || positionSet.getFirstChild() == null) return;
List<NodeInfo> order = new ArrayList<NodeInfo>();
applyOrdering(order, compViewParent, positionSet, tracker);
applyNoReparenting(order, compViewParent, positionSet);
applyNoHopping(order);
applyLowerPrecedence(order);
evaluateAndApply(order, compViewParent, positionSet, result);
}
/**
* This method determines if applying all of the positioning rules and restrictions ended up
* making changes to the compViewParent or the original position set. If changes are applicable
* to the CVP then they are applied. If the position set changed then the original stored in the
* PLF is updated.
*/
static void evaluateAndApply(
List<NodeInfo> order,
Element compViewParent,
Element positionSet,
IntegrationResult result)
throws PortalException {
adjustPositionSet(order, positionSet, result);
if (hasAffectOnCVP(order, compViewParent)) {
applyToNodes(order, compViewParent);
result.setChangedILF(true);
;
}
}
/**
* This method trims down the position set to the position directives on the node info elements
* still having a position directive. Any directives that violated restrictions were removed
* from the node info objects so the position set should be made to match the order of those
* still having one.
*/
static void adjustPositionSet(
List<NodeInfo> order, Element positionSet, IntegrationResult result) {
Node nodeToMatch = positionSet.getFirstChild();
Element nodeToInsertBefore = positionSet.getOwnerDocument().createElement("INSERT_POINT");
positionSet.insertBefore(nodeToInsertBefore, nodeToMatch);
for (Iterator<NodeInfo> iter = order.iterator(); iter.hasNext(); ) {
NodeInfo ni = iter.next();
if (ni.getPositionDirective() != null) {
// found one check it against the current one in the position
// set to see if it is different. If so then indicate that
// something (the position set) has changed in the plf
if (ni.getPositionDirective() != nodeToMatch) result.setChangedPLF(true);
;
// now bump the insertion point forward prior to
// moving on to the next position node to be evaluated
if (nodeToMatch != null) nodeToMatch = nodeToMatch.getNextSibling();
// now insert it prior to insertion point
positionSet.insertBefore(ni.getPositionDirective(), nodeToInsertBefore);
}
}
// now for any left over after the insert point remove them.
while (nodeToInsertBefore.getNextSibling() != null)
positionSet.removeChild(nodeToInsertBefore.getNextSibling());
// now remove the insertion point
positionSet.removeChild(nodeToInsertBefore);
}
/**
* This method compares the children by id in the order list with the order in the
* compViewParent's ui visible children and returns true if the ordering differs indicating that
* the positioning if needed.
*/
static boolean hasAffectOnCVP(List<NodeInfo> order, Element compViewParent) {
if (order.size() == 0) return false;
int idx = 0;
Element child = (Element) compViewParent.getFirstChild();
NodeInfo ni = order.get(idx);
if (child == null && ni != null) // most likely nodes to be pulled in
return true;
while (child != null) {
if (child.getAttribute("hidden").equals("false")
&& (!child.getAttribute("chanID").equals("")
|| child.getAttribute("type").equals("regular"))) {
if (ni.getId().equals(child.getAttribute(Constants.ATT_ID))) {
if (idx >= order.size() - 1) // at end of order list
return false;
ni = order.get(++idx);
} else // if not equal then return true
return true;
}
child = (Element) child.getNextSibling();
}
if (idx < order.size()) return true; // represents nodes to be pulled in
return false;
}
/**
* This method applies the ordering specified in the passed in order list to the child nodes of
* the compViewParent. Nodes specified in the list but located elsewhere are pulled in.
*/
static void applyToNodes(List<NodeInfo> order, Element compViewParent) {
// first set up a bogus node to assist with inserting
Node insertPoint = compViewParent.getOwnerDocument().createElement("bogus");
Node first = compViewParent.getFirstChild();
if (first != null) compViewParent.insertBefore(insertPoint, first);
else compViewParent.appendChild(insertPoint);
// now pass through the order list inserting the nodes as you go
for (int i = 0; i < order.size(); i++)
compViewParent.insertBefore(order.get(i).getNode(), insertPoint);
compViewParent.removeChild(insertPoint);
}
/**
* This method is responsible for preventing nodes with lower precedence from being located to
* the left (lower sibling order) of nodes having a higher precedence and moveAllowed="false".
*/
static void applyLowerPrecedence(List<NodeInfo> order) {
for (int i = 0; i < order.size(); i++) {
NodeInfo ni = order.get(i);
if (ni.getNode().getAttribute(Constants.ATT_MOVE_ALLOWED).equals("false")) {
for (int j = 0; j < i; j++) {
NodeInfo lefty = order.get(j);
if (lefty.getPrecedence() == null
|| lefty.getPrecedence().isLessThan(ni.getPrecedence())) {
order.remove(j);
order.add(i, lefty);
}
}
}
}
}
/**
* This method is responsible for preventing nodes with identical precedence in the same parent
* from hopping over each other so that a layout fragment can lock two tabs that are next to
* each other and they can only be separated by tabs with higher precedence.
*
* <p>If this situation is detected then the positioning of all nodes currently in the
* compViewParent is left as they are found in the CVP with any nodes brought in from other
* parents appended at the end with their relative order preserved.
*/
static void applyNoHopping(List<NodeInfo> order) {
if (isIllegalHoppingSpecified(order) == true) {
ArrayList<NodeInfo> cvpNodeInfos = new ArrayList<>();
// pull those out of the position list from the CVP
for (int i = order.size() - 1; i >= 0; i--)
if (order.get(i).getIndexInCVP() != -1) cvpNodeInfos.add(order.remove(i));
// what is left is coming from other parents. Now push them back in
// in the order specified in the CVP
NodeInfo[] nodeInfos = cvpNodeInfos.toArray(new NodeInfo[cvpNodeInfos.size()]);
Arrays.sort(nodeInfos, new NodeInfoComparator());
List<NodeInfo> list = Arrays.asList(nodeInfos);
order.addAll(0, list);
}
}
/**
* This method determines if any illegal hopping is being specified. To determine if the
* positioning is specifying an ordering that will result in hopping I need to determine for
* each node n in the list if any of the nodes to be positioned to its right currently lie to
* its left in the CVP and have moveAllowed="false" and have the same precedence or if any of
* the nodes to be positioned to its left currently lie to its right in the CVP and have
* moveAllowed="false" and have the same precedence.
*/
static boolean isIllegalHoppingSpecified(List<NodeInfo> order) {
for (int i = 0; i < order.size(); i++) {
NodeInfo ni = (NodeInfo) order.get(i);
// look for move restricted nodes
if (!ni.getNode().getAttribute(Constants.ATT_MOVE_ALLOWED).equals("false")) continue;
// now check nodes in lower position to see if they "hopped" here
// or if they have similar precedence and came from another parent.
for (int j = 0; j < i; j++) {
NodeInfo niSib = (NodeInfo) order.get(j);
// skip lower precedence nodes from this parent. These will get
// bumped during the lower precedence check
if (niSib.getPrecedence() == Precedence.getUserPrecedence()) continue;
if (niSib.getPrecedence().isEqualTo(ni.getPrecedence())
&& (niSib.getIndexInCVP() == -1
|| // from another parent
ni.getIndexInCVP() < niSib.getIndexInCVP())) // niSib hopping left
return true;
}
// now check upper positioned nodes to see if they "hopped"
for (int j = i + 1; j < order.size(); j++) {
NodeInfo niSib = (NodeInfo) order.get(j);
// ignore nodes from other parents and user precedence nodes
if (niSib.getIndexInCVP() == -1
|| niSib.getPrecedence() == Precedence.getUserPrecedence()) continue;
if (ni.getIndexInCVP() > niSib.getIndexInCVP()
&& // niSib hopped right
niSib.getPrecedence().isEqualTo(ni.getPrecedence())) return true;
}
}
return false;
}
/**
* This method scans through the nodes in the ordered list and identifies those that are not in
* the passed in compViewParent. For those it then looks in its current parent and checks to see
* if there are any down- stream (higher sibling index) siblings that have moveAllowed="false".
* If any such sibling is found then the node is not allowed to be reparented and is removed
* from the list.
*/
static void applyNoReparenting(
List<NodeInfo> order, Element compViewParent, Element positionSet) {
int i = 0;
while (i < order.size()) {
NodeInfo ni = order.get(i);
if (!ni.getNode().getParentNode().equals(compViewParent)) {
if (isNotReparentable(ni, compViewParent, positionSet)) {
LOG.info(
"Resetting the following NodeInfo because it is not reparentable: "
+ ni);
// this node should not be reparented. If it was placed
// here by way of a position directive then delete that
// directive out of the ni and posSet will be updated later
ni.setPositionDirective(null);
// now we need to remove it from the ordering list but
// skip incrementing i, deleted ni now filled by next ni
order.remove(i);
continue;
}
}
i++;
}
}
/**
* Return true if the passed in node or any downstream (higher index) siblings <strong>relative
* to its destination location</strong> have moveAllowed="false".
*/
private static boolean isNotReparentable(
NodeInfo ni, Element compViewParent, Element positionSet) {
// This one is easy -- can't re-parent a node with dlm:moveAllowed=false
if (ni.getNode().getAttribute(Constants.ATT_MOVE_ALLOWED).equals("false")) {
return true;
}
try {
/*
* Annoying to do in Java, but we need to find our own placeholder
* element in the positionSet
*/
final XPathFactory xpathFactory = XPathFactory.newInstance();
final XPath xpath = xpathFactory.newXPath();
final String findPlaceholderXpath =
".//*[local-name()='position' and @name='" + ni.getId() + "']";
final XPathExpression findPlaceholder = xpath.compile(findPlaceholderXpath);
final NodeList findPlaceholderList =
(NodeList) findPlaceholder.evaluate(positionSet, XPathConstants.NODESET);
switch (findPlaceholderList.getLength()) {
case 0:
LOG.warn(
"Node not found for XPathExpression=\""
+ findPlaceholderXpath
+ "\" in positionSet="
+ XmlUtilitiesImpl.toString(positionSet));
return true;
case 1:
// This is healthy
break;
default:
LOG.warn(
"More than one node found for XPathExpression=\""
+ findPlaceholderXpath
+ "\" in positionSet="
+ XmlUtilitiesImpl.toString(positionSet));
return true;
}
final Element placeholder = (Element) findPlaceholderList.item(0); // At last
for (Element nextPlaceholder =
(Element)
placeholder
.getNextSibling(); // Start with the next dlm:position element after placeholder
nextPlaceholder != null; // As long as we have a placeholder to look at
nextPlaceholder =
(Element)
nextPlaceholder
.getNextSibling()) { // Advance to the next placeholder
if (LOG.isDebugEnabled()) {
LOG.debug(
"Considering whether node ''"
+ ni.getId()
+ "' is Reparentable; subsequent sibling is: "
+ nextPlaceholder.getAttribute("name"));
}
/*
* Next task: we have to find the non-placeholder representation of
* the nextSiblingPlaceholder within the compViewParent
*/
final String unmaskPlaceholderXpath =
".//*[@ID='" + nextPlaceholder.getAttribute("name") + "']";
final XPathExpression unmaskPlaceholder = xpath.compile(unmaskPlaceholderXpath);
final NodeList unmaskPlaceholderList =
(NodeList)
unmaskPlaceholder.evaluate(compViewParent, XPathConstants.NODESET);
switch (unmaskPlaceholderList.getLength()) {
case 0:
// Not a problem; the nextSiblingPlaceholder also refers
// to a node that has been moved to this context (afaik)
continue;
case 1:
final Element nextSibling = (Element) unmaskPlaceholderList.item(0);
if (LOG.isDebugEnabled()) {
LOG.debug(
"Considering whether node ''"
+ ni.getId()
+ "' is Reparentable; subsequent sibling '"
+ nextSibling.getAttribute("ID")
+ "' has dlm:moveAllowed="
+ !nextSibling
.getAttribute(Constants.ATT_MOVE_ALLOWED)
.equals("false"));
}
// Need to perform some checks...
if (nextSibling.getAttribute(Constants.ATT_MOVE_ALLOWED).equals("false")) {
/*
* The following check is a bit strange; it seems to verify
* that the current NodeInfo and the nextSibling come from the
* same fragment. If they don't, the re-parenting is allowable.
* I believe this check could only be unsatisfied in the case
* of tabs.
*/
Precedence p =
Precedence.newInstance(
nextSibling.getAttribute(Constants.ATT_FRAGMENT));
if (ni.getPrecedence().isEqualTo(p)) {
return true;
}
}
break;
default:
LOG.warn(
"More than one node found for XPathExpression=\""
+ unmaskPlaceholderXpath
+ "\" in compViewParent");
return true;
}
}
} catch (XPathExpressionException xpe) {
throw new RuntimeException("Failed to evaluate XPATH", xpe);
}
return false; // Re-parenting is "not disallowed" (double-negative less readable)
}
/**
* This method assembles in the passed in order object a list of NodeInfo objects ordered first
* by those specified in the position set and whose nodes still exist in the composite view and
* then by any remaining children in the compViewParent.
*/
static void applyOrdering(
List<NodeInfo> order,
Element compViewParent,
Element positionSet,
NodeInfoTracker tracker) {
// first pull out all visible channel or visible folder children and
// put their id's in a list of available children and record their
// relative order in the CVP.
final Map<String, NodeInfo> available = new LinkedHashMap<String, NodeInfo>();
Element child = (Element) compViewParent.getFirstChild();
Element next = null;
int indexInCVP = 0;
while (child != null) {
next = (Element) child.getNextSibling();
if (child.getAttribute("hidden").equals("false")
&& (!child.getAttribute("chanID").equals("")
|| child.getAttribute("type").equals("regular"))) {
final NodeInfo nodeInfo = new NodeInfo(child, indexInCVP++);
tracker.track(order, compViewParent, positionSet);
final NodeInfo prevNode = available.put(nodeInfo.getId(), nodeInfo);
if (prevNode != null) {
throw new IllegalStateException(
"Infinite loop detected in layout. Triggered by "
+ nodeInfo.getId()
+ " with already visited node ids: "
+ available.keySet());
}
}
child = next;
}
// now fill the order list using id's from the position set if nodes
// having those ids exist in the composite view. Otherwise discard
// that position directive. As they are added to the list remove them
// from the available nodes in the parent.
Document CV = compViewParent.getOwnerDocument();
Element directive = (Element) positionSet.getFirstChild();
while (directive != null) {
next = (Element) directive.getNextSibling();
// id of child to move is in the name attrib on the position nodes
String id = directive.getAttribute("name");
child = CV.getElementById(id);
if (child != null) {
// look for the NodeInfo for this node in the available
// nodes and if found use that one. Otherwise use a new that
// does not include an index in the CVP parent. In either case
// indicate the position directive responsible for placing this
// NodeInfo object in the list.
final String childId = child.getAttribute(Constants.ATT_ID);
NodeInfo ni = available.remove(childId);
if (ni == null) {
ni = new NodeInfo(child);
tracker.track(order, compViewParent, positionSet);
}
ni.setPositionDirective(directive);
order.add(ni);
}
directive = next;
}
// now append any remaining ids from the available list maintaining
// the order that they have there.
order.addAll(available.values());
}
/**
* This method updates the positions recorded in a position set to reflect the ids of the nodes
* in the composite view of the layout. Any position nodes already in existence are reused to
* reduce database interaction needed to generate a new ID attribute. If any are left over after
* updating those position elements are removed. If no position set existed a new one is created
* for the parent. If no ILF nodes are found in the parent node then the position set as a whole
* is reclaimed.
*/
public static void updatePositionSet(Element compViewParent, Element plfParent, IPerson person)
throws PortalException {
if (LOG.isDebugEnabled()) LOG.debug("Updating Position Set");
if (compViewParent.getChildNodes().getLength() == 0) {
// no nodes to position. if set exists reclaim the space.
if (LOG.isDebugEnabled()) LOG.debug("No Nodes to position");
Element positions = getPositionSet(plfParent, person, false);
if (positions != null) plfParent.removeChild(positions);
return;
}
Element posSet = (Element) getPositionSet(plfParent, person, true);
Element position = (Element) posSet.getFirstChild();
Element viewNode = (Element) compViewParent.getFirstChild();
boolean ilfNodesFound = false;
while (viewNode != null) {
String ID = viewNode.getAttribute(Constants.ATT_ID);
String channelId = viewNode.getAttribute(Constants.ATT_CHANNEL_ID);
String type = viewNode.getAttribute(Constants.ATT_TYPE);
String hidden = viewNode.getAttribute(Constants.ATT_HIDDEN);
if (ID.startsWith(Constants.FRAGMENT_ID_USER_PREFIX)) ilfNodesFound = true;
if (!channelId.equals("")
|| // its a channel node or
(type.equals("regular")
&& // a regular, visible folder
hidden.equals("false"))) {
if (position != null) position.setAttribute(Constants.ATT_NAME, ID);
else position = createAndAppendPosition(ID, posSet, person);
position = (Element) position.getNextSibling();
}
viewNode = (Element) viewNode.getNextSibling();
}
if (ilfNodesFound == false) // only plf nodes, no pos set needed
plfParent.removeChild(posSet);
else {
// reclaim any leftover positions
while (position != null) {
Element nextPos = (Element) position.getNextSibling();
posSet.removeChild(position);
position = nextPos;
}
}
}
/**
* This method locates the position set element in the child list of the passed in plfParent or
* if not found it will create one automatically and return it if the passed in create flag is
* true.
*/
private static Element getPositionSet(Element plfParent, IPerson person, boolean create)
throws PortalException {
Node child = plfParent.getFirstChild();
while (child != null) {
if (child.getNodeName().equals(Constants.ELM_POSITION_SET)) return (Element) child;
child = child.getNextSibling();
}
if (create == false) return null;
String ID = null;
try {
ID = getDLS().getNextStructDirectiveId(person);
} catch (Exception e) {
throw new PortalException(
"Exception encountered while "
+ "generating new position set node "
+ "Id for userId="
+ person.getID(),
e);
}
Document plf = plfParent.getOwnerDocument();
Element positions = plf.createElement(Constants.ELM_POSITION_SET);
positions.setAttribute(Constants.ATT_TYPE, Constants.ELM_POSITION_SET);
positions.setAttribute(Constants.ATT_ID, ID);
plfParent.appendChild(positions);
return positions;
}
/**
* Create, append to the passed in position set, and return a position element that references
* the passed in elementID.
*/
private static Element createAndAppendPosition(
String elementID, Element positions, IPerson person) throws PortalException {
if (LOG.isDebugEnabled()) LOG.debug("Adding Position Set entry " + elementID + ".");
String ID = null;
try {
ID = getDLS().getNextStructDirectiveId(person);
} catch (Exception e) {
throw new PortalException(
"Exception encountered while "
+ "generating new position node "
+ "Id for userId="
+ person.getID(),
e);
}
Document plf = positions.getOwnerDocument();
Element position = plf.createElement(Constants.ELM_POSITION);
position.setAttribute(Constants.ATT_TYPE, Constants.ELM_POSITION);
position.setAttribute(Constants.ATT_ID, ID);
position.setAttributeNS(Constants.NS_URI, Constants.ATT_NAME, elementID);
positions.appendChild(position);
return position;
}
private static class NodeInfoComparator implements Comparator<NodeInfo> {
@Override
public int compare(NodeInfo o1, NodeInfo o2) {
return o1.getIndexInCVP() - o2.getIndexInCVP();
}
}
}
|
|
/*
* Copyright (C) 2014-2022 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.html.hc.html.forms;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.helger.commons.annotation.ReturnsMutableCopy;
import com.helger.commons.collection.impl.ICommonsList;
import com.helger.commons.collection.impl.ICommonsSet;
import com.helger.commons.equals.EqualsHelper;
import com.helger.html.hc.impl.HCTextNode;
/**
* Interface for definition items
*
* @author Philip Helger
* @param <IMPLTYPE>
* Implementation type
*/
public interface IHCSelect <IMPLTYPE extends IHCSelect <IMPLTYPE>> extends IHCControl <IMPLTYPE>
{
boolean isAutoFocus ();
@Nonnull
IMPLTYPE setAutoFocus (final boolean bAutoFocus);
@Nullable
String getForm ();
@Nonnull
IMPLTYPE setForm (@Nullable String sForm);
boolean isMultiple ();
@Nonnull
IMPLTYPE setMultiple (final boolean bMultiple);
int getSize ();
@Nonnull
IMPLTYPE setSize (final int nSize);
@Nonnull
@ReturnsMutableCopy
ICommonsSet <String> getAllPreselectedValues ();
boolean isPreselectedValue (@Nullable String sValue);
@Nonnull
HCOption addOption (@Nonnull HCOption aOption);
/**
* Add a new option at the specified index.
*
* @param nIndex
* The index to use. Should be ≥ 0.
* @param aOption
* The option to be added. May not be <code>null</code>.
* @return this for chaining
*/
@Nonnull
HCOption addOptionAt (@Nonnegative int nIndex, @Nonnull HCOption aOption);
@Nonnull
default HCOption addOption (@Nullable final String sText)
{
return addOption (sText, sText);
}
@Nonnull
default HCOption addOption (@Nullable final String sText, final boolean bSelected)
{
return addOption (sText, sText, bSelected);
}
@Nonnull
default HCOption addOption (@Nullable final String sValue, @Nullable final String sText)
{
final HCOption aOption = new HCOption ().setValue (sValue).addChild (HCTextNode.createOnDemand (sText));
return addOption (aOption);
}
@Nonnull
default HCOption addOption (@Nullable final String sValue, @Nullable final String sText, final boolean bSelected)
{
return addOption (sValue, sText).setSelected (bSelected);
}
@Nonnull
default HCOption addOption (@Nullable final String sValue, @Nullable final String sText, @Nullable final String sSelectedValue)
{
return addOption (sValue, sText, EqualsHelper.equals (sValue, sSelectedValue));
}
/**
* Add a new option at the specified index.
*
* @param nIndex
* The index to use. Should be ≥ 0.
* @param sValue
* The value of the option to be added. May be <code>null</code>.
* @param sText
* The text of the option to be added. May be <code>null</code>.
* @return this for chaining
*/
@Nonnull
default HCOption addOptionAt (@Nonnegative final int nIndex, @Nullable final String sValue, @Nullable final String sText)
{
final HCOption aOption = new HCOption ().setValue (sValue).addChild (HCTextNode.createOnDemand (sText));
return addOptionAt (nIndex, aOption);
}
@Nonnull
IMPLTYPE addOptionGroup (@Nullable HCOptGroup aOptGroup);
@Nonnull
IMPLTYPE removeAllOptions ();
@Nonnull
IMPLTYPE removeAllOptionGroups ();
/**
* Remove the option at the specified index.
*
* @param nIndex
* The index to be removed. Should be ≥ 0.
* @return this for chaining
*/
@Nonnull
IMPLTYPE removeOptionAt (@Nonnegative int nIndex);
/**
* Remove the option group at the specified index.
*
* @param nIndex
* The index to be removed. Should be ≥ 0.
* @return this for chaining
*/
@Nonnull
IMPLTYPE removeOptionGroupAt (@Nonnegative int nIndex);
/**
* @return The number of available options.
*/
@Nonnegative
int getOptionCount ();
/**
* @return The number of available option groups.
*/
@Nonnegative
int getOptionGroupCount ();
/**
* @return A non-<code>null</code> list of all available options.
*/
@Nonnull
@ReturnsMutableCopy
ICommonsList <HCOption> getAllOptions ();
/**
* @return A non-<code>null</code> list of all available option groups.
*/
@Nonnull
@ReturnsMutableCopy
ICommonsList <HCOptGroup> getAllOptionGroups ();
/**
* Get the option at the specified index
*
* @param nIndex
* The index to retrieve. Should always be ≥ 0.
* @return <code>null</code> if no option is available for the specified
* index.
*/
@Nullable
HCOption getOptionAtIndex (@Nonnegative int nIndex);
/**
* Get the option group at the specified index
*
* @param nIndex
* The index to retrieve. Should always be ≥ 0.
* @return <code>null</code> if no option group is available for the specified
* index.
*/
@Nullable
HCOptGroup getOptionGroupAtIndex (@Nonnegative int nIndex);
/**
* @return <code>true</code> if this select has at least one option.
*/
boolean hasOptions ();
/**
* @return <code>true</code> if this select has at least one option group.
*/
boolean hasOptionGroups ();
/**
* @return A non-<code>null</code> list of all selected options.
*/
@Nonnull
@ReturnsMutableCopy
ICommonsList <HCOption> getAllSelectedOptions ();
/**
* @return The first selected option. May be <code>null</code>.
*/
@Nullable
HCOption getFirstSelectedOption ();
@Nullable
default String getFirstSelectedOptionValue ()
{
final HCOption aOption = getFirstSelectedOption ();
return aOption == null ? null : aOption.getValue ();
}
/**
* @return The number of selected options. Always ≥ 0.
*/
@Nonnegative
int getSelectedOptionCount ();
/**
* Check if this select has at least one selected option
*
* @return <code>true</code> if at least one option is selected
*/
boolean hasSelectedOption ();
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.refactoring.memberPullUp;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.intention.AddAnnotationFix;
import com.intellij.lang.Language;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.MethodSignatureUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.refactoring.memberPullUp.PullUpData;
import com.intellij.refactoring.memberPullUp.PullUpHelper;
import com.intellij.refactoring.util.DocCommentPolicy;
import com.intellij.refactoring.util.RefactoringHierarchyUtil;
import com.intellij.refactoring.util.RefactoringUtil;
import com.intellij.refactoring.util.classMembers.MemberInfo;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocComment;
import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocCommentOwner;
import org.jetbrains.plugins.groovy.lang.groovydoc.psi.impl.GrDocCommentUtil;
import org.jetbrains.plugins.groovy.lang.psi.GrReferenceElement;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElementFactory;
import org.jetbrains.plugins.groovy.lang.psi.GroovyRecursiveElementVisitor;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrExtendsClause;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrImplementsClause;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrReferenceList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMember;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrCodeReferenceElement;
import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil;
import org.jetbrains.plugins.groovy.refactoring.classMembers.GrClassMemberReferenceVisitor;
import org.jetbrains.plugins.groovy.util.GroovyChangeContextUtil;
import java.util.*;
public class GrPullUpHelper implements PullUpHelper<MemberInfo> {
private static final Logger LOG = Logger.getInstance(GrPullUpHelper.class);
private static final Key<Boolean> SUPER_REF = Key.create("SUPER_REF");
private static final Key<Boolean> THIS_REF = Key.create("THIS_REF");
private static final Key<Boolean> PRESERVE_QUALIFIER = Key.create("PRESERVE_QUALIFIER");
private final PsiClass myTargetSuperClass;
private final Set<PsiMember> myMembersToMove;
private final PsiClass mySourceClass;
private final Project myProject;
private final DocCommentPolicy myDocCommentPolicy;
private final Set<PsiMember> myMembersAfterMove;
final ExplicitSuperDeleter myExplicitSuperDeleter;
final QualifiedThisSuperAdjuster myThisSuperAdjuster;
private final QualifiedThisSuperSearcher myQualifiedSearcher;
public GrPullUpHelper(PullUpData data) {
myTargetSuperClass = data.getTargetClass();
myMembersToMove = data.getMembersToMove();
mySourceClass = data.getSourceClass();
myProject = data.getProject();
myDocCommentPolicy = data.getDocCommentPolicy();
myMembersAfterMove = data.getMovedMembers();
myExplicitSuperDeleter = new ExplicitSuperDeleter();
myThisSuperAdjuster = new QualifiedThisSuperAdjuster();
myQualifiedSearcher = new QualifiedThisSuperSearcher();
}
@Override
public void encodeContextInfo(MemberInfo info) {
GroovyChangeContextUtil.encodeContextInfo(info.getMember());
((GroovyPsiElement)info.getMember()).accept(myQualifiedSearcher);
}
@Override
public void move(MemberInfo info, PsiSubstitutor substitutor) {
if (info.getMember() instanceof PsiMethod) {
doMoveMethod(substitutor, info);
}
else if (info.getMember() instanceof PsiField) {
doMoveField(substitutor, info);
}
else if (info.getMember() instanceof PsiClass) {
doMoveClass(substitutor, info);
}
}
@Override
public void postProcessMember(PsiMember member) {
((GrMember)member).accept(myExplicitSuperDeleter);
((GrMember)member).accept(myThisSuperAdjuster);
GroovyChangeContextUtil.decodeContextInfo(member, null, null);
((GroovyPsiElement)member).accept(new GroovyRecursiveElementVisitor() {
@Override
public void visitReferenceExpression(@NotNull GrReferenceExpression referenceExpression) {
if (processRef(referenceExpression)) return;
super.visitReferenceExpression(referenceExpression);
}
@Override
public void visitCodeReferenceElement(@NotNull GrCodeReferenceElement refElement) {
if (processRef(refElement)) return;
super.visitCodeReferenceElement(refElement);
}
private boolean processRef(@NotNull GrReferenceElement<? extends GroovyPsiElement> refElement) {
final PsiElement qualifier = refElement.getQualifier();
if (qualifier != null) {
final Boolean preserveQualifier = qualifier.getCopyableUserData(PRESERVE_QUALIFIER);
if (preserveQualifier != null && !preserveQualifier) {
refElement.setQualifier(null);
return true;
}
}
return false;
}
});
}
@Override
public void setCorrectVisibility(MemberInfo info) {
PsiModifierListOwner modifierListOwner = info.getMember();
if (myTargetSuperClass.isInterface()) {
PsiUtil.setModifierProperty(modifierListOwner, PsiModifier.PUBLIC, true);
}
else if (modifierListOwner.hasModifierProperty(PsiModifier.PRIVATE)) {
if (info.isToAbstract() || willBeUsedInSubclass(modifierListOwner, myMembersToMove, myTargetSuperClass, mySourceClass)) {
PsiUtil.setModifierProperty(modifierListOwner, PsiModifier.PROTECTED, true);
}
if (modifierListOwner instanceof GrTypeDefinition) {
((GrTypeDefinition)modifierListOwner).accept(new GroovyRecursiveElementVisitor() {
@Override
public void visitMethod(@NotNull GrMethod method) {
check(method);
}
@Override
public void visitField(@NotNull GrField field) {
check(field);
}
@Override
public void visitTypeDefinition(@NotNull GrTypeDefinition typeDefinition) {
check(typeDefinition);
super.visitTypeDefinition(typeDefinition);
}
private void check(PsiMember member) {
if (member.hasModifierProperty(PsiModifier.PRIVATE)) {
if (willBeUsedInSubclass(member, myMembersToMove, myTargetSuperClass, mySourceClass)) {
PsiUtil.setModifierProperty(member, PsiModifier.PROTECTED, true);
}
}
}
});
}
}
}
@Override
public void moveFieldInitializations(LinkedHashSet<PsiField> movedFields) {
//todo
}
@Override
public void updateUsage(PsiElement element) {
if (element instanceof GrReferenceExpression) {
GrExpression qualifierExpression = ((GrReferenceExpression)element).getQualifierExpression();
if (qualifierExpression instanceof GrReferenceExpression && ((GrReferenceExpression)qualifierExpression).resolve() == mySourceClass) {
((GrReferenceExpression)qualifierExpression).bindToElement(myTargetSuperClass);
}
}
}
private static boolean willBeUsedInSubclass(PsiElement member, Set<PsiMember> movedMembers, PsiClass superclass, PsiClass subclass) {
for (PsiReference ref : ReferencesSearch.search(member, new LocalSearchScope(subclass), false)) {
PsiElement element = ref.getElement();
if (!RefactoringHierarchyUtil.willBeInTargetClass(element, movedMembers, superclass, false)) {
return true;
}
}
return false;
}
private void doMoveMethod(PsiSubstitutor substitutor, MemberInfo info) {
GroovyPsiElementFactory elementFactory = GroovyPsiElementFactory.getInstance(myProject);
GrMethod method = (GrMethod)info.getMember();
PsiMethod sibling = method;
PsiMethod anchor = null;
while (sibling != null) {
sibling = PsiTreeUtil.getNextSiblingOfType(sibling, PsiMethod.class);
if (sibling != null) {
anchor = MethodSignatureUtil.findMethodInSuperClassBySignatureInDerived(method.getContainingClass(), myTargetSuperClass,
sibling.getSignature(PsiSubstitutor.EMPTY), false);
if (anchor != null) {
break;
}
}
}
GrMethod methodCopy = (GrMethod)method.copy();
if (method.findSuperMethods(myTargetSuperClass).length == 0) {
deleteOverrideAnnotationIfFound(methodCopy);
}
final boolean isOriginalMethodAbstract =
method.hasModifierProperty(PsiModifier.ABSTRACT) || method.hasModifierProperty(PsiModifier.DEFAULT);
if (myTargetSuperClass.isInterface() || info.isToAbstract()) {
GroovyChangeContextUtil.clearContextInfo(method);
RefactoringUtil.makeMethodAbstract(myTargetSuperClass, methodCopy);
if (myTargetSuperClass.isInterface()) {
PsiUtil.setModifierProperty(methodCopy, PsiModifier.ABSTRACT, false);
}
replaceMovedMemberTypeParameters(methodCopy, PsiUtil.typeParametersIterable(mySourceClass), substitutor, elementFactory);
final GrMethod movedElement =
anchor != null ? (GrMethod)myTargetSuperClass.addBefore(methodCopy, anchor) : (GrMethod)myTargetSuperClass.add(methodCopy);
CodeStyleSettings styleSettings = CodeStyleSettingsManager.getSettings(method.getProject());
if (styleSettings.INSERT_OVERRIDE_ANNOTATION) {
if (PsiUtil.isLanguageLevel5OrHigher(mySourceClass) && !myTargetSuperClass.isInterface() ||
PsiUtil.isLanguageLevel6OrHigher(mySourceClass)) {
new AddAnnotationFix(CommonClassNames.JAVA_LANG_OVERRIDE, method)
.invoke(method.getProject(), null, mySourceClass.getContainingFile());
}
}
GrDocComment oldDoc = method.getDocComment();
if (oldDoc != null) {
GrDocCommentUtil.setDocComment(movedElement, oldDoc);
}
myDocCommentPolicy.processCopiedJavaDoc(methodCopy.getDocComment(), oldDoc, isOriginalMethodAbstract);
myMembersAfterMove.add(movedElement);
if (isOriginalMethodAbstract) {
deleteMemberWithDocComment(method);
}
}
else {
if (isOriginalMethodAbstract) {
PsiUtil.setModifierProperty(myTargetSuperClass, PsiModifier.ABSTRACT, true);
}
fixReferencesToStatic(methodCopy);
replaceMovedMemberTypeParameters(methodCopy, PsiUtil.typeParametersIterable(mySourceClass), substitutor, elementFactory);
final PsiMethod superClassMethod = myTargetSuperClass.findMethodBySignature(methodCopy, false);
Language language = myTargetSuperClass.getLanguage();
final PsiMethod movedElement;
if (superClassMethod != null && superClassMethod.hasModifierProperty(PsiModifier.ABSTRACT)) {
movedElement = (PsiMethod)superClassMethod.replace(convertMethodToLanguage(methodCopy, language));
}
else {
movedElement = anchor != null
? (PsiMethod)myTargetSuperClass.addBefore(convertMethodToLanguage(methodCopy, language), anchor)
: (PsiMethod)myTargetSuperClass.add(convertMethodToLanguage(methodCopy, language));
myMembersAfterMove.add(movedElement);
}
if (movedElement instanceof GrMethod) {
GrDocCommentUtil.setDocComment((GrDocCommentOwner)movedElement, method.getDocComment());
}
deleteMemberWithDocComment(method);
}
}
private static void deleteMemberWithDocComment(GrDocCommentOwner docCommentOwner) {
GrDocComment oldDoc = docCommentOwner.getDocComment();
if (oldDoc != null) {
oldDoc.delete();
}
docCommentOwner.delete();
}
private static void deleteOverrideAnnotationIfFound(PsiMethod oMethod) {
final PsiAnnotation annotation = AnnotationUtil.findAnnotation(oMethod, CommonClassNames.JAVA_LANG_OVERRIDE);
if (annotation != null) {
PsiElement prev = annotation.getPrevSibling();
PsiElement next = annotation.getNextSibling();
if ((prev == null || org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil.isLineFeed(prev)) &&
org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil.isLineFeed(next)) {
next.delete();
}
annotation.delete();
}
}
public static void replaceMovedMemberTypeParameters(final PsiElement member,
final Iterable<PsiTypeParameter> parametersIterable,
final PsiSubstitutor substitutor,
final GroovyPsiElementFactory factory) {
final Map<PsiElement, PsiElement> replacement = new LinkedHashMap<>();
for (PsiTypeParameter parameter : parametersIterable) {
PsiType substitutedType = substitutor.substitute(parameter);
PsiType type = substitutedType != null ? substitutedType : TypeConversionUtil.erasure(factory.createType(parameter));
PsiElement scopeElement = member instanceof GrField ? member.getParent() : member;
for (PsiReference reference : ReferencesSearch.search(parameter, new LocalSearchScope(scopeElement))) {
final PsiElement element = reference.getElement();
final PsiElement parent = element.getParent();
if (parent instanceof PsiTypeElement) {
replacement.put(parent, factory.createTypeElement(type));
}
else if (element instanceof GrCodeReferenceElement && type instanceof PsiClassType) {
replacement.put(element, factory.createReferenceElementByType((PsiClassType)type));
}
}
}
final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(member.getProject());
for (PsiElement element : replacement.keySet()) {
if (element.isValid()) {
final PsiElement replaced = element.replace(replacement.get(element));
codeStyleManager.shortenClassReferences(replaced);
}
}
}
private void fixReferencesToStatic(GroovyPsiElement classMember) throws IncorrectOperationException {
final StaticReferencesCollector collector = new StaticReferencesCollector(myMembersToMove);
classMember.accept(collector);
ArrayList<GrReferenceElement> refs = collector.getReferences();
ArrayList<PsiElement> members = collector.getReferees();
ArrayList<PsiClass> classes = collector.getRefereeClasses();
GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(myProject);
for (int i = 0; i < refs.size(); i++) {
GrReferenceElement ref = refs.get(i);
PsiElement namedElement = members.get(i);
PsiClass aClass = classes.get(i);
if (namedElement instanceof PsiNamedElement) {
GrReferenceExpression newRef = (GrReferenceExpression)factory.createExpressionFromText("a." + ((PsiNamedElement)namedElement).getName(), null);
GrExpression qualifier = newRef.getQualifierExpression();
assert qualifier != null;
qualifier = (GrExpression)qualifier.replace(factory.createReferenceExpressionFromText(aClass.getQualifiedName()));
qualifier.putCopyableUserData(PRESERVE_QUALIFIER, ref.isQualified());
PsiElement replaced = ref.replace(newRef);
JavaCodeStyleManager.getInstance(myProject).shortenClassReferences(replaced);
}
}
}
private class StaticReferencesCollector extends GrClassMemberReferenceVisitor {
private final ArrayList<GrReferenceElement> myReferences = new ArrayList<>();
private final ArrayList<PsiElement> myReferees = new ArrayList<>();
private final ArrayList<PsiClass> myRefereeClasses = new ArrayList<>();
private final Set<PsiMember> myMovedMembers;
private StaticReferencesCollector(Set<PsiMember> movedMembers) {
super(mySourceClass);
myMovedMembers = movedMembers;
}
public ArrayList<PsiElement> getReferees() {
return myReferees;
}
public ArrayList<PsiClass> getRefereeClasses() {
return myRefereeClasses;
}
public ArrayList<GrReferenceElement> getReferences() {
return myReferences;
}
@Override
protected void visitClassMemberReferenceElement(GrMember classMember, GrReferenceElement classMemberReference) {
if (classMember.hasModifierProperty(PsiModifier.STATIC) /*&& classMemberReference.isQualified()*/) {
if (!myMovedMembers.contains(classMember) &&
RefactoringHierarchyUtil.isMemberBetween(myTargetSuperClass, mySourceClass, classMember)) {
myReferences.add(classMemberReference);
myReferees.add(classMember);
myRefereeClasses.add(classMember.getContainingClass());
}
else if (myMovedMembers.contains(classMember) || myMembersAfterMove.contains(classMember)) {
myReferences.add(classMemberReference);
myReferees.add(classMember);
myRefereeClasses.add(myTargetSuperClass);
}
}
}
}
private class ExplicitSuperDeleter extends GroovyRecursiveElementVisitor {
private final GrExpression myThisExpression = GroovyPsiElementFactory.getInstance(myProject).createExpressionFromText("this", null);
@Override
public void visitReferenceExpression(@NotNull GrReferenceExpression expression) {
if(org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil.isSuperReference(expression.getQualifierExpression())) {
PsiElement resolved = expression.resolve();
if (resolved == null || resolved instanceof PsiMethod && shouldFixSuper((PsiMethod) resolved)) {
expression.setQualifier(null);
}
}
else if (org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil.isSuperReference(expression)) {
expression.replaceWithExpression(myThisExpression, true);
}
}
@Override
public void visitTypeDefinition(@NotNull GrTypeDefinition typeDefinition) {
//do nothing
}
private boolean shouldFixSuper(PsiMethod method) {
for (PsiMember element : myMembersAfterMove) {
if (element instanceof PsiMethod) {
PsiMethod member = (PsiMethod)element;
// if there is such member among moved members, super qualifier
// should not be removed
final PsiManager manager = method.getManager();
if (manager.areElementsEquivalent(member.getContainingClass(), method.getContainingClass()) &&
MethodSignatureUtil.areSignaturesEqual(member, method)) {
return false;
}
}
}
final PsiMethod methodFromSuper = myTargetSuperClass.findMethodBySignature(method, false);
return methodFromSuper == null;
}
}
private class QualifiedThisSuperAdjuster extends GroovyRecursiveElementVisitor {
@Override
public void visitReferenceExpression(@NotNull GrReferenceExpression expression) {
super.visitReferenceExpression(expression);
if (expression.getCopyableUserData(SUPER_REF) != null) {
expression.putCopyableUserData(SUPER_REF, null);
final GrExpression qualifier = expression.getQualifier();
if (qualifier instanceof GrReferenceExpression && ((GrReferenceExpression)qualifier).isReferenceTo(mySourceClass)) {
try {
GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(myProject);
GrExpression newExpr = factory.createExpressionFromText(myTargetSuperClass.getName() + ".this", null);
expression.replace(newExpr);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
else if (expression.getCopyableUserData(THIS_REF) != null) {
expression.putCopyableUserData(THIS_REF, null);
final GrExpression qualifier = expression.getQualifier();
if (qualifier instanceof GrReferenceExpression && ((GrReferenceExpression)qualifier).isReferenceTo(mySourceClass)) {
try {
((GrReferenceExpression)qualifier).bindToElement(myTargetSuperClass);
GroovyChangeContextUtil.clearContextInfo(qualifier);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
}
}
private void doMoveField(PsiSubstitutor substitutor, MemberInfo info) {
GroovyPsiElementFactory elementFactory = GroovyPsiElementFactory.getInstance(myProject);
GrField field = (GrField)info.getMember();
field.normalizeDeclaration();
replaceMovedMemberTypeParameters(field, PsiUtil.typeParametersIterable(mySourceClass), substitutor, elementFactory);
fixReferencesToStatic(field);
if (myTargetSuperClass.isInterface()) {
PsiUtil.setModifierProperty(field, PsiModifier.PUBLIC, true);
}
final PsiMember movedElement = (PsiMember)myTargetSuperClass.add(convertFieldToLanguage(field, myTargetSuperClass.getLanguage()));
myMembersAfterMove.add(movedElement);
deleteMemberWithDocComment(field);
}
private void doMoveClass(PsiSubstitutor substitutor, MemberInfo info) {
if (Boolean.FALSE.equals(info.getOverrides())) {
PsiClass aClass = (PsiClass)info.getMember();
if (myTargetSuperClass instanceof GrTypeDefinition) {
addClassToSupers(info, aClass, substitutor, (GrTypeDefinition)myTargetSuperClass);
}
}
else {
GrTypeDefinition aClass = (GrTypeDefinition)info.getMember();
GroovyPsiElementFactory elementFactory = GroovyPsiElementFactory.getInstance(myProject);
replaceMovedMemberTypeParameters(aClass, PsiUtil.typeParametersIterable(mySourceClass), substitutor, elementFactory);
fixReferencesToStatic(aClass);
PsiMember movedElement = (PsiMember)myTargetSuperClass.addAfter(convertClassToLanguage(aClass, myTargetSuperClass.getLanguage()), null);
myMembersAfterMove.add(movedElement);
deleteMemberWithDocComment(aClass);
}
}
private static PsiMethod convertMethodToLanguage(PsiMethod method, Language language) {
if (method.getLanguage().equals(language)) {
return method;
}
return JVMElementFactories.getFactory(language, method.getProject()).createMethodFromText(method.getText(), null);
}
private static PsiField convertFieldToLanguage(PsiField field, Language language) {
if (field.getLanguage().equals(language)) {
return field;
}
return JVMElementFactories.getFactory(language, field.getProject()).createField(field.getName(), field.getType());
}
private static PsiClass convertClassToLanguage(PsiClass clazz, Language language) {
//if (clazz.getLanguage().equals(language)) {
// return clazz;
//}
//PsiClass newClass = JVMElementFactories.getFactory(language, clazz.getProject()).createClass(clazz.getName());
return clazz;
}
private void addClassToSupers(MemberInfo info, PsiClass aClass, PsiSubstitutor substitutor, GrTypeDefinition targetSuperClass) {
final PsiReferenceList sourceReferenceList = info.getSourceReferenceList();
LOG.assertTrue(sourceReferenceList != null);
PsiQualifiedReferenceElement ref = mySourceClass.equals(sourceReferenceList.getParent()) ?
removeFromReferenceList(sourceReferenceList, aClass) :
findReferenceToClass(sourceReferenceList, aClass);
if (ref != null && !targetSuperClass.isInheritor(aClass, false)) {
GroovyPsiElementFactory elementFactory = GroovyPsiElementFactory.getInstance(myProject);
replaceMovedMemberTypeParameters(ref, PsiUtil.typeParametersIterable(mySourceClass), substitutor, elementFactory);
GrReferenceList referenceList;
if (targetSuperClass.isInterface()) {
referenceList = targetSuperClass.getExtendsClause();
if (referenceList == null) {
GrExtendsClause newClause = GroovyPsiElementFactory.getInstance(myProject).createExtendsClause();
PsiElement anchor = targetSuperClass.getTypeParameterList() != null ? targetSuperClass.getTypeParameterList():
targetSuperClass.getNameIdentifierGroovy();
referenceList = (GrReferenceList)targetSuperClass.addAfter(newClause, anchor);
addSpacesAround(referenceList);
}
}
else {
referenceList = targetSuperClass.getImplementsClause();
if (referenceList == null) {
GrImplementsClause newClause = GroovyPsiElementFactory.getInstance(myProject).createImplementsClause();
PsiElement anchor = targetSuperClass.getExtendsClause() != null ? targetSuperClass.getExtendsClause() :
targetSuperClass.getTypeParameterList() != null ? targetSuperClass.getTypeParameterList() :
targetSuperClass.getNameIdentifierGroovy();
referenceList = (GrReferenceList)targetSuperClass.addAfter(newClause, anchor);
addSpacesAround(referenceList);
}
}
assert referenceList != null;
referenceList.add(ref);
}
}
private static void addSpacesAround(@NotNull GrReferenceList list) {
PsiElement prev = list.getPrevSibling();
if (!PsiImplUtil.isWhiteSpaceOrNls(prev)) {
list.getParent().getNode().addLeaf(TokenType.WHITE_SPACE, " ", list.getNode());
}
PsiElement next = list.getNextSibling();
if (!PsiImplUtil.isWhiteSpaceOrNls(next)) {
list.getParent().getNode().addLeaf(TokenType.WHITE_SPACE, " ", list.getNode().getTreeNext());
}
}
public static PsiQualifiedReferenceElement findReferenceToClass(PsiReferenceList refList, PsiClass aClass) {
PsiQualifiedReferenceElement[] refs = refList instanceof GrReferenceList ? ((GrReferenceList)refList).getReferenceElementsGroovy()
: refList.getReferenceElements();
for (PsiQualifiedReferenceElement ref : refs) {
if (ref.isReferenceTo(aClass)) {
return ref;
}
}
return null;
}
/**
* removes a reference to the specified class from the reference list given
*
* @return if removed - a reference to the class or null if there were no references to this class in the reference list
*/
public static PsiQualifiedReferenceElement removeFromReferenceList(PsiReferenceList refList, PsiClass aClass) throws IncorrectOperationException {
List<? extends PsiQualifiedReferenceElement> refs = Arrays.asList(
refList instanceof GrReferenceList ? ((GrReferenceList)refList).getReferenceElementsGroovy() : refList.getReferenceElements());
for (PsiQualifiedReferenceElement ref : refs) {
if (ref.isReferenceTo(aClass)) {
PsiQualifiedReferenceElement refCopy = (PsiQualifiedReferenceElement)ref.copy();
ref.delete();
return refCopy;
}
}
return null;
}
private class QualifiedThisSuperSearcher extends GroovyRecursiveElementVisitor {
@Override
public void visitReferenceExpression(@NotNull GrReferenceExpression expression) {
super.visitReferenceExpression(expression);
if (org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil.isSuperReference(expression)) {
final GrExpression qualifier = expression.getQualifier();
if (qualifier instanceof GrReferenceExpression && ((GrReferenceExpression)qualifier).isReferenceTo(mySourceClass)) {
try {
expression.putCopyableUserData(SUPER_REF, Boolean.TRUE);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
else if (org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil.isThisReference(expression)) {
final GrExpression qualifier = expression.getQualifier();
if (qualifier instanceof GrReferenceExpression && ((GrReferenceExpression)qualifier).isReferenceTo(mySourceClass)) {
try {
expression.putCopyableUserData(THIS_REF, Boolean.TRUE);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
}
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.impl;
import com.intellij.CommonBundle;
import com.intellij.ide.GeneralSettings;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.RecentProjectsManager;
import com.intellij.ide.highlighter.ProjectFileType;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.components.ServiceKt;
import com.intellij.openapi.components.StorageScheme;
import com.intellij.openapi.components.impl.stores.IComponentStore;
import com.intellij.openapi.components.impl.stores.IProjectStore;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ex.ProjectManagerEx;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.*;
import com.intellij.projectImport.ProjectOpenProcessor;
import com.intellij.ui.AppIcon;
import com.intellij.util.SystemProperties;
import org.jdom.JDOMException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
/**
* @author Eugene Belyaev
*/
public class ProjectUtil {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.impl.ProjectUtil");
private ProjectUtil() { }
public static void updateLastProjectLocation(final String projectFilePath) {
File lastProjectLocation = new File(projectFilePath);
if (lastProjectLocation.isFile()) {
lastProjectLocation = lastProjectLocation.getParentFile(); // for directory-based project storage
}
if (lastProjectLocation == null) { // the immediate parent of the ipr file
return;
}
lastProjectLocation = lastProjectLocation.getParentFile(); // the candidate directory to be saved
if (lastProjectLocation == null) {
return;
}
String path = lastProjectLocation.getPath();
try {
path = FileUtil.resolveShortWindowsName(path);
}
catch (IOException e) {
LOG.info(e);
return;
}
RecentProjectsManager.getInstance().setLastProjectCreationLocation(path.replace(File.separatorChar, '/'));
}
/**
* @param project cannot be null
*/
public static boolean closeAndDispose(@NotNull final Project project) {
return ProjectManagerEx.getInstanceEx().closeAndDispose(project);
}
/**
* @param path project file path
* @param projectToClose currently active project
* @param forceOpenInNewFrame forces opening in new frame
* @return project by path if the path was recognized as IDEA project file or one of the project formats supported by
* installed importers (regardless of opening/import result)
* null otherwise
*/
@Nullable
public static Project openOrImport(@NotNull String path, Project projectToClose, boolean forceOpenInNewFrame) {
VirtualFile virtualFile = LocalFileSystem.getInstance().refreshAndFindFileByPath(path);
if (virtualFile == null) return null;
virtualFile.refresh(false, false);
Project existing = findAndFocusExistingProjectForPath(path);
if (existing != null) return existing;
ProjectOpenProcessor strong = ProjectOpenProcessor.getStrongImportProvider(virtualFile);
if (strong != null) {
return strong.doOpenProject(virtualFile, projectToClose, forceOpenInNewFrame);
}
if (path.endsWith(ProjectFileType.DOT_DEFAULT_EXTENSION) ||
virtualFile.isDirectory() && virtualFile.findChild(Project.DIRECTORY_STORE_FOLDER) != null) {
return openProject(path, projectToClose, forceOpenInNewFrame);
}
if (virtualFile.isDirectory()) {
for (VirtualFile child : virtualFile.getChildren()) {
final String childPath = child.getPath();
if (childPath.endsWith(ProjectFileType.DOT_DEFAULT_EXTENSION)) {
return openProject(childPath, projectToClose, forceOpenInNewFrame);
}
}
}
ProjectOpenProcessor provider = ProjectOpenProcessor.getImportProvider(virtualFile);
if (provider != null) {
final Project project = provider.doOpenProject(virtualFile, projectToClose, forceOpenInNewFrame);
if (project != null) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (!project.isDisposed()) {
final ToolWindow toolWindow = ToolWindowManager.getInstance(project).getToolWindow(ToolWindowId.PROJECT_VIEW);
if (toolWindow != null) {
toolWindow.activate(null);
}
}
}
}, ModalityState.NON_MODAL);
}
return project;
}
return null;
}
@Nullable
public static Project openProject(final String path, @Nullable Project projectToClose, boolean forceOpenInNewFrame) {
File file = new File(path);
if (!file.exists()) {
Messages.showErrorDialog(IdeBundle.message("error.project.file.does.not.exist", path), CommonBundle.getErrorTitle());
return null;
}
if (file.isDirectory() && !new File(file, Project.DIRECTORY_STORE_FOLDER).exists()) {
String message = IdeBundle.message("error.project.file.does.not.exist", new File(file, Project.DIRECTORY_STORE_FOLDER).getPath());
Messages.showErrorDialog(message, CommonBundle.getErrorTitle());
return null;
}
Project existing = findAndFocusExistingProjectForPath(path);
if (existing != null) return existing;
Project[] openProjects = ProjectManager.getInstance().getOpenProjects();
if (!forceOpenInNewFrame && openProjects.length > 0) {
int exitCode = confirmOpenNewProject(false);
if (exitCode == GeneralSettings.OPEN_PROJECT_SAME_WINDOW) {
final Project toClose = projectToClose != null ? projectToClose : openProjects[openProjects.length - 1];
if (!closeAndDispose(toClose)) return null;
}
else if (exitCode != GeneralSettings.OPEN_PROJECT_NEW_WINDOW) {
return null;
}
}
ProjectManagerEx projectManager = ProjectManagerEx.getInstanceEx();
Project project = null;
try {
project = projectManager.loadAndOpenProject(path);
}
catch (IOException e) {
Messages.showMessageDialog(IdeBundle.message("error.cannot.load.project", e.getMessage()),
IdeBundle.message("title.cannot.load.project"), Messages.getErrorIcon());
}
catch (JDOMException e) {
LOG.info(e);
Messages.showMessageDialog(IdeBundle.message("error.project.file.is.corrupted"), IdeBundle.message("title.cannot.load.project"),
Messages.getErrorIcon());
}
catch (InvalidDataException e) {
LOG.info(e);
Messages.showMessageDialog(IdeBundle.message("error.project.file.is.corrupted"), IdeBundle.message("title.cannot.load.project"),
Messages.getErrorIcon());
}
return project;
}
@Nullable
private static Project findAndFocusExistingProjectForPath(String path) {
Project[] openProjects = ProjectManager.getInstance().getOpenProjects();
for (Project project : openProjects) {
if (!project.isDefault() && isSameProject(path, project)) {
focusProjectWindow(project, false);
return project;
}
}
return null;
}
/**
* @return {@link com.intellij.ide.GeneralSettings#OPEN_PROJECT_SAME_WINDOW}
* {@link com.intellij.ide.GeneralSettings#OPEN_PROJECT_NEW_WINDOW}
* {@link com.intellij.openapi.ui.Messages#CANCEL} - if user canceled the dialog
* @param isNewProject
*/
public static int confirmOpenNewProject(boolean isNewProject) {
final GeneralSettings settings = GeneralSettings.getInstance();
int confirmOpenNewProject = ApplicationManager.getApplication().isUnitTestMode() ? GeneralSettings.OPEN_PROJECT_NEW_WINDOW : settings.getConfirmOpenNewProject();
if (confirmOpenNewProject == GeneralSettings.OPEN_PROJECT_ASK) {
if (isNewProject) {
int exitCode = Messages.showYesNoDialog(IdeBundle.message("prompt.open.project.in.new.frame"),
IdeBundle.message("title.new.project"),
IdeBundle.message("button.existingframe"),
IdeBundle.message("button.newframe"),
Messages.getQuestionIcon(),
new ProjectNewWindowDoNotAskOption());
return exitCode == Messages.YES ? GeneralSettings.OPEN_PROJECT_SAME_WINDOW : GeneralSettings.OPEN_PROJECT_NEW_WINDOW;
}
else {
int exitCode = Messages.showYesNoCancelDialog(IdeBundle.message("prompt.open.project.in.new.frame"),
IdeBundle.message("title.open.project"),
IdeBundle.message("button.existingframe"),
IdeBundle.message("button.newframe"),
CommonBundle.getCancelButtonText(),
Messages.getQuestionIcon(),
new ProjectNewWindowDoNotAskOption());
return exitCode == Messages.YES ? GeneralSettings.OPEN_PROJECT_SAME_WINDOW :
exitCode == Messages.NO ? GeneralSettings.OPEN_PROJECT_NEW_WINDOW : Messages.CANCEL;
}
}
return confirmOpenNewProject;
}
public static boolean isSameProject(String path, @NotNull Project project) {
IProjectStore projectStore = (IProjectStore)ServiceKt.getStateStore(project);
String toOpen = FileUtil.toSystemIndependentName(path);
String existing = projectStore.getProjectFilePath();
String existingBaseDir = projectStore.getProjectBasePath();
if (existingBaseDir == null) {
// could be null if not yet initialized
return false;
}
final File openFile = new File(toOpen);
if (openFile.isDirectory()) {
return FileUtil.pathsEqual(toOpen, existingBaseDir);
}
if (StorageScheme.DIRECTORY_BASED == projectStore.getStorageScheme()) {
// todo: check if IPR is located not under the project base dir
return FileUtil.pathsEqual(FileUtil.toSystemIndependentName(openFile.getParentFile().getPath()), existingBaseDir);
}
return FileUtil.pathsEqual(toOpen, existing);
}
public static void focusProjectWindow(final Project p, boolean executeIfAppInactive) {
FocusCommand cmd = new FocusCommand() {
@NotNull
@Override
public ActionCallback run() {
JFrame f = WindowManager.getInstance().getFrame(p);
if (f != null) {
f.toFront();
//f.requestFocus();
}
return ActionCallback.DONE;
}
};
if (executeIfAppInactive) {
AppIcon.getInstance().requestFocus((IdeFrame)WindowManager.getInstance().getFrame(p));
cmd.run();
} else {
IdeFocusManager.getInstance(p).requestFocus(cmd, true);
}
}
public static String getBaseDir() {
final String lastProjectLocation = RecentProjectsManager.getInstance().getLastProjectCreationLocation();
if (lastProjectLocation != null) {
return lastProjectLocation.replace('/', File.separatorChar);
}
final String userHome = SystemProperties.getUserHome();
//noinspection HardCodedStringLiteral
return userHome.replace('/', File.separatorChar) + File.separator + ApplicationNamesInfo.getInstance().getLowercaseProductName() +
"Projects";
}
public static boolean isDirectoryBased(@NotNull Project project) {
IComponentStore store = ServiceKt.getStateStore(project);
return store instanceof IProjectStore && StorageScheme.DIRECTORY_BASED.equals(((IProjectStore)store).getStorageScheme());
}
}
|
|
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.interestrate.future.provider;
import com.opengamma.analytics.financial.interestrate.future.derivative.InterestRateFutureOptionMarginSecurity;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.BlackFunctionData;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.BlackPriceFunction;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.EuropeanVanillaOption;
import com.opengamma.analytics.financial.model.volatility.BlackFormulaRepository;
import com.opengamma.analytics.financial.provider.description.interestrate.BlackSTIRFuturesProviderInterface;
import com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderDiscount;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MulticurveSensitivity;
import com.opengamma.analytics.util.amount.SurfaceValue;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.tuple.DoublesPair;
/**
* Method for the pricing of interest rate future options with margin process.
* The pricing is done with a Black approach on the future rate (1.0-price).
* The Black parameters are represented by (expiration-strike-delay) surfaces.
* The "delay" is the time between option expiration and future last trading date,
* i.e. 0 for quarterly options and x for x-year mid-curve options.
* The future prices are computed without convexity adjustments.
*/
public class InterestRateFutureOptionMarginSecurityBlackRateMethod extends
InterestRateFutureOptionMarginSecurityGenericMethod<BlackSTIRFuturesProviderInterface> {
/**
* Creates the method unique instance.
*/
private static final InterestRateFutureOptionMarginSecurityBlackRateMethod INSTANCE =
new InterestRateFutureOptionMarginSecurityBlackRateMethod();
/**
* Constructor.
*/
public InterestRateFutureOptionMarginSecurityBlackRateMethod() {
}
/**
* Return the method unique instance.
* @return The instance.
*/
public static InterestRateFutureOptionMarginSecurityBlackRateMethod getInstance() {
return INSTANCE;
}
/**
* The Black function used in the pricing.
*/
private static final BlackPriceFunction BLACK_FUNCTION = new BlackPriceFunction();
/**
* The method used to compute the future price. It is a method without convexity adjustment.
*/
private static final InterestRateFutureSecurityDiscountingMethod METHOD_FUTURE =
InterestRateFutureSecurityDiscountingMethod.getInstance();
/**
* Computes the option security price from future price.
* @param security The future option security.
* @param blackData The Black volatility and multi-curves provider.
* @param priceFuture The price of the underlying future.
* @return The security price.
*/
public double priceFromFuturePrice(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData, final double priceFuture) {
ArgumentChecker.notNull(security, "Option security");
ArgumentChecker.notNull(blackData, "Black data");
final double rateStrike = 1.0 - security.getStrike();
final EuropeanVanillaOption option =
new EuropeanVanillaOption(rateStrike, security.getExpirationTime(), !security.isCall());
final double rateFutures = 1 - priceFuture;
final double delay = security.getUnderlyingFuture().getTradingLastTime() - security.getExpirationTime();
double volatility = blackData.getVolatility(security.getExpirationTime(), delay, security.getStrike(), priceFuture);
final BlackFunctionData dataBlack = new BlackFunctionData(rateFutures, 1.0, volatility);
final double priceSecurity = BLACK_FUNCTION.getPriceFunction(option).evaluate(dataBlack);
return priceSecurity;
}
/**
* Computes the option security price. The future price is computed without convexity adjustment.
* @param security The future option security.
* @param blackData The curve and Black volatility data.
* @return The security price.
*/
@Override
public double price(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData) {
ArgumentChecker.notNull(security, "Option security");
ArgumentChecker.notNull(blackData, "Black data");
final double priceFuture = METHOD_FUTURE.price(security.getUnderlyingFuture(), blackData.getMulticurveProvider());
return priceFromFuturePrice(security, blackData, priceFuture);
}
/**
* Computes the option security price curve sensitivity. The future price is computed without convexity adjustment.
* It is supposed that for a given strike the volatility does not change with the curves (sticky strike).
* @param security The future option security.
* @param blackData The curve and Black volatility data.
* @return The security price curve sensitivity.
*/
@Override
public MulticurveSensitivity priceCurveSensitivity(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData) {
ArgumentChecker.notNull(security, "Option security");
ArgumentChecker.notNull(blackData, "Black data");
// Forward sweep
final double priceFuture = METHOD_FUTURE.price(security.getUnderlyingFuture(), blackData.getMulticurveProvider());
final double rateStrike = 1.0 - security.getStrike();
final EuropeanVanillaOption option =
new EuropeanVanillaOption(rateStrike, security.getExpirationTime(), !security.isCall());
final double forward = 1 - priceFuture;
final double delay = security.getUnderlyingFuture().getTradingLastTime() - security.getExpirationTime();
double volatility = blackData.getVolatility(security.getExpirationTime(), delay, security.getStrike(), priceFuture);
final BlackFunctionData dataBlack = new BlackFunctionData(forward, 1.0, volatility);
final double[] priceAdjoint = BLACK_FUNCTION.getPriceAdjoint(option, dataBlack);
// Backward sweep
final double priceBar = 1.0;
final double forwardBar = priceAdjoint[1] * priceBar;
final double priceFutureBar = -forwardBar;
MulticurveSensitivity priceFutureDerivative =
METHOD_FUTURE.priceCurveSensitivity(security.getUnderlyingFuture(), blackData.getMulticurveProvider());
return priceFutureDerivative.multipliedBy(priceFutureBar);
}
/**
* Computes the option security price volatility sensitivity. The future price is computed without convexity adjustment.
* @param security The future option security.
* @param blackData The curve and Black volatility data.
* @return The security price Black volatility sensitivity.
*/
public SurfaceValue priceBlackSensitivity(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData) {
ArgumentChecker.notNull(security, "Option security");
ArgumentChecker.notNull(blackData, "Black data");
// Forward sweep
final double priceFuture = METHOD_FUTURE.price(security.getUnderlyingFuture(), blackData.getMulticurveProvider());
final double strike = security.getStrike();
final double rateStrike = 1.0 - strike;
final EuropeanVanillaOption option = new EuropeanVanillaOption(rateStrike, security.getExpirationTime(), !security.isCall());
final double forward = 1 - priceFuture;
final double delay = security.getUnderlyingFuture().getTradingLastTime() - security.getExpirationTime();
double volatility = blackData.getVolatility(security.getExpirationTime(), delay, security.getStrike(), priceFuture);
final BlackFunctionData dataBlack = new BlackFunctionData(forward, 1.0, volatility);
final double[] priceAdjoint = BLACK_FUNCTION.getPriceAdjoint(option, dataBlack);
// Backward sweep
final double priceBar = 1.0;
final double volatilityBar = priceAdjoint[2] * priceBar;
final DoublesPair expiryStrikeDelay = DoublesPair.of(security.getExpirationTime(), strike);
final SurfaceValue sensi = SurfaceValue.from(expiryStrikeDelay, volatilityBar);
return sensi;
}
/**
* Computes the option security price delta, wrt the futures price dV/df. The futures price is computed without convexity adjustment.
* It is supposed that for a given strike the volatility does not change with the curves.
* @param security The future option security.
* @param blackData The curve and Black volatility data.
* @return The delta.
*/
public double priceDelta(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData) {
ArgumentChecker.notNull(security, "Option security");
ArgumentChecker.notNull(blackData, "Black data");
// Forward sweep
final double priceFutures = METHOD_FUTURE.price(security.getUnderlyingFuture(), blackData);
final double rateStrike = 1.0 - security.getStrike();
final EuropeanVanillaOption option = new EuropeanVanillaOption(rateStrike, security.getExpirationTime(), !security.isCall());
final double forward = 1 - priceFutures;
final double delay = security.getUnderlyingFuture().getTradingLastTime() - security.getExpirationTime();
double volatility = blackData.getVolatility(security.getExpirationTime(), delay, security.getStrike(), priceFutures);
final BlackFunctionData dataBlack = new BlackFunctionData(forward, 1.0, volatility);
final double[] priceAdjoint = BLACK_FUNCTION.getPriceAdjoint(option, dataBlack);
return -priceAdjoint[1];
}
/**
* Computes the option's value gamma, the second derivative of the security price wrt underlying futures rate.
* The future price is computed without convexity adjustment.
* @param security The future option security.
* @param blackData The curve and Black volatility data.
* @return The security price.
*/
public double priceGamma(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData) {
ArgumentChecker.notNull(security, "Option security");
ArgumentChecker.notNull(blackData, "Black data");
// Forward sweep
final double priceFutures = METHOD_FUTURE.price(security.getUnderlyingFuture(), blackData.getMulticurveProvider());
final double strike = security.getStrike();
final double rateStrike = 1.0 - strike;
EuropeanVanillaOption option = new EuropeanVanillaOption(rateStrike, security.getExpirationTime(), !security.isCall());
final double forward = 1 - priceFutures;
final double delay = security.getUnderlyingFuture().getTradingLastTime() - security.getExpirationTime();
double volatility = blackData.getVolatility(security.getExpirationTime(), delay, security.getStrike(), priceFutures);
final BlackFunctionData dataBlack = new BlackFunctionData(forward, 1.0, volatility);
final double[] firstDerivs = new double[3];
final double[][] secondDerivs = new double[3][3];
BLACK_FUNCTION.getPriceAdjoint2(option, dataBlack, firstDerivs, secondDerivs);
return secondDerivs[0][0];
}
/**
* Computes the option security vega. The future price is computed without convexity adjustment.
* @param security The future option security.
* @param blackData The curve and Black volatility data.
* @return Black lognormal vega.
*/
public double priceVega(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData) {
// Forward sweep
final double priceFutures = METHOD_FUTURE.price(security.getUnderlyingFuture(), blackData);
final double strike = security.getStrike();
final double rateStrike = 1.0 - strike;
EuropeanVanillaOption option = new EuropeanVanillaOption(rateStrike, security.getExpirationTime(), !security.isCall());
final double forward = 1 - priceFutures;
final double delay = security.getUnderlyingFuture().getTradingLastTime() - security.getExpirationTime();
double volatility = blackData.getVolatility(security.getExpirationTime(), delay, security.getStrike(), priceFutures);
final BlackFunctionData dataBlack = new BlackFunctionData(forward, 1.0, volatility);
final double[] priceAdjoint = BLACK_FUNCTION.getPriceAdjoint(option, dataBlack);
return priceAdjoint[2];
}
/**
* Computes the options theta.
* @param security the future option.
* @param blackData the curve and black volatility data.
* @return the theta.
*/
public double priceTheta(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData) {
ArgumentChecker.notNull(security, "security");
ArgumentChecker.notNull(blackData, "black");
final double priceFutures = METHOD_FUTURE.price(security.getUnderlyingFuture(), blackData);
final double strike = security.getStrike();
final double rateStrike = 1.0 - strike;
final double forward = 1 - priceFutures;
double delay = security.getUnderlyingFuture().getTradingLastTime() - security.getExpirationTime();
double volatility = blackData.getVolatility(security.getExpirationTime(), delay, security.getStrike(), priceFutures);
double rate = ((MulticurveProviderDiscount) blackData.getMulticurveProvider()).getCurve(security.getCurrency()).
getInterestRate(security.getExpirationTime());
return BlackFormulaRepository.theta(forward, rateStrike, delay, volatility, security.isCall(), rate);
}
/**
* Interpolates and returns the option's implied volatility
* The future price is computed without convexity adjustment.
* @param security The future option security.
* @param blackData The curve and Black volatility data.
* @return Lognormal Implied Volatility.
*/
public double impliedVolatility(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData) {
ArgumentChecker.notNull(security, "Option security");
ArgumentChecker.notNull(blackData, "Black data");
double priceFutures = METHOD_FUTURE.price(security.getUnderlyingFuture(), blackData);
double delay = security.getUnderlyingFuture().getTradingLastTime() - security.getExpirationTime();
return blackData.getVolatility(security.getExpirationTime(), delay, security.getStrike(), priceFutures);
}
/**
* Computes the underlying future security price. The future price is computed without convexity adjustment.
* @param security The future option security.
* @param blackData The curve and Black volatility data.
* @return The security price.
*/
public double underlyingFuturesPrice(final InterestRateFutureOptionMarginSecurity security,
final BlackSTIRFuturesProviderInterface blackData) {
return METHOD_FUTURE.price(security.getUnderlyingFuture(), blackData.getMulticurveProvider());
}
}
|
|
/*
* Copyright 2007 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.CRC32;
/**
* A {@link Compiler} pass for aliasing strings. String declarations
* contribute to garbage collection, which becomes a problem in large
* applications. Strings that should be aliased occur many times in the code,
* or occur on codepaths that get executed frequently.
*
*/
class AliasStrings extends AbstractPostOrderCallback
implements CompilerPass {
private static final Logger logger =
Logger.getLogger(AliasStrings.class.getName());
/** Prefix for variable names for the aliased strings */
private static final String STRING_ALIAS_PREFIX = "$$S_";
private final AbstractCompiler compiler;
private final JSModuleGraph moduleGraph;
// Regular expression matcher for a blacklisting strings in aliasing.
private Matcher blacklist = null;
/**
* Strings that can be aliased, or null if all strings except 'undefined'
* should be aliased
*/
private final Set<String> aliasableStrings;
private final boolean outputStringUsage;
private final SortedMap<String, StringInfo> stringInfoMap = new TreeMap<>();
private final Set<String> usedHashedAliases = new LinkedHashSet<>();
/**
* Map from module to the node in that module that should parent any string
* variable declarations that have to be moved into that module
*/
private final Map<JSModule, Node> moduleVarParentMap =
new HashMap<>();
/** package private. This value is AND-ed with the hash function to allow
* unit tests to reduce the range of hash values to test collision cases */
long unitTestHashReductionMask = ~0L;
/**
* Creates an instance.
*
* @param compiler The compiler
* @param moduleGraph The module graph, or null if there are no modules
* @param strings Set of strings to be aliased. If null, all strings except
* 'undefined' will be aliased.
* @param blacklistRegex The regex to blacklist words in aliasing strings.
* @param outputStringUsage Outputs all strings and the number of times they
* were used in the application to the server log.
*/
AliasStrings(AbstractCompiler compiler,
JSModuleGraph moduleGraph,
Set<String> strings,
String blacklistRegex,
boolean outputStringUsage) {
this.compiler = compiler;
this.moduleGraph = moduleGraph;
this.aliasableStrings = strings;
if (blacklistRegex.length() != 0) {
this.blacklist = Pattern.compile(blacklistRegex).matcher("");
} else {
this.blacklist = null;
}
this.outputStringUsage = outputStringUsage;
}
@Override
public void process(Node externs, Node root) {
logger.fine("Aliasing common strings");
// Traverse the tree and collect strings
NodeTraversal.traverse(compiler, root, this);
// 1st edit pass: replace some strings with aliases
replaceStringsWithAliases();
// 2nd edit pass: add variable declarations for aliased strings.
addAliasDeclarationNodes();
if (outputStringUsage) {
outputStringUsage();
}
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isString() &&
!parent.isGetProp() &&
!parent.isRegExp()) {
String str = n.getString();
// "undefined" is special-cased, since it needs to be used when JS code
// is unloading and therefore variable references aren't available.
// This is because of a bug in Firefox.
if ("undefined".equals(str)) {
return;
}
if (blacklist != null && blacklist.reset(str).find()) {
return;
}
if (aliasableStrings == null || aliasableStrings.contains(str)) {
StringOccurrence occurrence = new StringOccurrence(n, parent);
StringInfo info = getOrCreateStringInfo(str);
info.occurrences.add(occurrence);
info.numOccurrences++;
if (t.inGlobalScope() || isInThrowExpression(n)) {
info.numOccurrencesInfrequentlyExecuted++;
}
// The current module.
JSModule module = t.getModule();
if (info.numOccurrences != 1) {
// Check whether the current module depends on the module containing
// the declaration.
if (module != null &&
info.moduleToContainDecl != null &&
module != info.moduleToContainDecl &&
!moduleGraph.dependsOn(module, info.moduleToContainDecl)) {
// We need to declare this string in the deepest module in the
// module dependency graph that both of these modules depend on.
module = moduleGraph.getDeepestCommonDependency(
module, info.moduleToContainDecl);
} else {
// use the previously saved insertion location.
return;
}
}
Node varParent = moduleVarParentMap.get(module);
if (varParent == null) {
varParent = compiler.getNodeForCodeInsertion(module);
moduleVarParentMap.put(module, varParent);
}
info.moduleToContainDecl = module;
info.parentForNewVarDecl = varParent;
info.siblingToInsertVarDeclBefore = varParent.getFirstChild();
}
}
}
/**
* Looks up the {@link StringInfo} object for a JavaScript string. Creates
* it if necessary.
*/
private StringInfo getOrCreateStringInfo(String string) {
StringInfo info = stringInfoMap.get(string);
if (info == null) {
info = new StringInfo(stringInfoMap.size());
stringInfoMap.put(string, info);
}
return info;
}
/**
* Is the {@link Node} currently within a 'throw' expression?
*/
private static boolean isInThrowExpression(Node n) {
// Look up the traversal stack to find a THROW node
for (Node ancestor : n.getAncestors()) {
switch (ancestor.getType()) {
case Token.THROW:
return true;
case Token.IF:
case Token.WHILE:
case Token.DO:
case Token.FOR:
case Token.SWITCH:
case Token.CASE:
case Token.DEFAULT_CASE:
case Token.BLOCK:
case Token.SCRIPT:
case Token.FUNCTION:
case Token.TRY:
case Token.CATCH:
case Token.RETURN:
case Token.EXPR_RESULT:
// early exit - these nodes types can't be within a THROW
return false;
}
}
return false;
}
/**
* Replace strings with references to alias variables.
*/
private void replaceStringsWithAliases() {
for (Entry<String, StringInfo> entry : stringInfoMap.entrySet()) {
String literal = entry.getKey();
StringInfo info = entry.getValue();
if (shouldReplaceWithAlias(literal, info)) {
for (StringOccurrence occurrence : info.occurrences) {
replaceStringWithAliasName(
occurrence, info.getVariableName(literal), info);
}
}
}
}
/**
* Creates a var declaration for each aliased string. Var declarations are
* inserted as close to the first use of the string as possible.
*/
private void addAliasDeclarationNodes() {
for (Entry<String, StringInfo> entry : stringInfoMap.entrySet()) {
StringInfo info = entry.getValue();
if (!info.isAliased) {
continue;
}
String alias = info.getVariableName(entry.getKey());
Node var = IR.var(IR.name(alias), IR.string(entry.getKey()));
if (info.siblingToInsertVarDeclBefore == null) {
info.parentForNewVarDecl.addChildToFront(var);
} else {
info.parentForNewVarDecl.addChildBefore(
var, info.siblingToInsertVarDeclBefore);
}
compiler.reportCodeChange();
}
}
/**
* Dictates the policy for replacing a string with an alias.
*
* @param str The string literal
* @param info Accumulated information about a string
*/
private static boolean shouldReplaceWithAlias(String str, StringInfo info) {
// Optimize for application performance. If there are any uses of the
// string that are not 'infrequent uses', assume they are frequent and
// create an alias.
if (info.numOccurrences > info.numOccurrencesInfrequentlyExecuted) {
return true;
}
// Optimize for code size. Are aliases smaller than strings?
//
// This logic optimizes for the size of uncompressed code, but it tends to
// get good results for the size of the gzipped code too.
//
// gzip actually prefers that strings are not aliased - it compresses N
// string literals better than 1 string literal and N+1 short variable
// names, provided each string is within 32k of the previous copy. We
// follow the uncompressed logic as insurance against there being multiple
// strings more than 32k apart.
int sizeOfLiteral = 2 + str.length();
int sizeOfStrings = info.numOccurrences * sizeOfLiteral;
int sizeOfVariable = 3;
// '6' comes from: 'var =;' in var XXX="...";
int sizeOfAliases = 6 + sizeOfVariable + sizeOfLiteral // declaration
+ info.numOccurrences * sizeOfVariable; // + uses
return sizeOfAliases < sizeOfStrings;
}
/**
* Replaces a string literal with a reference to the string's alias variable.
*/
private void replaceStringWithAliasName(StringOccurrence occurrence,
String name,
StringInfo info) {
occurrence.parent.replaceChild(occurrence.node,
IR.name(name));
info.isAliased = true;
compiler.reportCodeChange();
}
/**
* Outputs a log of all strings used more than once in the code.
*/
private void outputStringUsage() {
StringBuilder sb = new StringBuilder("Strings used more than once:\n");
for (Entry<String, StringInfo> stringInfoEntry : stringInfoMap.entrySet()) {
StringInfo info = stringInfoEntry.getValue();
if (info.numOccurrences > 1) {
sb.append(info.numOccurrences);
sb.append(": ");
sb.append(stringInfoEntry.getKey());
sb.append('\n');
}
}
// TODO(user): Make this save to file OR output to the application
logger.fine(sb.toString());
}
// -------------------------------------------------------------------------
/**
* A class that holds the location of a single JavaScript string literal
*/
private static final class StringOccurrence {
final Node node;
final Node parent;
StringOccurrence(Node node, Node parent) {
this.node = node;
this.parent = parent;
}
}
/**
* A class that holds information about a JavaScript string that might become
* aliased.
*/
private final class StringInfo {
final int id;
boolean isAliased; // set to 'true' when reference to alias created
final List<StringOccurrence> occurrences;
int numOccurrences;
int numOccurrencesInfrequentlyExecuted;
JSModule moduleToContainDecl;
Node parentForNewVarDecl;
Node siblingToInsertVarDeclBefore;
String aliasName;
StringInfo(int id) {
this.id = id;
this.occurrences = new ArrayList<>();
this.isAliased = false;
}
/** Returns the JS variable name to be substituted for this string. */
String getVariableName(String stringLiteral) {
if (aliasName == null) {
aliasName =
encodeStringAsIdentifier(STRING_ALIAS_PREFIX, stringLiteral);
}
return aliasName;
}
/**
* Returns a legal identifier that uniquely characterizes string 's'.
*
* We want the identifier to be a function of the string value because that
* makes the identifiers stable as the program is changed.
*
* The digits of a good hash function would be adequate, but for short
* strings the following algorithm is easier to work with for unit tests.
*
* ASCII alphanumerics are mapped to themselves. Other characters are
* mapped to $XXX or $XXX_ where XXX is a variable number of hex digits.
* The underscore is inserted as necessary to avoid ambiguity when the
* character following is a hex digit. E.g. '\n1' maps to '$a_1',
* distinguished by the underscore from '\u00A1' which maps to '$a1'.
*
* If the string is short enough, this is sufficient. Longer strings are
* truncated after encoding an initial prefix and appended with a hash
* value.
*/
String encodeStringAsIdentifier(String prefix, String s) {
// Limit to avoid generating very long identifiers
final int maxLimit = 20;
final int length = s.length();
final int limit = Math.min(length, maxLimit);
StringBuilder sb = new StringBuilder();
sb.append(prefix);
boolean protectHex = false;
for (int i = 0; i < limit; i++) {
char ch = s.charAt(i);
if (protectHex) {
if ((ch >= '0' && ch <= '9') ||
(ch >= 'a' && ch <= 'f')) { // toHexString generate lowercase
sb.append('_');
}
protectHex = false;
}
if ((ch >= '0' && ch <= '9') ||
(ch >= 'A' && ch <= 'Z') ||
(ch >= 'a' && ch <= 'z')) {
sb.append(ch);
} else {
sb.append('$');
sb.append(Integer.toHexString(ch));
protectHex = true;
}
}
if (length == limit) {
return sb.toString();
}
// The identifier is not unique because we omitted part, so add a
// checksum as a hashcode.
CRC32 crc32 = new CRC32();
crc32.update(s.getBytes(UTF_8));
long hash = crc32.getValue() & unitTestHashReductionMask;
sb.append('_');
sb.append(Long.toHexString(hash));
String encoded = sb.toString();
if (!usedHashedAliases.add(encoded)) {
// A collision has been detected (which is very rare). Use the sequence
// id to break the tie. This means that the name is no longer invariant
// across source code changes and recompilations.
encoded += "_" + id;
}
return encoded;
}
}
}
|
|
/*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.scenekit;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.scenekit.struct.SCNVector3;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.ByValue;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
/**
* SCNPhysicsHingeJoint
* <p>
* SCNPhysicsHingeJoint makes two bodies to move like they are connected by a hinge. It is for example suitable for doors, chains...
*/
@Generated
@Library("SceneKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class SCNPhysicsHingeJoint extends SCNPhysicsBehavior {
static {
NatJ.register();
}
@Generated
protected SCNPhysicsHingeJoint(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native SCNPhysicsHingeJoint alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native SCNPhysicsHingeJoint allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
/**
* Initializes and returns a physics hinge joint.
* The hinge attaches body to a specific location in 3d space specified by "anchor" and relative to the node that owns the body. "axis" specifies the axis of rotation for "body".
*/
@Generated
@Selector("jointWithBody:axis:anchor:")
public static native SCNPhysicsHingeJoint jointWithBodyAxisAnchor(SCNPhysicsBody body, @ByValue SCNVector3 axis,
@ByValue SCNVector3 anchor);
/**
* Initializes and returns a physics hinge joint.
* The hinge attaches bodyA and bodyB on anchorA and anchorB respectively. "axisA" and "axisB" specify the axis of rotation for bodyA and bodyB.
*/
@Generated
@Selector("jointWithBodyA:axisA:anchorA:bodyB:axisB:anchorB:")
public static native SCNPhysicsHingeJoint jointWithBodyAAxisAAnchorABodyBAxisBAnchorB(SCNPhysicsBody bodyA,
@ByValue SCNVector3 axisA, @ByValue SCNVector3 anchorA, SCNPhysicsBody bodyB, @ByValue SCNVector3 axisB,
@ByValue SCNVector3 anchorB);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native SCNPhysicsHingeJoint new_objc();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("supportsSecureCoding")
public static native boolean supportsSecureCoding();
@Generated
@Selector("version")
@NInt
public static native long version_static();
/**
* the anchor point on which bodyA is attached
*/
@Generated
@Selector("anchorA")
@ByValue
public native SCNVector3 anchorA();
/**
* the anchor point on which bodyB is attached
*/
@Generated
@Selector("anchorB")
@ByValue
public native SCNVector3 anchorB();
/**
* the axis of rotation of bodyA
*/
@Generated
@Selector("axisA")
@ByValue
public native SCNVector3 axisA();
/**
* the axis of rotation of bodyB
*/
@Generated
@Selector("axisB")
@ByValue
public native SCNVector3 axisB();
/**
* the first body constrained by the hinge
*/
@Generated
@Selector("bodyA")
public native SCNPhysicsBody bodyA();
/**
* the second body attached to the hinge.
*/
@Generated
@Selector("bodyB")
public native SCNPhysicsBody bodyB();
@Generated
@Selector("init")
public native SCNPhysicsHingeJoint init();
@Generated
@Selector("initWithCoder:")
public native SCNPhysicsHingeJoint initWithCoder(NSCoder coder);
/**
* the anchor point on which bodyA is attached
*/
@Generated
@Selector("setAnchorA:")
public native void setAnchorA(@ByValue SCNVector3 value);
/**
* the anchor point on which bodyB is attached
*/
@Generated
@Selector("setAnchorB:")
public native void setAnchorB(@ByValue SCNVector3 value);
/**
* the axis of rotation of bodyA
*/
@Generated
@Selector("setAxisA:")
public native void setAxisA(@ByValue SCNVector3 value);
/**
* the axis of rotation of bodyB
*/
@Generated
@Selector("setAxisB:")
public native void setAxisB(@ByValue SCNVector3 value);
@Generated
@ProtocolClassMethod("supportsSecureCoding")
public boolean _supportsSecureCoding() {
return supportsSecureCoding();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.cleaner;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FilterFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.StoppableImplementation;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestCleanerChore {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCleanerChore.class);
private static final Logger LOG = LoggerFactory.getLogger(TestCleanerChore.class);
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static DirScanPool POOL;
@BeforeClass
public static void setup() {
POOL = DirScanPool.getHFileCleanerScanPool(UTIL.getConfiguration());
}
@AfterClass
public static void cleanup() throws Exception {
// delete and recreate the test directory, ensuring a clean test dir between tests
UTIL.cleanupTestDir();
POOL.shutdownNow();
}
@Test
public void testSavesFilesOnRequest() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
Path testDir = UTIL.getDataTestDir();
FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, NeverDelete.class.getName());
AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, POOL);
// create the directory layout in the directory to clean
Path parent = new Path(testDir, "parent");
Path file = new Path(parent, "someFile");
fs.mkdirs(parent);
// touch a new file
fs.create(file).close();
assertTrue("Test file didn't get created.", fs.exists(file));
// run the chore
chore.chore();
// verify all the files were preserved
assertTrue("File shouldn't have been deleted", fs.exists(file));
assertTrue("directory shouldn't have been deleted", fs.exists(parent));
}
@Test
public void retriesIOExceptionInStatus() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
Path testDir = UTIL.getDataTestDir();
FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
Path child = new Path(testDir, "child");
Path file = new Path(child, "file");
fs.mkdirs(child);
fs.create(file).close();
assertTrue("test file didn't get created.", fs.exists(file));
final AtomicBoolean fails = new AtomicBoolean(true);
FilterFileSystem filtered = new FilterFileSystem(fs) {
public FileStatus[] listStatus(Path f) throws IOException {
if (fails.get()) {
throw new IOException("whomp whomp.");
}
return fs.listStatus(f);
}
};
AllValidPaths chore =
new AllValidPaths("test-retry-ioe", stop, conf, filtered, testDir, confKey, POOL);
// trouble talking to the filesystem
Boolean result = chore.runCleaner();
// verify that it couldn't clean the files.
assertTrue("test rig failed to inject failure.", fs.exists(file));
assertTrue("test rig failed to inject failure.", fs.exists(child));
// and verify that it accurately reported the failure.
assertFalse("chore should report that it failed.", result);
// filesystem is back
fails.set(false);
result = chore.runCleaner();
// verify everything is gone.
assertFalse("file should have been destroyed.", fs.exists(file));
assertFalse("directory should have been destroyed.", fs.exists(child));
// and verify that it accurately reported success.
assertTrue("chore should claim it succeeded.", result);
}
@Test
public void testDeletesEmptyDirectories() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
Path testDir = UTIL.getDataTestDir();
FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, POOL);
// create the directory layout in the directory to clean
Path parent = new Path(testDir, "parent");
Path child = new Path(parent, "child");
Path emptyChild = new Path(parent, "emptyChild");
Path file = new Path(child, "someFile");
fs.mkdirs(child);
fs.mkdirs(emptyChild);
// touch a new file
fs.create(file).close();
// also create a file in the top level directory
Path topFile = new Path(testDir, "topFile");
fs.create(topFile).close();
assertTrue("Test file didn't get created.", fs.exists(file));
assertTrue("Test file didn't get created.", fs.exists(topFile));
// run the chore
chore.chore();
// verify all the files got deleted
assertFalse("File didn't get deleted", fs.exists(topFile));
assertFalse("File didn't get deleted", fs.exists(file));
assertFalse("Empty directory didn't get deleted", fs.exists(child));
assertFalse("Empty directory didn't get deleted", fs.exists(parent));
}
/**
* Test to make sure that we don't attempt to ask the delegate whether or not we should preserve a
* directory.
* @throws Exception on failure
*/
@Test
public void testDoesNotCheckDirectories() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
Path testDir = UTIL.getDataTestDir();
FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, POOL);
// spy on the delegate to ensure that we don't check for directories
AlwaysDelete delegate = (AlwaysDelete) chore.cleanersChain.get(0);
AlwaysDelete spy = Mockito.spy(delegate);
chore.cleanersChain.set(0, spy);
// create the directory layout in the directory to clean
Path parent = new Path(testDir, "parent");
Path file = new Path(parent, "someFile");
fs.mkdirs(parent);
assertTrue("Test parent didn't get created.", fs.exists(parent));
// touch a new file
fs.create(file).close();
assertTrue("Test file didn't get created.", fs.exists(file));
FileStatus fStat = fs.getFileStatus(parent);
chore.chore();
// make sure we never checked the directory
Mockito.verify(spy, Mockito.never()).isFileDeletable(fStat);
Mockito.reset(spy);
}
@Test
public void testStoppedCleanerDoesNotDeleteFiles() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
Path testDir = UTIL.getDataTestDir();
FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, POOL);
// also create a file in the top level directory
Path topFile = new Path(testDir, "topFile");
fs.create(topFile).close();
assertTrue("Test file didn't get created.", fs.exists(topFile));
// stop the chore
stop.stop("testing stop");
// run the chore
chore.chore();
// test that the file still exists
assertTrue("File got deleted while chore was stopped", fs.exists(topFile));
}
/**
* While cleaning a directory, all the files in the directory may be deleted, but there may be
* another file added, in which case the directory shouldn't be deleted.
* @throws IOException on failure
*/
@Test
public void testCleanerDoesNotDeleteDirectoryWithLateAddedFiles() throws IOException {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
final Path testDir = UTIL.getDataTestDir();
final FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, POOL);
// spy on the delegate to ensure that we don't check for directories
AlwaysDelete delegate = (AlwaysDelete) chore.cleanersChain.get(0);
AlwaysDelete spy = Mockito.spy(delegate);
chore.cleanersChain.set(0, spy);
// create the directory layout in the directory to clean
final Path parent = new Path(testDir, "parent");
Path file = new Path(parent, "someFile");
fs.mkdirs(parent);
// touch a new file
fs.create(file).close();
assertTrue("Test file didn't get created.", fs.exists(file));
final Path addedFile = new Path(parent, "addedFile");
// when we attempt to delete the original file, add another file in the same directory
Mockito.doAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
fs.create(addedFile).close();
CommonFSUtils.logFileSystemState(fs, testDir, LOG);
return (Boolean) invocation.callRealMethod();
}
}).when(spy).isFileDeletable(Mockito.any());
// run the chore
chore.chore();
// make sure all the directories + added file exist, but the original file is deleted
assertTrue("Added file unexpectedly deleted", fs.exists(addedFile));
assertTrue("Parent directory deleted unexpectedly", fs.exists(parent));
assertFalse("Original file unexpectedly retained", fs.exists(file));
Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any());
Mockito.reset(spy);
}
/**
* The cleaner runs in a loop, where it first checks to see all the files under a directory can be
* deleted. If they all can, then we try to delete the directory. However, a file may be added
* that directory to after the original check. This ensures that we don't accidentally delete that
* directory on and don't get spurious IOExceptions.
* <p>
* This was from HBASE-7465.
* @throws Exception on failure
*/
@Test
public void testNoExceptionFromDirectoryWithRacyChildren() throws Exception {
UTIL.cleanupTestDir();
Stoppable stop = new StoppableImplementation();
// need to use a localutil to not break the rest of the test that runs on the local FS, which
// gets hosed when we start to use a minicluster.
HBaseTestingUtil localUtil = new HBaseTestingUtil();
Configuration conf = localUtil.getConfiguration();
final Path testDir = UTIL.getDataTestDir();
final FileSystem fs = UTIL.getTestFileSystem();
LOG.debug("Writing test data to: " + testDir);
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, POOL);
// spy on the delegate to ensure that we don't check for directories
AlwaysDelete delegate = (AlwaysDelete) chore.cleanersChain.get(0);
AlwaysDelete spy = Mockito.spy(delegate);
chore.cleanersChain.set(0, spy);
// create the directory layout in the directory to clean
final Path parent = new Path(testDir, "parent");
Path file = new Path(parent, "someFile");
fs.mkdirs(parent);
// touch a new file
fs.create(file).close();
assertTrue("Test file didn't get created.", fs.exists(file));
final Path racyFile = new Path(parent, "addedFile");
// when we attempt to delete the original file, add another file in the same directory
Mockito.doAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
fs.create(racyFile).close();
CommonFSUtils.logFileSystemState(fs, testDir, LOG);
return (Boolean) invocation.callRealMethod();
}
}).when(spy).isFileDeletable(Mockito.any());
// run the chore
chore.chore();
// make sure all the directories + added file exist, but the original file is deleted
assertTrue("Added file unexpectedly deleted", fs.exists(racyFile));
assertTrue("Parent directory deleted unexpectedly", fs.exists(parent));
assertFalse("Original file unexpectedly retained", fs.exists(file));
Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any());
}
@Test
public void testDeleteFileWithCleanerEnabled() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
Path testDir = UTIL.getDataTestDir();
FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, POOL);
// Enable cleaner
chore.setEnabled(true);
// create the directory layout in the directory to clean
Path parent = new Path(testDir, "parent");
Path child = new Path(parent, "child");
Path file = new Path(child, "someFile");
fs.mkdirs(child);
// touch a new file
fs.create(file).close();
assertTrue("Test file didn't get created.", fs.exists(file));
// run the chore
chore.chore();
// verify all the files got deleted
assertFalse("File didn't get deleted", fs.exists(file));
assertFalse("Empty directory didn't get deleted", fs.exists(child));
assertFalse("Empty directory didn't get deleted", fs.exists(parent));
}
@Test
public void testDeleteFileWithCleanerDisabled() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
Path testDir = UTIL.getDataTestDir();
FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, POOL);
// Disable cleaner
chore.setEnabled(false);
// create the directory layout in the directory to clean
Path parent = new Path(testDir, "parent");
Path child = new Path(parent, "child");
Path file = new Path(child, "someFile");
fs.mkdirs(child);
// touch a new file
fs.create(file).close();
assertTrue("Test file didn't get created.", fs.exists(file));
// run the chore
chore.chore();
// verify all the files exist
assertTrue("File got deleted with cleaner disabled", fs.exists(file));
assertTrue("Directory got deleted", fs.exists(child));
assertTrue("Directory got deleted", fs.exists(parent));
}
@Test
public void testOnConfigurationChange() throws Exception {
int availableProcessorNum = Runtime.getRuntime().availableProcessors();
if (availableProcessorNum == 1) { // no need to run this test
return;
}
// have at least 2 available processors/cores
int initPoolSize = availableProcessorNum / 2;
int changedPoolSize = availableProcessorNum;
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
Path testDir = UTIL.getDataTestDir();
FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
conf.set(CleanerChore.CHORE_POOL_SIZE, String.valueOf(initPoolSize));
AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, POOL);
chore.setEnabled(true);
// Create subdirs under testDir
int dirNums = 6;
Path[] subdirs = new Path[dirNums];
for (int i = 0; i < dirNums; i++) {
subdirs[i] = new Path(testDir, "subdir-" + i);
fs.mkdirs(subdirs[i]);
}
// Under each subdirs create 6 files
for (Path subdir : subdirs) {
createFiles(fs, subdir, 6);
}
// Start chore
Thread t = new Thread(() -> chore.chore());
t.setDaemon(true);
t.start();
// Change size of chore's pool
conf.set(CleanerChore.CHORE_POOL_SIZE, String.valueOf(changedPoolSize));
POOL.onConfigurationChange(conf);
assertEquals(changedPoolSize, chore.getChorePoolSize());
// Stop chore
t.join();
}
@Test
public void testOnConfigurationChangeLogCleaner() throws Exception {
int availableProcessorNum = Runtime.getRuntime().availableProcessors();
if (availableProcessorNum == 1) { // no need to run this test
return;
}
DirScanPool pool = DirScanPool.getLogCleanerScanPool(UTIL.getConfiguration());
// have at least 2 available processors/cores
int initPoolSize = availableProcessorNum / 2;
int changedPoolSize = availableProcessorNum;
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
Path testDir = UTIL.getDataTestDir();
FileSystem fs = UTIL.getTestFileSystem();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
conf.set(CleanerChore.LOG_CLEANER_CHORE_SIZE, String.valueOf(initPoolSize));
final AllValidPaths chore =
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, pool);
chore.setEnabled(true);
// Create subdirs under testDir
int dirNums = 6;
Path[] subdirs = new Path[dirNums];
for (int i = 0; i < dirNums; i++) {
subdirs[i] = new Path(testDir, "subdir-" + i);
fs.mkdirs(subdirs[i]);
}
// Under each subdirs create 6 files
for (Path subdir : subdirs) {
createFiles(fs, subdir, 6);
}
// Start chore
Thread t = new Thread(new Runnable() {
@Override
public void run() {
chore.chore();
}
});
t.setDaemon(true);
t.start();
// Change size of chore's pool
conf.set(CleanerChore.LOG_CLEANER_CHORE_SIZE, String.valueOf(changedPoolSize));
pool.onConfigurationChange(conf);
assertEquals(changedPoolSize, chore.getChorePoolSize());
// Stop chore
t.join();
}
@Test
public void testMinimumNumberOfThreads() throws Exception {
Configuration conf = UTIL.getConfiguration();
String confKey = "hbase.test.cleaner.delegates";
conf.set(confKey, AlwaysDelete.class.getName());
conf.set(CleanerChore.CHORE_POOL_SIZE, "2");
int numProcs = Runtime.getRuntime().availableProcessors();
// Sanity
assertEquals(numProcs, CleanerChore.calculatePoolSize(Integer.toString(numProcs)));
// The implementation does not allow us to set more threads than we have processors
assertEquals(numProcs, CleanerChore.calculatePoolSize(Integer.toString(numProcs + 2)));
// Force us into the branch that is multiplying 0.0 against the number of processors
assertEquals(1, CleanerChore.calculatePoolSize("0.0"));
}
private void createFiles(FileSystem fs, Path parentDir, int numOfFiles) throws IOException {
for (int i = 0; i < numOfFiles; i++) {
int xMega = 1 + ThreadLocalRandom.current().nextInt(3); // size of each file is between 1~3M
try (FSDataOutputStream fsdos = fs.create(new Path(parentDir, "file-" + i))) {
for (int m = 0; m < xMega; m++) {
byte[] M = new byte[1024 * 1024];
Bytes.random(M);
fsdos.write(M);
}
}
}
}
private static class AllValidPaths extends CleanerChore<BaseHFileCleanerDelegate> {
public AllValidPaths(String name, Stoppable s, Configuration conf, FileSystem fs,
Path oldFileDir, String confkey, DirScanPool pool) {
super(name, Integer.MAX_VALUE, s, conf, fs, oldFileDir, confkey, pool);
}
// all paths are valid
@Override
protected boolean validate(Path file) {
return true;
}
}
public static class AlwaysDelete extends BaseHFileCleanerDelegate {
@Override
public boolean isFileDeletable(FileStatus fStat) {
return true;
}
}
public static class NeverDelete extends BaseHFileCleanerDelegate {
@Override
public boolean isFileDeletable(FileStatus fStat) {
return false;
}
}
}
|
|
/*
* Copyright 2001-2009 Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.quartz;
import java.util.Date;
import java.util.Calendar;
import java.util.Locale;
import java.util.TimeZone;
import static org.quartz.DateBuilder.*;
import static org.quartz.DateBuilder.MILLISECONDS_IN_DAY;
import junit.framework.TestCase;
/**
* Unit test for JobDetail.
*/
public class DateBuilderTest extends TestCase {
public void testBasicBuilding() {
Date t = dateOf(10, 30, 0, 1, 7, 2013); // july 1 10:30:00 am
Calendar vc = Calendar.getInstance();
vc.set(Calendar.YEAR, 2013);
vc.set(Calendar.MONTH, Calendar.JULY);
vc.set(Calendar.DAY_OF_MONTH, 1);
vc.set(Calendar.HOUR_OF_DAY, 10);
vc.set(Calendar.MINUTE, 30);
vc.set(Calendar.SECOND, 0);
vc.set(Calendar.MILLISECOND, 0);
Date v = vc.getTime();
assertEquals("DateBuilder-produced date is not as expected.", t, v);
}
public void testBuilder() {
Calendar vc = Calendar.getInstance();
vc.set(Calendar.YEAR, 2013);
vc.set(Calendar.MONTH, Calendar.JULY);
vc.set(Calendar.DAY_OF_MONTH, 1);
vc.set(Calendar.HOUR_OF_DAY, 10);
vc.set(Calendar.MINUTE, 30);
vc.set(Calendar.SECOND, 0);
vc.set(Calendar.MILLISECOND, 0);
Date bd = newDate().inYear(2013).inMonth(JULY).onDay(1).atHourOfDay(10).atMinute(30).atSecond(0).build();
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
bd = newDate().inYear(2013).inMonthOnDay(JULY, 1).atHourMinuteAndSecond(10, 30, 0).build();
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
TimeZone tz = TimeZone.getTimeZone("GMT-4:00");
Locale lz = Locale.TAIWAN;
vc = Calendar.getInstance(tz, lz);
vc.set(Calendar.YEAR, 2013);
vc.set(Calendar.MONTH, Calendar.JUNE);
vc.set(Calendar.DAY_OF_MONTH, 1);
vc.set(Calendar.HOUR_OF_DAY, 10);
vc.set(Calendar.MINUTE, 33);
vc.set(Calendar.SECOND, 12);
vc.set(Calendar.MILLISECOND, 0);
bd = newDate().inYear(2013).inMonth(JUNE).onDay(1).atHourOfDay(10).atMinute(33).atSecond(12).inTimeZone(tz).inLocale(lz).build();
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
bd = newDateInLocale(lz).inYear(2013).inMonth(JUNE).onDay(1).atHourOfDay(10).atMinute(33).atSecond(12).inTimeZone(tz).build();
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
bd = newDateInTimezone(tz).inYear(2013).inMonth(JUNE).onDay(1).atHourOfDay(10).atMinute(33).atSecond(12).inLocale(lz).build();
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
bd = newDateInTimeZoneAndLocale(tz, lz).inYear(2013).inMonth(JUNE).onDay(1).atHourOfDay(10).atMinute(33).atSecond(12).build();
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
}
public void testEvensBuilders() {
Calendar vc = Calendar.getInstance();
vc.set(Calendar.YEAR, 2013);
vc.set(Calendar.MONTH, Calendar.JUNE);
vc.set(Calendar.DAY_OF_MONTH, 1);
vc.set(Calendar.HOUR_OF_DAY, 10);
vc.set(Calendar.MINUTE, 33);
vc.set(Calendar.SECOND, 12);
vc.set(Calendar.MILLISECOND, 0);
Calendar rd = (Calendar) vc.clone();
Date bd = newDate().inYear(2013).inMonth(JUNE).onDay(1).atHourOfDay(10).atMinute(33).atSecond(12).build();
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
rd.set(Calendar.MILLISECOND, 13);
bd = evenSecondDateBefore(rd.getTime());
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
vc.set(Calendar.SECOND, 13);
rd.set(Calendar.MILLISECOND, 13);
bd = evenSecondDate(rd.getTime());
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
vc.set(Calendar.SECOND, 0);
vc.set(Calendar.MINUTE, 34);
rd.set(Calendar.SECOND, 13);
bd = evenMinuteDate(rd.getTime());
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
vc.set(Calendar.SECOND, 0);
vc.set(Calendar.MINUTE, 33);
rd.set(Calendar.SECOND, 13);
bd = evenMinuteDateBefore(rd.getTime());
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
vc.set(Calendar.SECOND, 0);
vc.set(Calendar.MINUTE, 0);
vc.set(Calendar.HOUR_OF_DAY, 11);
rd.set(Calendar.SECOND, 13);
bd = evenHourDate(rd.getTime());
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
vc.set(Calendar.SECOND, 0);
vc.set(Calendar.MINUTE, 0);
vc.set(Calendar.HOUR_OF_DAY, 10);
rd.set(Calendar.SECOND, 13);
bd = evenHourDateBefore(rd.getTime());
assertEquals("DateBuilder-produced date is not as expected.", vc.getTime(), bd);
Date td = new Date();
bd = evenHourDateAfterNow();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.MINUTE));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.SECOND));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.MILLISECOND));
assertTrue("DateBuilder-produced date is not as expected.", bd.after(td));
vc.set(Calendar.SECOND, 54);
vc.set(Calendar.MINUTE, 13);
vc.set(Calendar.HOUR_OF_DAY, 8);
bd = nextGivenMinuteDate(vc.getTime(), 15);
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", 8, vc.get(Calendar.HOUR_OF_DAY));
assertEquals("DateBuilder-produced date is not as expected.", 15, vc.get(Calendar.MINUTE));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.SECOND));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.MILLISECOND));
}
public void testGivenBuilders() {
Calendar vc = Calendar.getInstance();
vc.set(Calendar.SECOND, 54);
vc.set(Calendar.MINUTE, 13);
vc.set(Calendar.HOUR_OF_DAY, 8);
Date bd = nextGivenMinuteDate(vc.getTime(), 45);
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", 8, vc.get(Calendar.HOUR_OF_DAY));
assertEquals("DateBuilder-produced date is not as expected.", 45, vc.get(Calendar.MINUTE));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.SECOND));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.MILLISECOND));
vc.set(Calendar.SECOND, 54);
vc.set(Calendar.MINUTE, 46);
vc.set(Calendar.HOUR_OF_DAY, 8);
bd = nextGivenMinuteDate(vc.getTime(), 45);
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", 9, vc.get(Calendar.HOUR_OF_DAY));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.MINUTE));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.SECOND));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.MILLISECOND));
}
public void testAtBuilders() {
Calendar rd = Calendar.getInstance();
Calendar vc = Calendar.getInstance();
rd.setTime(new Date());
Date bd = todayAt(10, 33, 12);
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", 10, vc.get(Calendar.HOUR_OF_DAY));
assertEquals("DateBuilder-produced date is not as expected.", 33, vc.get(Calendar.MINUTE));
assertEquals("DateBuilder-produced date is not as expected.", 12, vc.get(Calendar.SECOND));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.MILLISECOND));
assertEquals("DateBuilder-produced date is not as expected.", rd.get(Calendar.DAY_OF_YEAR), vc.get(Calendar.DAY_OF_YEAR));
rd.setTime(new Date());
rd.add(Calendar.MILLISECOND, (int)MILLISECONDS_IN_DAY); // increment the day (using this means on purpose - to test const)
bd = tomorrowAt(10, 33, 12);
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", 10, vc.get(Calendar.HOUR_OF_DAY));
assertEquals("DateBuilder-produced date is not as expected.", 33, vc.get(Calendar.MINUTE));
assertEquals("DateBuilder-produced date is not as expected.", 12, vc.get(Calendar.SECOND));
assertEquals("DateBuilder-produced date is not as expected.", 0, vc.get(Calendar.MILLISECOND));
assertEquals("DateBuilder-produced date is not as expected.", rd.get(Calendar.DAY_OF_YEAR), vc.get(Calendar.DAY_OF_YEAR));
}
public void testTranslate() {
TimeZone tz1 = TimeZone.getTimeZone("GMT-2:00");
TimeZone tz2 = TimeZone.getTimeZone("GMT-4:00");
Calendar vc = Calendar.getInstance(tz1);
vc.set(Calendar.YEAR, 2013);
vc.set(Calendar.MONTH, Calendar.JUNE);
vc.set(Calendar.DAY_OF_MONTH, 1);
vc.set(Calendar.HOUR_OF_DAY, 10);
vc.set(Calendar.MINUTE, 33);
vc.set(Calendar.SECOND, 12);
vc.set(Calendar.MILLISECOND, 0);
vc.setTime( translateTime(vc.getTime(), tz1, tz2) );
assertEquals("DateBuilder-produced date is not as expected.", 12, vc.get(Calendar.HOUR_OF_DAY));
vc = Calendar.getInstance(tz2);
vc.set(Calendar.YEAR, 2013);
vc.set(Calendar.MONTH, Calendar.JUNE);
vc.set(Calendar.DAY_OF_MONTH, 1);
vc.set(Calendar.HOUR_OF_DAY, 10);
vc.set(Calendar.MINUTE, 33);
vc.set(Calendar.SECOND, 12);
vc.set(Calendar.MILLISECOND, 0);
vc.setTime( translateTime(vc.getTime(), tz2, tz1) );
assertEquals("DateBuilder-produced date is not as expected.", 8, vc.get(Calendar.HOUR_OF_DAY));
}
public void testMonthTranslations() {
Calendar vc = Calendar.getInstance();
Date bd = newDate().inYear(2013).inMonthOnDay(JANUARY, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.JANUARY, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(FEBRUARY, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.FEBRUARY, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(MARCH, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.MARCH, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(APRIL, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.APRIL, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(MAY, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.MAY, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(JUNE, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.JUNE, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(JULY, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.JULY, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(AUGUST, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.AUGUST, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(SEPTEMBER, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.SEPTEMBER, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(OCTOBER, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.OCTOBER, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(NOVEMBER, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.NOVEMBER, vc.get(Calendar.MONTH));
bd = newDate().inYear(2013).inMonthOnDay(DECEMBER, 1).atHourMinuteAndSecond(10, 30, 0).build();
vc.setTime(bd);
assertEquals("DateBuilder-produced date is not as expected.", Calendar.DECEMBER, vc.get(Calendar.MONTH));
}
}
|
|
package net.erel.maven.plugins.service.bugtracking;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
import net.erel.maven.plugins.domain.bugTracker.IssueFacade;
import net.erel.maven.plugins.domain.changelog.Action;
import net.erel.maven.plugins.utils.L18nHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.taskadapter.redmineapi.RedmineException;
import com.taskadapter.redmineapi.RedmineManager;
import com.taskadapter.redmineapi.bean.Issue;
class RedMineService implements BugTrackerService {
private String api;
private String host;
public RedMineService(String api, String host) {
this.api = api;
this.host = host;
}
private static final Logger LOGGER = LoggerFactory.getLogger(RedMineService.class);
/* (non-Javadoc)
* @see net.erel.maven.plugins.service.bugtracking.BugTrackerService#getAssignedTicketsForCurrentUser()
*/
@Override
@SuppressWarnings("unchecked")
public List<String> getAssignedTicketsForCurrentUser() {
try {
RedmineManager mgr = new RedmineManager(host, api);
Map<String, String> issueREquest = new HashMap<String, String>();
issueREquest.put("assigned_to_id", mgr.getCurrentUser().getId().toString());
List<Issue> issues = mgr.getIssues(issueREquest);
return Lists.transform(issues, new Function<Issue, String>() {
@Override
public String apply(@Nullable Issue input) {
return input.getId().toString();
}
});
} catch (RedmineException e) {
LOGGER.warn("issue with redmine, failed to get assigned ticket", e);
return Collections.EMPTY_LIST;
}
}
/* (non-Javadoc)
* @see net.erel.maven.plugins.service.bugtracking.BugTrackerService#getDescriptionForIssue(java.lang.String)
*/
@Override
public String getDescriptionForIssue(String issue) {
try {
RedmineManager mgr = new RedmineManager(host, api);
return mgr.getIssueById(Integer.valueOf(issue)).getSubject();
} catch (NumberFormatException | RedmineException e) {
LOGGER.warn("failed to get issue description");
return "";
}
}
/* (non-Javadoc)
* @see net.erel.maven.plugins.service.bugtracking.BugTrackerService#updateIssueStartDev(java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public void updateIssueStartDev(String ticket, String branchUrl, String project) {
try {
RedmineManager mgr = new RedmineManager(host, api);
Issue issue = mgr.getIssueById(Integer.valueOf(ticket));
issue.setNotes(L18nHelper.t("work-started-on-branch-X-for-project-Y", branchUrl, project));
mgr.update(issue);
} catch (NumberFormatException | RedmineException e) {
LOGGER.warn("failed to update issue");
}
}
/* (non-Javadoc)
* @see net.erel.maven.plugins.service.bugtracking.BugTrackerService#updateIssueDoneDev(java.lang.String, java.lang.String)
*/
@Override
public void updateIssueDoneDev(String ticket, String project,String mrUrl) {
try {
RedmineManager mgr = new RedmineManager(host, api);
Issue issue = mgr.getIssueById(Integer.valueOf(ticket));
issue.setNotes(L18nHelper.t("task-done-pending-integration-for-projet-X-follow-merge-request-here", project,mrUrl));
issue.setAssignee(issue.getAuthor());
mgr.update(issue);
} catch (RedmineException | IllegalArgumentException e) {
LOGGER.warn("failed to update issue");
}
}
/* (non-Javadoc)
* @see net.erel.maven.plugins.service.bugtracking.BugTrackerService#populateActionWithTicketDate(net.erel.maven.plugins.domain.changelog.Action)
*/
@Override
public void populateActionWithTicketDate(Action action) {
try {
RedmineManager mgr = new RedmineManager(host, api);
Issue issue;
issue = mgr.getIssueById(Integer.parseInt(action.getIssue()));
action.setDueTo(issue.getAuthor().getFullName());
action.setDueToEmail(issue.getAuthor().getMail());
action.setType(trackerNameToChangeLogName(issue.getTracker().getName()));
action.getContent().add(issue.getSubject());
} catch (RedmineException | IllegalArgumentException e) {
LOGGER.warn("failed to populate action with ticket data", e);
}
}
/**
* quick and dirty mapping for tracker, this should change
*
* @param trackerName
* @return
*/
private static String trackerNameToChangeLogName(String trackerName) {
if (Strings.isNullOrEmpty(trackerName))
return "fix";
else if (trackerName.contains("Incident client")) {
return "fix";
} else if (trackerName.contains("evolution")) {
return "add";
} else
return "fix";
}
/* (non-Javadoc)
* @see net.erel.maven.plugins.service.bugtracking.BugTrackerService#updateIssueReleased(net.erel.maven.plugins.domain.changelog.Action, java.lang.String, java.lang.String)
*/
@Override
public void updateIssueReleased(Action action, String version, String projectName) {
try {
RedmineManager mgr = new RedmineManager(host, api);
Issue issue;
issue = mgr.getIssueById(Integer.parseInt(action.getIssue()));
issue.setNotes(L18nHelper.t("ticket-released-in-version-X-for-project-Y", version, projectName));
mgr.update(issue);
} catch (RedmineException | IllegalArgumentException e) {
LOGGER.warn("failed to populate action with ticket data", e);
}
}
/* (non-Javadoc)
* @see net.erel.maven.plugins.service.bugtracking.BugTrackerService#getIssueById(java.lang.String)
*/
@Override
public IssueFacade getIssueById(String issueNumber) {
try {
RedmineManager mgr = new RedmineManager(host, api);
Issue issue;
issue = mgr.getIssueById(Integer.parseInt(issueNumber));
IssueFacade facade = new IssueFacade();
facade.setId(""+issue.getId());
facade.setSubject(issue.getSubject());
return facade;
} catch (RedmineException | IllegalArgumentException e) {
LOGGER.warn("failed to use Redmine API", e);
return null;
}
}
@Override
public String getTicketFromBranchName(String branch) {
Pattern pat = Pattern.compile(".*-(\\d+)-.*");
Matcher mat = pat.matcher(branch);
if(mat.matches()){
return mat.group();
}
else
return null;
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.function.Supplier;
public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBuilder> {
public static final String NAME = "percolate";
static final ParseField DOCUMENT_FIELD = new ParseField("document");
static final ParseField DOCUMENTS_FIELD = new ParseField("documents");
private static final ParseField NAME_FIELD = new ParseField("name");
private static final ParseField QUERY_FIELD = new ParseField("field");
private static final ParseField INDEXED_DOCUMENT_FIELD_INDEX = new ParseField("index");
private static final ParseField INDEXED_DOCUMENT_FIELD_ID = new ParseField("id");
private static final ParseField INDEXED_DOCUMENT_FIELD_ROUTING = new ParseField("routing");
private static final ParseField INDEXED_DOCUMENT_FIELD_PREFERENCE = new ParseField("preference");
private static final ParseField INDEXED_DOCUMENT_FIELD_VERSION = new ParseField("version");
private final String field;
private String name;
private final List<BytesReference> documents;
private final XContentType documentXContentType;
private final String indexedDocumentIndex;
private final String indexedDocumentId;
private final String indexedDocumentRouting;
private final String indexedDocumentPreference;
private final Long indexedDocumentVersion;
private final Supplier<BytesReference> documentSupplier;
/**
* Creates a percolator query builder instance for percolating a provided document.
*
* @param field The field that contains the percolator query
* @param document The binary blob containing document to percolate
* @param documentXContentType The content type of the binary blob containing the document to percolate
*/
public PercolateQueryBuilder(String field, BytesReference document, XContentType documentXContentType) {
this(field, Collections.singletonList(document), documentXContentType);
}
/**
* Creates a percolator query builder instance for percolating a provided document.
*
* @param field The field that contains the percolator query
* @param documents The binary blob containing document to percolate
* @param documentXContentType The content type of the binary blob containing the document to percolate
*/
public PercolateQueryBuilder(String field, List<BytesReference> documents, XContentType documentXContentType) {
if (field == null) {
throw new IllegalArgumentException("[field] is a required argument");
}
if (documents == null) {
throw new IllegalArgumentException("[document] is a required argument");
}
this.field = field;
this.documents = documents;
this.documentXContentType = Objects.requireNonNull(documentXContentType);
indexedDocumentIndex = null;
indexedDocumentId = null;
indexedDocumentRouting = null;
indexedDocumentPreference = null;
indexedDocumentVersion = null;
this.documentSupplier = null;
}
/**
* Creates a percolator query builder instance for percolating a document in a remote index.
*
* @param field The field that contains the percolator query
* @param indexedDocumentIndex The index containing the document to percolate
* @param indexedDocumentId The id of the document to percolate
* @param indexedDocumentRouting The routing value for the document to percolate
* @param indexedDocumentPreference The preference to use when fetching the document to percolate
* @param indexedDocumentVersion The expected version of the document to percolate
*/
public PercolateQueryBuilder(String field, String indexedDocumentIndex,
String indexedDocumentId, String indexedDocumentRouting,
String indexedDocumentPreference, Long indexedDocumentVersion) {
if (field == null) {
throw new IllegalArgumentException("[field] is a required argument");
}
if (indexedDocumentIndex == null) {
throw new IllegalArgumentException("[index] is a required argument");
}
if (indexedDocumentId == null) {
throw new IllegalArgumentException("[id] is a required argument");
}
this.field = field;
this.indexedDocumentIndex = indexedDocumentIndex;
this.indexedDocumentId = indexedDocumentId;
this.indexedDocumentRouting = indexedDocumentRouting;
this.indexedDocumentPreference = indexedDocumentPreference;
this.indexedDocumentVersion = indexedDocumentVersion;
this.documents = Collections.emptyList();
this.documentXContentType = null;
this.documentSupplier = null;
}
protected PercolateQueryBuilder(String field, Supplier<BytesReference> documentSupplier) {
if (field == null) {
throw new IllegalArgumentException("[field] is a required argument");
}
this.field = field;
this.documents = Collections.emptyList();
this.documentXContentType = null;
this.documentSupplier = documentSupplier;
indexedDocumentIndex = null;
indexedDocumentId = null;
indexedDocumentRouting = null;
indexedDocumentPreference = null;
indexedDocumentVersion = null;
}
/**
* Read from a stream.
*/
PercolateQueryBuilder(StreamInput in) throws IOException {
super(in);
field = in.readString();
name = in.readOptionalString();
if (in.getVersion().before(Version.V_8_0_0)) {
String documentType = in.readOptionalString();
assert documentType == null;
}
indexedDocumentIndex = in.readOptionalString();
if (in.getVersion().before(Version.V_8_0_0)) {
String indexedDocumentType = in.readOptionalString();
assert indexedDocumentType == null;
}
indexedDocumentId = in.readOptionalString();
indexedDocumentRouting = in.readOptionalString();
indexedDocumentPreference = in.readOptionalString();
if (in.readBoolean()) {
indexedDocumentVersion = in.readVLong();
} else {
indexedDocumentVersion = null;
}
documents = in.readList(StreamInput::readBytesReference);
if (documents.isEmpty() == false) {
documentXContentType = in.readEnum(XContentType.class);
} else {
documentXContentType = null;
}
documentSupplier = null;
}
/**
* Sets the name used for identification purposes in <code>_percolator_document_slot</code> response field
* when multiple percolate queries have been specified in the main query.
*/
public PercolateQueryBuilder setName(String name) {
this.name = name;
return this;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
if (documentSupplier != null) {
throw new IllegalStateException("supplier must be null, can't serialize suppliers, missing a rewriteAndFetch?");
}
out.writeString(field);
out.writeOptionalString(name);
if (out.getVersion().before(Version.V_8_0_0)) {
// In 7x, typeless percolate queries are represented by null documentType values
out.writeOptionalString(null);
}
out.writeOptionalString(indexedDocumentIndex);
if (out.getVersion().before(Version.V_8_0_0)) {
// In 7x, typeless percolate queries are represented by null indexedDocumentType values
out.writeOptionalString(null);
}
out.writeOptionalString(indexedDocumentId);
out.writeOptionalString(indexedDocumentRouting);
out.writeOptionalString(indexedDocumentPreference);
if (indexedDocumentVersion != null) {
out.writeBoolean(true);
out.writeVLong(indexedDocumentVersion);
} else {
out.writeBoolean(false);
}
out.writeVInt(documents.size());
for (BytesReference document : documents) {
out.writeBytesReference(document);
}
if (documents.isEmpty() == false) {
out.writeEnum(documentXContentType);
}
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(QUERY_FIELD.getPreferredName(), field);
if (name != null) {
builder.field(NAME_FIELD.getPreferredName(), name);
}
if (documents.isEmpty() == false) {
builder.startArray(DOCUMENTS_FIELD.getPreferredName());
for (BytesReference document : documents) {
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, document)) {
parser.nextToken();
builder.generator().copyCurrentStructure(parser);
}
}
builder.endArray();
}
if (indexedDocumentIndex != null || indexedDocumentId != null) {
if (indexedDocumentIndex != null) {
builder.field(INDEXED_DOCUMENT_FIELD_INDEX.getPreferredName(), indexedDocumentIndex);
}
if (indexedDocumentId != null) {
builder.field(INDEXED_DOCUMENT_FIELD_ID.getPreferredName(), indexedDocumentId);
}
if (indexedDocumentRouting != null) {
builder.field(INDEXED_DOCUMENT_FIELD_ROUTING.getPreferredName(), indexedDocumentRouting);
}
if (indexedDocumentPreference != null) {
builder.field(INDEXED_DOCUMENT_FIELD_PREFERENCE.getPreferredName(), indexedDocumentPreference);
}
if (indexedDocumentVersion != null) {
builder.field(INDEXED_DOCUMENT_FIELD_VERSION.getPreferredName(), indexedDocumentVersion);
}
}
printBoostAndQueryName(builder);
builder.endObject();
}
public static PercolateQueryBuilder fromXContent(XContentParser parser) throws IOException {
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String field = null;
String name = null;
String indexedDocumentIndex = null;
String indexedDocumentId = null;
String indexedDocumentRouting = null;
String indexedDocumentPreference = null;
Long indexedDocumentVersion = null;
List<BytesReference> documents = new ArrayList<>();
String queryName = null;
String currentFieldName = null;
boolean documentsSpecified = false;
boolean documentSpecified = false;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if (DOCUMENTS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
if (documentSpecified) {
throw new IllegalArgumentException("[" + PercolateQueryBuilder.NAME +
"] Either specified [document] or [documents], not both");
}
documentsSpecified = true;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.copyCurrentStructure(parser);
builder.flush();
documents.add(BytesReference.bytes(builder));
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
"] query does not support [" + token + "]");
}
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
"] query does not field name [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (DOCUMENT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
if (documentsSpecified) {
throw new IllegalArgumentException("[" + PercolateQueryBuilder.NAME +
"] Either specified [document] or [documents], not both");
}
documentSpecified = true;
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.copyCurrentStructure(parser);
builder.flush();
documents.add(BytesReference.bytes(builder));
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
"] query does not support field name [" + currentFieldName + "]");
}
} else if (token.isValue() || token == XContentParser.Token.VALUE_NULL) {
if (QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
field = parser.text();
} else if (NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
name = parser.textOrNull();
} else if (INDEXED_DOCUMENT_FIELD_INDEX.match(currentFieldName, parser.getDeprecationHandler())) {
indexedDocumentIndex = parser.text();
} else if (INDEXED_DOCUMENT_FIELD_ID.match(currentFieldName, parser.getDeprecationHandler())) {
indexedDocumentId = parser.text();
} else if (INDEXED_DOCUMENT_FIELD_ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
indexedDocumentRouting = parser.text();
} else if (INDEXED_DOCUMENT_FIELD_PREFERENCE.match(currentFieldName, parser.getDeprecationHandler())) {
indexedDocumentPreference = parser.text();
} else if (INDEXED_DOCUMENT_FIELD_VERSION.match(currentFieldName, parser.getDeprecationHandler())) {
indexedDocumentVersion = parser.longValue();
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
boost = parser.floatValue();
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
"] query does not support [" + currentFieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
"] query does not support [" + token + "]");
}
}
PercolateQueryBuilder queryBuilder;
if (documents.isEmpty() == false) {
queryBuilder = new PercolateQueryBuilder(field, documents, XContentType.JSON);
} else if (indexedDocumentId != null) {
queryBuilder = new PercolateQueryBuilder(field, indexedDocumentIndex, indexedDocumentId, indexedDocumentRouting,
indexedDocumentPreference, indexedDocumentVersion);
} else {
throw new IllegalArgumentException("[" + PercolateQueryBuilder.NAME + "] query, nothing to percolate");
}
if (name != null) {
queryBuilder.setName(name);
}
queryBuilder.queryName(queryName);
queryBuilder.boost(boost);
return queryBuilder;
}
@Override
protected boolean doEquals(PercolateQueryBuilder other) {
return Objects.equals(field, other.field)
&& Objects.equals(documents, other.documents)
&& Objects.equals(indexedDocumentIndex, other.indexedDocumentIndex)
&& Objects.equals(documentSupplier, other.documentSupplier)
&& Objects.equals(indexedDocumentId, other.indexedDocumentId);
}
@Override
protected int doHashCode() {
return Objects.hash(field, documents, indexedDocumentIndex, indexedDocumentId, documentSupplier);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) {
if (documents.isEmpty() == false) {
return this;
} else if (documentSupplier != null) {
final BytesReference source = documentSupplier.get();
if (source == null) {
return this; // not executed yet
} else {
PercolateQueryBuilder rewritten = new PercolateQueryBuilder(field,
Collections.singletonList(source), XContentHelper.xContentType(source));
if (name != null) {
rewritten.setName(name);
}
return rewritten;
}
}
GetRequest getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentId);
getRequest.preference("_local");
getRequest.routing(indexedDocumentRouting);
getRequest.preference(indexedDocumentPreference);
if (indexedDocumentVersion != null) {
getRequest.version(indexedDocumentVersion);
}
SetOnce<BytesReference> documentSupplier = new SetOnce<>();
queryShardContext.registerAsyncAction((client, listener) -> {
client.get(getRequest, ActionListener.wrap(getResponse -> {
if (getResponse.isExists() == false) {
throw new ResourceNotFoundException(
"indexed document [{}/{}] couldn't be found", indexedDocumentIndex, indexedDocumentId
);
}
if(getResponse.isSourceEmpty()) {
throw new IllegalArgumentException(
"indexed document [" + indexedDocumentIndex + "/" + indexedDocumentId + "] source disabled"
);
}
documentSupplier.set(getResponse.getSourceAsBytesRef());
listener.onResponse(null);
}, listener::onFailure));
});
PercolateQueryBuilder rewritten = new PercolateQueryBuilder(field, documentSupplier::get);
if (name != null) {
rewritten.setName(name);
}
return rewritten;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
// Call nowInMillis() so that this query becomes un-cacheable since we
// can't be sure that it doesn't use now or scripts
context.nowInMillis();
if (indexedDocumentIndex != null || indexedDocumentId != null || documentSupplier != null) {
throw new IllegalStateException("query builder must be rewritten first");
}
if (documents.isEmpty()) {
throw new IllegalStateException("no document to percolate");
}
MappedFieldType fieldType = context.fieldMapper(field);
if (fieldType == null) {
throw new QueryShardException(context, "field [" + field + "] does not exist");
}
if (!(fieldType instanceof PercolatorFieldMapper.FieldType)) {
throw new QueryShardException(context, "expected field [" + field +
"] to be of type [percolator], but is of type [" + fieldType.typeName() + "]");
}
final List<ParsedDocument> docs = new ArrayList<>();
final DocumentMapper docMapper;
final MapperService mapperService = context.getMapperService();
String type = mapperService.documentMapper().type();
docMapper = mapperService.documentMapper(type);
for (BytesReference document : documents) {
docs.add(docMapper.parse(new SourceToParse(context.index().getName(), type, "_temp_id", document, documentXContentType)));
}
FieldNameAnalyzer fieldNameAnalyzer = (FieldNameAnalyzer) docMapper.mappers().indexAnalyzer();
// Need to this custom impl because FieldNameAnalyzer is strict and the percolator sometimes isn't when
// 'index.percolator.map_unmapped_fields_as_string' is enabled:
Analyzer analyzer = new DelegatingAnalyzerWrapper(Analyzer.PER_FIELD_REUSE_STRATEGY) {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
Analyzer analyzer = fieldNameAnalyzer.analyzers().get(fieldName);
if (analyzer != null) {
return analyzer;
} else {
return context.getIndexAnalyzers().getDefaultIndexAnalyzer();
}
}
};
final IndexSearcher docSearcher;
final boolean excludeNestedDocuments;
if (docs.size() > 1 || docs.get(0).docs().size() > 1) {
assert docs.size() != 1 || docMapper.hasNestedObjects();
docSearcher = createMultiDocumentSearcher(analyzer, docs);
excludeNestedDocuments = docMapper.hasNestedObjects() && docs.stream()
.map(ParsedDocument::docs)
.mapToInt(List::size)
.anyMatch(size -> size > 1);
} else {
MemoryIndex memoryIndex = MemoryIndex.fromDocument(docs.get(0).rootDoc(), analyzer, true, false);
docSearcher = memoryIndex.createSearcher();
docSearcher.setQueryCache(null);
excludeNestedDocuments = false;
}
PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType;
String name = this.name != null ? this.name : pft.name();
QueryShardContext percolateShardContext = wrap(context);
PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField,
percolateShardContext,
pft.mapUnmappedFieldsAsText);
return pft.percolateQuery(name, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated());
}
public String getField() {
return field;
}
public List<BytesReference> getDocuments() {
return documents;
}
//pkg-private for testing
XContentType getXContentType() {
return documentXContentType;
}
public String getQueryName() {
return name;
}
static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection<ParsedDocument> docs) {
RAMDirectory ramDirectory = new RAMDirectory();
try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(analyzer))) {
// Indexing in order here, so that the user provided order matches with the docid sequencing:
Iterable<ParseContext.Document> iterable = () -> docs.stream()
.map(ParsedDocument::docs)
.flatMap(Collection::stream)
.iterator();
indexWriter.addDocuments(iterable);
DirectoryReader directoryReader = DirectoryReader.open(indexWriter);
assert directoryReader.leaves().size() == 1 : "Expected single leaf, but got [" + directoryReader.leaves().size() + "]";
final IndexSearcher slowSearcher = new IndexSearcher(directoryReader);
slowSearcher.setQueryCache(null);
return slowSearcher;
} catch (IOException e) {
throw new ElasticsearchException("Failed to create index for percolator with nested document ", e);
}
}
static PercolateQuery.QueryStore createStore(MappedFieldType queryBuilderFieldType,
QueryShardContext context,
boolean mapUnmappedFieldsAsString) {
Version indexVersion = context.indexVersionCreated();
NamedWriteableRegistry registry = context.getWriteableRegistry();
return ctx -> {
LeafReader leafReader = ctx.reader();
BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(queryBuilderFieldType.name());
if (binaryDocValues == null) {
return docId -> null;
}
return docId -> {
if (binaryDocValues.advanceExact(docId)) {
BytesRef qbSource = binaryDocValues.binaryValue();
try (InputStream in = new ByteArrayInputStream(qbSource.bytes, qbSource.offset, qbSource.length)) {
try (StreamInput input = new NamedWriteableAwareStreamInput(
new InputStreamStreamInput(in, qbSource.length), registry)) {
input.setVersion(indexVersion);
// Query builder's content is stored via BinaryFieldMapper, which has a custom encoding
// to encode multiple binary values into a single binary doc values field.
// This is the reason we need to first need to read the number of values and
// then the length of the field value in bytes.
int numValues = input.readVInt();
assert numValues == 1;
int valueLength = input.readVInt();
assert valueLength > 0;
QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class);
assert in.read() == -1;
return PercolatorFieldMapper.toQuery(context, mapUnmappedFieldsAsString, queryBuilder);
}
}
} else {
return null;
}
};
};
}
static QueryShardContext wrap(QueryShardContext shardContext) {
return new QueryShardContext(shardContext) {
@Override
public BitSetProducer bitsetFilter(Query query) {
return context -> {
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
searcher.setQueryCache(null);
final Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f);
final Scorer s = weight.scorer(context);
if (s != null) {
return new BitDocIdSet(BitSet.of(s.iterator(), context.reader().maxDoc())).bits();
} else {
return null;
}
};
}
@Override
@SuppressWarnings("unchecked")
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndex().getName());
IndexFieldDataCache cache = new IndexFieldDataCache.None();
CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService();
return (IFD) builder.build(shardContext.getIndexSettings(), fieldType, cache, circuitBreaker,
shardContext.getMapperService());
}
};
}
}
|
|
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assume.assumeFalse;
import static org.openqa.selenium.Platform.ANDROID;
import static org.openqa.selenium.Platform.LINUX;
import static org.openqa.selenium.testing.Driver.PHANTOMJS;
import static org.openqa.selenium.testing.Driver.SAFARI;
import org.junit.Test;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.JUnit4TestBase;
import org.openqa.selenium.testing.SwitchToTopAfterTest;
import org.openqa.selenium.testing.TestUtilities;
import org.openqa.selenium.testing.drivers.SauceDriver;
import java.util.logging.Logger;
public class WindowTest extends JUnit4TestBase {
private static Logger log = Logger.getLogger(WindowTest.class.getName());
@Test
public void testGetsTheSizeOfTheCurrentWindow() {
assumeFalse(
"window().getSize() is not implemented for Chrome for Android. "
+ "https://code.google.com/p/chromedriver/issues/detail?id=1005",
TestUtilities.isChrome(driver) && TestUtilities.getEffectivePlatform(driver).is(ANDROID));
Dimension size = driver.manage().window().getSize();
assertThat(size.width, is(greaterThan(0)));
assertThat(size.height, is(greaterThan(0)));
}
@Test
public void testSetsTheSizeOfTheCurrentWindow() {
// Browser window cannot be resized or moved on ANDROID (and most mobile platforms
// though others aren't defined in org.openqa.selenium.Platform).
assumeFalse(TestUtilities.getEffectivePlatform(driver).is(ANDROID));
assumeFalse("https://code.google.com/p/chromedriver/issues/detail?id=1129",
SauceDriver.shouldUseSauce() && TestUtilities.isChrome(driver)
&& TestUtilities.getEffectivePlatform(driver).is(LINUX));
// resize relative to the initial size, since we don't know what it is
changeSizeBy(-20, -20);
}
@SwitchToTopAfterTest
@Test
public void testSetsTheSizeOfTheCurrentWindowFromFrame() {
// Browser window cannot be resized or moved on ANDROID (and most mobile platforms
// though others aren't defined in org.openqa.selenium.Platform).
assumeFalse(TestUtilities.getEffectivePlatform(driver).is(ANDROID));
assumeFalse("https://code.google.com/p/chromedriver/issues/detail?id=1129",
SauceDriver.shouldUseSauce() && TestUtilities.isChrome(driver)
&& TestUtilities.getEffectivePlatform(driver).is(LINUX));
driver.get(pages.framesetPage);
driver.switchTo().frame("fourth");
// resize relative to the initial size, since we don't know what it is
changeSizeBy(-20, -20);
}
@SwitchToTopAfterTest
@Test
public void testSetsTheSizeOfTheCurrentWindowFromIframe() {
// Browser window cannot be resized or moved on ANDROID (and most mobile platforms
// though others aren't defined in org.openqa.selenium.Platform).
assumeFalse(TestUtilities.getEffectivePlatform(driver).is(ANDROID));
assumeFalse("https://code.google.com/p/chromedriver/issues/detail?id=1129",
SauceDriver.shouldUseSauce() && TestUtilities.isChrome(driver)
&& TestUtilities.getEffectivePlatform(driver).is(LINUX));
driver.get(pages.iframePage);
driver.switchTo().frame("iframe1-name");
// resize relative to the initial size, since we don't know what it is
changeSizeBy(-20, -20);
}
@Test
public void testGetsThePositionOfTheCurrentWindow() {
// Window position is undefined on ANDROID (and most mobile platforms
// though others aren't defined in org.openqa.selenium.Platform).
assumeFalse(TestUtilities.getEffectivePlatform(driver).is(ANDROID));
Point position = driver.manage().window().getPosition();
assertThat(position.x, is(greaterThanOrEqualTo(0)));
assertThat(position.y, is(greaterThanOrEqualTo(0)));
}
@Test
@Ignore(value = {SAFARI, PHANTOMJS},
reason = "Safari: getPosition after setPosition doesn't match up exactly, " +
"as expected - probably due to nuances in Mac OSX window manager.")
public void testSetsThePositionOfTheCurrentWindow() throws InterruptedException {
// Browser window cannot be resized or moved on ANDROID (and most mobile platforms
// though others aren't defined in org.openqa.selenium.Platform).
assumeFalse(TestUtilities.getEffectivePlatform(driver).is(ANDROID));
assumeFalse("https://code.google.com/p/chromedriver/issues/detail?id=1129",
SauceDriver.shouldUseSauce() && TestUtilities.isChrome(driver)
&& TestUtilities.getEffectivePlatform(driver).is(LINUX));
WebDriver.Window window = driver.manage().window();
Point position = window.getPosition();
Dimension originalSize = window.getSize();
try {
// Some Linux window managers start taking liberties wrt window positions when moving the window
// off-screen. Therefore, try to stay on-screen. Hopefully you have more than 210 px,
// or this may fail.
window.setSize(new Dimension(200, 200));
Point targetPosition = new Point(position.x + 10, position.y + 10);
window.setPosition(targetPosition);
wait.until(xEqual(targetPosition));
wait.until(yEqual(targetPosition));
} finally {
window.setSize(originalSize);
}
}
@Ignore(value = {PHANTOMJS}, reason = "Not yet implemented.")
@Test
public void testCanMaximizeTheWindow() throws InterruptedException {
// Browser window cannot be resized or moved on ANDROID (and most mobile platforms
// though others aren't defined in org.openqa.selenium.Platform).
assumeFalse(TestUtilities.getEffectivePlatform(driver).is(ANDROID));
assumeNotLinuxAtSauce();
changeSizeTo(new Dimension(450, 273));
maximize();
}
@SwitchToTopAfterTest
@Ignore(value = {PHANTOMJS}, reason = "Not yet implemented.")
@Test
public void testCanMaximizeTheWindowFromFrame() throws InterruptedException {
// Browser window cannot be resized or moved on ANDROID (and most mobile platforms
// though others aren't defined in org.openqa.selenium.Platform).
assumeFalse(TestUtilities.getEffectivePlatform(driver).is(ANDROID));
assumeNotLinuxAtSauce();
driver.get(pages.framesetPage);
changeSizeTo(new Dimension(450, 274));
driver.switchTo().frame("fourth");
maximize();
}
@Ignore(value = {PHANTOMJS}, reason = "Not yet implemented.")
@SwitchToTopAfterTest
@Test
public void testCanMaximizeTheWindowFromIframe() throws InterruptedException {
// Browser window cannot be resized or moved on ANDROID (and most mobile platforms
// though others aren't defined in org.openqa.selenium.Platform).
assumeFalse(TestUtilities.getEffectivePlatform(driver).is(ANDROID));
assumeNotLinuxAtSauce();
driver.get(pages.iframePage);
changeSizeTo(new Dimension(450, 275));
driver.switchTo().frame("iframe1-name");
maximize();
}
private void changeSizeBy(int deltaX, int deltaY) {
WebDriver.Window window = driver.manage().window();
Dimension size = window.getSize();
changeSizeTo(new Dimension(size.width + deltaX, size.height + deltaY));
}
private void changeSizeTo(Dimension targetSize) {
WebDriver.Window window = driver.manage().window();
window.setSize(targetSize);
wait.until(windowSizeEqual(targetSize));
}
private void maximize() {
WebDriver.Window window = driver.manage().window();
Dimension size = window.getSize();
window.maximize();
wait.until(windowWidthToBeGreaterThan(size));
wait.until(windowHeightToBeGreaterThan(size));
}
private ExpectedCondition<Boolean> windowSizeEqual(final Dimension size) {
return new ExpectedCondition<Boolean>() {
public Boolean apply(WebDriver driver) {
Dimension newSize = driver.manage().window().getSize();
return newSize.height == size.height &&
newSize.width == size.width;
}
};
}
private ExpectedCondition<Boolean> windowWidthToBeGreaterThan(final Dimension size) {
return new ExpectedCondition<Boolean>() {
public Boolean apply(WebDriver driver) {
Dimension newSize = driver.manage().window().getSize();
log.info("waiting for width, Current dimensions are " + newSize);
if(newSize.width != size.width) {
return true;
}
return null;
}
};
}
private ExpectedCondition<Boolean> windowHeightToBeGreaterThan(final Dimension size) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
Dimension newSize = driver.manage().window().getSize();
log.info("waiting for height, Current dimensions are " + newSize);
if(newSize.height != size.height) {
return true;
}
return null;
}
};
}
private ExpectedCondition<Boolean> xEqual(final Point targetPosition) {
return new ExpectedCondition<Boolean>() {
public Boolean apply(WebDriver driver) {
Point newPosition = driver.manage().window().getPosition();
if(newPosition.x == targetPosition.x) {
return true;
}
return null;
}
};
}
private ExpectedCondition<Boolean> yEqual(final Point targetPosition) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
Point newPosition = driver.manage().window().getPosition();
if(newPosition.y == targetPosition.y) {
return true;
}
return null;
}
};
}
private void assumeNotLinuxAtSauce() {
// Tests that maximize browser window used to fail when Sauce didn't run a window manager
// on Linux. 2015-07-16, they still fail although Sauce reportedly runs metacity.
// Chrome/Linux: simply fail.
// Firefox/Linux: FirefoxDriver finally report a changed window size 22 seconds after replying
// the maximize command, but video never shows the maximized window.
assumeFalse(TestUtilities.getEffectivePlatform(driver).is(LINUX) && SauceDriver.shouldUseSauce());
}
}
|
|
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.authenticator.testing;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.action.ViewActions.pressBack;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.intent.Intents.intended;
import static androidx.test.espresso.intent.matcher.IntentMatchers.anyIntent;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import android.app.Activity;
import android.app.Instrumentation;
import android.app.PendingIntent;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.graphics.Point;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
import android.os.SystemClock;
import android.preference.Preference;
import android.preference.PreferenceActivity;
import android.preference.PreferenceScreen;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.view.ViewParent;
import android.widget.EditText;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.TextView;
import androidx.test.espresso.action.GeneralSwipeAction;
import androidx.test.espresso.action.Press;
import androidx.test.espresso.action.Swipe;
import com.google.android.apps.authenticator.testability.DependencyInjector;
import com.google.android.apps.authenticator.testability.StartActivityListener;
import com.google.android.apps.authenticator.testability.StartServiceListener;
import com.google.android.apps.authenticator.util.annotations.FixWhenMinSdkVersion;
import com.google.common.base.Objects;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import junit.framework.Assert;
import junit.framework.AssertionFailedError;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
/**
* A class that offers various utility methods for writing tests.
*/
// TODO: Move utilities for tests into the java/ tree.
public class TestUtilities {
public static final String APP_PACKAGE_NAME = "com.google.android.apps.authenticator2";
/**
* Timeout (milliseconds) when waiting for the results of a UI action performed by the code under
* test.
*/
public static final int UI_ACTION_EFFECT_TIMEOUT_MILLIS = 5000;
private TestUtilities() {}
public static boolean clickView(Instrumentation instr, final View view) {
boolean result =
runOnMainSyncWithTimeout(
new Callable<Boolean>() {
@Override
public Boolean call() {
return view.performClick();
}
});
// this shouldn't be needed but without it or sleep, there isn't time for view refresh, etc.
instr.waitForIdleSync();
return result;
}
public static boolean longClickView(Instrumentation instr, final View view) {
boolean result =
runOnMainSyncWithTimeout(
new Callable<Boolean>() {
@Override
public Boolean call() {
return view.performLongClick();
}
});
instr.waitForIdleSync();
return result;
}
/**
* Performs a click/tap on a list item at the specified position.
*
* @return {@code true} if the click/tap was consumed, {@code false} otherwise.
*/
public static boolean clickListViewItem(ListView listView, int position) {
try {
// Here we assume that accidental long-press can usually be undone by pressing back
onView(is(listView.getChildAt(position))).perform(click(pressBack()));
return true;
} catch (Exception e) {
return false;
}
}
/** Sets the text of the provided {@link TextView} widget on the UI thread. */
public static void setText(Instrumentation instr, final TextView view, final String text) {
runOnMainSyncWithTimeout(
new Callable<Void>() {
@Override
public Void call() {
view.setText(text);
return null;
}
});
instr.waitForIdleSync();
Assert.assertEquals(text, view.getText().toString());
}
/*
* Sends a string to a EditText box.
*
* @return the resulting string read from the editText - this should equal text.
*/
public static String enterText(
Instrumentation instr, final EditText editText, final String text) {
runOnMainSyncWithTimeout(
new Callable<Void>() {
@Override
public Void call() {
editText.requestFocus();
return null;
}
});
// TODO: Decide on using touch mode and how to do it consistently. e.g. the above could be
// replaced by "TouchUtils.tapView(this, editText);"
instr.sendStringSync(text);
return editText.getText().toString();
}
/** Taps the specified preference displayed by the provided Activity. */
@FixWhenMinSdkVersion(11)
@SuppressWarnings("deprecation")
public static void tapPreference(PreferenceActivity activity, Preference preference) {
// IMPLEMENTATION NOTE: There's no obvious way to find out which View corresponds to the
// preference because the Preference list in the adapter is flattened, whereas the View
// hierarchy in the ListView is not.
// Thus, we go for the Reflection-based invocation of Preference.performClick() which is as
// close to the invocation stack of a normal tap as it gets.
// Only perform the click if the preference is in the adapter to catch cases where the
// preference is not part of the PreferenceActivity for some reason.
ListView listView = activity.getListView();
ListAdapter listAdapter = listView.getAdapter();
for (int i = 0, len = listAdapter.getCount(); i < len; i++) {
if (listAdapter.getItem(i) == preference) {
invokePreferencePerformClickOnMainThread(preference, activity.getPreferenceScreen());
return;
}
}
throw new IllegalArgumentException("Preference " + preference + " not in list");
}
private static void invokePreferencePerformClickOnMainThread(
final Preference preference, final PreferenceScreen preferenceScreen) {
runOnMainSyncWithTimeout(
new Callable<Void>() {
@Override
public Void call() {
invokePreferencePerformClick(preference, preferenceScreen);
return null;
}
});
}
private static void invokePreferencePerformClick(
Preference preference, PreferenceScreen preferenceScreen) {
try {
Method performClickMethod =
Preference.class.getDeclaredMethod("performClick", PreferenceScreen.class);
performClickMethod.setAccessible(true);
performClickMethod.invoke(preference, preferenceScreen);
} catch (NoSuchMethodException e) {
throw new RuntimeException("Preference.performClickMethod method not found", e);
} catch (InvocationTargetException e) {
throw new RuntimeException("Preference.performClickMethod failed", e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Failed to access Preference.performClickMethod", e);
}
}
/** Waits until the window which contains the provided view has focus. */
public static void waitForWindowFocus(View view) throws InterruptedException, TimeoutException {
long deadline = SystemClock.uptimeMillis() + UI_ACTION_EFFECT_TIMEOUT_MILLIS;
while (!view.hasWindowFocus()) {
long millisTillDeadline = deadline - SystemClock.uptimeMillis();
if (millisTillDeadline < 0) {
throw new TimeoutException("Timed out while waiting for window focus");
}
Thread.sleep(50);
}
}
/** Waits until the {@link Activity} is finishing. */
public static void waitForActivityFinishing(Activity activity)
throws InterruptedException, TimeoutException {
long deadline = SystemClock.uptimeMillis() + UI_ACTION_EFFECT_TIMEOUT_MILLIS;
while (!activity.isFinishing()) {
long millisTillDeadline = deadline - SystemClock.uptimeMillis();
if (millisTillDeadline < 0) {
throw new TimeoutException("Timed out while waiting for activity to start finishing");
}
Thread.sleep(50);
}
}
/**
* Invokes the {@link Activity}'s {@code onBackPressed()} on the UI thread and blocks (with a
* timeout) the calling thread until the invocation completes. If the calling thread is the UI
* thread, the {@code finish} is invoked directly and without a timeout.
*/
public static void invokeActivityOnBackPressedOnUiThread(final Activity activity) {
runOnMainSyncWithTimeout(
new Callable<Void>() {
@Override
public Void call() {
activity.onBackPressed();
return null;
}
});
}
/**
* Invokes the {@link Activity}'s {@code finish()} on the UI thread and blocks (with a timeout)
* the calling thread until the invocation completes. If the calling thread is the UI thread, the
* {@code finish} is invoked directly and without a timeout.
*/
public static void invokeFinishActivityOnUiThread(final Activity activity) {
runOnMainSyncWithTimeout(
new Callable<Void>() {
@Override
public Void call() {
activity.finish();
return null;
}
});
}
/**
* Invokes the {@link Activity}'s {@code onActivityResult} on the UI thread and blocks (with a
* timeout) the calling thread until the invocation completes. If the calling thread is the UI
* thread, the {@code finish} is invoked directly and without a timeout.
*/
public static void invokeOnActivityResultOnUiThread(
final Activity activity, final int requestCode, final int resultCode, final Intent intent) {
runOnMainSyncWithTimeout(
new Callable<Void>() {
@Override
public Void call() throws Exception {
// The method has to be invoked via Reflection because it's protected rather than
// public.
Method method =
Activity.class.getDeclaredMethod(
"onActivityResult", int.class, int.class, Intent.class);
method.setAccessible(true);
method.invoke(activity, requestCode, resultCode, intent);
return null;
}
});
}
private static boolean isViewAndAllItsParentsVisible(View view) {
if (view.getVisibility() != View.VISIBLE) {
return false;
}
ViewParent parent = view.getParent();
if (!(parent instanceof View)) {
// This View is the root of the View hierarche, and it's visible (checked above)
return true;
}
// This View itself is actually visible only all of its parents are visible.
return isViewAndAllItsParentsVisible((View) parent);
}
private static boolean isViewOrAnyParentVisibilityGone(View view) {
if (view.getVisibility() == View.GONE) {
return true;
}
ViewParent parent = view.getParent();
if (!(parent instanceof View)) {
// This View is the root of the View hierarchy, and its visibility is not GONE (checked above)
return false;
}
// This View itself is actually visible only all of its parents are visible.
return isViewOrAnyParentVisibilityGone((View) parent);
}
/** Asserts that the provided {@link View} and all its parents are visible. */
public static void assertViewAndAllItsParentsVisible(View view) {
Assert.assertTrue(isViewAndAllItsParentsVisible(view));
}
/** Asserts that the provided {@link View} and all its parents are visible. */
public static void assertViewOrAnyParentVisibilityGone(View view) {
Assert.assertTrue(isViewOrAnyParentVisibilityGone(view));
}
/**
* Asserts that the provided {@link View} is on the screen and is visible (which means its parent
* and the parent of its parent and so forth are visible too).
*/
public static void assertViewVisibleOnScreen(View view) {
onView(equalTo(view)).check(matches(isDisplayed()));
assertViewAndAllItsParentsVisible(view);
}
/**
* Opens the options menu of the provided {@link Activity} and invokes the menu item with the
* provided ID.
*
* <p>Note: This method cannot be invoked on the main thread.
*/
public static void openOptionsMenuAndInvokeItem(
Instrumentation instrumentation, final Activity activity, final int itemId) {
if (!instrumentation.invokeMenuActionSync(activity, itemId, 0)) {
throw new RuntimeException("Failed to invoke options menu item ID " + itemId);
}
instrumentation.waitForIdleSync();
}
/**
* Opens the context menu for the provided {@link View} and invokes the menu item with the
* provided ID.
*
* <p>Note: This method cannot be invoked on the main thread.
*/
public static void openContextMenuAndInvokeItem(
Instrumentation instrumentation, final Activity activity, final View view, final int itemId) {
// IMPLEMENTATION NOTE: Instrumentation.invokeContextMenuAction would've been much simpler, but
// it requires the View to be focused which is hard to achieve in touch mode.
runOnMainSyncWithTimeout(
new Callable<Void>() {
@Override
public Void call() {
// Use performLongClick instead of showContextMenu to exercise more of the code path
// that
// is invoked when the user normally opens a context menu.
if (!view.performLongClick()) {
throw new RuntimeException("Failed to perform long click");
}
if (!activity.getWindow().performContextMenuIdentifierAction(itemId, 0)) {
throw new RuntimeException("Failed perform to context menu action");
}
return null;
}
});
instrumentation.waitForIdleSync();
}
/**
* Asserts that the provided {@link Activity} displayed a dialog with the provided ID at some
* point in the past. Note that this does not necessarily mean that the dialog is still being
* displayed.
*
* <p><b>Note:</b> this method resets the "was displayed" state of the dialog. This means that a
* consecutive invocation of this method for the same dialog ID will fail unless the dialog was
* displayed again prior to the invocation of this method.
*/
@SuppressWarnings("deprecation") // TODO: fix by using a fragment instead
public static void assertDialogWasDisplayed(Activity activity, int dialogId) {
// IMPLEMENTATION NOTE: This code below relies on the fact that, if a dialog with the ID was
// every displayed, then dismissDialog will succeed, whereas if the dialog with the ID has
// never been shown, then dismissDialog throws an IllegalArgumentException.
try {
activity.dismissDialog(dialogId);
// Reset the "was displayed" state
activity.removeDialog(dialogId);
} catch (IllegalArgumentException e) {
Assert.fail("No dialog with ID " + dialogId + " was ever displayed");
}
}
/** Asserts that the provided {@link Activity} has not displayed a dialog with the provided ID. */
@SuppressWarnings("deprecation") // TODO: fix by using a fragment instead
public static void assertDialogWasNotDisplayed(Activity activity, int dialogId) {
// IMPLEMENTATION NOTE: This code below relies on the fact that, if a dialog with the ID was
// every displayed, then dismissDialog will succeed, whereas if the dialog with the ID has
// never been shown, then dismissDialog throws an IllegalArgumentException.
try {
activity.dismissDialog(dialogId);
Assert.fail("Dialog with ID " + dialogId + " was displayed");
} catch (IllegalArgumentException expected) {
// Expected
}
}
/**
* Taps the negative button of a currently displayed dialog. This method assumes that a button of
* the dialog is currently selected.
*
* @see #tapDialogPositiveButton(Instrumentation)
*/
public static void tapDialogNegativeButton(Instrumentation instrumentation) {
selectDialogButton(instrumentation);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_LEFT);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_CENTER);
}
/**
* Taps the positive button of a currently displayed dialog. This method assumes that a button of
* the dialog is currently selected.
*
* @see #tapDialogNegativeButton(Instrumentation)
*/
@FixWhenMinSdkVersion(14)
public static void tapDialogPositiveButton(Instrumentation instrumentation) {
selectDialogButton(instrumentation);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_RIGHT);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_CENTER);
}
/**
* Selects a button at the bottom of a dialog. This assumes that a dialog is currently displayed
* in the foreground.
*/
private static void selectDialogButton(Instrumentation instrumentation) {
// If the dialog contains too much text it will scroll and the buttons at the bottom will only
// get selected once it scrolls to the very bottom.
// So far, 6 x DPAD_DOWN seems to do the trick for our app...
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);
}
/**
* Taps the negative button of a currently displayed 3 button dialog. This method assumes that a
* button of the dialog is currently selected.
*/
public static void tapNegativeButtonIn3ButtonDialog(Instrumentation instrumentation) {
selectDialogButton(instrumentation);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_LEFT);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_LEFT);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_CENTER);
}
/**
* Taps the neutral button of a currently displayed 3 button dialog. This method assumes that a
* button of the dialog is currently selected.
*/
public static void tapNeutralButtonIn3ButtonDialog(Instrumentation instrumentation) {
selectDialogButton(instrumentation);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_RIGHT);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_CENTER);
}
/**
* Taps the positive button of a currently displayed 3 button dialog. This method assumes that a
* button of the dialog is currently selected.
*/
public static void tapPositiveButtonIn3ButtonDialog(Instrumentation instrumentation) {
selectDialogButton(instrumentation);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_RIGHT);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_RIGHT);
instrumentation.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_CENTER);
}
/**
* Configures the {@link DependencyInjector} with a {@link StartActivityListener} that prevents
* activity launches.
*/
public static void withLaunchPreventingStartActivityListenerInDependencyResolver() {
StartActivityListener mockListener = Mockito.mock(StartActivityListener.class);
doReturn(true)
.when(mockListener)
.onStartActivityInvoked(Mockito.anyObject(), Mockito.anyObject());
DependencyInjector.setStartActivityListener(mockListener);
}
/**
* Configures the {@link DependencyInjector} with a {@link StartServiceListener} that prevents
* service launches.
*/
public static void withLaunchPreventingStartServiceListenerInDependencyResolver() {
StartServiceListener mockListener = Mockito.mock(StartServiceListener.class);
doReturn(true)
.when(mockListener)
.onStartServiceInvoked(Mockito.anyObject(), Mockito.anyObject());
DependencyInjector.setStartServiceListener(mockListener);
}
/**
* Verifies (with a timeout of {@link #UI_ACTION_EFFECT_TIMEOUT_MILLIS}) that an activity launch
* has been attempted and returns the {@link Intent} with which the attempt occurred.
*
* <p><b>NOTE: This method assumes that the {@link DependencyInjector} was configured using {@link
* #withLaunchPreventingStartActivityListenerInDependencyResolver()}.</b>
*/
public static Intent verifyWithTimeoutThatStartActivityAttemptedExactlyOnce() {
StartActivityListener mockListener = DependencyInjector.getStartActivityListener();
ArgumentCaptor<Intent> intentCaptor = ArgumentCaptor.forClass(Intent.class);
verify(mockListener, timeout(UI_ACTION_EFFECT_TIMEOUT_MILLIS))
.onStartActivityInvoked(Mockito.anyObject(), intentCaptor.capture());
return intentCaptor.getValue();
}
public static void assertLessThanOrEquals(long expected, long actual) {
if (actual > expected) {
Assert.fail(actual + " > " + expected);
}
}
public static void assertMoreThanOrEquals(long expected, long actual) {
if (actual < expected) {
Assert.fail(actual + " < " + expected);
}
}
/*
* Returns the x and y coordinates of center of view in pixels.
*/
public static Point getCenterOfViewOnScreen(View view) {
int[] location = new int[2];
view.getLocationOnScreen(location);
int centerX = location[0] + view.getWidth() / 2;
int centerY = location[1] + view.getHeight() / 2;
return new Point(centerX, centerY);
}
/*
* returns the pixel value at the right side end of the view.
*/
public static int getRightXofViewOnScreen(View view) {
int[] location = new int[2];
view.getLocationOnScreen(location);
int width = view.getWidth();
return location[0] + width;
}
/*
* returns the pixel value at the left side end of the view.
*/
public static int getLeftXofViewOnScreen(View view) {
int[] location = new int[2];
view.getLocationOnScreen(location);
return location[0];
}
/*
* Drags from the center of the view to the toX value.
* This methods exists in TouchUtil, however, it has a bug which causes it to work
* while dragging on the left side, but not on the right side, hence, we
* had to recreate it here.
*/
public static int dragViewToX(View v, int gravity, int toX) {
if (gravity != Gravity.CENTER) {
throw new IllegalArgumentException("Can only handle Gravity.CENTER.");
}
Point point = getCenterOfViewOnScreen(v);
final int fromX = point.x;
final int fromY = point.y;
int deltaX = Math.abs(fromX - toX);
onView(equalTo(v))
.perform(
new GeneralSwipeAction(
Swipe.SLOW,
(view) -> {
return new float[] {fromX, fromY};
},
(view) -> {
return new float[] {toX, fromY};
},
Press.FINGER));
return deltaX;
}
/**
* Finds an {@link Intent} whose component name points to the specified class.
*
* @return first matching {@code Intent} or {@code null} if no match found.
*/
public static Intent findIntentByComponentClass(Collection<Intent> intents, Class<?> cls) {
for (Intent intent : intents) {
ComponentName componentName = intent.getComponent();
if ((componentName != null) && (cls.getName().equals(componentName.getClassName()))) {
return intent;
}
}
return null;
}
private static Object jsonValueToJavaValue(Object value) throws JSONException {
if ((value == null) || (value == JSONObject.NULL)) {
return null;
} else if (value instanceof JSONObject) {
return jsonObjectToMap((JSONObject) value);
} else if (value instanceof JSONArray) {
return jsonArrayToList((JSONArray) value);
} else {
return value;
}
}
private static Map<String, Object> jsonObjectToMap(JSONObject jsonObject) throws JSONException {
Map<String, Object> result = Maps.newHashMap();
JSONArray names = jsonObject.names();
for (int i = 0, len = ((names != null) ? names.length() : 0); i < len; i++) {
String name = names.getString(i);
Object value = jsonObject.get(name);
result.put(name, jsonValueToJavaValue(value));
}
return result;
}
private static List<Object> jsonArrayToList(JSONArray jsonArray) throws JSONException {
List<Object> result = Lists.newArrayList();
for (int i = 0, len = (jsonArray != null) ? jsonArray.length() : 0; i < len; i++) {
result.add(jsonValueToJavaValue(jsonArray.get(i)));
}
return result;
}
/** Gets the result code that the provided {@link Activity} is currently set to return. */
public static int getActivityResultCode(Activity activity) {
// We need to use Reflection because the code is stored in the resultCode field with package
// private visibility.
try {
Field field = Activity.class.getDeclaredField("mResultCode");
field.setAccessible(true);
// The field is guarded (for concurrency) by the Activity instance.
synchronized (activity) {
return field.getInt(activity);
}
} catch (Exception e) {
throw new RuntimeException("Failed to get Acitivity.mResultCode for " + activity, e);
}
}
/** Gets the result data that the provided {@link Activity} is currently set to return. */
public static Intent getActivityResultData(Activity activity) {
// We need to use Reflection because the data is stored in the resultData field with package
// private visibility.
try {
Field field = Activity.class.getDeclaredField("mResultData");
field.setAccessible(true);
// The field is guarded (for concurrency) by the Activity instance.
synchronized (activity) {
return (Intent) field.get(activity);
}
} catch (Exception e) {
throw new RuntimeException("Failed to get Acitivity.mResultData for " + activity, e);
}
}
/** Asserts that the actual value is in the expected range (inclusive). */
public static void assertInRangeInclusive(
long actual, long expectedMinValue, long expectedMaxValue) {
if ((actual < expectedMinValue) || (actual > expectedMaxValue)) {
Assert.fail(actual + " not in [" + expectedMinValue + ", " + expectedMaxValue + "]");
}
}
/**
* Invokes the provided {@link Callable} on the main thread and blocks until the operation
* completes or times out. If this method is invoked on the main thread, the {@code Callable} is
* invoked immediately and no timeout is enforced.
*
* <p>Exceptions thrown by the {@code Callable} are rethrown by this method. Checked exceptions
* are rethrown as unchecked exceptions.
*
* @return result returned by the {@code Callable}.
*/
public static <V> V runOnMainSyncWithTimeout(Callable<V> callable) {
try {
return runOnMainSyncWithTimeoutAndWithCheckedExceptionsExpected(callable);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException("Unexpected checked exception", e);
}
}
/**
* Invokes the provided {@link Callable} on the main thread and blocks until the operation
* completes or times out. If this method is invoked on the main thread, the {@code Callable} is
* invoked immediately and no timeout is enforced.
*
* <p>Exceptions thrown by the {@code Callable} are rethrown by this method.
*
* @return result returned by the {@code Callable}.
*/
private static <V> V runOnMainSyncWithTimeoutAndWithCheckedExceptionsExpected(
Callable<V> callable) throws Exception {
Looper mainLooper = Looper.getMainLooper();
if (mainLooper.getThread() == Thread.currentThread()) {
// This method is being invoked on the main thread -- invoke the Callable inline to avoid
// a deadlock.
return callable.call();
} else {
FutureTask<V> task = new FutureTask<V>(callable);
new Handler(mainLooper).post(task);
try {
return task.get(UI_ACTION_EFFECT_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof Exception) {
throw (Exception) cause;
} else {
throw new RuntimeException("Execution on main thread failed", e);
}
}
}
}
/**
* Fails the test if any {@link Intent}s are still floating around after any validated ones have
* been consumed.
*/
public static boolean isStrayIntentRemaining() {
try {
intended(anyIntent());
return true;
} catch (AssertionFailedError expected) {
return false;
}
}
/** Creates a fake PendingIntent that can be used as a test value */
public static PendingIntent createFakePendingIntent() {
try {
Constructor<PendingIntent> constructor =
PendingIntent.class.getDeclaredConstructor(IBinder.class);
constructor.setAccessible(true);
return constructor.newInstance(mock(IBinder.class));
} catch (IllegalArgumentException e) {
throw e;
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
throw new RuntimeException(e);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
/**
* Intent that can be compared to another Intent (base Intent doesn't override equals method)
*
* <p>Reason: (new Intent("MY_ACTION)).equals(new Intent("MY_ACTION")) returns false. This is
* really annoying for unit tests for testing expecting method calls.
*
* <p>A ComparableIntent can be used instead of an Intent when calling:
* Mockito.verify(myMock).myFunction(new ComparableIntent("MY_ACTION")). This avoids the
* shenanigan to use an ArgumentCaptor.
*
* <p>Warning: this breaks the usual contract of equals that A.equals(B) means B.equals(A), so
* this should be used just for testing.
*/
public static class ComparableIntent extends Intent {
public ComparableIntent() {
super();
}
public ComparableIntent(String action) {
super(action);
}
public ComparableIntent(Context packageContext, Class<?> cls) {
super(packageContext, cls);
}
@Override
public int hashCode() {
return Objects.hashCode(filterHashCode(), getFlags(), getExtras());
}
@Override
public boolean equals(Object o) {
if (o instanceof Intent) {
return filterEquals((Intent) o) && compareFlags((Intent) o) && compareExtras((Intent) o);
}
return false;
}
private boolean compareFlags(Intent other) {
return other != null && this.getFlags() == other.getFlags();
}
private boolean compareExtras(Intent other) {
if (other == null) {
return false;
}
Bundle myExtras = this.getExtras();
Bundle theirExtras = other.getExtras();
if (myExtras == null && theirExtras == null) {
return true;
}
if (myExtras.size() != theirExtras.size()) {
return false;
}
for (String key : myExtras.keySet()) {
if (!theirExtras.containsKey(key) || !myExtras.get(key).equals(theirExtras.get(key))) {
return false;
}
}
return true;
}
}
}
|
|
package com.planet_ink.coffee_mud.Common.interfaces;
import java.util.*;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
/*
Copyright 2011-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* A class defining the characteristics of a clan government,
* and its membership.
* @author Bo Zimmermanimmerman
*/
public interface ClanGovernment extends Modifiable, CMCommon
{
/**
* Gets the iD.
*
* @return the iD
*/
public int getID();
/**
* Sets the iD.
*
* @param iD the new iD
*/
public void setID(int iD);
/**
* Gets the name.
*
* @return the name
*/
public String getName();
/**
* Sets the name.
*
* @param name the new name
*/
public void setName(String name);
/**
* Gets the category.
*
* @return the category
*/
public String getCategory();
/**
* Sets the category.
*
* @param category the new category
*/
public void setCategory(String category);
/**
* Gets the auto role.
*
* @return the auto role
*/
public int getAutoRole();
/**
* Sets the auto role.
*
* @param autoRole the new auto role
*/
public void setAutoRole(int autoRole);
/**
* Gets the accept pos.
*
* @return the accept pos
*/
public int getAcceptPos();
/**
* Sets the accept pos.
*
* @param acceptPos the new accept pos
*/
public void setAcceptPos(int acceptPos);
/**
* Gets the short desc.
*
* @return the short desc
*/
public String getShortDesc();
/**
* Sets the short desc.
*
* @param shortDesc the new short desc
*/
public void setShortDesc(String shortDesc);
/**
* Gets the long desc.
*
* @return the long desc
*/
public String getLongDesc();
/**
* Sets the long desc.
*
* @param longDesc the new long desc
*/
public void setLongDesc(String longDesc);
/**
* Gets the required mask str.
*
* @return the required mask str
*/
public String getRequiredMaskStr();
/**
* Sets the required mask str.
*
* @param requiredMaskStr the new required mask str
*/
public void setRequiredMaskStr(String requiredMaskStr);
/**
* Gets the Scriptable parm when joining/creating a clan.
*
* @return the script for joining/creating a clan
*/
public String getEntryScript();
/**
* Sets the Scriptable parm when joining/creating a clan.
*
* @param scriptParm the Scriptable parm
*/
public void setEntryScript(String scriptParm);
/**
* Gets the Scriptable parm when resigning/exiling a clan.
*
* @return the script for resigning/exiling a clan
*/
public String getExitScript();
/**
* Sets the Scriptable parm when resigning/exiling a clan.
*
* @param scriptParm the Scriptable parm
*/
public void setExitScript(String scriptParm);
/**
* Checks if is public.
*
* @return true, if is public
*/
public boolean isPublic();
/**
* Sets the public.
*
* @param isPublic the new public
*/
public void setPublic(boolean isPublic);
/**
* Checks if is family only.
*
* @return true, if is family only
*/
public boolean isFamilyOnly();
/**
* Sets the family only.
*
* @param isFamilyOnly the new family only
*/
public void setFamilyOnly(boolean isFamilyOnly);
/**
* Returns true if this clan is rivalrous with other rivalrous clans,
* meaning that pvp is enabled between them, and war can be declared
* between them.
* @return true or false
*/
public boolean isRivalrous();
/**
* Set to true if this clan is rivalrous with other rivalrous clans,
* meaning that pvp is enabled between them, and war can be declared
* @param isRivalrous true or false
*/
public void setRivalrous(boolean isRivalrous);
/**
* Gets the override min members.
*
* @return the override min members
*/
public Integer getOverrideMinMembers();
/**
* Sets the override min members.
*
* @param overrideMinMembers the new override min members
*/
public void setOverrideMinMembers(Integer overrideMinMembers);
/**
* Checks if is conquest enabled.
*
* @return true, if is conquest enabled
*/
public boolean isConquestEnabled();
/**
* Sets the conquest enabled.
*
* @param conquestEnabled the new conquest enabled
*/
public void setConquestEnabled(boolean conquestEnabled);
/**
* Checks if is conquest item loyalty.
*
* @return true, if is conquest item loyalty
*/
public boolean isConquestItemLoyalty();
/**
* Sets the conquest item loyalty.
*
* @param conquestItemLoyalty the new conquest item loyalty
*/
public void setConquestItemLoyalty(boolean conquestItemLoyalty);
/**
* Checks if is conquest by worship.
*
* @return true, if is conquest by worship
*/
public boolean isConquestByWorship();
/**
* Sets the conquest by worship.
*
* @param conquestByWorship the new conquest by worship
*/
public void setConquestByWorship(boolean conquestByWorship);
/**
* Gets the max vote days.
*
* @return the max vote days
*/
public int getMaxVoteDays();
/**
* Sets the max vote days.
*
* @param maxVoteDays the new max vote days
*/
public void setMaxVoteDays(int maxVoteDays);
/**
* Gets the vote quorum pct.
*
* @return the vote quorum pct
*/
public int getVoteQuorumPct();
/**
* Sets the vote quorum pct.
*
* @param voteQuorumPct the new vote quorum pct
*/
public void setVoteQuorumPct(int voteQuorumPct);
/**
* Gets the xp calculation formula.
*
* @return the xp calculation formula
*/
public String getXpCalculationFormulaStr();
/**
* Sets the xp calculation formula.
*
* @param xpCalculationFormulaStr the new xp calculation formula
*/
public void setXpCalculationFormulaStr(String xpCalculationFormulaStr);
/**
* Returns the compiled xp calculation formula
*
* @return the compiled xp calculation formula
*/
public LinkedList<CMath.CompiledOperation> getXPCalculationFormula();
/**
* Checks if is default.
*
* @return true, if is default
*/
public boolean isDefault();
/**
* Sets the default.
*
* @param isDefault the new default
*/
public void setDefault(boolean isDefault);
/**
* Gets the positions.
*
* @return the positions
*/
public ClanPosition[] getPositions();
/**
* Sets the positions.
*
* @param positions the new positions
*/
public void setPositions(ClanPosition[] positions);
/**
* Gets the auto promote by.
*
* @return the auto promote by
*/
public Clan.AutoPromoteFlag getAutoPromoteBy();
/**
* Sets the auto promote by.
*
* @param autoPromoteBy the new auto promote by
*/
public void setAutoPromoteBy(Clan.AutoPromoteFlag autoPromoteBy);
/**
* Gets the level progression.
*
* @return the level progression
*/
public int[] getLevelProgression();
/**
* Sets the level progression.
*
* @param levelProgression the new level progression
*/
public void setLevelProgression(int[] levelProgression);
/**
* Gets the help str.
*
* @return the help str
*/
public String getHelpStr();
/**
* Adds the position.
*
* @return the clan position
*/
public ClanPosition addPosition();
/**
* Del position.
*
* @param pos the pos
*/
public void delPosition(ClanPosition pos);
/**
* Gets the position.
*
* @param pos the pos
* @return the position
*/
public ClanPosition getPosition(String pos);
/**
* returns clan position by the given role id
* @param roleID the role id to look for
* @return the position role id goes to
*/
public ClanPosition findPositionRole(Integer roleID);
/**
* returns clan position based on role id, or some part
* of its id or name.
* @param pos the role id, name, or id
* @return the clan position the pos goes to
*/
public ClanPosition findPositionRole(String pos);
/**
* Return the list of abilities owned by someone
* who is part of a clan of the given level.
* @param mob the mob enabled
* @param clan the clan the abilities come from
* @param level clan level
* @return list of abilities
*/
public SearchIDList<Ability> getClanLevelAbilities(MOB mob, Clan clan, Integer level);
/**
* Return the list of effects owned by someone
* who is part of a clan of the given level.
* @param mob the mob affected
* @param clan the clan the effects come from
* @param level clan level
* @return list of abilities
*/
public ChameleonList<Ability> getClanLevelEffects(MOB mob, Clan clan, Integer level);
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.codeInsight.daemon.lambda;
import com.intellij.codeInsight.ExpectedTypeInfo;
import com.intellij.codeInsight.ExpectedTypesProvider;
import com.intellij.codeInsight.daemon.LightDaemonAnalyzerTestCase;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.resolve.DefaultParameterTypeInferencePolicy;
import com.intellij.psi.infos.CandidateInfo;
import com.intellij.psi.infos.MethodCandidateInfo;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NonNls;
import java.util.Collection;
public class Java8ExpressionsCheckTest extends LightDaemonAnalyzerTestCase {
@NonNls static final String BASE_PATH = "/codeInsight/daemonCodeAnalyzer/lambda/expressions";
public void testSecondConflictResolutionOnSameMethodCall() {
doTestAllMethodCallExpressions();
}
public void testNestedLambdaAdditionalConstraints() {
doTestAllMethodCallExpressions();
}
public void testAvoidClassRefCachingDuringInference() {
doTestAllMethodCallExpressions();
}
public void testInfinitiveParameterBoundsCheck() {
doTestAllMethodCallExpressions();
}
public void testProoveThatInferenceInsideLambdaBodyDontInfluenceOuterCallInference() {
doTestAllMethodCallExpressions();
}
public void testDontCollectUnhandledReferencesInsideLambdaBody() {
doTestAllMethodCallExpressions();
}
public void testCachedUnresolvedMethods() {
doTestCachedUnresolved();
}
public void testCacheUnresolvedMethods2() {
doTestCachedUnresolved();
}
public void testCacheUnresolvedMethods3() {
doTestCachedUnresolved();
}
public void testCacheUnresolvedMethods4() {
doTestCachedUnresolved();
}
public void testCacheUnresolvedMethods5() {
doTestCachedUnresolved();
}
public void testMethodOverloadsInsideLambdaHierarchy() {
doTestAllMethodCallExpressions();
}
public void testObjectOverloadsWithDiamondsOverMultipleConstructors() {
doTestAllMethodCallExpressions();
}
public void testLambdaParameterTypeSideEffects() {
configureByFile(BASE_PATH + "/" + getTestName(false) + ".java");
Collection<PsiParameter> parameters = PsiTreeUtil.findChildrenOfType(getFile(), PsiParameter.class);
for (PsiParameter parameter : parameters) {
if (parameter.getTypeElement() == null) { //lambda parameter
assertNotNull(parameter.getType());
Collection<PsiCallExpression> expressions = PsiTreeUtil.findChildrenOfType(getFile(), PsiCallExpression.class);
for (PsiCallExpression expression : expressions) {
assertNotNull(expression.getText(), expression.resolveMethod());
}
getPsiManager().dropResolveCaches();
}
}
}
public void testCachingOfResultsDuringCandidatesIteration() {
configureByFile(BASE_PATH + "/" + getTestName(false) + ".java");
final Collection<PsiMethodCallExpression> methodCallExpressions = PsiTreeUtil.findChildrenOfType(getFile(), PsiMethodCallExpression.class);
final PsiResolveHelper helper = JavaPsiFacade.getInstance(getProject()).getResolveHelper();
for (PsiMethodCallExpression expression : methodCallExpressions) {
CandidateInfo[] candidates = helper.getReferencedMethodCandidates(expression, false, true);
PsiExpressionList argumentList = expression.getArgumentList();
PsiExpression[] args = argumentList.getExpressions();
for (JavaResolveResult result : candidates) {
if (result instanceof MethodCandidateInfo) {
final MethodCandidateInfo info = (MethodCandidateInfo)result;
MethodCandidateInfo.ourOverloadGuard
.doPreventingRecursion(argumentList, false, () -> info.inferTypeArguments(DefaultParameterTypeInferencePolicy.INSTANCE, args, true));
}
}
PsiMethodCallExpression parentCall = PsiTreeUtil.getParentOfType(expression, PsiMethodCallExpression.class, true);
if (parentCall != null) {
JavaResolveResult result = parentCall.getMethodExpression().advancedResolve(false);
if (result instanceof MethodCandidateInfo) {
assertNull(((MethodCandidateInfo)result).getInferenceErrorMessage());
}
}
}
}
public void testNonCachingFolding() {
final String filePath = BASE_PATH + "/" + getTestName(false) + ".java";
configureByFile(filePath);
PsiNewExpression newWithAnonym =
PsiTreeUtil.getParentOfType(getFile().findElementAt(getEditor().getCaretModel().getOffset()), PsiNewExpression.class);
ExpectedTypeInfo[] types = ExpectedTypesProvider.getExpectedTypes(newWithAnonym, false);
assertNotNull(types);
doTestConfiguredFile(false, false, filePath);
}
public void testRejectCachedTopLevelSessionIfItCorrespondsToTheWrongOverload() {
final String filePath = BASE_PATH + "/" + getTestName(false) + ".java";
configureByFile(filePath);
PsiMethodCallExpression methodCall =
PsiTreeUtil.getParentOfType(getFile().findElementAt(getEditor().getCaretModel().getOffset()), PsiMethodCallExpression.class);
assertNotNull(methodCall);
final PsiResolveHelper helper = JavaPsiFacade.getInstance(methodCall.getProject()).getResolveHelper();
CandidateInfo[] candidates = helper.getReferencedMethodCandidates(methodCall, false, true);
for (CandidateInfo candidate : candidates) {
if (candidate instanceof MethodCandidateInfo) {
//try to cache top level session
candidate.getSubstitutor();
}
}
doTestConfiguredFile(false, false, filePath);
}
public void testCheckedExceptionConstraintToTopLevel() {
doTestCachedUnresolved();
}
private void doTestCachedUnresolved() {
configureByFile(BASE_PATH + "/" + getTestName(false) + ".java");
PsiMethodCallExpression callExpression =
PsiTreeUtil.getParentOfType(getFile().findElementAt(getEditor().getCaretModel().getOffset()), PsiMethodCallExpression.class);
assertNotNull(callExpression);
assertNotNull(callExpression.getType());
final Collection<PsiCallExpression> methodCallExpressions = PsiTreeUtil.findChildrenOfType(getFile(), PsiCallExpression.class);
for (PsiCallExpression expression : methodCallExpressions) {
assertNotNull("Failed inference for: " + expression.getText(), expression.getType());
}
}
public void testIDEA140035() {
doTestAllMethodCallExpressions();
final Collection<PsiParameter> parameterLists = PsiTreeUtil.findChildrenOfType(getFile(), PsiParameter.class);
for (PsiParameter parameter : parameterLists) {
if (parameter.getTypeElement() != null) continue;
getPsiManager().dropResolveCaches();
final PsiType type = parameter.getType();
assertFalse("Failed inference for: " + parameter.getParent().getText(), type instanceof PsiLambdaParameterType);
}
}
public void testAdditionalConstraintsBasedOnLambdaResolution() {
doTestAllMethodCallExpressions();
}
public void testAdditionalConstraintsBasedOnLambdaResolutionForNestedLambdas() {
doTestAllMethodCallExpressions();
}
public void testOverloadResolutionInsideLambdaInsideNestedCall() {
doTestAllMethodCallExpressions();
}
private void doTestAllMethodCallExpressions() {
configureByFile(BASE_PATH + "/" + getTestName(false) + ".java");
final Collection<PsiCallExpression> methodCallExpressions = PsiTreeUtil.findChildrenOfType(getFile(), PsiCallExpression.class);
for (PsiCallExpression expression : methodCallExpressions) {
getPsiManager().dropResolveCaches();
if (expression instanceof PsiMethodCallExpression) {
assertNotNull("Failed to resolve: " + expression.getText(), expression.resolveMethod());
}
assertNotNull("Failed inference for: " + expression.getText(), expression.getType());
}
final Collection<PsiNewExpression> parameterLists = PsiTreeUtil.findChildrenOfType(getFile(), PsiNewExpression.class);
for (PsiNewExpression newExpression : parameterLists) {
getPsiManager().dropResolveCaches();
final PsiType[] arguments = newExpression.getTypeArguments();
String failMessage = "Failed inference for: " + newExpression.getParent().getText();
assertNotNull(failMessage, arguments);
PsiDiamondType diamondType = PsiDiamondType.getDiamondType(newExpression);
if (diamondType != null) {
JavaResolveResult staticFactory = diamondType.getStaticFactory();
assertNotNull(staticFactory);
assertTrue(staticFactory instanceof MethodCandidateInfo);
assertNull(failMessage, ((MethodCandidateInfo)staticFactory).getInferenceErrorMessage());
}
}
}
}
|
|
package org.apache.maven.plugin.javadoc;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.mortbay.jetty.Connector;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.bio.SocketConnector;
import org.mortbay.jetty.security.B64Code;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.ServletHolder;
import org.mortbay.proxy.AsyncProxyServlet;
/**
* A Proxy server.
*
* @author <a href="mailto:vincent.siveton@gmail.com">Vincent Siveton</a>
* @version $Id$
* @since 2.6
*/
class ProxyServer
{
private Server proxyServer;
/**
* @param proxyServlet the wanted auth proxy servlet
*/
public ProxyServer( AuthAsyncProxyServlet proxyServlet )
{
this( null, 0, proxyServlet );
}
/**
* @param hostName the server name
* @param port the server port
* @param debug true to display System.err, false otherwise.
* @param proxyServlet the wanted auth proxy servlet
*/
public ProxyServer( String hostName, int port, AuthAsyncProxyServlet proxyServlet )
{
proxyServer = new Server();
proxyServer.addConnector( getDefaultConnector( hostName, port ) );
Context context = new Context( proxyServer, "/", 0 );
context.addServlet( new ServletHolder( proxyServlet ), "/" );
}
/**
* @return the host name
*/
public String getHostName()
{
Connector connector = proxyServer.getConnectors()[0];
return connector.getHost();
}
/**
* @return the host port
*/
public int getPort()
{
Connector connector = proxyServer.getConnectors()[0];
return ( connector.getLocalPort() <= 0 ? connector.getPort() : connector.getLocalPort() );
}
/**
* @throws Exception if any
*/
public void start()
throws Exception
{
if ( proxyServer != null )
{
proxyServer.start();
}
}
/**
* @throws Exception if any
*/
public void stop()
throws Exception
{
if ( proxyServer != null )
{
proxyServer.stop();
}
proxyServer = null;
}
private Connector getDefaultConnector( String hostName, int port )
{
Connector connector = new SocketConnector();
if ( hostName != null )
{
connector.setHost( hostName );
}
else
{
try
{
connector.setHost( InetAddress.getLocalHost().getCanonicalHostName() );
}
catch ( UnknownHostException e )
{
// nop
}
}
if ( port > 0 )
{
connector.setPort( port );
}
return connector;
}
/**
* A proxy servlet with authentication support.
*/
static class AuthAsyncProxyServlet
extends AsyncProxyServlet
{
private Map<String, String> authentications;
private long sleepTime = 0;
/**
* Constructor for non authentication servlet.
*/
public AuthAsyncProxyServlet()
{
super();
}
/**
* Constructor for authentication servlet.
*
* @param authentications a map of user/password
*/
public AuthAsyncProxyServlet( Map<String, String> authentications )
{
this();
this.authentications = authentications;
}
/**
* Constructor for authentication servlet.
*
* @param authentications a map of user/password
* @param sleepTime a positive time to sleep the service thread (for timeout)
*/
public AuthAsyncProxyServlet( Map<String, String> authentications, long sleepTime )
{
this();
this.authentications = authentications;
this.sleepTime = sleepTime;
}
/** {@inheritDoc} */
public void service( ServletRequest req, ServletResponse res )
throws ServletException, IOException
{
final HttpServletRequest request = (HttpServletRequest) req;
final HttpServletResponse response = (HttpServletResponse) res;
if ( this.authentications != null && !this.authentications.isEmpty() )
{
String proxyAuthorization = request.getHeader( "Proxy-Authorization" );
if ( proxyAuthorization != null && proxyAuthorization.startsWith( "Basic " ) )
{
String proxyAuth = proxyAuthorization.substring( 6 );
String authorization = B64Code.decode( proxyAuth );
String[] authTokens = authorization.split( ":" );
String user = authTokens[0];
String password = authTokens[1];
if ( this.authentications.get( user ) == null )
{
throw new IllegalArgumentException( user + " not found in the map!" );
}
if ( sleepTime > 0 )
{
try
{
Thread.sleep( sleepTime );
}
catch ( InterruptedException e )
{
// nop
}
}
String authPass = this.authentications.get(user);
if ( password.equals( authPass ) )
{
// could throw exceptions...
super.service( req, res );
return;
}
}
// Proxy-Authenticate Basic realm="CCProxy Authorization"
response.addHeader( "Proxy-Authenticate", "Basic realm=\"Jetty Proxy Authorization\"" );
response.setStatus( HttpServletResponse.SC_PROXY_AUTHENTICATION_REQUIRED );
return;
}
super.service( req, res );
}
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.actions;
import com.intellij.execution.Location;
import com.intellij.execution.PsiLocation;
import com.intellij.execution.RunManager;
import com.intellij.execution.RunnerAndConfigurationSettings;
import com.intellij.execution.configurations.ConfigurationType;
import com.intellij.execution.configurations.ConfigurationTypeUtil;
import com.intellij.execution.configurations.RunConfiguration;
import com.intellij.execution.junit.RuntimeConfigurationProducer;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.util.List;
/**
* Context for creating run configurations from a location in the source code.
*
* @see RunConfigurationProducer
*/
public class ConfigurationContext {
private static final Logger LOG = Logger.getInstance("#com.intellij.execution.actions.ConfigurationContext");
private final Location<PsiElement> myLocation;
private RunnerAndConfigurationSettings myConfiguration;
private Ref<RunnerAndConfigurationSettings> myExistingConfiguration;
private final Module myModule;
private final RunConfiguration myRuntimeConfiguration;
private final Component myContextComponent;
public static Key<ConfigurationContext> SHARED_CONTEXT = Key.create("SHARED_CONTEXT");
private List<RuntimeConfigurationProducer> myPreferredProducers;
private List<ConfigurationFromContext> myConfigurationsFromContext;
public static ConfigurationContext getFromContext(DataContext dataContext) {
final ConfigurationContext context = new ConfigurationContext(dataContext);
final DataManager dataManager = DataManager.getInstance();
ConfigurationContext sharedContext = dataManager.loadFromDataContext(dataContext, SHARED_CONTEXT);
if (sharedContext == null ||
sharedContext.getLocation() == null ||
context.getLocation() == null ||
!Comparing.equal(sharedContext.getLocation().getPsiElement(), context.getLocation().getPsiElement())) {
sharedContext = context;
dataManager.saveInDataContext(dataContext, SHARED_CONTEXT, sharedContext);
}
return sharedContext;
}
private ConfigurationContext(final DataContext dataContext) {
myRuntimeConfiguration = RunConfiguration.DATA_KEY.getData(dataContext);
myContextComponent = PlatformDataKeys.CONTEXT_COMPONENT.getData(dataContext);
myModule = LangDataKeys.MODULE.getData(dataContext);
@SuppressWarnings({"unchecked"})
final Location<PsiElement> location = (Location<PsiElement>)Location.DATA_KEY.getData(dataContext);
if (location != null) {
myLocation = location;
return;
}
final Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project == null) {
myLocation = null;
return;
}
final PsiElement element = getSelectedPsiElement(dataContext, project);
if (element == null) {
myLocation = null;
return;
}
myLocation = new PsiLocation<PsiElement>(project, myModule, element);
}
public ConfigurationContext(PsiElement element) {
myModule = ModuleUtilCore.findModuleForPsiElement(element);
myLocation = new PsiLocation<PsiElement>(element.getProject(), myModule, element);
myRuntimeConfiguration = null;
myContextComponent = null;
}
/**
* Returns the configuration created from this context.
*
* @return the configuration, or null if none of the producers were able to create a configuration from this context.
*/
@Nullable
public RunnerAndConfigurationSettings getConfiguration() {
if (myConfiguration == null) createConfiguration();
return myConfiguration;
}
private void createConfiguration() {
LOG.assertTrue(myConfiguration == null);
final Location location = getLocation();
myConfiguration = location != null && !DumbService.isDumb(location.getProject()) ?
PreferredProducerFind.createConfiguration(location, this) :
null;
}
public void setConfiguration(RunnerAndConfigurationSettings configuration) {
myConfiguration = configuration;
}
@Deprecated
@Nullable
public RunnerAndConfigurationSettings updateConfiguration(final RuntimeConfigurationProducer producer) {
myConfiguration = producer.getConfiguration();
return myConfiguration;
}
/**
* Returns the source code location for this context.
*
* @return the source code location, or null if no source code fragment is currently selected.
*/
@Nullable
public Location getLocation() {
return myLocation;
}
/**
* Returns the PSI element at caret for this context.
*
* @return the PSI element, or null if no source code fragment is currently selected.
*/
@Nullable
public PsiElement getPsiLocation() {
return myLocation != null ? myLocation.getPsiElement() : null;
}
/**
* Finds an existing run configuration matching the context.
*
* @return an existing configuration, or null if none was found.
*/
@Nullable
public RunnerAndConfigurationSettings findExisting() {
if (myExistingConfiguration != null) return myExistingConfiguration.get();
myExistingConfiguration = new Ref<RunnerAndConfigurationSettings>();
if (myLocation == null) {
return null;
}
final PsiElement psiElement = myLocation.getPsiElement();
if (!psiElement.isValid()) {
return null;
}
final List<RuntimeConfigurationProducer> producers = findPreferredProducers();
if (myRuntimeConfiguration != null) {
if (producers != null) {
for (RuntimeConfigurationProducer producer : producers) {
final RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(myLocation, this);
if (configuration != null && configuration.getConfiguration() == myRuntimeConfiguration) {
myExistingConfiguration.set(configuration);
}
}
}
for (RunConfigurationProducer producer : RunConfigurationProducer.getProducers(getProject())) {
RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(this);
if (configuration != null && configuration.getConfiguration() == myRuntimeConfiguration) {
myExistingConfiguration.set(configuration);
}
}
}
if (producers != null) {
for (RuntimeConfigurationProducer producer : producers) {
final RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(myLocation, this);
if (configuration != null) {
myExistingConfiguration.set(configuration);
}
}
}
for (RunConfigurationProducer producer : RunConfigurationProducer.getProducers(getProject())) {
RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(this);
if (configuration != null) {
myExistingConfiguration.set(configuration);
}
}
return myExistingConfiguration.get();
}
@Nullable
private static PsiElement getSelectedPsiElement(final DataContext dataContext, final Project project) {
PsiElement element = null;
final Editor editor = CommonDataKeys.EDITOR.getData(dataContext);
if (editor != null){
final PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (psiFile != null) {
final int offset = editor.getCaretModel().getOffset();
element = psiFile.findElementAt(offset);
if (element == null && offset > 0 && offset == psiFile.getTextLength()) {
element = psiFile.findElementAt(offset-1);
}
}
}
if (element == null) {
final PsiElement[] elements = LangDataKeys.PSI_ELEMENT_ARRAY.getData(dataContext);
element = elements != null && elements.length > 0 ? elements[0] : null;
}
if (element == null) {
final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext);
if (files != null && files.length > 0) {
element = PsiManager.getInstance(project).findFile(files[0]);
}
}
return element;
}
public RunManager getRunManager() {
return RunManager.getInstance(getProject());
}
public Project getProject() {
return myLocation.getProject();
}
public Module getModule() {
return myModule;
}
public DataContext getDataContext() {
return DataManager.getInstance().getDataContext(myContextComponent);
}
/**
* Returns original {@link RunConfiguration} from this context.
* For example, it could be some test framework runtime configuration that had been launched
* and that had brought a result test tree on which a right-click action was performed.
*
* @param type {@link ConfigurationType} instance to filter original runtime configuration by its type
* @return {@link RunConfiguration} instance, it could be null
*/
@Nullable
public RunConfiguration getOriginalConfiguration(@Nullable ConfigurationType type) {
if (type == null) {
return myRuntimeConfiguration;
}
if (myRuntimeConfiguration != null
&& ConfigurationTypeUtil.equals(myRuntimeConfiguration.getType(), type)) {
return myRuntimeConfiguration;
}
return null;
}
@Deprecated
@Nullable
public List<RuntimeConfigurationProducer> findPreferredProducers() {
if (myPreferredProducers == null) {
myPreferredProducers = PreferredProducerFind.findPreferredProducers(myLocation, this, true);
}
return myPreferredProducers;
}
@Nullable
public List<ConfigurationFromContext> getConfigurationsFromContext() {
if (myConfigurationsFromContext == null) {
myConfigurationsFromContext = PreferredProducerFind.getConfigurationsFromContext(myLocation, this, true);
}
return myConfigurationsFromContext;
}
}
|
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
/**
* State and utility processing {@link HRegion#getClosestRowBefore(byte[], byte[])}.
* Like {@link ScanQueryMatcher} and {@link ScanDeleteTracker} but does not
* implement the {@link DeleteTracker} interface since state spans rows (There
* is no update nor reset method).
*/
@InterfaceAudience.Private
class GetClosestRowBeforeTracker {
private final KeyValue targetkey;
// Any cell w/ a ts older than this is expired.
private final long now;
private final long oldestUnexpiredTs;
private Cell candidate = null;
private final CellComparator cellComparator;
// Flag for whether we're doing getclosest on a metaregion.
private final boolean metaregion;
// Offset and length into targetkey demarking table name (if in a metaregion).
private final int rowoffset;
private final int tablenamePlusDelimiterLength;
// Deletes keyed by row. Comparator compares on row portion of KeyValue only.
private final NavigableMap<Cell, NavigableSet<Cell>> deletes;
/**
* @param c
* @param kv Presume first on row: i.e. empty column, maximum timestamp and
* a type of Type.Maximum
* @param ttl Time to live in ms for this Store
* @param metaregion True if this is hbase:meta or -ROOT- region.
*/
GetClosestRowBeforeTracker(final CellComparator c, final KeyValue kv,
final long ttl, final boolean metaregion) {
super();
this.metaregion = metaregion;
this.targetkey = kv;
// If we are in a metaregion, then our table name is the prefix on the
// targetkey.
this.rowoffset = kv.getRowOffset();
int l = -1;
if (metaregion) {
l = Bytes.searchDelimiterIndex(kv.getRowArray(), rowoffset, kv.getRowLength(),
HConstants.DELIMITER) - this.rowoffset;
}
this.tablenamePlusDelimiterLength = metaregion? l + 1: -1;
this.now = System.currentTimeMillis();
this.oldestUnexpiredTs = now - ttl;
this.cellComparator = c;
this.deletes = new TreeMap<Cell, NavigableSet<Cell>>(new CellComparator.RowComparator());
}
/*
* Add the specified KeyValue to the list of deletes.
* @param kv
*/
private void addDelete(final Cell kv) {
NavigableSet<Cell> rowdeletes = this.deletes.get(kv);
if (rowdeletes == null) {
rowdeletes = new TreeSet<Cell>(this.cellComparator);
this.deletes.put(kv, rowdeletes);
}
rowdeletes.add(kv);
}
/*
* @param kv Adds candidate if nearer the target than previous candidate.
* @return True if updated candidate.
*/
private boolean addCandidate(final Cell kv) {
if (!isDeleted(kv) && isBetterCandidate(kv)) {
this.candidate = kv;
return true;
}
return false;
}
boolean isBetterCandidate(final Cell contender) {
return this.candidate == null ||
(this.cellComparator.compareRows(this.candidate, contender) < 0 &&
this.cellComparator.compareRows(contender, this.targetkey) <= 0);
}
/*
* Check if specified KeyValue buffer has been deleted by a previously
* seen delete.
* @param kv
* @return true is the specified KeyValue is deleted, false if not
*/
private boolean isDeleted(final Cell kv) {
if (this.deletes.isEmpty()) return false;
NavigableSet<Cell> rowdeletes = this.deletes.get(kv);
if (rowdeletes == null || rowdeletes.isEmpty()) return false;
return isDeleted(kv, rowdeletes);
}
/**
* Check if the specified KeyValue buffer has been deleted by a previously
* seen delete.
* @param kv
* @param ds
* @return True is the specified KeyValue is deleted, false if not
*/
public boolean isDeleted(final Cell kv, final NavigableSet<Cell> ds) {
if (deletes == null || deletes.isEmpty()) return false;
for (Cell d: ds) {
long kvts = kv.getTimestamp();
long dts = d.getTimestamp();
if (CellUtil.isDeleteFamily(d)) {
if (kvts <= dts) return true;
continue;
}
// Check column
int ret = CellComparator.compareQualifiers(kv, d);
if (ret <= -1) {
// This delete is for an earlier column.
continue;
} else if (ret >= 1) {
// Beyond this kv.
break;
}
// Check Timestamp
if (kvts > dts) return false;
// Check Type
switch (KeyValue.Type.codeToType(d.getTypeByte())) {
case Delete: return kvts == dts;
case DeleteColumn: return true;
default: continue;
}
}
return false;
}
/**
* @param cell
* @return true if the cell is expired
*/
public boolean isExpired(final Cell cell) {
return cell.getTimestamp() < this.oldestUnexpiredTs ||
HStore.isCellTTLExpired(cell, this.oldestUnexpiredTs, this.now);
}
/*
* Handle keys whose values hold deletes.
* Add to the set of deletes and then if the candidate keys contain any that
* might match, then check for a match and remove it. Implies candidates
* is made with a Comparator that ignores key type.
* @param kv
* @return True if we removed <code>k</code> from <code>candidates</code>.
*/
boolean handleDeletes(final Cell kv) {
addDelete(kv);
boolean deleted = false;
if (!hasCandidate()) return deleted;
if (isDeleted(this.candidate)) {
this.candidate = null;
deleted = true;
}
return deleted;
}
/**
* Do right thing with passed key, add to deletes or add to candidates.
* @param kv
* @return True if we added a candidate
*/
boolean handle(final Cell kv) {
if (CellUtil.isDelete(kv)) {
handleDeletes(kv);
return false;
}
return addCandidate(kv);
}
/**
* @return True if has candidate
*/
public boolean hasCandidate() {
return this.candidate != null;
}
/**
* @return Best candidate or null.
*/
public Cell getCandidate() {
return this.candidate;
}
public KeyValue getTargetKey() {
return this.targetkey;
}
/**
* @param kv Current kv
* @param firstOnRow on row kv.
* @return True if we went too far, past the target key.
*/
boolean isTooFar(final Cell kv, final Cell firstOnRow) {
return this.cellComparator.compareRows(kv, firstOnRow) > 0;
}
boolean isTargetTable(final Cell kv) {
if (!metaregion) return true;
// Compare start of keys row. Compare including delimiter. Saves having
// to calculate where tablename ends in the candidate kv.
return Bytes.compareTo(this.targetkey.getRowArray(), this.rowoffset,
this.tablenamePlusDelimiterLength,
kv.getRowArray(), kv.getRowOffset(), this.tablenamePlusDelimiterLength) == 0;
}
}
|
|
/*
* #%L
* GwtMaterial
* %%
* Copyright (C) 2015 - 2017 GwtMaterialDesign
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package gwt.material.design.client.base;
import com.google.gwt.dom.client.Style;
import gwt.material.design.client.async.AsyncWidgetCallback;
import gwt.material.design.client.async.IsAsyncWidget;
import gwt.material.design.client.async.loader.AsyncDisplayLoader;
import gwt.material.design.client.async.loader.DefaultButtonLoader;
import gwt.material.design.client.async.mixin.AsyncWidgetMixin;
import gwt.material.design.client.constants.*;
import gwt.material.design.client.ui.MaterialIcon;
/**
* @author Ben Dol
*/
public abstract class AbstractIconButton extends AbstractButton implements HasIcon, IsAsyncWidget<AbstractIconButton, String> {
protected AsyncWidgetMixin<AbstractIconButton, String> asyncWidgetMixin;
protected MaterialIcon icon = new MaterialIcon();
public AbstractIconButton() {
super();
setAsyncDisplayLoader(new DefaultButtonLoader(this));
setIconPosition(IconPosition.LEFT);
}
public AbstractIconButton(ButtonType type, String text, MaterialIcon icon) {
this(type, text);
this.icon = icon;
ensureIconAttached();
}
public AbstractIconButton(ButtonType type, String text) {
this();
setType(type);
setText(text);
setIconPosition(IconPosition.LEFT);
}
public AbstractIconButton(IconType iconType) {
this();
setIconType(iconType);
setIconPosition(IconPosition.LEFT);
}
public AbstractIconButton(ButtonType type) {
this();
setType(type);
setIconPosition(IconPosition.LEFT);
}
public AbstractIconButton(String... initialClass) {
this();
setInitialClasses(initialClass);
}
@Override
protected void onLoad() {
super.onLoad();
registerHandler(addClickHandler(event -> {
if (isAsynchronous()) {
load(getAsyncCallback());
}
}));
}
@Override
public MaterialIcon getIcon() {
return icon;
}
@Override
public void setIconType(IconType iconType) {
icon.setIconType(iconType);
ensureIconAttached();
}
@Override
public void setIconPosition(IconPosition position) {
icon.setIconPosition(position);
}
@Override
public void setIconSize(IconSize size) {
icon.setIconSize(size);
}
@Override
public void setIconFontSize(double size, Style.Unit unit) {
icon.setIconFontSize(size, unit);
}
@Override
public void setIconColor(Color iconColor) {
icon.setIconColor(iconColor);
}
@Override
public Color getIconColor() {
return icon.getIconColor();
}
@Override
public void setIconPrefix(boolean prefix) {
icon.setIconPrefix(prefix);
}
@Override
public boolean isIconPrefix() {
return icon.isIconPrefix();
}
@Override
public void setCustomIconType(String iconType) {
icon.setCustomIconType(iconType);
ensureIconAttached();
}
@Override
public String getCustomIconType() {
return icon.getCustomIconType();
}
/**
* Ensure the icon is attached in slot 0.
*/
protected void ensureIconAttached() {
if (icon != null && !icon.isAttached()) {
insert(icon, 0);
}
}
@Override
public void setAsynchronous(boolean asynchronous) {
getAsyncWidgetMixin().setAsynchronous(asynchronous);
}
@Override
public boolean isAsynchronous() {
return getAsyncWidgetMixin().isAsynchronous();
}
@Override
public void load(AsyncWidgetCallback<AbstractIconButton, String> asyncCallback) {
getAsyncWidgetMixin().load(asyncCallback);
}
@Override
public void setLoaded(boolean loaded) {
getAsyncWidgetMixin().setLoaded(loaded);
}
@Override
public boolean isLoaded() {
return getAsyncWidgetMixin().isLoaded();
}
@Override
public void setAsyncCallback(AsyncWidgetCallback<AbstractIconButton, String> asyncCallback) {
getAsyncWidgetMixin().setAsyncCallback(asyncCallback);
}
@Override
public AsyncWidgetCallback<AbstractIconButton, String> getAsyncCallback() {
return getAsyncWidgetMixin().getAsyncCallback();
}
@Override
public void setAsyncDisplayLoader(AsyncDisplayLoader displayLoader) {
getAsyncWidgetMixin().setAsyncDisplayLoader(displayLoader);
}
@Override
public AsyncDisplayLoader getAsyncDisplayLoader() {
return getAsyncWidgetMixin().getAsyncDisplayLoader();
}
protected AsyncWidgetMixin<AbstractIconButton, String> getAsyncWidgetMixin() {
if (asyncWidgetMixin == null) {
asyncWidgetMixin = new AsyncWidgetMixin<>(this);
}
return asyncWidgetMixin;
}
}
|
|
/**
*============================================================================
* Copyright The Ohio State University Research Foundation, The University of Chicago -
* Argonne National Laboratory, Emory University, SemanticBits LLC, and
* Ekagra Software Technologies Ltd.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cagrid-core/LICENSE.txt for details.
*============================================================================
**/
package gov.nih.nci.cagrid.data.codegen;
import gov.nih.nci.cagrid.data.ExtensionDataUtils;
import gov.nih.nci.cagrid.data.MetadataConstants;
import gov.nih.nci.cagrid.data.codegen.templates.StubCQLQueryProcessorTemplate;
import gov.nih.nci.cagrid.data.extension.Data;
import gov.nih.nci.cagrid.data.extension.ModelInformation;
import gov.nih.nci.cagrid.data.extension.ModelSourceType;
import gov.nih.nci.cagrid.data.extension.ServiceFeatures;
import gov.nih.nci.cagrid.data.style.ServiceStyleContainer;
import gov.nih.nci.cagrid.data.style.ServiceStyleLoader;
import gov.nih.nci.cagrid.data.style.StyleCodegenPreProcessor;
import gov.nih.nci.cagrid.introduce.IntroduceConstants;
import gov.nih.nci.cagrid.introduce.beans.extension.ExtensionTypeExtensionData;
import gov.nih.nci.cagrid.introduce.beans.extension.ServiceExtensionDescriptionType;
import gov.nih.nci.cagrid.introduce.beans.resource.ResourcePropertyType;
import gov.nih.nci.cagrid.introduce.beans.service.ServiceType;
import gov.nih.nci.cagrid.introduce.common.CommonTools;
import gov.nih.nci.cagrid.introduce.common.ServiceInformation;
import gov.nih.nci.cagrid.introduce.extension.CodegenExtensionException;
import gov.nih.nci.cagrid.introduce.extension.CodegenExtensionPreProcessor;
import gov.nih.nci.cagrid.introduce.extension.ExtensionTools;
import gov.nih.nci.cagrid.metadata.MetadataUtils;
import gov.nih.nci.cagrid.metadata.dataservice.DomainModel;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* DataServiceCodegenPreProcessor
* Preprocessor for data service codegen
* operations.
*
* @author <A HREF="MAILTO:ervin@bmi.osu.edu">David W. Ervin</A>
* @created May 11, 2006
* @version $Id: DataServiceCodegenPreProcessor.java,v 1.7 2009-05-29 19:57:47 dervin Exp $
*/
public class DataServiceCodegenPreProcessor implements CodegenExtensionPreProcessor {
private static final Log LOG = LogFactory.getLog(DataServiceCodegenPreProcessor.class);
public void preCodegen(ServiceExtensionDescriptionType desc, ServiceInformation info)
throws CodegenExtensionException {
// check for and potentially create the stub query processor java file
if (!stubQueryProcessorExists(info)) {
createStubQueryProcessor(info);
}
addDomainModelResourceProperty(desc, info);
addQueryLanguageSupportResourceProperty(info);
addInstanceCountResourceProperty(info);
// execute the service style's pre codegen processor
ExtensionTypeExtensionData extData = ExtensionTools.getExtensionData(desc, info);
Data data = null;
try {
data = ExtensionDataUtils.getExtensionData(extData);
} catch (Exception ex) {
throw new CodegenExtensionException("Error getting extension data: " + ex.getMessage(), ex);
}
ServiceFeatures features = data.getServiceFeatures();
if (features != null && features.getServiceStyle() != null) {
try {
ServiceStyleContainer container = ServiceStyleLoader.getStyle(features.getServiceStyle().getName());
if (container == null) {
throw new CodegenExtensionException(
"Could not load service style " + features.getServiceStyle().getName());
}
StyleCodegenPreProcessor preProcessor = container.loadCodegenPreProcessor();
if (preProcessor != null) {
preProcessor.codegenPreProcessStyle(desc, info);
}
} catch (Exception ex) {
throw new CodegenExtensionException(
"Error executing style codegen post processor: " + ex.getMessage(), ex);
}
}
}
private boolean stubQueryProcessorExists(ServiceInformation info) {
String stubName = ExtensionDataUtils.getQueryProcessorStubClassName(info);
File stubJavaFile = new File(info.getBaseDirectory().getAbsolutePath()
+ File.separator + "src" + File.separator
+ stubName.replace('.', File.separatorChar) + ".java");
return stubJavaFile.exists();
}
private void createStubQueryProcessor(ServiceInformation info) throws CodegenExtensionException {
String stubName = ExtensionDataUtils.getQueryProcessorStubClassName(info);
File stubJavaFile = new File(info.getBaseDirectory().getAbsolutePath()
+ File.separator + "src" + File.separator
+ stubName.replace('.', File.separatorChar) + ".java");
stubJavaFile.getParentFile().mkdirs();
StubCQLQueryProcessorTemplate stubTemplate = new StubCQLQueryProcessorTemplate();
String stubJavaCode = stubTemplate.generate(info);
try {
FileWriter writer = new FileWriter(stubJavaFile);
writer.write(stubJavaCode);
writer.close();
} catch (IOException ex) {
throw new CodegenExtensionException("Error creating stub query processor: "
+ ex.getMessage(), ex);
}
}
private ModelInformation getModelInformation(
ServiceExtensionDescriptionType desc, ServiceInformation info) throws Exception {
ExtensionTypeExtensionData extData = ExtensionTools.getExtensionData(desc, info);
Data data = ExtensionDataUtils.getExtensionData(extData);
ModelInformation modelInfo = data.getModelInformation();
if (modelInfo == null) {
LOG.warn("NO MODEL INFORMATION FOUND, USING DEFAULTS");
modelInfo = new ModelInformation();
modelInfo.setSource(ModelSourceType.none);
data.setModelInformation(modelInfo);
ExtensionDataUtils.storeExtensionData(extData, data);
}
return modelInfo;
}
private void addDomainModelResourceProperty(ServiceExtensionDescriptionType desc, ServiceInformation info)
throws CodegenExtensionException {
// determine the name for the domain model document
String localDomainModelFilename = getDestinationDomainModelFilename(info);
// find the service's etc directory, where the domain model goes on the file system
String domainModelFile = new File(info.getBaseDirectory(),
"etc" + File.separator + localDomainModelFilename).getAbsolutePath();
// get the model information
ModelInformation modelInfo = null;
try {
modelInfo = getModelInformation(desc, info);
} catch (Exception ex) {
throw new CodegenExtensionException("Error loading Model Information from extension data", ex);
}
// get the resource property for the domain model
ResourcePropertyType dmResourceProp = getDomainModelResourceProp(info);
LOG.debug("Domain model source determined to be " + modelInfo.getSource());
if (ModelSourceType.preBuilt.equals(modelInfo.getSource())) {
// the model is already in the service's etc dir with the name specified
// in the resource property. Make sure the resource property has
// the filename and populate from file flag set
dmResourceProp.setPopulateFromFile(true);
dmResourceProp.setFileLocation(localDomainModelFilename);
} else if (ModelSourceType.mms.equals(modelInfo.getSource())) {
// the domain model is to be generated from the MMS
LOG.info("Generating domain model from MMS");
// set the domain model file name
dmResourceProp.setFileLocation(localDomainModelFilename);
// generate the domain model document
generateDomainModel(modelInfo, info, domainModelFile);
}
// if the domain model XML fiel doesn't exist, don't try to
// populate the domain model metadata on service startup
File dmFile = new File(domainModelFile);
dmResourceProp.setPopulateFromFile(dmFile.exists());
}
/**
* Gets the local part of the filename from which the Domain Model resource
* property (metadata) will be populated at runtime. This value is configured
* by the user when selecting a pre-built domain model in the data service
* creation GUI in Introduce, or the default (domainModel.xml) value
* if a domain model is to be generated from the caDSR
*
* @param info
* The service information
* @return
* The local domain model xml file name
*/
private String getDestinationDomainModelFilename(ServiceInformation info) {
ResourcePropertyType domainModelResourceProperty = getDomainModelResourceProp(info);
String filename = domainModelResourceProperty.getFileLocation();
if (filename == null || filename.trim().length() == 0) {
filename = "domainModel.xml";
}
return filename;
}
private void generateDomainModel(ModelInformation modelInfo,
ServiceInformation info, String domainModelFile) throws CodegenExtensionException {
if (modelInfo != null) {
DomainModel model = null;
try {
model = DomainModelCreationUtil.createDomainModel(modelInfo);
} catch (Exception ex) {
throw new CodegenExtensionException("Error creating domain model: " + ex.getMessage(), ex);
}
System.out.println("Created data service Domain Model");
LOG.info("Created data service Domain Model");
// get the data service's description
ServiceType dataService = null;
String serviceName = info.getIntroduceServiceProperties().getProperty(
IntroduceConstants.INTRODUCE_SKELETON_SERVICE_NAME);
ServiceType[] services = info.getServices().getService();
for (int i = 0; i < services.length; i++) {
if (services[i].getName().equals(serviceName)) {
dataService = services[i];
break;
}
}
if (dataService == null) {
// this REALLY should never happen...
throw new CodegenExtensionException("No data service found in service information!!");
}
LOG.debug("Serializing domain model to file " + domainModelFile);
try {
FileWriter domainModelFileWriter = new FileWriter(domainModelFile);
MetadataUtils.serializeDomainModel(model, domainModelFileWriter);
domainModelFileWriter.flush();
domainModelFileWriter.close();
LOG.debug("Serialized domain model to file " + domainModelFile);
} catch (Exception ex) {
throw new CodegenExtensionException("Error serializing the domain model to disk: "
+ ex.getMessage(), ex);
}
}
}
private ResourcePropertyType getDomainModelResourceProp(ServiceInformation info) {
ServiceType baseService = info.getServices().getService(0);
ResourcePropertyType[] typedProps = CommonTools.getResourcePropertiesOfType(
info.getServices().getService(0), MetadataConstants.DOMAIN_MODEL_QNAME);
if (typedProps == null || typedProps.length == 0) {
ResourcePropertyType dmProp = new ResourcePropertyType();
dmProp.setQName(MetadataConstants.DOMAIN_MODEL_QNAME);
dmProp.setRegister(true);
CommonTools.addResourcePropety(baseService, dmProp);
LOG.debug("Created new resource property for domain model");
return dmProp;
} else {
LOG.debug("Found existing domain model resource property");
return typedProps[0];
}
}
private void addQueryLanguageSupportResourceProperty(ServiceInformation info) {
ServiceType baseService = info.getServices().getService(0);
if (CommonTools.getResourcePropertiesOfType(baseService, MetadataConstants.QUERY_LANGUAGE_SUPPORT_QNAME).length == 0) {
ResourcePropertyType supportRp = new ResourcePropertyType();
supportRp.setRegister(true);
supportRp.setDescription(MetadataConstants.QUERY_LANGUAGE_SUPPORT_DESCRIPTION);
supportRp.setQName(MetadataConstants.QUERY_LANGUAGE_SUPPORT_QNAME);
CommonTools.addResourcePropety(baseService, supportRp);
}
}
private void addInstanceCountResourceProperty(ServiceInformation info) {
ServiceType baseService = info.getServices().getService(0);
if (CommonTools.getResourcePropertiesOfType(baseService, MetadataConstants.DATA_INSTANCE_QNAME).length == 0) {
ResourcePropertyType countRp = new ResourcePropertyType();
countRp.setRegister(true);
countRp.setDescription(MetadataConstants.DATA_INSTANCE_DESCRIPTION);
countRp.setQName(MetadataConstants.DATA_INSTANCE_QNAME);
CommonTools.addResourcePropety(baseService, countRp);
}
}
}
|
|
package com.planet_ink.coffee_mud.Commands;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
import java.util.concurrent.TimeUnit;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("rawtypes")
public class Affect extends StdCommand
{
private final String[] access=_i(new String[]{"AFFECTS","AFFECT","AFF","AF"});
@Override public String[] getAccessWords(){return access;}
// private final static Class[][] internalParameters=new Class[][]{{Physical.class}};
public String getMOBState(final MOB mob)
{
final StringBuffer msg=new StringBuffer("");
if((mob.playerStats()!=null)&&(mob.soulMate()==null)&&(mob.playerStats().getHygiene()>=PlayerStats.HYGIENE_DELIMIT))
{
if(CMSecurity.isASysOp(mob))
mob.playerStats().setHygiene(0);
else
{
final int x=(int)(mob.playerStats().getHygiene()/PlayerStats.HYGIENE_DELIMIT);
if(x<=1) msg.append(_("^!You could use a bath.^?\n\r"));
else
if(x<=3) msg.append(_("^!You could really use a bath.^?\n\r"));
else
if(x<=7) msg.append(_("^!You need to bathe, soon.^?\n\r"));
else
if(x<15) msg.append(_("^!You desperately need to bathe.^?\n\r"));
else msg.append(_("^!Your stench is horrendous! Bathe dammit!^?\n\r"));
}
}
if(CMLib.flags().isBound(mob))
msg.append(_("^!You are bound.^?\n\r"));
// dont do falling -- the flag doubles for drowning/treading water anyway.
//if(CMLib.flags().isFalling(mob))
// msg.append(_("^!You are falling!!!^?\n\r"));
//else
if(CMLib.flags().isSleeping(mob))
msg.append(_("^!You are sleeping.^?\n\r"));
else
if(CMLib.flags().isSitting(mob))
msg.append(_("^!You are resting.^?\n\r"));
else
if(CMLib.flags().isSwimmingInWater(mob))
msg.append(_("^!You are swimming.^?\n\r"));
else
if(CMLib.flags().isClimbing(mob))
msg.append(_("^!You are climbing.^?\n\r"));
else
if(CMLib.flags().isFlying(mob))
msg.append(_("^!You are flying.^?\n\r"));
else
msg.append(_("^!You are standing.^?\n\r"));
if(mob.riding()!=null)
msg.append(_("^!You are @x1 @x2.^?\n\r",mob.riding().stateString(mob),mob.riding().name()));
if(CMath.bset(mob.getBitmap(),MOB.ATT_PLAYERKILL))
msg.append(_("^!Your playerkill flag is on.^?\n\r"));
if(CMLib.flags().isInvisible(mob))
msg.append(_("^!You are invisible.^?\n\r"));
if(CMLib.flags().isHidden(mob))
msg.append(_("^!You are hidden.^?\n\r"));// ("+CMLib.flags().getHideScore(mob)+").^?\n\r");
if(CMLib.flags().isSneaking(mob))
msg.append(_("^!You are sneaking.^?\n\r"));
if(CMath.bset(mob.getBitmap(),MOB.ATT_QUIET))
msg.append(_("^!You are in QUIET mode.^?\n\r"));
if(mob.curState().getFatigue()>CharState.FATIGUED_MILLIS)
msg.append(_("^!You are fatigued.^?\n\r"));
if(mob.curState().getHunger()<1)
msg.append(_("^!You are hungry.^?\n\r"));
if(mob.curState().getThirst()<1)
msg.append(_("^!You are thirsty.^?\n\r"));
return msg.toString();
}
public String getAffects(Session S, Physical P, boolean xtra, boolean autosAlso)
{
final StringBuffer msg=new StringBuffer("");
final int NUM_COLS=2;
final int COL_LEN=ListingLibrary.ColFixer.fixColWidth(36.0,S);
int colnum=NUM_COLS;
final MOB mob=(S!=null)?S.mob():null;
for(final Enumeration<Ability> a=P.effects();a.hasMoreElements();)
{
final Ability A=a.nextElement();
if(A==null) continue;
String disp=A.displayText();
if(autosAlso && disp.length()==0)
disp=A.ID()+"+"+A.proficiency();
if(disp.length()>0)
{
if(disp.startsWith("(")&&disp.endsWith(")"))
{
long tr=A.expirationDate();
if(A.invoker()!=null) tr=tr-(System.currentTimeMillis()-A.invoker().lastTickedDateTime());
if(tr<Ability.TICKS_ALMOST_FOREVER)
disp+=" ^.^N"+CMLib.time().date2EllapsedTime(tr, TimeUnit.SECONDS, true);
}
if(xtra)
disp+=", BY="+((A.invoker()==null)?"N/A":A.invoker().Name());
String[] disps={disp};
if(CMStrings.lengthMinusColors(disp)>(COL_LEN*NUM_COLS))
{
String s=CMLib.coffeeFilter().fullOutFilter(S,mob,null,null,null,disp,true);
s=CMStrings.replaceAll(s,"\r","");
final List<String> V=CMParms.parseAny(s,'\n',true);
disps=new String[V.size()];
for(int d=0;d<V.size();d++)
disps[d]=V.get(d);
colnum=NUM_COLS;
}
for (final String disp2 : disps)
{
disp=disp2;
if(((++colnum)>=NUM_COLS)||(CMStrings.lengthMinusColors(disp)>COL_LEN))
{
msg.append("\n\r");
colnum=0;
}
msg.append("^S"+CMStrings.padRightPreserve("^<HELPNAME NAME='"+CMStrings.removeColors(A.name())+"'^>"+disp+"^</HELPNAME^>",COL_LEN));
if(CMStrings.lengthMinusColors(disp)>COL_LEN) colnum=99;
}
}
}
msg.append("^N\n\r");
return msg.toString();
}
@Override
public boolean execute(MOB mob, Vector commands, int metaFlags)
throws java.io.IOException
{
final Session S=mob.session();
if(S!=null)
{
if(CMSecurity.isAllowed(mob, mob.location(),CMSecurity.SecFlag.CMDMOBS))
{
final String name=CMParms.combine(commands,1);
if(name.length()>0)
{
Physical P=null;
if((name.equalsIgnoreCase("here")||(name.equalsIgnoreCase("room"))))
P=CMLib.map().roomLocation(mob);
else
if((name.equalsIgnoreCase("area")||(name.equalsIgnoreCase("zone"))))
P=CMLib.map().areaLocation(mob);
else
P=mob.location().fetchFromMOBRoomFavorsItems(mob,null,name,Wearable.FILTER_ANY);
if(P==null)
S.colorOnlyPrint(_("You don't see @x1 here.\n\r^N",name));
else
{
if(S==mob.session())
S.colorOnlyPrint(_(" \n\r^!@x1 is affected by: ^?",P.name()));
final String msg=getAffects(S,P,true,CMath.bset(mob.getBitmap(),MOB.ATT_SYSOPMSGS));
if(msg.length()<5)
S.colorOnlyPrintln(_("Nothing!\n\r^N"));
else
S.colorOnlyPrintln(msg);
}
return false;
}
}
if(S==mob.session())
S.colorOnlyPrintln("\n\r"+getMOBState(mob)+"\n\r");
if(S==mob.session())
S.colorOnlyPrint(_("^!You are affected by: ^?"));
final String msg=getAffects(S,mob,CMath.bset(mob.getBitmap(),MOB.ATT_SYSOPMSGS),CMath.bset(mob.getBitmap(),MOB.ATT_SYSOPMSGS));
if(msg.length()<5)
S.colorOnlyPrintln(_("Nothing!\n\r^N"));
else
S.colorOnlyPrintln(msg);
}
return false;
}
@Override public boolean canBeOrdered(){return true;}
@Override
public Object executeInternal(MOB mob, int metaFlags, Object... args) throws java.io.IOException
{
//if(!super.checkArguments(internalParameters, args)) return Boolean.FALSE.toString();
Physical target=mob;
Session S=(mob!=null)?mob.session():null;
for(final Object o : args)
{
if(o instanceof Physical)
{
if(o instanceof MOB)
S=((MOB)o).session();
target=(Physical)o;
}
}
return getAffects(S,target,false,false);
}
}
|
|
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.models.word2vec;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.LineIterator;
import org.deeplearning4j.text.sentenceiterator.CollectionSentenceIterator;
import org.junit.Rule;
import org.junit.rules.Timeout;
import org.nd4j.shade.guava.primitives.Doubles;
import org.nd4j.shade.guava.primitives.Ints;
import lombok.val;
import net.didion.jwnl.data.Word;
import org.apache.commons.io.FileUtils;
import org.deeplearning4j.BaseDL4JTest;
import org.deeplearning4j.models.embeddings.inmemory.InMemoryLookupTable;
import org.deeplearning4j.models.embeddings.loader.VectorsConfiguration;
import org.deeplearning4j.models.word2vec.wordstore.inmemory.AbstractCache;
import org.deeplearning4j.nn.modelimport.keras.utils.KerasModelUtils;
import org.nd4j.linalg.io.ClassPathResource;
import org.deeplearning4j.models.embeddings.learning.impl.elements.CBOW;
import org.deeplearning4j.models.embeddings.learning.impl.elements.SkipGram;
import org.deeplearning4j.models.embeddings.loader.WordVectorSerializer;
import org.deeplearning4j.models.embeddings.reader.impl.BasicModelUtils;
import org.deeplearning4j.models.embeddings.reader.impl.FlatModelUtils;
import org.deeplearning4j.models.embeddings.wordvectors.WordVectors;
import org.deeplearning4j.text.sentenceiterator.BasicLineIterator;
import org.deeplearning4j.text.sentenceiterator.SentenceIterator;
import org.deeplearning4j.text.sentenceiterator.UimaSentenceIterator;
import org.deeplearning4j.text.tokenization.tokenizer.preprocessor.CommonPreprocessor;
import org.deeplearning4j.text.tokenization.tokenizerfactory.DefaultTokenizerFactory;
import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.ops.transforms.Transforms;
import org.nd4j.resources.Resources;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.*;
import static org.junit.Assert.*;
/**
* @author jeffreytang
*/
public class Word2VecTests extends BaseDL4JTest {
private static final Logger log = LoggerFactory.getLogger(Word2VecTests.class);
private File inputFile;
private File inputFile2;
private String pathToWriteto;
private WordVectors googleModel;
@Rule
public Timeout timeout = Timeout.seconds(300);
@Before
public void before() throws Exception {
File googleModelTextFile = new ClassPathResource("word2vecserialization/google_news_30.txt").getFile();
googleModel = WordVectorSerializer.readWord2VecModel(googleModelTextFile);
inputFile = Resources.asFile("big/raw_sentences.txt");
inputFile2 = Resources.asFile("big/raw_sentences_2.txt");
File ptwt = new File(System.getProperty("java.io.tmpdir"), "testing_word2vec_serialization.txt");
pathToWriteto = ptwt.getAbsolutePath();
FileUtils.deleteDirectory(new File("word2vec-index"));
}
@Test
public void testGoogleModelLoaded() throws Exception {
assertEquals(googleModel.vocab().numWords(), 30);
assertTrue(googleModel.hasWord("Morgan_Freeman"));
double[] wordVector = googleModel.getWordVector("Morgan_Freeman");
assertTrue(wordVector.length == 300);
assertEquals(Doubles.asList(wordVector).get(0), 0.044423, 1e-3);
}
@Test
public void testSimilarity() throws Exception {
testGoogleModelLoaded();
assertEquals(googleModel.similarity("Benkovic", "Boeremag_trialists"), 0.1204, 1e-2);
assertEquals(googleModel.similarity("Benkovic", "Gopie"), 0.3350, 1e-2);
assertEquals(googleModel.similarity("Benkovic", "Youku.com"), 0.0116, 1e-2);
}
@Test
public void testWordsNearest() throws Exception {
testGoogleModelLoaded();
List<Object> lst = Arrays.asList(googleModel.wordsNearest("Benkovic", 10).toArray());
assertTrue(lst.contains("Gopie"));
assertTrue(lst.contains("JIM_HOOK_Senior"));
/*
assertEquals(lst.get(0), "Gopie");
assertEquals(lst.get(1), "JIM_HOOK_Senior");
*/
}
@Test
public void testUIMAIterator() throws Exception {
SentenceIterator iter = UimaSentenceIterator.createWithPath(inputFile.getAbsolutePath());
assertEquals(iter.nextSentence(), "No , he says now .");
}
@Test
@Ignore // no adagrad these days
public void testWord2VecAdaGrad() throws Exception {
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(5).iterations(5).learningRate(0.025).layerSize(100)
.seed(42).batchSize(13500).sampling(0).negativeSample(0)
//.epochs(10)
.windowSize(5).modelUtils(new BasicModelUtils<VocabWord>()).useAdaGrad(false)
.useHierarchicSoftmax(true).iterate(iter).workers(4).tokenizerFactory(t).build();
vec.fit();
Collection<String> lst = vec.wordsNearest("day", 10);
log.info(Arrays.toString(lst.toArray()));
// assertEquals(10, lst.size());
double sim = vec.similarity("day", "night");
log.info("Day/night similarity: " + sim);
assertTrue(lst.contains("week"));
assertTrue(lst.contains("night"));
assertTrue(lst.contains("year"));
}
@Test
public void testWord2VecCBOW() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(1).iterations(1).learningRate(0.025).layerSize(150)
.seed(42).sampling(0).negativeSample(0).useHierarchicSoftmax(true).windowSize(5)
.modelUtils(new BasicModelUtils<VocabWord>()).useAdaGrad(false).iterate(iter).workers(4)
.tokenizerFactory(t).elementsLearningAlgorithm(new CBOW<VocabWord>()).build();
vec.fit();
Collection<String> lst = vec.wordsNearest("day", 10);
log.info(Arrays.toString(lst.toArray()));
// assertEquals(10, lst.size());
double sim = vec.similarity("day", "night");
log.info("Day/night similarity: " + sim);
assertTrue(lst.contains("week"));
assertTrue(lst.contains("night"));
assertTrue(lst.contains("year"));
assertTrue(sim > 0.65f);
}
@Test
public void testWord2VecMultiEpoch() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
SentenceIterator iter;
if(isIntegrationTests()){
iter = new BasicLineIterator(inputFile.getAbsolutePath());
} else {
iter = new CollectionSentenceIterator(firstNLines(inputFile, 50000));
}
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(1).iterations(5).learningRate(0.025).layerSize(150)
.seed(42).sampling(0).negativeSample(0).useHierarchicSoftmax(true).windowSize(5).epochs(3)
.modelUtils(new BasicModelUtils<VocabWord>()).useAdaGrad(false).iterate(iter).workers(8)
.tokenizerFactory(t).elementsLearningAlgorithm(new CBOW<VocabWord>()).build();
vec.fit();
Collection<String> lst = vec.wordsNearest("day", 10);
log.info(Arrays.toString(lst.toArray()));
// assertEquals(10, lst.size());
double sim = vec.similarity("day", "night");
log.info("Day/night similarity: " + sim);
assertTrue(lst.contains("week"));
assertTrue(lst.contains("night"));
assertTrue(lst.contains("year"));
}
@Test
public void reproducibleResults_ForMultipleRuns() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
log.info("reproducibleResults_ForMultipleRuns");
val shakespear = new ClassPathResource("big/rnj.txt");
val basic = new ClassPathResource("big/rnj.txt");
SentenceIterator iter = new BasicLineIterator(inputFile);
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec1 = new Word2Vec.Builder().minWordFrequency(1).iterations(1).batchSize(8192).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new SkipGram<VocabWord>())
.epochs(1).windowSize(5).allowParallelTokenization(true)
.workers(1)
.useHierarchicSoftmax(true)
.modelUtils(new BasicModelUtils<VocabWord>()).iterate(iter).tokenizerFactory(t).build();
Word2Vec vec2 = new Word2Vec.Builder().minWordFrequency(1).iterations(1).batchSize(8192).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new SkipGram<VocabWord>())
.epochs(1).windowSize(5).allowParallelTokenization(true)
.workers(1)
.useHierarchicSoftmax(true)
.modelUtils(new BasicModelUtils<VocabWord>()).iterate(iter).tokenizerFactory(t).build();
vec1.fit();
iter.reset();
vec2.fit();
for (int e = 0; e < vec1.getVocab().numWords(); e++) {
val w1 = vec1.getVocab().elementAtIndex(e);
val w2 = vec2.getVocab().elementAtIndex(e);
assertNotNull(w1);
assertNotNull(w2);
assertEquals(w1.getLabel(), w2.getLabel());
assertArrayEquals("Failed for token [" + w1.getLabel() + "] at index [" + e + "]", Ints.toArray(w1.getPoints()), Ints.toArray(w2.getPoints()));
assertArrayEquals("Failed for token [" + w1.getLabel() + "] at index [" + e + "]", Ints.toArray(w1.getCodes()), Ints.toArray(w2.getCodes()));
}
val syn0_from_vec1 = ((InMemoryLookupTable<VocabWord>) vec1.getLookupTable()).getSyn0();
val syn0_from_vec2 = ((InMemoryLookupTable<VocabWord>) vec2.getLookupTable()).getSyn0();
assertEquals(syn0_from_vec1, syn0_from_vec2);
log.info("Day/night similarity: {}", vec1.similarity("day", "night"));
val result = vec1.wordsNearest("day", 10);
printWords("day", result, vec1);
}
@Test
public void testRunWord2Vec() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
// Strip white space before and after for each line
/*val shakespear = new ClassPathResource("big/rnj.txt");
SentenceIterator iter = new BasicLineIterator(shakespear.getFile());*/
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
// Split on white spaces in the line to get words
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(1).iterations(1).batchSize(8192).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new SkipGram<VocabWord>())
//.negativeSample(10)
.epochs(1).windowSize(5).allowParallelTokenization(true)
.workers(6)
.usePreciseMode(true)
.modelUtils(new BasicModelUtils<VocabWord>()).iterate(iter).tokenizerFactory(t).build();
assertEquals(new ArrayList<String>(), vec.getStopWords());
vec.fit();
File tempFile = File.createTempFile("temp", "temp");
tempFile.deleteOnExit();
WordVectorSerializer.writeFullModel(vec, tempFile.getAbsolutePath());
Collection<String> lst = vec.wordsNearest("day", 10);
//log.info(Arrays.toString(lst.toArray()));
printWords("day", lst, vec);
assertEquals(10, lst.size());
double sim = vec.similarity("day", "night");
log.info("Day/night similarity: " + sim);
assertTrue(sim < 1.0);
assertTrue(sim > 0.4);
assertTrue(lst.contains("week"));
assertTrue(lst.contains("night"));
assertTrue(lst.contains("year"));
assertFalse(lst.contains(null));
lst = vec.wordsNearest("day", 10);
//log.info(Arrays.toString(lst.toArray()));
printWords("day", lst, vec);
assertTrue(lst.contains("week"));
assertTrue(lst.contains("night"));
assertTrue(lst.contains("year"));
new File("cache.ser").delete();
ArrayList<String> labels = new ArrayList<>();
labels.add("day");
labels.add("night");
labels.add("week");
INDArray matrix = vec.getWordVectors(labels);
assertEquals(matrix.getRow(0, true), vec.getWordVectorMatrix("day"));
assertEquals(matrix.getRow(1, true), vec.getWordVectorMatrix("night"));
assertEquals(matrix.getRow(2, true), vec.getWordVectorMatrix("week"));
WordVectorSerializer.writeWordVectors(vec, pathToWriteto);
}
/**
* Adding test for cosine similarity, to track changes in Transforms.cosineSim()
*/
@Test
public void testCosineSim() {
double[] array1 = new double[] {1.01, 0.91, 0.81, 0.71};
double[] array2 = new double[] {1.01, 0.91, 0.81, 0.71};
double[] array3 = new double[] {1.0, 0.9, 0.8, 0.7};
double sim12 = Transforms.cosineSim(Nd4j.create(array1), Nd4j.create(array2));
double sim23 = Transforms.cosineSim(Nd4j.create(array2), Nd4j.create(array3));
log.info("Arrays 1/2 cosineSim: " + sim12);
log.info("Arrays 2/3 cosineSim: " + sim23);
log.info("Arrays 1/2 dot: " + Nd4j.getBlasWrapper().dot(Nd4j.create(array1), Nd4j.create(array2)));
log.info("Arrays 2/3 dot: " + Nd4j.getBlasWrapper().dot(Nd4j.create(array2), Nd4j.create(array3)));
assertEquals(1.0d, sim12, 0.01d);
assertEquals(0.99d, sim23, 0.01d);
}
@Test
public void testLoadingWordVectors() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
File modelFile = new File(pathToWriteto);
if (!modelFile.exists()) {
testRunWord2Vec();
}
WordVectors wordVectors = WordVectorSerializer.loadTxtVectors(modelFile);
Collection<String> lst = wordVectors.wordsNearest("day", 10);
System.out.println(Arrays.toString(lst.toArray()));
}
@Ignore
@Test
public void testWord2VecGoogleModelUptraining() throws Exception {
long time1 = System.currentTimeMillis();
Word2Vec vec = WordVectorSerializer.readWord2VecModel(
new File("C:\\Users\\raver\\Downloads\\GoogleNews-vectors-negative300.bin.gz"), false);
long time2 = System.currentTimeMillis();
log.info("Model loaded in {} msec", time2 - time1);
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
// Split on white spaces in the line to get words
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
vec.setTokenizerFactory(t);
vec.setSentenceIterator(iter);
vec.getConfiguration().setUseHierarchicSoftmax(false);
vec.getConfiguration().setNegative(5.0);
vec.setElementsLearningAlgorithm(new CBOW<VocabWord>());
vec.fit();
}
@Test
public void testW2VnegativeOnRestore() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
// Strip white space before and after for each line
SentenceIterator iter;
if(isIntegrationTests()){
iter = new BasicLineIterator(inputFile.getAbsolutePath());
} else {
iter = new CollectionSentenceIterator(firstNLines(inputFile, 300));
}
// Split on white spaces in the line to get words
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(1).iterations(3).batchSize(8192).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new SkipGram<VocabWord>()).negativeSample(10).epochs(1)
.windowSize(5).useHierarchicSoftmax(false).allowParallelTokenization(true)
.modelUtils(new FlatModelUtils<VocabWord>()).iterate(iter).tokenizerFactory(t).build();
assertEquals(false, vec.getConfiguration().isUseHierarchicSoftmax());
log.info("Fit 1");
vec.fit();
File tmpFile = File.createTempFile("temp", "file");
tmpFile.deleteOnExit();
WordVectorSerializer.writeWord2VecModel(vec, tmpFile);
iter.reset();
Word2Vec restoredVec = WordVectorSerializer.readWord2VecModel(tmpFile, true);
restoredVec.setTokenizerFactory(t);
restoredVec.setSentenceIterator(iter);
assertEquals(false, restoredVec.getConfiguration().isUseHierarchicSoftmax());
assertTrue(restoredVec.getModelUtils() instanceof FlatModelUtils);
assertTrue(restoredVec.getConfiguration().isAllowParallelTokenization());
log.info("Fit 2");
restoredVec.fit();
iter.reset();
restoredVec = WordVectorSerializer.readWord2VecModel(tmpFile, false);
restoredVec.setTokenizerFactory(t);
restoredVec.setSentenceIterator(iter);
assertEquals(false, restoredVec.getConfiguration().isUseHierarchicSoftmax());
assertTrue(restoredVec.getModelUtils() instanceof BasicModelUtils);
log.info("Fit 3");
restoredVec.fit();
}
@Test
public void testUnknown1() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
// Strip white space before and after for each line
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
// Split on white spaces in the line to get words
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(10).useUnknown(true)
.unknownElement(new VocabWord(1.0, "PEWPEW")).iterations(1).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new CBOW<VocabWord>()).epochs(1).windowSize(5)
.useHierarchicSoftmax(true).allowParallelTokenization(true)
.modelUtils(new FlatModelUtils<VocabWord>()).iterate(iter).tokenizerFactory(t).build();
vec.fit();
assertTrue(vec.hasWord("PEWPEW"));
assertTrue(vec.getVocab().containsWord("PEWPEW"));
INDArray unk = vec.getWordVectorMatrix("PEWPEW");
assertNotEquals(null, unk);
File tempFile = File.createTempFile("temp", "file");
tempFile.deleteOnExit();
WordVectorSerializer.writeWord2VecModel(vec, tempFile);
log.info("Original configuration: {}", vec.getConfiguration());
Word2Vec restored = WordVectorSerializer.readWord2VecModel(tempFile);
assertTrue(restored.hasWord("PEWPEW"));
assertTrue(restored.getVocab().containsWord("PEWPEW"));
INDArray unk_restored = restored.getWordVectorMatrix("PEWPEW");
assertEquals(unk, unk_restored);
// now we're getting some junk word
INDArray random = vec.getWordVectorMatrix("hhsd7d7sdnnmxc_SDsda");
INDArray randomRestored = restored.getWordVectorMatrix("hhsd7d7sdnnmxc_SDsda");
log.info("Restored configuration: {}", restored.getConfiguration());
assertEquals(unk, random);
assertEquals(unk, randomRestored);
}
@Test
public void orderIsCorrect_WhenParallelized() throws Exception {
// Strip white space before and after for each line
SentenceIterator iter;
if(isIntegrationTests()){
iter = new BasicLineIterator(inputFile.getAbsolutePath());
} else {
iter = new CollectionSentenceIterator(firstNLines(inputFile, 300));
}
// Split on white spaces in the line to get words
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(1).iterations(3).batchSize(64).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new SkipGram<VocabWord>())
//.negativeSample(10)
.epochs(1).windowSize(5).allowParallelTokenization(true)
.workers(1)
.modelUtils(new BasicModelUtils<VocabWord>()).iterate(iter).tokenizerFactory(t).build();
vec.fit();
System.out.println(vec.getVocab().numWords());
val words = vec.getVocab().words();
assertTrue(words.size() > 0);
// for (val word : words) {
// System.out.println(word);
// }
}
@Test
public void testJSONSerialization() {
Word2Vec word2Vec = new Word2Vec.Builder()
.layerSize(1000)
.limitVocabularySize(1000)
.elementsLearningAlgorithm(CBOW.class.getCanonicalName())
.allowParallelTokenization(true)
.modelUtils(new FlatModelUtils<VocabWord>())
.usePreciseMode(true)
.batchSize(1024)
.windowSize(23)
.minWordFrequency(24)
.iterations(54)
.seed(45)
.learningRate(0.08)
.epochs(45)
.stopWords(Collections.singletonList("NOT"))
.sampling(44)
.workers(45)
.negativeSample(56)
.useAdaGrad(true)
.useHierarchicSoftmax(false)
.minLearningRate(0.002)
.resetModel(true)
.useUnknown(true)
.enableScavenger(true)
.usePreciseWeightInit(true)
.build();
AbstractCache<VocabWord> cache = new AbstractCache.Builder<VocabWord>().build();
val words = new VocabWord[3];
words[0] = new VocabWord(1.0, "word");
words[1] = new VocabWord(2.0, "test");
words[2] = new VocabWord(3.0, "tester");
for (int i = 0; i < words.length; ++i) {
cache.addToken(words[i]);
cache.addWordToIndex(i, words[i].getLabel());
}
word2Vec.setVocab(cache);
String json = null;
Word2Vec unserialized = null;
try {
json = word2Vec.toJson();
log.info("{}", json.toString());
unserialized = Word2Vec.fromJson(json);
}
catch (Exception e) {
e.printStackTrace();
fail();
}
assertEquals(cache.totalWordOccurrences(),((Word2Vec) unserialized).getVocab().totalWordOccurrences());
assertEquals(cache.totalNumberOfDocs(), ((Word2Vec) unserialized).getVocab().totalNumberOfDocs());
for (int i = 0; i < words.length; ++i) {
val cached = cache.wordAtIndex(i);
val restored = ((Word2Vec) unserialized).getVocab().wordAtIndex(i);
assertNotNull(cached);
assertEquals(cached, restored);
}
}
@Test
public void testWord2VecConfigurationConsistency() {
VectorsConfiguration configuration = new VectorsConfiguration();
assertEquals(configuration.getLayersSize(), 200);
assertEquals(configuration.getLayersSize(), 200);
assert(configuration.getElementsLearningAlgorithm() == null);
assertEquals(configuration.isAllowParallelTokenization(), false);
assertEquals(configuration.isPreciseMode(), false);
assertEquals(configuration.getBatchSize(), 512);
assert(configuration.getModelUtils() == null);
assertTrue(!configuration.isPreciseMode());
assertEquals(configuration.getBatchSize(), 512);
assertEquals(configuration.getWindow(), 5);
assertEquals(configuration.getMinWordFrequency(), 5);
assertEquals(configuration.getIterations(), 1);
assertEquals(configuration.getSeed(), 0);
assertEquals(configuration.getLearningRate(), 0.025, 1e-5f);
assertEquals(configuration.getEpochs(), 1);
assertTrue(configuration.getStopList().isEmpty());
assertEquals(configuration.getSampling(), 0.0, 1e-5f);
assertEquals(configuration.getNegative(), 0, 1e-5f);
assertTrue(!configuration.isUseAdaGrad());
assertTrue(configuration.isUseHierarchicSoftmax());
assertEquals(configuration.getMinLearningRate(), 1.0E-4, 1e-5f);
assertTrue(!configuration.isUseUnknown());
Word2Vec word2Vec = new Word2Vec.Builder(configuration)
.layerSize(1000)
.limitVocabularySize(1000)
.elementsLearningAlgorithm(CBOW.class.getCanonicalName())
.allowParallelTokenization(true)
.modelUtils(new FlatModelUtils<VocabWord>())
.usePreciseMode(true)
.batchSize(1024)
.windowSize(23)
.minWordFrequency(24)
.iterations(54)
.seed(45)
.learningRate(0.08)
.epochs(45)
.stopWords(Collections.singletonList("NOT"))
.sampling(44)
.workers(45)
.negativeSample(56)
.useAdaGrad(true)
.useHierarchicSoftmax(false)
.minLearningRate(0.002)
.resetModel(true)
.useUnknown(true)
.enableScavenger(true)
.usePreciseWeightInit(true)
.build();
assertEquals(word2Vec.getConfiguration().getLayersSize(), word2Vec.getLayerSize());
assertEquals(word2Vec.getConfiguration().getLayersSize(), 1000);
assertEquals(word2Vec.getConfiguration().getElementsLearningAlgorithm(), CBOW.class.getCanonicalName());
assertEquals(word2Vec.getConfiguration().isAllowParallelTokenization(), true);
assertEquals(word2Vec.getConfiguration().isPreciseMode(), true);
assertEquals(word2Vec.getConfiguration().getBatchSize(), 1024);
String modelUtilsName = word2Vec.getConfiguration().getModelUtils();
assertEquals(modelUtilsName, FlatModelUtils.class.getCanonicalName());
assertTrue(word2Vec.getConfiguration().isPreciseMode());
assertEquals(word2Vec.getConfiguration().getBatchSize(), 1024);
assertEquals(word2Vec.getConfiguration().getWindow(), 23);
assertEquals(word2Vec.getConfiguration().getMinWordFrequency(), 24);
assertEquals(word2Vec.getConfiguration().getIterations(), 54);
assertEquals(word2Vec.getConfiguration().getSeed(), 45);
assertEquals(word2Vec.getConfiguration().getLearningRate(), 0.08, 1e-5f);
assertEquals(word2Vec.getConfiguration().getEpochs(), 45);
assertEquals(word2Vec.getConfiguration().getStopList().size(), 1);
assertEquals(configuration.getSampling(), 44.0, 1e-5f);
assertEquals(configuration.getNegative(), 56.0, 1e-5f);
assertTrue(configuration.isUseAdaGrad());
assertTrue(!configuration.isUseHierarchicSoftmax());
assertEquals(configuration.getMinLearningRate(), 0.002, 1e-5f);
assertTrue(configuration.isUseUnknown());
}
@Test
public void testWordVectorsPartiallyAbsentLabels() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
// Split on white spaces in the line to get words
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(10).useUnknown(true)
.iterations(1).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new CBOW<VocabWord>()).epochs(1).windowSize(5)
.useHierarchicSoftmax(true).allowParallelTokenization(true)
.useUnknown(false)
.modelUtils(new FlatModelUtils<VocabWord>()).iterate(iter).tokenizerFactory(t).build();
vec.fit();
ArrayList<String> labels = new ArrayList<>();
labels.add("fewfew");
labels.add("day");
labels.add("night");
labels.add("week");
INDArray matrix = vec.getWordVectors(labels);
assertEquals(3, matrix.rows());
assertEquals(matrix.getRow(0, true), vec.getWordVectorMatrix("day"));
assertEquals(matrix.getRow(1, true), vec.getWordVectorMatrix("night"));
assertEquals(matrix.getRow(2, true), vec.getWordVectorMatrix("week"));
}
@Test
public void testWordVectorsAbsentLabels() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
// Split on white spaces in the line to get words
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(10).useUnknown(true)
.iterations(1).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new CBOW<VocabWord>()).epochs(1).windowSize(5)
.useHierarchicSoftmax(true).allowParallelTokenization(true)
.useUnknown(false)
.modelUtils(new FlatModelUtils<VocabWord>()).iterate(iter).tokenizerFactory(t).build();
vec.fit();
ArrayList<String> labels = new ArrayList<>();
labels.add("fewfew");
INDArray matrix = vec.getWordVectors(labels);
assertTrue(matrix.isEmpty());
}
@Test
public void testWordVectorsAbsentLabels_WithUnknown() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
// Split on white spaces in the line to get words
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(1).iterations(1).batchSize(8192).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new SkipGram<VocabWord>())
//.negativeSample(10)
.epochs(1).windowSize(5).allowParallelTokenization(true)
.workers(4)
.modelUtils(new BasicModelUtils<VocabWord>()).iterate(iter).tokenizerFactory(t)
.useUnknown(true).unknownElement(new VocabWord(1, "UNKOWN")).build();
vec.fit();
ArrayList<String> labels = new ArrayList<>();
labels.add("bus");
labels.add("car");
INDArray matrix = vec.getWordVectors(labels);
for (int i = 0; i < labels.size(); ++i)
assertEquals(matrix.getRow(i, true), vec.getWordVectorMatrix("UNKNOWN"));
}
@Test
public void weightsNotUpdated_WhenLocked() throws Exception {
boolean isIntegration = isIntegrationTests();
SentenceIterator iter;
SentenceIterator iter2;
if(isIntegration){
iter = new BasicLineIterator(inputFile);
iter2 = new BasicLineIterator(inputFile2.getAbsolutePath());
} else {
iter = new CollectionSentenceIterator(firstNLines(inputFile, 300));
iter2 = new CollectionSentenceIterator(firstNLines(inputFile2, 300));
}
Word2Vec vec1 = new Word2Vec.Builder().minWordFrequency(1).iterations(3).batchSize(64).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new SkipGram<VocabWord>())
.epochs(1).windowSize(5).allowParallelTokenization(true)
.workers(1)
.iterate(iter)
.modelUtils(new BasicModelUtils<VocabWord>()).build();
vec1.fit();
Word2Vec vec2 = new Word2Vec.Builder().minWordFrequency(1).iterations(3).batchSize(32).layerSize(100)
.stopWords(new ArrayList<String>()).seed(32).learningRate(0.021).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new SkipGram<VocabWord>())
.epochs(1).windowSize(5).allowParallelTokenization(true)
.workers(1)
.iterate(iter2)
.intersectModel(vec1, true)
.modelUtils(new BasicModelUtils<VocabWord>()).build();
vec2.fit();
assertEquals(vec1.getWordVectorMatrix("put"), vec2.getWordVectorMatrix("put"));
assertEquals(vec1.getWordVectorMatrix("part"), vec2.getWordVectorMatrix("part"));
assertEquals(vec1.getWordVectorMatrix("made"), vec2.getWordVectorMatrix("made"));
assertEquals(vec1.getWordVectorMatrix("money"), vec2.getWordVectorMatrix("money"));
}
@Test
public void weightsNotUpdated_WhenLocked_CBOW() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
Word2Vec vec1 = new Word2Vec.Builder().minWordFrequency(1).iterations(1).batchSize(8192).layerSize(100)
.stopWords(new ArrayList<String>()).seed(42).learningRate(0.025).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new CBOW<VocabWord>())
.epochs(1).windowSize(5).allowParallelTokenization(true)
.workers(1)
.iterate(iter)
.modelUtils(new BasicModelUtils<VocabWord>()).build();
vec1.fit();
log.info("Fit 1 finished");
iter = new BasicLineIterator(inputFile2.getAbsolutePath());
Word2Vec vec2 = new Word2Vec.Builder().minWordFrequency(1).iterations(1).batchSize(8192).layerSize(100)
.stopWords(new ArrayList<String>()).seed(32).learningRate(0.021).minLearningRate(0.001)
.sampling(0).elementsLearningAlgorithm(new CBOW<VocabWord>())
.epochs(1).windowSize(5).allowParallelTokenization(true)
.workers(1)
.iterate(iter)
.intersectModel(vec1, true)
.modelUtils(new BasicModelUtils<VocabWord>()).build();
vec2.fit();
log.info("Fit 2 finished");
assertEquals(vec1.getWordVectorMatrix("put"), vec2.getWordVectorMatrix("put"));
assertEquals(vec1.getWordVectorMatrix("part"), vec2.getWordVectorMatrix("part"));
assertEquals(vec1.getWordVectorMatrix("made"), vec2.getWordVectorMatrix("made"));
assertEquals(vec1.getWordVectorMatrix("money"), vec2.getWordVectorMatrix("money"));
}
@Test
public void testWordsNearestSum() throws IOException {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
skipUnlessIntegrationTests(); //AB 2020/02/06 Skip CUDA except for integration tests due to very slow test speed - > 5 minutes on Titan X
}
log.info("Load & Vectorize Sentences....");
SentenceIterator iter = new BasicLineIterator(inputFile);
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
log.info("Building model....");
Word2Vec vec = new Word2Vec.Builder()
.minWordFrequency(5)
.iterations(1)
.layerSize(100)
.seed(42)
.windowSize(5)
.iterate(iter)
.tokenizerFactory(t)
.build();
log.info("Fitting Word2Vec model....");
vec.fit();
log.info("Writing word vectors to text file....");
log.info("Closest Words:");
Collection<String> lst = vec.wordsNearestSum("day", 10);
log.info("10 Words closest to 'day': {}", lst);
assertTrue(lst.contains("week"));
assertTrue(lst.contains("night"));
assertTrue(lst.contains("year"));
assertTrue(lst.contains("years"));
assertTrue(lst.contains("time"));
}
private static void printWords(String target, Collection<String> list, Word2Vec vec) {
System.out.println("Words close to [" + target + "]:");
for (String word : list) {
double sim = vec.similarity(target, word);
System.out.print("'" + word + "': [" + sim + "]");
}
System.out.print("\n");
}
public static List<String> firstNLines(File f, int n){
List<String> lines = new ArrayList<>();
try(InputStream is = new BufferedInputStream(new FileInputStream(f))){
LineIterator lineIter = IOUtils.lineIterator(is, StandardCharsets.UTF_8);
try{
for( int i=0; i<n && lineIter.hasNext(); i++ ){
lines.add(lineIter.next());
}
} finally {
lineIter.close();
}
return lines;
} catch (IOException e){
throw new RuntimeException(e);
}
}
}
|
|
/*
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.rxjava3.internal.operators.flowable;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
import java.util.*;
import org.junit.Test;
import org.mockito.InOrder;
import org.reactivestreams.Subscriber;
import io.reactivex.rxjava3.core.*;
import io.reactivex.rxjava3.exceptions.TestException;
import io.reactivex.rxjava3.functions.*;
import io.reactivex.rxjava3.internal.functions.Functions;
import io.reactivex.rxjava3.internal.subscriptions.BooleanSubscription;
import io.reactivex.rxjava3.internal.util.CrashingMappedIterable;
import io.reactivex.rxjava3.plugins.RxJavaPlugins;
import io.reactivex.rxjava3.processors.PublishProcessor;
import io.reactivex.rxjava3.subscribers.TestSubscriber;
import io.reactivex.rxjava3.testsupport.*;
public class FlowableWithLatestFromTest extends RxJavaTest {
static final BiFunction<Integer, Integer, Integer> COMBINER = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return (t1 << 8) + t2;
}
};
static final BiFunction<Integer, Integer, Integer> COMBINER_ERROR = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
throw new TestException("Forced failure");
}
};
@Test
public void simple() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
InOrder inOrder = inOrder(subscriber);
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
result.subscribe(subscriber);
source.onNext(1);
inOrder.verify(subscriber, never()).onNext(anyInt());
other.onNext(1);
inOrder.verify(subscriber, never()).onNext(anyInt());
source.onNext(2);
inOrder.verify(subscriber).onNext((2 << 8) + 1);
other.onNext(2);
inOrder.verify(subscriber, never()).onNext(anyInt());
other.onComplete();
inOrder.verify(subscriber, never()).onComplete();
source.onNext(3);
inOrder.verify(subscriber).onNext((3 << 8) + 2);
source.onComplete();
inOrder.verify(subscriber).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void emptySource() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onComplete();
ts.assertNoErrors();
ts.assertTerminated();
ts.assertNoValues();
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void emptyOther() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
source.onNext(1);
source.onComplete();
ts.assertNoErrors();
ts.assertTerminated();
ts.assertNoValues();
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void unsubscription() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriber<Integer> ts = new TestSubscriber<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onNext(1);
ts.cancel();
ts.assertValue((1 << 8) + 1);
ts.assertNoErrors();
ts.assertNotComplete();
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void sourceThrows() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onNext(1);
source.onError(new TestException());
ts.assertTerminated();
ts.assertValue((1 << 8) + 1);
ts.assertError(TestException.class);
ts.assertNotComplete();
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void otherThrows() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onNext(1);
other.onError(new TestException());
ts.assertTerminated();
ts.assertValue((1 << 8) + 1);
ts.assertNotComplete();
ts.assertError(TestException.class);
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void functionThrows() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER_ERROR);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onNext(1);
ts.assertTerminated();
ts.assertNotComplete();
ts.assertNoValues();
ts.assertError(TestException.class);
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void noDownstreamUnsubscribe() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriber<Integer> ts = new TestSubscriber<>();
result.subscribe(ts);
source.onComplete();
assertFalse(ts.isCancelled());
}
@Test
public void backpressure() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriber<Integer> ts = new TestSubscriber<>(0L);
result.subscribe(ts);
assertTrue("Other has no observers!", other.hasSubscribers());
ts.request(1);
source.onNext(1);
assertTrue("Other has no observers!", other.hasSubscribers());
ts.assertNoValues();
other.onNext(1);
source.onNext(2);
ts.assertValue((2 << 8) + 1);
ts.request(5);
source.onNext(3);
source.onNext(4);
source.onNext(5);
source.onNext(6);
source.onNext(7);
ts.assertValues(
(2 << 8) + 1, (3 << 8) + 1, (4 << 8) + 1, (5 << 8) + 1,
(6 << 8) + 1, (7 << 8) + 1
);
ts.cancel();
assertFalse("Other has observers!", other.hasSubscribers());
ts.assertNoErrors();
}
static final Function<Object[], String> toArray = new Function<Object[], String>() {
@Override
public String apply(Object[] args) {
return Arrays.toString(args);
}
};
@Test
public void manySources() {
PublishProcessor<String> pp1 = PublishProcessor.create();
PublishProcessor<String> pp2 = PublishProcessor.create();
PublishProcessor<String> pp3 = PublishProcessor.create();
PublishProcessor<String> main = PublishProcessor.create();
TestSubscriber<String> ts = new TestSubscriber<>();
main.withLatestFrom(new Flowable[] { pp1, pp2, pp3 }, toArray)
.subscribe(ts);
main.onNext("1");
ts.assertNoValues();
pp1.onNext("a");
ts.assertNoValues();
pp2.onNext("A");
ts.assertNoValues();
pp3.onNext("=");
ts.assertNoValues();
main.onNext("2");
ts.assertValues("[2, a, A, =]");
pp2.onNext("B");
ts.assertValues("[2, a, A, =]");
pp3.onComplete();
ts.assertValues("[2, a, A, =]");
pp1.onNext("b");
main.onNext("3");
ts.assertValues("[2, a, A, =]", "[3, b, B, =]");
main.onComplete();
ts.assertValues("[2, a, A, =]", "[3, b, B, =]");
ts.assertNoErrors();
ts.assertComplete();
assertFalse("ps1 has subscribers?", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?", pp2.hasSubscribers());
assertFalse("ps3 has subscribers?", pp3.hasSubscribers());
}
@Test
public void manySourcesIterable() {
PublishProcessor<String> pp1 = PublishProcessor.create();
PublishProcessor<String> pp2 = PublishProcessor.create();
PublishProcessor<String> pp3 = PublishProcessor.create();
PublishProcessor<String> main = PublishProcessor.create();
TestSubscriber<String> ts = new TestSubscriber<>();
main.withLatestFrom(Arrays.<Flowable<?>>asList(pp1, pp2, pp3), toArray)
.subscribe(ts);
main.onNext("1");
ts.assertNoValues();
pp1.onNext("a");
ts.assertNoValues();
pp2.onNext("A");
ts.assertNoValues();
pp3.onNext("=");
ts.assertNoValues();
main.onNext("2");
ts.assertValues("[2, a, A, =]");
pp2.onNext("B");
ts.assertValues("[2, a, A, =]");
pp3.onComplete();
ts.assertValues("[2, a, A, =]");
pp1.onNext("b");
main.onNext("3");
ts.assertValues("[2, a, A, =]", "[3, b, B, =]");
main.onComplete();
ts.assertValues("[2, a, A, =]", "[3, b, B, =]");
ts.assertNoErrors();
ts.assertComplete();
assertFalse("ps1 has subscribers?", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?", pp2.hasSubscribers());
assertFalse("ps3 has subscribers?", pp3.hasSubscribers());
}
@Test
public void manySourcesIterableSweep() {
for (String val : new String[] { "1" /*, null*/ }) {
int n = 35;
for (int i = 0; i < n; i++) {
List<Flowable<?>> sources = new ArrayList<>();
List<String> expected = new ArrayList<>();
expected.add(val);
for (int j = 0; j < i; j++) {
sources.add(Flowable.just(val));
expected.add(String.valueOf(val));
}
TestSubscriber<String> ts = new TestSubscriber<>();
PublishProcessor<String> main = PublishProcessor.create();
main.withLatestFrom(sources, toArray).subscribe(ts);
ts.assertNoValues();
main.onNext(val);
main.onComplete();
ts.assertValue(expected.toString());
ts.assertNoErrors();
ts.assertComplete();
}
}
}
@Test
public void backpressureNoSignal() {
PublishProcessor<String> pp1 = PublishProcessor.create();
PublishProcessor<String> pp2 = PublishProcessor.create();
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.range(1, 10).withLatestFrom(new Flowable<?>[] { pp1, pp2 }, toArray)
.subscribe(ts);
ts.assertNoValues();
ts.request(1);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertComplete();
assertFalse("ps1 has subscribers?", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?", pp2.hasSubscribers());
}
@Test
public void backpressureWithSignal() {
PublishProcessor<String> pp1 = PublishProcessor.create();
PublishProcessor<String> pp2 = PublishProcessor.create();
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.range(1, 3).withLatestFrom(new Flowable<?>[] { pp1, pp2 }, toArray)
.subscribe(ts);
ts.assertNoValues();
pp1.onNext("1");
pp2.onNext("1");
ts.request(1);
ts.assertValue("[1, 1, 1]");
ts.request(1);
ts.assertValues("[1, 1, 1]", "[2, 1, 1]");
ts.request(1);
ts.assertValues("[1, 1, 1]", "[2, 1, 1]", "[3, 1, 1]");
ts.assertNoErrors();
ts.assertComplete();
assertFalse("ps1 has subscribers?", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?", pp2.hasSubscribers());
}
@Test
public void withEmpty() {
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.range(1, 3).withLatestFrom(
new Flowable<?>[] { Flowable.just(1), Flowable.empty() }, toArray)
.subscribe(ts);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertComplete();
}
@Test
public void withError() {
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.range(1, 3).withLatestFrom(
new Flowable<?>[] { Flowable.just(1), Flowable.error(new TestException()) }, toArray)
.subscribe(ts);
ts.assertNoValues();
ts.assertError(TestException.class);
ts.assertNotComplete();
}
@Test
public void withMainError() {
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.error(new TestException()).withLatestFrom(
new Flowable<?>[] { Flowable.just(1), Flowable.just(1) }, toArray)
.subscribe(ts);
ts.assertNoValues();
ts.assertError(TestException.class);
ts.assertNotComplete();
}
@Test
public void with2Others() {
Flowable<Integer> just = Flowable.just(1);
TestSubscriber<List<Integer>> ts = new TestSubscriber<>();
just.withLatestFrom(just, just, new Function3<Integer, Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b, Integer c) {
return Arrays.asList(a, b, c);
}
})
.subscribe(ts);
ts.assertValue(Arrays.asList(1, 1, 1));
ts.assertNoErrors();
ts.assertComplete();
}
@Test
public void with3Others() {
Flowable<Integer> just = Flowable.just(1);
TestSubscriber<List<Integer>> ts = new TestSubscriber<>();
just.withLatestFrom(just, just, just, new Function4<Integer, Integer, Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b, Integer c, Integer d) {
return Arrays.asList(a, b, c, d);
}
})
.subscribe(ts);
ts.assertValue(Arrays.asList(1, 1, 1, 1));
ts.assertNoErrors();
ts.assertComplete();
}
@Test
public void with4Others() {
Flowable<Integer> just = Flowable.just(1);
TestSubscriber<List<Integer>> ts = new TestSubscriber<>();
just.withLatestFrom(just, just, just, just, new Function5<Integer, Integer, Integer, Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b, Integer c, Integer d, Integer e) {
return Arrays.asList(a, b, c, d, e);
}
})
.subscribe(ts);
ts.assertValue(Arrays.asList(1, 1, 1, 1, 1));
ts.assertNoErrors();
ts.assertComplete();
}
@Test
public void dispose() {
TestHelper.checkDisposed(Flowable.just(1).withLatestFrom(Flowable.just(2), new BiFunction<Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b) throws Exception {
return a;
}
}));
TestHelper.checkDisposed(Flowable.just(1).withLatestFrom(Flowable.just(2), Flowable.just(3), new Function3<Integer, Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b, Integer c) throws Exception {
return a;
}
}));
}
@Test
public void manyIteratorThrows() {
Flowable.just(1)
.withLatestFrom(new CrashingMappedIterable<>(1, 100, 100, new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) throws Exception {
return Flowable.just(2);
}
}), new Function<Object[], Object>() {
@Override
public Object apply(Object[] a) throws Exception {
return a;
}
})
.to(TestHelper.testConsumer())
.assertFailureAndMessage(TestException.class, "iterator()");
}
@Test
public void manyCombinerThrows() {
Flowable.just(1).withLatestFrom(Flowable.just(2), Flowable.just(3), new Function3<Integer, Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b, Integer c) throws Exception {
throw new TestException();
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void manyErrors() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
subscriber.onError(new TestException("First"));
subscriber.onNext(1);
subscriber.onError(new TestException("Second"));
subscriber.onComplete();
}
}.withLatestFrom(Flowable.just(2), Flowable.just(3), new Function3<Integer, Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b, Integer c) throws Exception {
return a;
}
})
.to(TestHelper.testConsumer())
.assertFailureAndMessage(TestException.class, "First");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void otherErrors() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Flowable.just(1)
.withLatestFrom(new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onError(new TestException("First"));
s.onError(new TestException("Second"));
}
}, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
})
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(TestException.class, "First");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void combineToNull1() {
Flowable.just(1)
.withLatestFrom(Flowable.just(2), new BiFunction<Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b) throws Exception {
return null;
}
})
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void combineToNull2() {
Flowable.just(1)
.withLatestFrom(Arrays.asList(Flowable.just(2), Flowable.just(3)), new Function<Object[], Object>() {
@Override
public Object apply(Object[] o) throws Exception {
return null;
}
})
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void zeroOtherCombinerReturnsNull() {
Flowable.just(1)
.withLatestFrom(new Flowable[0], Functions.justFunction(null))
.to(TestHelper.testConsumer())
.assertFailureAndMessage(NullPointerException.class, "The combiner returned a null value");
}
@Test
public void singleRequestNotForgottenWhenNoData() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriber<Integer> ts = new TestSubscriber<>(0L);
result.subscribe(ts);
ts.request(1);
source.onNext(1);
ts.assertNoValues();
other.onNext(1);
ts.assertNoValues();
source.onNext(2);
ts.assertValue((2 << 8) + 1);
}
@Test
public void coldSourceConsumedWithoutOther() {
Flowable.range(1, 10).withLatestFrom(Flowable.never(),
new BiFunction<Integer, Object, Object>() {
@Override
public Object apply(Integer a, Object b) throws Exception {
return a;
}
})
.test(1)
.assertResult();
}
@Test
public void coldSourceConsumedWithoutManyOthers() {
Flowable.range(1, 10).withLatestFrom(Flowable.never(), Flowable.never(), Flowable.never(),
new Function4<Integer, Object, Object, Object, Object>() {
@Override
public Object apply(Integer a, Object b, Object c, Object d) throws Exception {
return a;
}
})
.test(1)
.assertResult();
}
@Test
public void otherOnSubscribeRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp0 = PublishProcessor.create();
final PublishProcessor<Integer> pp1 = PublishProcessor.create();
final PublishProcessor<Integer> pp2 = PublishProcessor.create();
final PublishProcessor<Integer> pp3 = PublishProcessor.create();
final Flowable<Object> source = pp0.withLatestFrom(pp1, pp2, pp3, new Function4<Object, Integer, Integer, Integer, Object>() {
@Override
public Object apply(Object a, Integer b, Integer c, Integer d)
throws Exception {
return a;
}
});
final TestSubscriber<Object> ts = new TestSubscriber<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
source.subscribe(ts);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
TestHelper.race(r1, r2);
ts.assertEmpty();
assertFalse(pp0.hasSubscribers());
assertFalse(pp1.hasSubscribers());
assertFalse(pp2.hasSubscribers());
assertFalse(pp3.hasSubscribers());
}
}
@Test
public void otherCompleteRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp0 = PublishProcessor.create();
final PublishProcessor<Integer> pp1 = PublishProcessor.create();
final PublishProcessor<Integer> pp2 = PublishProcessor.create();
final PublishProcessor<Integer> pp3 = PublishProcessor.create();
final Flowable<Object> source = pp0.withLatestFrom(pp1, pp2, pp3, new Function4<Object, Integer, Integer, Integer, Object>() {
@Override
public Object apply(Object a, Integer b, Integer c, Integer d)
throws Exception {
return a;
}
});
final TestSubscriber<Object> ts = new TestSubscriber<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
source.subscribe(ts);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
pp1.onComplete();
}
};
TestHelper.race(r1, r2);
ts.assertResult();
assertFalse(pp0.hasSubscribers());
assertFalse(pp1.hasSubscribers());
assertFalse(pp2.hasSubscribers());
assertFalse(pp3.hasSubscribers());
}
}
}
|
|
/*
* Copyright 2006-2021 Prowide
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.prowidesoftware.swift.model.mt.mt7xx;
import com.prowidesoftware.Generated;
import java.io.Serializable;
import org.apache.commons.lang3.StringUtils;
import com.prowidesoftware.swift.model.*;
import com.prowidesoftware.swift.model.field.*;
import com.prowidesoftware.swift.model.mt.AbstractMT;
import com.prowidesoftware.swift.utils.Lib;
import java.io.File;
import java.io.InputStream;
import java.io.IOException;
/**
* MT 747 - Amendment to an Authorisation to Reimburse.
*
* <p>
* SWIFT MT747 (ISO 15022) message structure:
*
<div class="scheme"><ul>
<li class="field">Field 20 (M)</li>
<li class="field">Field 21 (O)</li>
<li class="field">Field 30 (M)</li>
<li class="field">Field 31 E (O)</li>
<li class="field">Field 32 B (O)</li>
<li class="field">Field 33 B (O)</li>
<li class="field">Field 34 B (O)</li>
<li class="field">Field 39 A (O)</li>
<li class="field">Field 39 C (O)</li>
<li class="field">Field 72 Z (O)</li>
<li class="field">Field 77 (O)</li>
</ul></div>
*
* <p>
* This source code is specific to release <strong>SRU 2021</strong>
* <p>
* For additional resources check <a href="https://www.prowidesoftware.com/resources">https://www.prowidesoftware.com/resources</a>
*/
@Generated
public class MT747 extends AbstractMT implements Serializable {
/**
* Constant identifying the SRU to which this class belongs to.
*/
public static final int SRU = 2021;
private static final long serialVersionUID = 1L;
private static final transient java.util.logging.Logger log = java.util.logging.Logger.getLogger(MT747.class.getName());
/**
* Constant for MT name, this is part of the classname, after MT.
*/
public static final String NAME = "747";
/**
* Creates an MT747 initialized with the parameter SwiftMessage.
* @param m swift message with the MT747 content
*/
public MT747(final SwiftMessage m) {
super(m);
sanityCheck(m);
}
/**
* Creates an MT747 initialized with the parameter MtSwiftMessage.
* @param m swift message with the MT747 content, the parameter can not be null
* @see #MT747(String)
*/
public MT747(final MtSwiftMessage m) {
this(m.message());
}
/**
* Creates an MT747 initialized with the parameter MtSwiftMessage.
*
* @param m swift message with the MT747 content
* @return the created object or null if the parameter is null
* @see #MT747(String)
* @since 7.7
*/
public static MT747 parse(final MtSwiftMessage m) {
if (m == null) {
return null;
}
return new MT747(m);
}
/**
* Creates and initializes a new MT747 input message setting TEST BICS as sender and receiver.
* All mandatory header attributes are completed with default values.
*
* @since 7.6
*/
public MT747() {
this(BIC.TEST8, BIC.TEST8);
}
/**
* Creates and initializes a new MT747 input message from sender to receiver.
* All mandatory header attributes are completed with default values.
* In particular the sender and receiver addresses will be filled with proper default LT identifier
* and branch codes if not provided,
*
* @param sender the sender address as a bic8, bic11 or full logical terminal consisting of 12 characters
* @param receiver the receiver address as a bic8, bic11 or full logical terminal consisting of 12 characters
* @since 7.7
*/
public MT747(final String sender, final String receiver) {
super(747, sender, receiver);
}
/**
* Creates a new MT747 by parsing a String with the message content in its swift FIN format.
* If the fin parameter is null or the message cannot be parsed, the internal message object
* will be initialized (blocks will be created) but empty.
* If the string contains multiple messages, only the first one will be parsed.
*
* @param fin a string with the MT message in its FIN swift format
* @since 7.7
*/
public MT747(final String fin) {
super();
if (fin != null) {
final SwiftMessage parsed = read(fin);
if (parsed != null) {
super.m = parsed;
sanityCheck(parsed);
}
}
}
private void sanityCheck(final SwiftMessage param) {
if (param.isServiceMessage()) {
log.warning("Creating an MT747 object from FIN content with a Service Message. Check if the MT747 you are intended to read is prepended with and ACK.");
} else if (!StringUtils.equals(param.getType(), "747")) {
log.warning("Creating an MT747 object from FIN content with message type "+param.getType());
}
}
/**
* Creates a new MT747 by parsing a String with the message content in its swift FIN format.
* If the fin parameter cannot be parsed, the returned MT747 will have its internal message object
* initialized (blocks will be created) but empty.
* If the string contains multiple messages, only the first one will be parsed.
*
* @param fin a string with the MT message in its FIN swift format. <em>fin may be null in which case this method returns null</em>
* @return a new instance of MT747 or null if fin is null
* @since 7.7
*/
public static MT747 parse(final String fin) {
if (fin == null) {
return null;
}
return new MT747(fin);
}
/**
* Creates a new MT747 by parsing a input stream with the message content in its swift FIN format, using "UTF-8" as encoding.
* If the message content is null or cannot be parsed, the internal message object
* will be initialized (blocks will be created) but empty.
* If the stream contains multiple messages, only the first one will be parsed.
*
* @param stream an input stream in UTF-8 encoding with the MT message in its FIN swift format.
* @throws IOException if the stream data cannot be read
* @since 7.7
*/
public MT747(final InputStream stream) throws IOException {
this(Lib.readStream(stream));
}
/**
* Creates a new MT747 by parsing a input stream with the message content in its swift FIN format, using "UTF-8" as encoding.
* If the stream contains multiple messages, only the first one will be parsed.
*
* @param stream an input stream in UTF-8 encoding with the MT message in its FIN swift format.
* @return a new instance of MT747 or null if stream is null or the message cannot be parsed
* @throws IOException if the stream data cannot be read
* @since 7.7
*/
public static MT747 parse(final InputStream stream) throws IOException {
if (stream == null) {
return null;
}
return new MT747(stream);
}
/**
* Creates a new MT747 by parsing a file with the message content in its swift FIN format.
* If the file content is null or cannot be parsed as a message, the internal message object
* will be initialized (blocks will be created) but empty.
* If the file contains multiple messages, only the first one will be parsed.
*
* @param file a file with the MT message in its FIN swift format.
* @throws IOException if the file content cannot be read
* @since 7.7
*/
public MT747(final File file) throws IOException {
this(Lib.readFile(file));
}
/**
* Creates a new MT747 by parsing a file with the message content in its swift FIN format.
* If the file contains multiple messages, only the first one will be parsed.
*
* @param file a file with the MT message in its FIN swift format.
* @return a new instance of MT747 or null if; file is null, does not exist, can't be read, is not a file or the message cannot be parsed
* @throws IOException if the file content cannot be read
* @since 7.7
*/
public static MT747 parse(final File file) throws IOException {
if (file == null) {
return null;
}
return new MT747(file);
}
/**
* Returns this MT number.
* @return the message type number of this MT
* @since 6.4
*/
@Override
public String getMessageType() {
return "747";
}
/**
* Add all tags from block to the end of the block4.
*
* @param block to append
* @return this object to allow method chaining
* @since 7.6
*/
@Override
public MT747 append(final SwiftTagListBlock block) {
super.append(block);
return this;
}
/**
* Add all tags to the end of the block4.
*
* @param tags to append
* @return this object to allow method chaining
* @since 7.6
*/
@Override
public MT747 append(final Tag... tags) {
super.append(tags);
return this;
}
/**
* Add all the fields to the end of the block4.
*
* @param fields to append
* @return this object to allow method chaining
* @since 7.6
*/
@Override
public MT747 append(final Field... fields) {
super.append(fields);
return this;
}
/**
* Creates an MT747 messages from its JSON representation.
* <p>
* For generic conversion of JSON into the corresponding MT instance
* see {@link AbstractMT#fromJson(String)}
*
* @param json a JSON representation of an MT747 message
* @return a new instance of MT747
* @since 7.10.3
*/
public static MT747 fromJson(final String json) {
return (MT747) AbstractMT.fromJson(json);
}
/**
* Iterates through block4 fields and return the first one whose name matches 20,
* or null if none is found.
* The first occurrence of field 20 at MT747 is expected to be the only one.
*
* @return a Field20 object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field20 getField20() {
final Tag t = tag("20");
if (t != null) {
return new Field20(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 21,
* or null if none is found.
* The first occurrence of field 21 at MT747 is expected to be the only one.
*
* @return a Field21 object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field21 getField21() {
final Tag t = tag("21");
if (t != null) {
return new Field21(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 30,
* or null if none is found.
* The first occurrence of field 30 at MT747 is expected to be the only one.
*
* @return a Field30 object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field30 getField30() {
final Tag t = tag("30");
if (t != null) {
return new Field30(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 31E,
* or null if none is found.
* The first occurrence of field 31E at MT747 is expected to be the only one.
*
* @return a Field31E object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field31E getField31E() {
final Tag t = tag("31E");
if (t != null) {
return new Field31E(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 32B,
* or null if none is found.
* The first occurrence of field 32B at MT747 is expected to be the only one.
*
* @return a Field32B object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field32B getField32B() {
final Tag t = tag("32B");
if (t != null) {
return new Field32B(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 33B,
* or null if none is found.
* The first occurrence of field 33B at MT747 is expected to be the only one.
*
* @return a Field33B object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field33B getField33B() {
final Tag t = tag("33B");
if (t != null) {
return new Field33B(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 34B,
* or null if none is found.
* The first occurrence of field 34B at MT747 is expected to be the only one.
*
* @return a Field34B object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field34B getField34B() {
final Tag t = tag("34B");
if (t != null) {
return new Field34B(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 39A,
* or null if none is found.
* The first occurrence of field 39A at MT747 is expected to be the only one.
*
* @return a Field39A object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field39A getField39A() {
final Tag t = tag("39A");
if (t != null) {
return new Field39A(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 39C,
* or null if none is found.
* The first occurrence of field 39C at MT747 is expected to be the only one.
*
* @return a Field39C object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field39C getField39C() {
final Tag t = tag("39C");
if (t != null) {
return new Field39C(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 72Z,
* or null if none is found.
* The first occurrence of field 72Z at MT747 is expected to be the only one.
*
* @return a Field72Z object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field72Z getField72Z() {
final Tag t = tag("72Z");
if (t != null) {
return new Field72Z(t.getValue());
} else {
return null;
}
}
/**
* Iterates through block4 fields and return the first one whose name matches 77,
* or null if none is found.
* The first occurrence of field 77 at MT747 is expected to be the only one.
*
* @return a Field77 object or null if the field is not found
* @see SwiftTagListBlock#getTagByName(String)
* @throws IllegalStateException if SwiftMessage object is not initialized
*/
public Field77 getField77() {
final Tag t = tag("77");
if (t != null) {
return new Field77(t.getValue());
} else {
return null;
}
}
}
|
|
/*******************************************************************************
* Copyright (c) 2013 blinkbox Entertainment Limited. All rights reserved.
*******************************************************************************/
package com.blinkboxbooks.android.test.api;
import android.app.Activity;
import android.content.Context;
import com.blinkbox.books.test.MyShadowSystemClock;
import com.blinkboxbooks.android.api.BBBApiConstants;
import com.blinkboxbooks.android.api.model.BBBBookmark;
import com.blinkboxbooks.android.api.model.BBBBookmarkList;
import com.blinkboxbooks.android.api.model.BBBBusinessErrorsList;
import com.blinkboxbooks.android.api.net.BBBRequest;
import com.blinkboxbooks.android.api.net.BBBRequestFactory;
import com.blinkboxbooks.android.api.net.BBBRequestManager;
import com.blinkboxbooks.android.api.net.BBBResponse;
import com.blinkboxbooks.android.api.net.responsehandler.BBBBasicResponseHandler;
import com.blinkboxbooks.android.test.AccountHelper;
import com.blinkboxbooks.android.test.TestConstants;
import com.blinkboxbooks.android.util.BBBTextUtils;
import com.google.gson.Gson;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import java.net.HttpURLConnection;
@RunWith(RobolectricTestRunner.class)
@Config(manifest=Config.NONE, shadows = { MyShadowSystemClock.class } )
public class BookmarkServiceApiTest extends TestCase implements TestConstants, BBBApiConstants {
private static BBBBookmark bookmark;
private static String lastSyncDateTime;
@Before
public void setUp() throws Exception {
super.setUp();
if(TestConstants.VOLLEY_ENABLED) {
Context context = new Activity();
BBBRequestManager.getInstance().initVolleyRequestQueue(context.getApplicationContext());
}
BBBRequestFactory.getInstance().setHostDefault(HOST);
BBBRequestFactory.getInstance().setHostBookmark(HOST_BOOKMARK);
BBBRequestManager.getInstance().setInterface(AccountHelper.getInstance());
if(AccountHelper.getInstance().getAccessToken() == null) {
AuthenticationApiTest test = new AuthenticationApiTest();
test.setUp();
test.testAuthentication();
}
}
/*
* Tests getting all the bookmarks for the user
*/
@Test
public void testGetBookmarks() throws Exception {
BBBRequest request = BBBRequestFactory.getInstance().createGetBookmarksRequest(null, null, null);
String expectedUrl = AccountHelper.constructUrl(BBBRequestFactory.getInstance().getHostBookmark(), PATH_READING_MY_BOOKMARKS);
assertEquals(expectedUrl, request.getUrl());
BBBResponse response = BBBRequestManager.getInstance().executeRequestSynchronously(request);
bookmarkListHandler.receivedResponse(response);
}
/*
* Tests adding a bookmark
*/
@Test
public void testAddBookmark() throws Exception {
BBBRequest request = BBBRequestFactory.getInstance().createAddBookmarkRequest(ISBN_2, BOOKMARK_TYPE_LAST_READ_POSITION, USERNAME, "cfi_position"+Math.random(), "test bookmark"+Math.random(), null, null, null, 45, "preview");
String expectedUrl = AccountHelper.constructUrl(BBBRequestFactory.getInstance().getHostBookmark(), PATH_READING_MY_BOOKMARKS);
assertEquals(expectedUrl, request.getUrl());
BBBResponse response = BBBRequestManager.getInstance().executeRequestSynchronously(request);
if(response.getResponseCode() != HttpURLConnection.HTTP_CREATED) {
if(response.getResponseCode() == HttpURLConnection.HTTP_BAD_REQUEST && !BBBTextUtils.isEmpty(response.getResponseData())) {
BBBBusinessErrorsList businessErrorsList = new Gson().fromJson(response.getResponseData(), BBBBusinessErrorsList.class);
if(!businessErrorsList.containsError(ERROR_BOOKMARK_ALREAD_EXISTS)) {
fail("Error: "+response.toString());
}
} else {
fail("Error: "+response.toString());
}
}
}
/*
* Tests getting all the bookmarks for the user
*/
@Test
public void testGetBookmarksWithSyncDate() throws Exception {
sleep(2000);
testGetBookmarks(); // this will set the lastSyncDateTime
sleep(2000);
BBBRequest request = BBBRequestFactory.getInstance().createGetBookmarksRequest(null, null, lastSyncDateTime);
String expectedUrl = AccountHelper.constructUrl(BBBRequestFactory.getInstance().getHostBookmark(), PATH_READING_MY_BOOKMARKS, PARAM_LAST_SYNC_DATE_TIME, lastSyncDateTime);
assertEquals(expectedUrl, request.getUrl());
BBBResponse response = BBBRequestManager.getInstance().executeRequestSynchronously(request);
BBBBookmarkList bookmarkList = new Gson().fromJson(response.getResponseData(), BBBBookmarkList.class);
if(bookmarkList.bookmarks != null && bookmarkList.bookmarks.length > 0) {
bookmark = bookmarkList.bookmarks[0];
}
}
/*
* Tests getting all the bookmarks of a particular type given an ISBN
*/
@Test
public void testGetBookmarksWithSyncDateAndISBN() throws Exception {
sleep(2000);
testAddBookmark();
sleep(2000);
BBBRequest request = BBBRequestFactory.getInstance().createGetBookmarksRequest(ISBN_2, null, lastSyncDateTime, BOOKMARK_TYPE_LAST_READ_POSITION);
String expectedUrl = AccountHelper.constructUrl(BBBRequestFactory.getInstance().getHostBookmark(), PATH_READING_MY_BOOKMARKS, PARAM_BOOK, ISBN_2, PARAM_LAST_SYNC_DATE_TIME, lastSyncDateTime, PARAM_BOOKMARK_TYPE, BOOKMARK_TYPE_LAST_READ_POSITION);
assertEquals(expectedUrl, request.getUrl());
BBBResponse response = BBBRequestManager.getInstance().executeRequestSynchronously(request);
BBBBookmarkList bookmarkList = new Gson().fromJson(response.getResponseData(), BBBBookmarkList.class);
assertNotNull(bookmarkList.bookmarks);
assertEquals(1, bookmarkList.bookmarks.length);
assertEquals(ISBN_2, bookmarkList.bookmarks[0].book);
}
private void sleep(long time) {
try {
Thread.sleep(time);
} catch(InterruptedException e) {}
}
/*
* Tests getting a single bookmark
*/
@Test
public void testGetBookmark() throws Exception {
if(bookmark == null) {
return;
}
BBBRequest request = BBBRequestFactory.getInstance().createGetBookmarkRequest(bookmark.id);
String expectedUrl = AccountHelper.constructUrl(BBBRequestFactory.getInstance().getHostBookmark(), String.format(PATH_READING_MY_BOOKMARKS_ITEM, bookmark.id));
assertEquals(expectedUrl, request.getUrl());
BBBResponse response = BBBRequestManager.getInstance().executeRequestSynchronously(request);
bookmarkHandler.receivedResponse(response);
}
/*
* Tests updating a bookmark
*/
@Test
public void testUpdateBookmark() throws Exception {
if(bookmark == null) {
return;
}
BBBRequest request = BBBRequestFactory.getInstance().createUpdateBookmarkRequest(bookmark.id, "HIGHLIGHT", USERNAME, "updated_CFI", "updated Name", null, null, null, 65, "updated preview");
String expectedUrl = AccountHelper.constructUrl(BBBRequestFactory.getInstance().getHostBookmark(), String.format(PATH_READING_MY_BOOKMARKS_ITEM, bookmark.id));
assertEquals(expectedUrl, request.getUrl());
BBBResponse response = BBBRequestManager.getInstance().executeRequestSynchronously(request);
if(response.getResponseCode() != HttpURLConnection.HTTP_OK) {
fail("Error: "+response.toString());
}
}
/*
* Tests deleting a bookmark
*/
@Test
public void testDeleteBookmark() throws Exception {
if(bookmark == null) {
return;
}
BBBRequest request = BBBRequestFactory.getInstance().createDeleteBookmarkRequest(bookmark.id, USERNAME);
String expectedUrl = AccountHelper.constructUrl(BBBRequestFactory.getInstance().getHostBookmark(), String.format(PATH_READING_MY_BOOKMARKS_ITEM, bookmark.id), PARAM_DELETED_BY, USERNAME);
assertEquals(expectedUrl, request.getUrl());
BBBResponse response = BBBRequestManager.getInstance().executeRequestSynchronously(request);
if(response.getResponseCode() >= HttpURLConnection.HTTP_MULT_CHOICE) {
fail("Error: "+response.toString());
}
}
/*
* Response handler for receiving a single bookmark
*/
private final BBBBasicResponseHandler<BBBBookmark> bookmarkHandler = new BBBBasicResponseHandler<BBBBookmark>() {
public void receivedData(BBBResponse response, BBBBookmark bookmarkList) {
assertNotNull(bookmarkList); //response should not be null if parsing was successful
assertTrue(response.getResponseCode() == HttpURLConnection.HTTP_OK);
}
public void receivedError(BBBResponse response) {
fail("Error: "+response.toString());
}
};
/*
* Response handler for receiving a list of books
*/
private final BBBBasicResponseHandler<BBBBookmarkList> bookmarkListHandler = new BBBBasicResponseHandler<BBBBookmarkList>() {
public void receivedData(BBBResponse response, BBBBookmarkList bookmarkList) {
assertNotNull(bookmarkList); //response should not be null if parsing was successful
assertTrue(response.getResponseCode() == HttpURLConnection.HTTP_OK);
lastSyncDateTime = bookmarkList.lastSyncDateTime;
if(bookmarkList.bookmarks != null && bookmarkList.bookmarks.length > 0) {
bookmark = bookmarkList.bookmarks[0];
}
}
public void receivedError(BBBResponse response) {
fail("Error: "+response.toString());
}
};
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.kstream.internals;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KeyValueTimestamp;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.TestInputTopic;
import org.apache.kafka.streams.TopologyTestDriver;
import org.apache.kafka.streams.errors.TopologyException;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Grouped;
import org.apache.kafka.streams.kstream.KGroupedStream;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.SessionWindows;
import org.apache.kafka.streams.kstream.SlidingWindows;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.kstream.Windows;
import org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.SessionStore;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.apache.kafka.test.MockAggregator;
import org.apache.kafka.test.MockInitializer;
import org.apache.kafka.test.MockProcessorSupplier;
import org.apache.kafka.test.MockReducer;
import org.apache.kafka.test.StreamsTestUtils;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Map;
import java.util.Properties;
import static java.time.Duration.ofMillis;
import static org.apache.kafka.test.StreamsTestUtils.getMetricByName;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThrows;
public class KGroupedStreamImplTest {
private static final String TOPIC = "topic";
private static final String INVALID_STORE_NAME = "~foo bar~";
private final StreamsBuilder builder = new StreamsBuilder();
private KGroupedStream<String, String> groupedStream;
private final Properties props = StreamsTestUtils.getStreamsConfig(Serdes.String(), Serdes.String());
@Before
public void before() {
final KStream<String, String> stream = builder.stream(TOPIC, Consumed.with(Serdes.String(), Serdes.String()));
groupedStream = stream.groupByKey(Grouped.with(Serdes.String(), Serdes.String()));
}
@Test
public void shouldNotHaveNullAggregatorOnCogroup() {
assertThrows(NullPointerException.class, () -> groupedStream.cogroup(null));
}
@Test
public void shouldNotHaveNullReducerOnReduce() {
assertThrows(NullPointerException.class, () -> groupedStream.reduce(null));
}
@Test
public void shouldNotHaveInvalidStoreNameOnReduce() {
assertThrows(TopologyException.class, () -> groupedStream.reduce(MockReducer.STRING_ADDER, Materialized.as(INVALID_STORE_NAME)));
}
@Test
public void shouldNotHaveNullReducerWithWindowedReduce() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(TimeWindows.of(ofMillis(10)))
.reduce(null, Materialized.as("store")));
}
@Test
public void shouldNotHaveNullWindowsWithWindowedReduce() {
assertThrows(NullPointerException.class, () -> groupedStream.windowedBy((Windows<?>) null));
}
@Test
public void shouldNotHaveInvalidStoreNameWithWindowedReduce() {
assertThrows(TopologyException.class, () -> groupedStream
.windowedBy(TimeWindows.of(ofMillis(10)))
.reduce(MockReducer.STRING_ADDER, Materialized.as(INVALID_STORE_NAME)));
}
@Test
public void shouldNotHaveNullInitializerOnAggregate() {
assertThrows(NullPointerException.class, () -> groupedStream.aggregate(null, MockAggregator.TOSTRING_ADDER, Materialized.as("store")));
}
@Test
public void shouldNotHaveNullAdderOnAggregate() {
assertThrows(NullPointerException.class, () -> groupedStream.aggregate(MockInitializer.STRING_INIT, null, Materialized.as("store")));
}
@Test
public void shouldNotHaveInvalidStoreNameOnAggregate() {
assertThrows(TopologyException.class, () -> groupedStream.aggregate(
MockInitializer.STRING_INIT,
MockAggregator.TOSTRING_ADDER,
Materialized.as(INVALID_STORE_NAME)));
}
@Test
public void shouldNotHaveNullInitializerOnWindowedAggregate() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(TimeWindows.of(ofMillis(10)))
.aggregate(null, MockAggregator.TOSTRING_ADDER, Materialized.as("store")));
}
@Test
public void shouldNotHaveNullAdderOnWindowedAggregate() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(TimeWindows.of(ofMillis(10)))
.aggregate(MockInitializer.STRING_INIT, null, Materialized.as("store")));
}
@Test
public void shouldNotHaveNullWindowsOnWindowedAggregate() {
assertThrows(NullPointerException.class, () -> groupedStream.windowedBy((Windows<?>) null));
}
@Test
public void shouldNotHaveInvalidStoreNameOnWindowedAggregate() {
assertThrows(TopologyException.class, () -> groupedStream
.windowedBy(TimeWindows.of(ofMillis(10)))
.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(INVALID_STORE_NAME)));
}
@Test
public void shouldNotHaveNullReducerWithSlidingWindowedReduce() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(10), ofMillis(100)))
.reduce(null, Materialized.as("store")));
}
@Test
public void shouldNotHaveNullWindowsWithSlidingWindowedReduce() {
assertThrows(NullPointerException.class, () -> groupedStream.windowedBy((SlidingWindows) null));
}
@Test
public void shouldNotHaveInvalidStoreNameWithSlidingWindowedReduce() {
assertThrows(TopologyException.class, () -> groupedStream
.windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(10), ofMillis(100)))
.reduce(MockReducer.STRING_ADDER, Materialized.as(INVALID_STORE_NAME)));
}
@Test
public void shouldNotHaveNullInitializerOnSlidingWindowedAggregate() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(10), ofMillis(100)))
.aggregate(null, MockAggregator.TOSTRING_ADDER, Materialized.as("store")));
}
@Test
public void shouldNotHaveNullAdderOnSlidingWindowedAggregate() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(10), ofMillis(100)))
.aggregate(MockInitializer.STRING_INIT, null, Materialized.as("store")));
}
@Test
public void shouldNotHaveInvalidStoreNameOnSlidingWindowedAggregate() {
assertThrows(TopologyException.class, () -> groupedStream
.windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(10), ofMillis(100)))
.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(INVALID_STORE_NAME)));
}
@Test
public void shouldCountSlidingWindows() {
final MockProcessorSupplier<Windowed<String>, Long> supplier = new MockProcessorSupplier<>();
groupedStream
.windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(500L), ofMillis(2000L)))
.count(Materialized.as("aggregate-by-key-windowed"))
.toStream()
.process(supplier);
doCountSlidingWindows(supplier);
}
@Test
public void shouldCountSlidingWindowsWithInternalStoreName() {
final MockProcessorSupplier<Windowed<String>, Long> supplier = new MockProcessorSupplier<>();
groupedStream
.windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(500L), ofMillis(2000L)))
.count()
.toStream()
.process(supplier);
doCountSlidingWindows(supplier);
}
private void doCountSlidingWindows(final MockProcessorSupplier<Windowed<String>, Long> supplier) {
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("1", "A", 500L);
inputTopic.pipeInput("1", "A", 999L);
inputTopic.pipeInput("1", "A", 600L);
inputTopic.pipeInput("2", "B", 500L);
inputTopic.pipeInput("2", "B", 600L);
inputTopic.pipeInput("2", "B", 700L);
inputTopic.pipeInput("3", "C", 501L);
inputTopic.pipeInput("1", "A", 1000L);
inputTopic.pipeInput("1", "A", 1000L);
inputTopic.pipeInput("2", "B", 1000L);
inputTopic.pipeInput("2", "B", 1000L);
inputTopic.pipeInput("3", "C", 600L);
}
final Comparator<KeyValueTimestamp<Windowed<String>, Long>> comparator =
Comparator.comparing((KeyValueTimestamp<Windowed<String>, Long> o) -> o.key().key())
.thenComparing((KeyValueTimestamp<Windowed<String>, Long> o) -> o.key().window().start());
final ArrayList<KeyValueTimestamp<Windowed<String>, Long>> actual = supplier.theCapturedProcessor().processed();
actual.sort(comparator);
assertThat(actual, equalTo(Arrays.asList(
// processing A@500
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(0L, 500L)), 1L, 500L),
// processing A@600
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(100L, 600L)), 2L, 600L),
// processing A@999
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(499L, 999L)), 2L, 999L),
// processing A@600
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(499L, 999L)), 3L, 999L),
// processing first A@1000
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(500L, 1000L)), 4L, 1000L),
// processing second A@1000
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(500L, 1000L)), 5L, 1000L),
// processing A@999
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(501L, 1001L)), 1L, 999L),
// processing A@600
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(501L, 1001L)), 2L, 999L),
// processing first A@1000
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(501L, 1001L)), 3L, 1000L),
// processing second A@1000
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(501L, 1001L)), 4L, 1000L),
// processing A@600
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(601L, 1101L)), 1L, 999L),
// processing first A@1000
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(601L, 1101L)), 2L, 1000L),
// processing second A@1000
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(601L, 1101L)), 3L, 1000L),
// processing first A@1000
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(1000L, 1500L)), 1L, 1000L),
// processing second A@1000
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(1000L, 1500L)), 2L, 1000L),
// processing B@500
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(0L, 500L)), 1L, 500L),
// processing B@600
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(100L, 600L)), 2L, 600L),
// processing B@700
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(200L, 700L)), 3L, 700L),
// processing first B@1000
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(500L, 1000L)), 4L, 1000L),
// processing second B@1000
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(500L, 1000L)), 5L, 1000L),
// processing B@600
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(501L, 1001L)), 1L, 600L),
// processing B@700
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(501L, 1001L)), 2L, 700L),
// processing first B@1000
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(501L, 1001L)), 3L, 1000L),
// processing second B@1000
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(501L, 1001L)), 4L, 1000L),
// processing B@700
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(601L, 1101L)), 1L, 700L),
// processing first B@1000
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(601L, 1101)), 2L, 1000L),
// processing second B@1000
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(601L, 1101)), 3L, 1000L),
// processing first B@1000
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(701L, 1201L)), 1L, 1000L),
// processing second B@1000
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(701L, 1201L)), 2L, 1000L),
// processing C@501
new KeyValueTimestamp<>(new Windowed<>("3", new TimeWindow(1L, 501L)), 1L, 501L),
// processing C@600
new KeyValueTimestamp<>(new Windowed<>("3", new TimeWindow(100L, 600L)), 2L, 600L),
// processing C@600
new KeyValueTimestamp<>(new Windowed<>("3", new TimeWindow(502L, 1002L)), 1L, 600L)
)));
}
private void doAggregateSessionWindows(final MockProcessorSupplier<Windowed<String>, Integer> supplier) {
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("1", "1", 10);
inputTopic.pipeInput("2", "2", 15);
inputTopic.pipeInput("1", "1", 30);
inputTopic.pipeInput("1", "1", 70);
inputTopic.pipeInput("1", "1", 100);
inputTopic.pipeInput("1", "1", 90);
}
final Map<Windowed<String>, ValueAndTimestamp<Integer>> result
= supplier.theCapturedProcessor().lastValueAndTimestampPerKey();
assertEquals(
ValueAndTimestamp.make(2, 30L),
result.get(new Windowed<>("1", new SessionWindow(10L, 30L))));
assertEquals(
ValueAndTimestamp.make(1, 15L),
result.get(new Windowed<>("2", new SessionWindow(15L, 15L))));
assertEquals(
ValueAndTimestamp.make(3, 100L),
result.get(new Windowed<>("1", new SessionWindow(70L, 100L))));
}
@Test
public void shouldAggregateSessionWindows() {
final MockProcessorSupplier<Windowed<String>, Integer> supplier = new MockProcessorSupplier<>();
final KTable<Windowed<String>, Integer> table = groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.aggregate(
() -> 0,
(aggKey, value, aggregate) -> aggregate + 1,
(aggKey, aggOne, aggTwo) -> aggOne + aggTwo,
Materialized
.<String, Integer, SessionStore<Bytes, byte[]>>as("session-store").
withValueSerde(Serdes.Integer()));
table.toStream().process(supplier);
doAggregateSessionWindows(supplier);
assertEquals(table.queryableStoreName(), "session-store");
}
@Test
public void shouldAggregateSessionWindowsWithInternalStoreName() {
final MockProcessorSupplier<Windowed<String>, Integer> supplier = new MockProcessorSupplier<>();
final KTable<Windowed<String>, Integer> table = groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.aggregate(
() -> 0,
(aggKey, value, aggregate) -> aggregate + 1,
(aggKey, aggOne, aggTwo) -> aggOne + aggTwo,
Materialized.with(null, Serdes.Integer()));
table.toStream().process(supplier);
doAggregateSessionWindows(supplier);
}
private void doCountSessionWindows(final MockProcessorSupplier<Windowed<String>, Long> supplier) {
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("1", "1", 10);
inputTopic.pipeInput("2", "2", 15);
inputTopic.pipeInput("1", "1", 30);
inputTopic.pipeInput("1", "1", 70);
inputTopic.pipeInput("1", "1", 100);
inputTopic.pipeInput("1", "1", 90);
}
final Map<Windowed<String>, ValueAndTimestamp<Long>> result =
supplier.theCapturedProcessor().lastValueAndTimestampPerKey();
assertEquals(
ValueAndTimestamp.make(2L, 30L),
result.get(new Windowed<>("1", new SessionWindow(10L, 30L))));
assertEquals(
ValueAndTimestamp.make(1L, 15L),
result.get(new Windowed<>("2", new SessionWindow(15L, 15L))));
assertEquals(
ValueAndTimestamp.make(3L, 100L),
result.get(new Windowed<>("1", new SessionWindow(70L, 100L))));
}
@Test
public void shouldCountSessionWindows() {
final MockProcessorSupplier<Windowed<String>, Long> supplier = new MockProcessorSupplier<>();
final KTable<Windowed<String>, Long> table = groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.count(Materialized.as("session-store"));
table.toStream().process(supplier);
doCountSessionWindows(supplier);
assertEquals(table.queryableStoreName(), "session-store");
}
@Test
public void shouldCountSessionWindowsWithInternalStoreName() {
final MockProcessorSupplier<Windowed<String>, Long> supplier = new MockProcessorSupplier<>();
final KTable<Windowed<String>, Long> table = groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.count();
table.toStream().process(supplier);
doCountSessionWindows(supplier);
assertNull(table.queryableStoreName());
}
private void doReduceSessionWindows(final MockProcessorSupplier<Windowed<String>, String> supplier) {
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("1", "A", 10);
inputTopic.pipeInput("2", "Z", 15);
inputTopic.pipeInput("1", "B", 30);
inputTopic.pipeInput("1", "A", 70);
inputTopic.pipeInput("1", "B", 100);
inputTopic.pipeInput("1", "C", 90);
}
final Map<Windowed<String>, ValueAndTimestamp<String>> result =
supplier.theCapturedProcessor().lastValueAndTimestampPerKey();
assertEquals(
ValueAndTimestamp.make("A:B", 30L),
result.get(new Windowed<>("1", new SessionWindow(10L, 30L))));
assertEquals(
ValueAndTimestamp.make("Z", 15L),
result.get(new Windowed<>("2", new SessionWindow(15L, 15L))));
assertEquals(
ValueAndTimestamp.make("A:B:C", 100L),
result.get(new Windowed<>("1", new SessionWindow(70L, 100L))));
}
@Test
public void shouldReduceSessionWindows() {
final MockProcessorSupplier<Windowed<String>, String> supplier = new MockProcessorSupplier<>();
final KTable<Windowed<String>, String> table = groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.reduce((value1, value2) -> value1 + ":" + value2, Materialized.as("session-store"));
table.toStream().process(supplier);
doReduceSessionWindows(supplier);
assertEquals(table.queryableStoreName(), "session-store");
}
@Test
public void shouldReduceSessionWindowsWithInternalStoreName() {
final MockProcessorSupplier<Windowed<String>, String> supplier = new MockProcessorSupplier<>();
final KTable<Windowed<String>, String> table = groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.reduce((value1, value2) -> value1 + ":" + value2);
table.toStream().process(supplier);
doReduceSessionWindows(supplier);
assertNull(table.queryableStoreName());
}
@Test
public void shouldNotAcceptNullReducerWhenReducingSessionWindows() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.reduce(null, Materialized.as("store")));
}
@Test
public void shouldNotAcceptNullSessionWindowsReducingSessionWindows() {
assertThrows(NullPointerException.class, () -> groupedStream.windowedBy((SessionWindows) null));
}
@Test
public void shouldNotAcceptInvalidStoreNameWhenReducingSessionWindows() {
assertThrows(TopologyException.class, () -> groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.reduce(MockReducer.STRING_ADDER, Materialized.as(INVALID_STORE_NAME))
);
}
@Test
public void shouldNotAcceptNullStateStoreSupplierWhenReducingSessionWindows() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.reduce(null, Materialized.<String, String, SessionStore<Bytes, byte[]>>as(null))
);
}
@Test
public void shouldNotAcceptNullInitializerWhenAggregatingSessionWindows() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.aggregate(null, MockAggregator.TOSTRING_ADDER, (aggKey, aggOne, aggTwo) -> null, Materialized.as("storeName"))
);
}
@Test
public void shouldNotAcceptNullAggregatorWhenAggregatingSessionWindows() {
assertThrows(NullPointerException.class, () -> groupedStream.
windowedBy(SessionWindows.with(ofMillis(30)))
.aggregate(MockInitializer.STRING_INIT, null, (aggKey, aggOne, aggTwo) -> null, Materialized.as("storeName"))
);
}
@Test
public void shouldNotAcceptNullSessionMergerWhenAggregatingSessionWindows() {
assertThrows(NullPointerException.class, () -> groupedStream
.windowedBy(SessionWindows.with(ofMillis(30)))
.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, null, Materialized.as("storeName"))
);
}
@Test
public void shouldNotAcceptNullSessionWindowsWhenAggregatingSessionWindows() {
assertThrows(NullPointerException.class, () -> groupedStream.windowedBy((SessionWindows) null));
}
@Test
public void shouldAcceptNullStoreNameWhenAggregatingSessionWindows() {
groupedStream
.windowedBy(SessionWindows.with(ofMillis(10)))
.aggregate(
MockInitializer.STRING_INIT,
MockAggregator.TOSTRING_ADDER,
(aggKey, aggOne, aggTwo) -> null, Materialized.with(Serdes.String(), Serdes.String())
);
}
@Test
public void shouldNotAcceptInvalidStoreNameWhenAggregatingSessionWindows() {
assertThrows(TopologyException.class, () -> groupedStream
.windowedBy(SessionWindows.with(ofMillis(10)))
.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, (aggKey, aggOne, aggTwo) -> null, Materialized.as(INVALID_STORE_NAME))
);
}
@Test
public void shouldThrowNullPointerOnReduceWhenMaterializedIsNull() {
assertThrows(NullPointerException.class, () -> groupedStream.reduce(MockReducer.STRING_ADDER, null));
}
@Test
public void shouldThrowNullPointerOnAggregateWhenMaterializedIsNull() {
assertThrows(NullPointerException.class, () -> groupedStream.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, null));
}
@Test
public void shouldThrowNullPointerOnCountWhenMaterializedIsNull() {
assertThrows(NullPointerException.class, () -> groupedStream.count((Materialized<String, Long, KeyValueStore<Bytes, byte[]>>) null));
}
@Test
public void shouldCountAndMaterializeResults() {
groupedStream.count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("count").withKeySerde(Serdes.String()));
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
processData(driver);
{
final KeyValueStore<String, Long> count = driver.getKeyValueStore("count");
assertThat(count.get("1"), equalTo(3L));
assertThat(count.get("2"), equalTo(1L));
assertThat(count.get("3"), equalTo(2L));
}
{
final KeyValueStore<String, ValueAndTimestamp<Long>> count = driver.getTimestampedKeyValueStore("count");
assertThat(count.get("1"), equalTo(ValueAndTimestamp.make(3L, 10L)));
assertThat(count.get("2"), equalTo(ValueAndTimestamp.make(1L, 1L)));
assertThat(count.get("3"), equalTo(ValueAndTimestamp.make(2L, 9L)));
}
}
}
@Test
public void shouldLogAndMeasureSkipsInAggregateWithBuiltInMetricsVersion0100To24() {
shouldLogAndMeasureSkipsInAggregate(StreamsConfig.METRICS_0100_TO_24);
}
@Test
public void shouldLogAndMeasureSkipsInAggregateWithBuiltInMetricsVersionLatest() {
shouldLogAndMeasureSkipsInAggregate(StreamsConfig.METRICS_LATEST);
}
private void shouldLogAndMeasureSkipsInAggregate(final String builtInMetricsVersion) {
groupedStream.count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("count").withKeySerde(Serdes.String()));
props.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, builtInMetricsVersion);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamAggregate.class);
final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
processData(driver);
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
final Map<MetricName, ? extends Metric> metrics = driver.metrics();
assertEquals(
1.0,
getMetricByName(metrics, "skipped-records-total", "stream-metrics").metricValue()
);
assertNotEquals(
0.0,
getMetricByName(metrics, "skipped-records-rate", "stream-metrics").metricValue()
);
}
assertThat(
appender.getMessages(),
hasItem("Skipping record due to null key or value. key=[3] value=[null] topic=[topic] partition=[0] "
+ "offset=[6]")
);
}
}
@Test
public void shouldReduceAndMaterializeResults() {
groupedStream.reduce(
MockReducer.STRING_ADDER,
Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("reduce")
.withKeySerde(Serdes.String())
.withValueSerde(Serdes.String()));
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
processData(driver);
{
final KeyValueStore<String, String> reduced = driver.getKeyValueStore("reduce");
assertThat(reduced.get("1"), equalTo("A+C+D"));
assertThat(reduced.get("2"), equalTo("B"));
assertThat(reduced.get("3"), equalTo("E+F"));
}
{
final KeyValueStore<String, ValueAndTimestamp<String>> reduced = driver.getTimestampedKeyValueStore("reduce");
assertThat(reduced.get("1"), equalTo(ValueAndTimestamp.make("A+C+D", 10L)));
assertThat(reduced.get("2"), equalTo(ValueAndTimestamp.make("B", 1L)));
assertThat(reduced.get("3"), equalTo(ValueAndTimestamp.make("E+F", 9L)));
}
}
}
@Test
public void shouldLogAndMeasureSkipsInReduceWithBuiltInMetricsVersion0100To24() {
shouldLogAndMeasureSkipsInReduce(StreamsConfig.METRICS_0100_TO_24);
}
@Test
public void shouldLogAndMeasureSkipsInReduceWithBuiltInMetricsVersionLatest() {
shouldLogAndMeasureSkipsInReduce(StreamsConfig.METRICS_LATEST);
}
private void shouldLogAndMeasureSkipsInReduce(final String builtInMetricsVersion) {
groupedStream.reduce(
MockReducer.STRING_ADDER,
Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("reduce")
.withKeySerde(Serdes.String())
.withValueSerde(Serdes.String())
);
props.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, builtInMetricsVersion);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamReduce.class);
final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
processData(driver);
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
final Map<MetricName, ? extends Metric> metrics = driver.metrics();
assertEquals(
1.0,
getMetricByName(metrics, "skipped-records-total", "stream-metrics").metricValue()
);
assertNotEquals(
0.0,
getMetricByName(metrics, "skipped-records-rate", "stream-metrics").metricValue()
);
}
assertThat(
appender.getMessages(),
hasItem("Skipping record due to null key or value. key=[3] value=[null] topic=[topic] partition=[0] "
+ "offset=[6]")
);
}
}
@Test
public void shouldAggregateAndMaterializeResults() {
groupedStream.aggregate(
MockInitializer.STRING_INIT,
MockAggregator.TOSTRING_ADDER,
Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("aggregate")
.withKeySerde(Serdes.String())
.withValueSerde(Serdes.String()));
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
processData(driver);
{
final KeyValueStore<String, String> aggregate = driver.getKeyValueStore("aggregate");
assertThat(aggregate.get("1"), equalTo("0+A+C+D"));
assertThat(aggregate.get("2"), equalTo("0+B"));
assertThat(aggregate.get("3"), equalTo("0+E+F"));
}
{
final KeyValueStore<String, ValueAndTimestamp<String>> aggregate = driver.getTimestampedKeyValueStore("aggregate");
assertThat(aggregate.get("1"), equalTo(ValueAndTimestamp.make("0+A+C+D", 10L)));
assertThat(aggregate.get("2"), equalTo(ValueAndTimestamp.make("0+B", 1L)));
assertThat(aggregate.get("3"), equalTo(ValueAndTimestamp.make("0+E+F", 9L)));
}
}
}
@Test
public void shouldAggregateWithDefaultSerdes() {
final MockProcessorSupplier<String, String> supplier = new MockProcessorSupplier<>();
groupedStream
.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER)
.toStream()
.process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
processData(driver);
assertThat(
supplier.theCapturedProcessor().lastValueAndTimestampPerKey().get("1"),
equalTo(ValueAndTimestamp.make("0+A+C+D", 10L)));
assertThat(
supplier.theCapturedProcessor().lastValueAndTimestampPerKey().get("2"),
equalTo(ValueAndTimestamp.make("0+B", 1L)));
assertThat(
supplier.theCapturedProcessor().lastValueAndTimestampPerKey().get("3"),
equalTo(ValueAndTimestamp.make("0+E+F", 9L)));
}
}
private void processData(final TopologyTestDriver driver) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("1", "A", 5L);
inputTopic.pipeInput("2", "B", 1L);
inputTopic.pipeInput("1", "C", 3L);
inputTopic.pipeInput("1", "D", 10L);
inputTopic.pipeInput("3", "E", 8L);
inputTopic.pipeInput("3", "F", 9L);
inputTopic.pipeInput("3", (String) null);
}
private void doCountWindowed(final MockProcessorSupplier<Windowed<String>, Long> supplier) {
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("1", "A", 0L);
inputTopic.pipeInput("1", "A", 499L);
inputTopic.pipeInput("1", "A", 100L);
inputTopic.pipeInput("2", "B", 0L);
inputTopic.pipeInput("2", "B", 100L);
inputTopic.pipeInput("2", "B", 200L);
inputTopic.pipeInput("3", "C", 1L);
inputTopic.pipeInput("1", "A", 500L);
inputTopic.pipeInput("1", "A", 500L);
inputTopic.pipeInput("2", "B", 500L);
inputTopic.pipeInput("2", "B", 500L);
inputTopic.pipeInput("3", "B", 100L);
}
assertThat(supplier.theCapturedProcessor().processed(), equalTo(Arrays.asList(
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(0L, 500L)), 1L, 0L),
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(0L, 500L)), 2L, 499L),
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(0L, 500L)), 3L, 499L),
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(0L, 500L)), 1L, 0L),
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(0L, 500L)), 2L, 100L),
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(0L, 500L)), 3L, 200L),
new KeyValueTimestamp<>(new Windowed<>("3", new TimeWindow(0L, 500L)), 1L, 1L),
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(500L, 1000L)), 1L, 500L),
new KeyValueTimestamp<>(new Windowed<>("1", new TimeWindow(500L, 1000L)), 2L, 500L),
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(500L, 1000L)), 1L, 500L),
new KeyValueTimestamp<>(new Windowed<>("2", new TimeWindow(500L, 1000L)), 2L, 500L),
new KeyValueTimestamp<>(new Windowed<>("3", new TimeWindow(0L, 500L)), 2L, 100L)
)));
}
@Test
public void shouldCountWindowed() {
final MockProcessorSupplier<Windowed<String>, Long> supplier = new MockProcessorSupplier<>();
groupedStream
.windowedBy(TimeWindows.of(ofMillis(500L)))
.count(Materialized.as("aggregate-by-key-windowed"))
.toStream()
.process(supplier);
doCountWindowed(supplier);
}
@Test
public void shouldCountWindowedWithInternalStoreName() {
final MockProcessorSupplier<Windowed<String>, Long> supplier = new MockProcessorSupplier<>();
groupedStream
.windowedBy(TimeWindows.of(ofMillis(500L)))
.count()
.toStream()
.process(supplier);
doCountWindowed(supplier);
}
}
|
|
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.sfm.d2;
import boofcv.abst.sfm.d2.ImageMotion2D;
import boofcv.alg.distort.ImageDistort;
import boofcv.alg.interpolate.InterpolatePixelS;
import boofcv.alg.interpolate.InterpolationType;
import boofcv.alg.misc.ImageMiscOps;
import boofcv.factory.distort.FactoryDistort;
import boofcv.factory.interpolate.FactoryInterpolation;
import boofcv.struct.border.BorderType;
import boofcv.struct.distort.PixelTransform;
import boofcv.struct.image.GrayF32;
import boofcv.struct.image.GrayU8;
import boofcv.testing.BoofStandardJUnit;
import georegression.struct.affine.Affine2D_F64;
import georegression.struct.point.Point2D_F32;
import georegression.struct.shapes.Quadrilateral_F64;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/**
* @author Peter Abeles
*/
public class TestStitchingFromMotion2D extends BoofStandardJUnit {
GrayF32 image = new GrayF32(100, 150);
Affine2D_F64 translation = new Affine2D_F64(1, 0, 0, 1, 1, -2);
Affine2D_F64 motion0 = new Affine2D_F64(1, 2, 3, 4, 5, 6);
/**
* Given fake internal algorithms see if it performs as expected. tests several functions
*/
@Test void basicTest() {
HelperMotion motion = new HelperMotion();
HelperDistort distort = new HelperDistort();
StitchingTransform trans = FactoryStitchingTransform.createAffine_F64();
StitchingFromMotion2D<GrayF32, Affine2D_F64> alg =
new StitchingFromMotion2D<>(motion, distort, trans, 0.3);
alg.configure(200, 300, null);
assertTrue(alg.process(image));
assertEquals(0, motion.numReset);
assertEquals(1, motion.numProcess);
assertEquals(1, distort.numSetModel);
assertEquals(1, distort.numApply);
assertEquals(200, alg.getStitchedImage().width);
assertEquals(300, alg.getStitchedImage().height);
Affine2D_F64 found = alg.getWorldToCurr();
assertEquals(1, found.tx, 1e-5);
assertEquals(-2, found.ty, 1e-5);
assertTrue(alg.process(image));
assertEquals(0, motion.numReset);
assertEquals(2, motion.numProcess);
assertEquals(2, distort.numSetModel);
assertEquals(2, distort.numApply);
found = alg.getWorldToCurr();
assertEquals(1, found.tx, 1e-5);
assertEquals(-2, found.ty, 1e-5);
// test reset
alg.reset();
assertEquals(1, motion.numReset);
found = alg.getWorldToCurr();
assertEquals(0, found.tx, 1e-5);
assertEquals(0, found.ty, 1e-5);
}
/**
* Checks to see if the user specified initial transformation is correctly applied
*/
@Test void checkInitialTransform() {
HelperMotion motion = new HelperMotion();
HelperDistort distort = new HelperDistort();
StitchingTransform trans = FactoryStitchingTransform.createAffine_F64();
StitchingFromMotion2D<GrayF32, Affine2D_F64> alg =
new StitchingFromMotion2D<>(motion, distort, trans, 0.3);
alg.configure(200, 300, motion0);
assertTrue(alg.process(image));
Affine2D_F64 expected = motion0.concat(translation, null);
Affine2D_F64 found = alg.getWorldToCurr();
assertEquals(expected.a11, found.a11, 1e-5);
assertEquals(expected.tx, found.tx, 1e-5);
assertEquals(expected.ty, found.ty, 1e-5);
}
/**
* Provide an extremely different transformation and see if that causes an exception
*/
@Test void checkMaxJump() {
HelperMotion motion = new HelperMotion();
HelperDistort distort = new HelperDistort();
StitchingTransform trans = FactoryStitchingTransform.createAffine_F64();
StitchingFromMotion2D<GrayF32, Affine2D_F64> alg =
new StitchingFromMotion2D<>(motion, distort, trans, 0.3);
alg.configure(200, 300, null);
assertTrue(alg.process(image));
// this is very different from what it had before
motion.found = motion0;
assertFalse(alg.process(image));
}
/**
* Note that this test does not actually check to see if the correct transform is applied
*/
@Test void setOriginToCurrent() {
HelperMotion motion = new HelperMotion();
HelperDistort distort = new HelperDistort();
StitchingTransform trans = FactoryStitchingTransform.createAffine_F64();
StitchingFromMotion2D<GrayF32, Affine2D_F64> alg =
new StitchingFromMotion2D<>(motion, distort, trans, 0.3);
alg.configure(200, 300, null);
assertTrue(alg.process(image));
alg.setOriginToCurrent();
assertEquals(2, distort.numSetModel);
assertEquals(2, distort.numApply);
}
@Test void resizeStitchImage_noTransform() {
HelperMotion motion = new HelperMotion();
HelperDistort distort = new HelperDistort();
StitchingTransform trans = FactoryStitchingTransform.createAffine_F64();
StitchingFromMotion2D<GrayF32, Affine2D_F64> alg =
new StitchingFromMotion2D<>(motion, distort, trans, 0.3);
alg.configure(200, 300, null);
assertTrue(alg.process(image));
ImageMiscOps.fill(alg.getStitchedImage().subimage(2, 3, 30, 40, null), 1);
alg.resizeStitchImage(250, 400, null);
// see if the image is where it should be
checkBlock(2, 3, 30, 40, alg.getStitchedImage());
// check the stiched image size
assertEquals(250, alg.getStitchedImage().width);
assertEquals(400, alg.getStitchedImage().height);
// no transform provided, should be the same
Affine2D_F64 found = alg.getWorldToCurr();
assertEquals(1, found.tx, 1e-5);
assertEquals(-2, found.ty, 1e-5);
}
@Test void resizeStitchImage_Transform() {
HelperMotion motion = new HelperMotion();
InterpolatePixelS interp = FactoryInterpolation.createPixelS(0, 255,
InterpolationType.BILINEAR, BorderType.EXTENDED, GrayF32.class);
ImageDistort distorter = FactoryDistort.distortSB(false, interp, GrayF32.class);
StitchingTransform trans = FactoryStitchingTransform.createAffine_F64();
StitchingFromMotion2D<GrayF32, Affine2D_F64> alg =
new StitchingFromMotion2D<>(motion, distorter, trans, 0.3);
alg.configure(200, 300, null);
assertTrue(alg.process(image));
ImageMiscOps.fill(alg.getStitchedImage().subimage(2, 3, 30, 40, null), 1);
Affine2D_F64 transform = new Affine2D_F64(1, 0, 0, 1, -2, 4);
alg.resizeStitchImage(250, 400, transform);
// see if the image is where it should be
checkBlock(4, 0, 32, 36, alg.getStitchedImage());
// check the stitched image size
assertEquals(250, alg.getStitchedImage().width);
assertEquals(400, alg.getStitchedImage().height);
// check to see if translation was correctly applied
Affine2D_F64 found = alg.getWorldToCurr();
assertEquals(1 - 2, found.tx, 1e-5);
assertEquals(-2 + 4, found.ty, 1e-5);
}
private void checkBlock( int x0, int y0, int x1, int y1, GrayF32 image ) {
for (int y = 0; y < image.height; y++) {
for (int x = 0; x < image.width; x++) {
float v = image.get(x, y);
if (x >= x0 && x < x1 && y >= y0 && y < y1) {
assertEquals(1, v, 1e-5);
} else {
assertEquals(0, v, 1e-5);
}
}
}
}
@Test void getImageCorners() {
HelperMotion motion = new HelperMotion();
HelperDistort distort = new HelperDistort();
StitchingTransform trans = FactoryStitchingTransform.createAffine_F64();
StitchingFromMotion2D<GrayF32, Affine2D_F64> alg =
new StitchingFromMotion2D<>(motion, distort, trans, 0.3);
alg.configure(200, 300, null);
assertTrue(alg.process(image));
int w = 100, h = 150;
Quadrilateral_F64 corners = new Quadrilateral_F64();
alg.getImageCorners(w, h, corners);
assertEquals(-1, corners.a.x, 1e-5);
assertEquals(2, corners.a.y, 1e-5);
assertEquals(-1 + w, corners.b.x, 1e-5);
assertEquals(2, corners.b.y, 1e-5);
assertEquals(-1 + w, corners.c.x, 1e-5);
assertEquals(2 + h, corners.c.y, 1e-5);
assertEquals(-1, corners.d.x, 1e-5);
assertEquals(2 + h, corners.d.y, 1e-5);
}
/**
* Make sure it doesn't blow up if reset is called before anythign is processed
*/
@Test void resetBeforeProcess() {
HelperMotion motion = new HelperMotion();
HelperDistort distort = new HelperDistort();
StitchingTransform trans = FactoryStitchingTransform.createAffine_F64();
StitchingFromMotion2D<GrayF32, Affine2D_F64> alg =
new StitchingFromMotion2D<>(motion, distort, trans, 0.3);
alg.reset();
}
private class HelperMotion implements ImageMotion2D<GrayF32, Affine2D_F64> {
int numProcess = 0;
int numReset = 0;
int numSetToFirst = 0;
Affine2D_F64 found = translation;
@Override
public boolean process( GrayF32 input ) {
numProcess++;
return true;
}
@Override
public void reset() {
numReset++;
}
@Override
public void setToFirst() {
numSetToFirst++;
}
@Override
public long getFrameID() { return 0; }
@Override
public Affine2D_F64 getFirstToCurrent() {
return found;
}
@Override
public Class<Affine2D_F64> getTransformType() {
return Affine2D_F64.class;
}
}
private class HelperDistort implements ImageDistort<GrayF32, GrayF32> {
int numSetModel = 0;
int numApply = 0;
@Override
public void setModel( PixelTransform<Point2D_F32> dstToSrc ) {
numSetModel++;
}
@Override
public void apply( GrayF32 srcImg, GrayF32 dstImg ) {
numApply++;
}
@Override
public void apply( GrayF32 srcImg, GrayF32 dstImg, GrayU8 mask ) {numApply++;}
@Override
public void apply( GrayF32 srcImg, GrayF32 dstImg, int dstX0, int dstY0, int dstX1, int dstY1 ) {
numApply++;
}
@Override
public void setRenderAll( boolean renderAll ) {}
@Override
public boolean getRenderAll() {return false;}
@Override
public PixelTransform<Point2D_F32> getModel() {return null;}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.util.jar;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.harmony.luni.util.Util;
/**
* The input stream from which the JAR file to be read may be fetched. It is
* used like the {@code ZipInputStream}.
*
* @see ZipInputStream
*/
public class JarInputStream extends ZipInputStream {
private Manifest manifest;
private boolean eos = false;
private JarEntry mEntry;
private JarEntry jarEntry;
private boolean isMeta;
private JarVerifier verifier;
private OutputStream verStream;
/**
* Constructs a new {@code JarInputStream} from an input stream.
*
* @param stream
* the input stream containing the JAR file.
* @param verify
* if the file should be verified with a {@code JarVerifier}.
* @throws IOException
* If an error occurs reading entries from the input stream.
* @see ZipInputStream#ZipInputStream(InputStream)
*/
public JarInputStream(InputStream stream, boolean verify)
throws IOException {
super(stream);
if (verify) {
verifier = new JarVerifier("JarInputStream");
}
if ((mEntry = getNextJarEntry()) == null) {
return;
}
String name = Util.toASCIIUpperCase(mEntry.getName());
if (name.equals(JarFile.META_DIR)) {
mEntry = null; // modifies behavior of getNextJarEntry()
closeEntry();
mEntry = getNextJarEntry();
name = mEntry.getName().toUpperCase();
}
if (name.equals(JarFile.MANIFEST_NAME)) {
mEntry = null;
manifest = new Manifest(this, verify);
closeEntry();
if (verify) {
verifier.setManifest(manifest);
if (manifest != null) {
verifier.mainAttributesEnd = manifest
.getMainAttributesEnd();
}
}
} else {
Attributes temp = new Attributes(3);
temp.map.put("hidden", null);
mEntry.setAttributes(temp);
/*
* if not from the first entry, we will not get enough
* information,so no verify will be taken out.
*/
verifier = null;
}
}
/**
* Constructs a new {@code JarInputStream} from an input stream.
*
* @param stream
* the input stream containing the JAR file.
* @throws IOException
* If an error occurs reading entries from the input stream.
* @see ZipInputStream#ZipInputStream(InputStream)
*/
public JarInputStream(InputStream stream) throws IOException {
this(stream, true);
}
/**
* Returns the {@code Manifest} object associated with this {@code
* JarInputStream} or {@code null} if no manifest entry exists.
*
* @return the MANIFEST specifying the contents of the JAR file.
*/
public Manifest getManifest() {
return manifest;
}
/**
* Returns the next {@code JarEntry} contained in this stream or {@code
* null} if no more entries are present.
*
* @return the next JAR entry.
* @throws IOException
* if an error occurs while reading the entry.
*/
public JarEntry getNextJarEntry() throws IOException {
return (JarEntry) getNextEntry();
}
/**
* Reads up to {@code length} of decompressed data and stores it in
* {@code buffer} starting at {@code offset}.
*
* @param buffer
* Buffer to store into
* @param offset
* offset in buffer to store at
* @param length
* number of bytes to store
* @return Number of uncompressed bytes read
* @throws IOException
* if an IOException occurs.
*/
@Override
public int read(byte[] buffer, int offset, int length) throws IOException {
if (mEntry != null) {
return -1;
}
int r = super.read(buffer, offset, length);
if (verStream != null && !eos) {
if (r == -1) {
eos = true;
if (verifier != null) {
if (isMeta) {
verifier.addMetaEntry(jarEntry.getName(),
((ByteArrayOutputStream) verStream)
.toByteArray());
try {
verifier.readCertificates();
} catch (SecurityException e) {
verifier = null;
throw e;
}
} else {
((JarVerifier.VerifierEntry) verStream).verify();
}
}
} else {
verStream.write(buffer, offset, r);
}
}
return r;
}
/**
* Returns the next {@code ZipEntry} contained in this stream or {@code
* null} if no more entries are present.
*
* @return the next extracted ZIP entry.
* @throws IOException
* if an error occurs while reading the entry.
*/
@Override
public ZipEntry getNextEntry() throws IOException {
if (mEntry != null) {
jarEntry = mEntry;
mEntry = null;
jarEntry.setAttributes(null);
} else {
jarEntry = (JarEntry) super.getNextEntry();
if (jarEntry == null) {
return null;
}
if (verifier != null) {
isMeta = Util.toASCIIUpperCase(jarEntry.getName()).startsWith(
JarFile.META_DIR);
if (isMeta) {
verStream = new ByteArrayOutputStream();
} else {
verStream = verifier.initEntry(jarEntry.getName());
}
}
}
eos = false;
return jarEntry;
}
@Override
protected ZipEntry createZipEntry(String name) {
JarEntry entry = new JarEntry(name);
if (manifest != null) {
entry.setAttributes(manifest.getAttributes(name));
}
return entry;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.foundation.vertx.client.tcp;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.servicecomb.foundation.vertx.client.tcp.TcpClientConnection.Status;
import org.apache.servicecomb.foundation.vertx.tcp.TcpOutputStream;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.vertx.core.AsyncResult;
import io.vertx.core.Context;
import io.vertx.core.Handler;
import io.vertx.core.impl.FutureFactoryImpl;
import io.vertx.core.net.NetSocket;
import io.vertx.core.net.impl.NetSocketImpl;
import mockit.Deencapsulation;
import mockit.Expectations;
import mockit.Mock;
import mockit.MockUp;
import mockit.Mocked;
public class TestTcpClientConnection {
@Mocked
Context context;
@Mocked
NetClientWrapper netClientWrapper;
String strEndpoint = "rest://localhost:8080";
TcpClientConnection tcpClientConnection;
Map<Long, TcpRequest> requestMap;
Queue<ByteBuf> writeQueue;
Queue<AbstractTcpClientPackage> packageQueue;
@Before
public void setup() {
tcpClientConnection = new TcpClientConnection(context, netClientWrapper, strEndpoint);
requestMap = Deencapsulation.getField(tcpClientConnection, "requestMap");
packageQueue = Deencapsulation.getField(tcpClientConnection, "packageQueue");
writeQueue = Deencapsulation.getField(tcpClientConnection, "writeQueue");
}
@Test
public void localSupportLogin() {
Assert.assertFalse(tcpClientConnection.isLocalSupportLogin());
tcpClientConnection.setLocalSupportLogin(true);
Assert.assertTrue(tcpClientConnection.isLocalSupportLogin());
}
@Test
public void createLogin() {
Assert.assertNull(tcpClientConnection.createLogin());
}
@Test
public void onLoginResponse_buffer() {
Assert.assertTrue(tcpClientConnection.onLoginResponse(null));
}
@Test
public void send_inWorkingStatus(@Mocked AbstractTcpClientPackage tcpClientPackage,
@Mocked TcpOutputStream tcpOutputStream) {
Deencapsulation.setField(tcpClientConnection, "status", Status.WORKING);
long msgId = 1;
ByteBuf byteBuf = Unpooled.buffer();
new Expectations(tcpClientConnection) {
{
tcpClientPackage.getMsgId();
result = msgId;
tcpClientPackage.createStream();
result = tcpOutputStream;
tcpOutputStream.getByteBuf();
result = byteBuf;
}
};
new MockUp<Context>(context) {
@Mock
void runOnContext(Handler<Void> action) {
}
};
tcpClientConnection.send(tcpClientPackage, ar -> {
});
Assert.assertSame(byteBuf, writeQueue.poll());
Assert.assertNull(writeQueue.poll());
Assert.assertEquals(Status.WORKING, Deencapsulation.getField(tcpClientConnection, "status"));
}
@Test
public void send_inDisconnectedStatus(@Mocked AbstractTcpClientPackage tcpClientPackage,
@Mocked TcpOutputStream tcpOutputStream) {
long msgId = 1;
new Expectations(tcpClientConnection) {
{
tcpClientPackage.getMsgId();
result = msgId;
}
};
new MockUp<Context>(context) {
@Mock
void runOnContext(Handler<Void> action) {
action.handle(null);
}
};
tcpClientConnection.send(tcpClientPackage, ar -> {
});
Assert.assertSame(tcpClientPackage, packageQueue.poll());
Assert.assertNull(packageQueue.poll());
Assert.assertEquals(Status.CONNECTING, Deencapsulation.getField(tcpClientConnection, "status"));
}
@Test
public void send_disconnectedToTryLogin(@Mocked AbstractTcpClientPackage tcpClientPackage,
@Mocked TcpOutputStream tcpOutputStream) {
long msgId = 1;
new Expectations(tcpClientConnection) {
{
tcpClientPackage.getMsgId();
result = msgId;
}
};
new MockUp<Context>(context) {
@Mock
void runOnContext(Handler<Void> action) {
Deencapsulation.setField(tcpClientConnection, "status", Status.TRY_LOGIN);
action.handle(null);
}
};
tcpClientConnection.send(tcpClientPackage, ar -> {
});
Assert.assertSame(tcpClientPackage, packageQueue.poll());
Assert.assertNull(packageQueue.poll());
Assert.assertEquals(Status.TRY_LOGIN, Deencapsulation.getField(tcpClientConnection, "status"));
}
@Test
public void send_disconnectedToWorking(@Mocked AbstractTcpClientPackage tcpClientPackage,
@Mocked TcpOutputStream tcpOutputStream) {
long msgId = 1;
new Expectations(tcpClientConnection) {
{
tcpClientPackage.getMsgId();
result = msgId;
}
};
new MockUp<Context>(context) {
@Mock
void runOnContext(Handler<Void> action) {
Deencapsulation.setField(tcpClientConnection, "status", Status.WORKING);
action.handle(null);
}
};
tcpClientConnection.send(tcpClientPackage, ar -> {
});
Assert.assertNull(writeQueue.poll());
Assert.assertNull(packageQueue.poll());
Assert.assertEquals(Status.WORKING, Deencapsulation.getField(tcpClientConnection, "status"));
}
@Test
public void connect_success(@Mocked NetSocketImpl netSocket) {
FutureFactoryImpl futureFactory = new FutureFactoryImpl();
new MockUp<NetClientWrapper>(netClientWrapper) {
@Mock
void connect(boolean ssl, int port, String host, Handler<AsyncResult<NetSocket>> connectHandler) {
connectHandler.handle(futureFactory.succeededFuture(netSocket));
}
};
tcpClientConnection.connect();
Assert.assertSame(netSocket, tcpClientConnection.getNetSocket());
Assert.assertEquals(Status.WORKING, Deencapsulation.getField(tcpClientConnection, "status"));
}
@Test
public void connect_failed() {
requestMap.put(10L, new TcpRequest(10, ar -> {
}));
FutureFactoryImpl futureFactory = new FutureFactoryImpl();
Error error = new Error();
new MockUp<NetClientWrapper>(netClientWrapper) {
@Mock
void connect(boolean ssl, int port, String host, Handler<AsyncResult<NetSocket>> connectHandler) {
connectHandler.handle(futureFactory.failedFuture(error));
}
};
tcpClientConnection.connect();
Assert.assertEquals(Status.DISCONNECTED, Deencapsulation.getField(tcpClientConnection, "status"));
Assert.assertEquals(0, requestMap.size());
}
@Test
public void onClosed(@Mocked NetSocketImpl netSocket) {
requestMap.put(10L, new TcpRequest(10, ar -> {
}));
tcpClientConnection.initNetSocket(netSocket);
Deencapsulation.invoke(tcpClientConnection, "onClosed", new Class<?>[] {Void.class}, new Object[] {null});
Assert.assertEquals(Status.DISCONNECTED, Deencapsulation.getField(tcpClientConnection, "status"));
Assert.assertEquals(0, requestMap.size());
}
@Test
public void onReply_notExist() {
// should not throw exception
tcpClientConnection.onReply(1, null, null);
}
@Test
public void on_exist() {
long msgId = 1L;
AtomicInteger count = new AtomicInteger();
requestMap.put(msgId, new TcpRequest(10, ar -> {
count.incrementAndGet();
}));
tcpClientConnection.onReply(msgId, null, null);
Assert.assertEquals(1, count.get());
}
}
|
|
/*
* Copyright © 2014-2016 NetApp, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* DO NOT EDIT THIS CODE BY HAND! It has been generated with jsvcgen.
*/
package com.solidfire.element.api;
import com.solidfire.gson.Gson;
import com.solidfire.core.client.Attributes;
import com.solidfire.gson.annotations.SerializedName;
import com.solidfire.core.annotation.Since;
import com.solidfire.core.javautil.Optional;
import java.io.Serializable;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Objects;
/**
* StartBulkVolumeWriteRequest
* StartBulkVolumeWrite enables you to initialize a bulk volume write session on a specified volume. Only two bulk volume processes can run simultaneously on a volume. When you initialize the write session, data is written to a SolidFire storage volume from an external backup source. The external data is accessed by a web server running on an SF-series node. Communications and server
* interaction information for external data access is passed by a script running on the storage system.
**/
public class StartBulkVolumeWriteRequest implements Serializable {
public static final long serialVersionUID = 4230264928827969924L;
@SerializedName("volumeID") private Long volumeID;
@SerializedName("format") private String format;
@SerializedName("script") private Optional<String> script;
@SerializedName("scriptParameters") private Optional<Attributes> scriptParameters;
@SerializedName("attributes") private Optional<Attributes> attributes;
// empty constructor
@Since("7.0")
public StartBulkVolumeWriteRequest() {}
// parameterized constructor
@Since("7.0")
public StartBulkVolumeWriteRequest(
Long volumeID,
String format,
Optional<String> script,
Optional<Attributes> scriptParameters,
Optional<Attributes> attributes
)
{
this.volumeID = volumeID;
this.format = format;
this.script = (script == null) ? Optional.<String>empty() : script;
this.scriptParameters = (scriptParameters == null) ? Optional.<Attributes>empty() : scriptParameters;
this.attributes = (attributes == null) ? Optional.<Attributes>empty() : attributes;
}
/**
* The ID of the volume to be written to.
**/
public Long getVolumeID() { return this.volumeID; }
public void setVolumeID(Long volumeID) {
this.volumeID = volumeID;
}
/**
* The format of the volume data. It can be either of the following formats:
* uncompressed: Every byte of the volume is returned without any compression.
* native: Opaque data is returned that is smaller and more efficiently stored and written on a subsequent bulk
* volume write.
**/
public String getFormat() { return this.format; }
public void setFormat(String format) {
this.format = format;
}
/**
* The executable name of a script. If unspecified,
* the key and URL are necessary to access SF-series
* nodes. The script runs on the primary node and the key
* and URL is returned to the script, so the local web server
* can be contacted.
**/
public Optional<String> getScript() { return this.script; }
public void setScript(Optional<String> script) {
this.script = (script == null) ? Optional.<String>empty() : script;
}
/**
* JSON parameters to pass to the script.
**/
public Optional<Attributes> getScriptParameters() { return this.scriptParameters; }
public void setScriptParameters(Optional<Attributes> scriptParameters) {
this.scriptParameters = (scriptParameters == null) ? Optional.<Attributes>empty() : scriptParameters;
}
/**
* JSON attributes for the bulk volume job.
**/
public Optional<Attributes> getAttributes() { return this.attributes; }
public void setAttributes(Optional<Attributes> attributes) {
this.attributes = (attributes == null) ? Optional.<Attributes>empty() : attributes;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StartBulkVolumeWriteRequest that = (StartBulkVolumeWriteRequest) o;
return
Objects.equals(volumeID, that.volumeID) &&
Objects.equals(format, that.format) &&
Objects.equals(script, that.script) &&
Objects.equals(scriptParameters, that.scriptParameters) &&
Objects.equals(attributes, that.attributes);
}
@Override
public int hashCode() {
return Objects.hash( volumeID,format,script,scriptParameters,attributes );
}
public java.util.Map<String, Object> toMap() {
java.util.Map<String, Object> map = new HashMap<>();
map.put("volumeID", volumeID);
map.put("format", format);
map.put("script", script);
map.put("scriptParameters", scriptParameters);
map.put("attributes", attributes);
return map;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
Gson gson = new Gson();
sb.append( "{ " );
sb.append(" volumeID : ").append(gson.toJson(volumeID)).append(",");
sb.append(" format : ").append(gson.toJson(format)).append(",");
if(null != script && script.isPresent()){
sb.append(" script : ").append(gson.toJson(script)).append(",");
}
else{
sb.append(" script : ").append("null").append(",");
}
if(null != scriptParameters && scriptParameters.isPresent()){
sb.append(" scriptParameters : ").append(gson.toJson(scriptParameters)).append(",");
}
else{
sb.append(" scriptParameters : ").append("null").append(",");
}
if(null != attributes && attributes.isPresent()){
sb.append(" attributes : ").append(gson.toJson(attributes)).append(",");
}
else{
sb.append(" attributes : ").append("null").append(",");
}
sb.append( " }" );
if(sb.lastIndexOf(", }") != -1)
sb.deleteCharAt(sb.lastIndexOf(", }"));
return sb.toString();
}
public static Builder builder() {
return new Builder();
}
public final Builder asBuilder() {
return new Builder().buildFrom(this);
}
public static class Builder {
private Long volumeID;
private String format;
private Optional<String> script;
private Optional<Attributes> scriptParameters;
private Optional<Attributes> attributes;
private Builder() { }
public StartBulkVolumeWriteRequest build() {
return new StartBulkVolumeWriteRequest (
this.volumeID,
this.format,
this.script,
this.scriptParameters,
this.attributes);
}
private StartBulkVolumeWriteRequest.Builder buildFrom(final StartBulkVolumeWriteRequest req) {
this.volumeID = req.volumeID;
this.format = req.format;
this.script = req.script;
this.scriptParameters = req.scriptParameters;
this.attributes = req.attributes;
return this;
}
public StartBulkVolumeWriteRequest.Builder volumeID(final Long volumeID) {
this.volumeID = volumeID;
return this;
}
public StartBulkVolumeWriteRequest.Builder format(final String format) {
this.format = format;
return this;
}
public StartBulkVolumeWriteRequest.Builder optionalScript(final String script) {
this.script = (script == null) ? Optional.<String>empty() : Optional.of(script);
return this;
}
public StartBulkVolumeWriteRequest.Builder optionalScriptParameters(final Attributes scriptParameters) {
this.scriptParameters = (scriptParameters == null) ? Optional.<Attributes>empty() : Optional.of(scriptParameters);
return this;
}
public StartBulkVolumeWriteRequest.Builder optionalAttributes(final Attributes attributes) {
this.attributes = (attributes == null) ? Optional.<Attributes>empty() : Optional.of(attributes);
return this;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.test.catalog;
import org.apache.calcite.plan.RelOptPredicateList;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.metadata.BuiltInMetadata;
import org.apache.calcite.rel.metadata.MetadataDef;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.schema.TableMacro;
import org.apache.calcite.schema.TranslatableTable;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.util.SqlOperatorTables;
import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.NonNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/** Adds some extra tables to the mock catalog. These increase the time and
* complexity of initializing the catalog (because they contain views whose
* SQL needs to be parsed) and so are not used for all tests. */
public class MockCatalogReaderExtended extends MockCatalogReaderSimple {
/**
* Creates a MockCatalogReader.
*
* <p>Caller must then call {@link #init} to populate with data;
* constructor is protected to encourage you to call {@link #create}.
*
* @param typeFactory Type factory
* @param caseSensitive case sensitivity
*/
protected MockCatalogReaderExtended(RelDataTypeFactory typeFactory,
boolean caseSensitive) {
super(typeFactory, caseSensitive);
}
/** Creates and initializes a MockCatalogReaderExtended. */
public static @NonNull MockCatalogReaderExtended create(
RelDataTypeFactory typeFactory, boolean caseSensitive) {
return new MockCatalogReaderExtended(typeFactory, caseSensitive).init();
}
@Override public MockCatalogReaderExtended init() {
super.init();
MockSchema salesSchema = new MockSchema("SALES");
// Same as "EMP_20" except it uses ModifiableViewTable which populates
// constrained columns with default values on INSERT and has a single constraint on DEPTNO.
List<String> empModifiableViewNames = ImmutableList.of(
salesSchema.getCatalogName(), salesSchema.getName(), "EMP_MODIFIABLEVIEW");
TableMacro empModifiableViewMacro = MockModifiableViewRelOptTable.viewMacro(rootSchema,
"select EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, SLACKER from EMPDEFAULTS"
+ " where DEPTNO = 20", empModifiableViewNames.subList(0, 2),
ImmutableList.of(empModifiableViewNames.get(2)), true);
TranslatableTable empModifiableView = empModifiableViewMacro.apply(ImmutableList.of());
MockModifiableViewRelOptTable mockEmpViewTable = MockModifiableViewRelOptTable.create(
(MockModifiableViewRelOptTable.MockModifiableViewTable) empModifiableView, this,
empModifiableViewNames.get(0), empModifiableViewNames.get(1),
empModifiableViewNames.get(2), false, 20, null);
registerTable(mockEmpViewTable);
// Same as "EMP_MODIFIABLEVIEW" except that all columns are in the view, columns are reordered,
// and there is an `extra` extended column.
List<String> empModifiableViewNames2 = ImmutableList.of(
salesSchema.getCatalogName(), salesSchema.getName(), "EMP_MODIFIABLEVIEW2");
TableMacro empModifiableViewMacro2 = MockModifiableViewRelOptTable.viewMacro(rootSchema,
"select ENAME, EMPNO, JOB, DEPTNO, SLACKER, SAL, EXTRA, HIREDATE, MGR, COMM"
+ " from EMPDEFAULTS extend (EXTRA boolean)"
+ " where DEPTNO = 20", empModifiableViewNames2.subList(0, 2),
ImmutableList.of(empModifiableViewNames.get(2)), true);
TranslatableTable empModifiableView2 = empModifiableViewMacro2.apply(ImmutableList.of());
MockModifiableViewRelOptTable mockEmpViewTable2 = MockModifiableViewRelOptTable.create(
(MockModifiableViewRelOptTable.MockModifiableViewTable) empModifiableView2, this,
empModifiableViewNames2.get(0), empModifiableViewNames2.get(1),
empModifiableViewNames2.get(2), false, 20, null);
registerTable(mockEmpViewTable2);
// Same as "EMP_MODIFIABLEVIEW" except that comm is not in the view.
List<String> empModifiableViewNames3 = ImmutableList.of(
salesSchema.getCatalogName(), salesSchema.getName(), "EMP_MODIFIABLEVIEW3");
TableMacro empModifiableViewMacro3 = MockModifiableViewRelOptTable.viewMacro(rootSchema,
"select EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, SLACKER from EMPDEFAULTS"
+ " where DEPTNO = 20", empModifiableViewNames3.subList(0, 2),
ImmutableList.of(empModifiableViewNames3.get(2)), true);
TranslatableTable empModifiableView3 = empModifiableViewMacro3.apply(ImmutableList.of());
MockModifiableViewRelOptTable mockEmpViewTable3 = MockModifiableViewRelOptTable.create(
(MockModifiableViewRelOptTable.MockModifiableViewTable) empModifiableView3, this,
empModifiableViewNames3.get(0), empModifiableViewNames3.get(1),
empModifiableViewNames3.get(2), false, 20, null);
registerTable(mockEmpViewTable3);
MockSchema structTypeSchema = new MockSchema("STRUCT");
registerSchema(structTypeSchema);
final Fixture f = new Fixture(typeFactory);
final List<CompoundNameColumn> columnsExtended = Arrays.asList(
new CompoundNameColumn("", "K0", f.varchar20TypeNull),
new CompoundNameColumn("", "C1", f.varchar20TypeNull),
new CompoundNameColumn("F0", "C0", f.intType),
new CompoundNameColumn("F1", "C1", f.intTypeNull));
final List<CompoundNameColumn> extendedColumns =
new ArrayList<>(columnsExtended);
extendedColumns.add(new CompoundNameColumn("F2", "C2", f.varchar20Type));
final CompoundNameColumnResolver structExtendedTableResolver =
new CompoundNameColumnResolver(extendedColumns, "F0");
final MockTable structExtendedTypeTable =
MockTable.create(this, structTypeSchema, "T_EXTEND", false, 100,
structExtendedTableResolver);
for (CompoundNameColumn column : columnsExtended) {
structExtendedTypeTable.addColumn(column.getName(), column.type);
}
registerTable(structExtendedTypeTable);
// Defines a table with
// schema(A int, B bigint, C varchar(10), D as a + 1 stored, E as b * 3 virtual).
MockSchema virtualColumnsSchema = new MockSchema("VIRTUALCOLUMNS");
registerSchema(virtualColumnsSchema);
final MockTable virtualColumnsTable1 =
MockTable.create(this, virtualColumnsSchema, "VC_T1", false, 100,
null, new VirtualColumnsExpressionFactory(), true);
virtualColumnsTable1.addColumn("A", f.intTypeNull);
virtualColumnsTable1.addColumn("B", f.bigintType);
virtualColumnsTable1.addColumn("C", f.varchar10Type);
virtualColumnsTable1.addColumn("D", f.intTypeNull);
// Column E has the same type as column A because it's a virtual column
// with expression that references column A.
virtualColumnsTable1.addColumn("E", f.intTypeNull);
// Same schema with VC_T1 but with different table name.
final MockTable virtualColumnsTable2 =
MockTable.create(this, virtualColumnsSchema, "VC_T2", false, 100,
null, new VirtualColumnsExpressionFactory(), false);
virtualColumnsTable2.addColumn("A", f.intTypeNull);
virtualColumnsTable2.addColumn("B", f.bigintType);
virtualColumnsTable2.addColumn("C", f.varchar10Type);
virtualColumnsTable2.addColumn("D", f.intTypeNull);
virtualColumnsTable2.addColumn("E", f.bigintType);
registerTable(virtualColumnsTable1);
registerTable(virtualColumnsTable2);
// Register table with complex data type rows.
MockSchema complexTypeColumnsSchema = new MockSchema("COMPLEXTYPES");
registerSchema(complexTypeColumnsSchema);
final MockTable complexTypeColumnsTable =
MockTable.create(this, complexTypeColumnsSchema, "CTC_T1",
false, 100);
complexTypeColumnsTable.addColumn("A", f.recordType1);
complexTypeColumnsTable.addColumn("B", f.recordType2);
complexTypeColumnsTable.addColumn("C", f.recordType3);
complexTypeColumnsTable.addColumn("D", f.recordType4);
complexTypeColumnsTable.addColumn("E", f.recordType5);
complexTypeColumnsTable.addColumn("intArrayType", f.intArrayType);
complexTypeColumnsTable.addColumn("varchar5ArrayType", f.varchar5ArrayType);
complexTypeColumnsTable.addColumn("intArrayArrayType", f.intArrayArrayType);
complexTypeColumnsTable.addColumn("varchar5ArrayArrayType", f.varchar5ArrayArrayType);
complexTypeColumnsTable.addColumn("intMultisetType", f.intMultisetType);
complexTypeColumnsTable.addColumn("varchar5MultisetType", f.varchar5MultisetType);
complexTypeColumnsTable.addColumn("intMultisetArrayType", f.intMultisetArrayType);
complexTypeColumnsTable.addColumn("varchar5MultisetArrayType",
f.varchar5MultisetArrayType);
complexTypeColumnsTable.addColumn("intArrayMultisetType", f.intArrayMultisetType);
complexTypeColumnsTable.addColumn("rowArrayMultisetType", f.rowArrayMultisetType);
registerTable(complexTypeColumnsTable);
MockSchema nullableRowsSchema = new MockSchema("NULLABLEROWS");
registerSchema(nullableRowsSchema);
final MockTable nullableRowsTable =
MockTable.create(this, nullableRowsSchema, "NR_T1", false, 100);
RelDataType bigIntNotNull = typeFactory.createSqlType(SqlTypeName.BIGINT);
RelDataType nullableRecordType =
typeFactory.builder()
.nullableRecord(true)
.add("NOT_NULL_FIELD", bigIntNotNull)
.add("NULLABLE_FIELD", bigIntNotNull).nullable(true)
.build();
nullableRowsTable.addColumn("ROW_COLUMN", nullableRecordType, false);
nullableRowsTable.addColumn(
"ROW_COLUMN_ARRAY",
typeFactory.createArrayType(nullableRecordType, -1),
true);
registerTable(nullableRowsTable);
MockSchema geoSchema = new MockSchema("GEO");
registerSchema(geoSchema);
final MockTable restaurantTable =
MockTable.create(this, geoSchema, "RESTAURANTS", false, 100);
restaurantTable.addColumn("NAME", f.varchar20Type, true);
restaurantTable.addColumn("LATITUDE", f.intType);
restaurantTable.addColumn("LONGITUDE", f.intType);
restaurantTable.addColumn("CUISINE", f.varchar10Type);
restaurantTable.addColumn("HILBERT", f.bigintType);
restaurantTable.addMonotonic("HILBERT");
restaurantTable.addWrap(
new BuiltInMetadata.AllPredicates.Handler() {
@Override public RelOptPredicateList getAllPredicates(RelNode r,
RelMetadataQuery mq) {
// Return the predicate:
// r.hilbert = hilbert(r.longitude, r.latitude)
//
// (Yes, x = longitude, y = latitude. Same as ST_MakePoint.)
final RexBuilder rexBuilder = r.getCluster().getRexBuilder();
final RexInputRef refLatitude = rexBuilder.makeInputRef(r, 1);
final RexInputRef refLongitude = rexBuilder.makeInputRef(r, 2);
final RexInputRef refHilbert = rexBuilder.makeInputRef(r, 4);
return RelOptPredicateList.of(rexBuilder,
ImmutableList.of(
rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
refHilbert,
rexBuilder.makeCall(hilbertOp(),
refLongitude, refLatitude))));
}
SqlOperator hilbertOp() {
for (SqlOperator op
: SqlOperatorTables.spatialInstance().getOperatorList()) {
if (op.getKind() == SqlKind.HILBERT
&& op.getOperandCountRange().isValidCount(2)) {
return op;
}
}
throw new AssertionError();
}
@Override public MetadataDef<BuiltInMetadata.AllPredicates> getDef() {
return BuiltInMetadata.AllPredicates.DEF;
}
});
registerTable(restaurantTable);
return this;
}
}
|
|
/**
*/
package IFML.Extensions.provider;
import IFML.Extensions.util.ExtensionsAdapterFactory;
import java.util.ArrayList;
import java.util.Collection;
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.Notifier;
import org.eclipse.emf.edit.provider.ChangeNotifier;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.ComposedAdapterFactory;
import org.eclipse.emf.edit.provider.IChangeNotifier;
import org.eclipse.emf.edit.provider.IDisposable;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.INotifyChangedListener;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
/**
* This is the factory that is used to provide the interfaces needed to support Viewers.
* The adapters generated by this factory convert EMF adapter notifications into calls to {@link #fireNotifyChanged fireNotifyChanged}.
* The adapters also support Eclipse property sheets.
* Note that most of the adapters are shared among multiple instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class ExtensionsItemProviderAdapterFactory extends ExtensionsAdapterFactory implements ComposeableAdapterFactory, IChangeNotifier, IDisposable {
/**
* This keeps track of the root adapter factory that delegates to this adapter factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ComposedAdapterFactory parentAdapterFactory;
/**
* This is used to implement {@link org.eclipse.emf.edit.provider.IChangeNotifier}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IChangeNotifier changeNotifier = new ChangeNotifier();
/**
* This keeps track of all the supported types checked by {@link #isFactoryForType isFactoryForType}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<Object> supportedTypes = new ArrayList<Object>();
/**
* This constructs an instance.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ExtensionsItemProviderAdapterFactory() {
supportedTypes.add(IEditingDomainItemProvider.class);
supportedTypes.add(IStructuredItemContentProvider.class);
supportedTypes.add(ITreeItemContentProvider.class);
supportedTypes.add(IItemLabelProvider.class);
supportedTypes.add(IItemPropertySource.class);
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.OnSubmitEvent} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected OnSubmitEventItemProvider onSubmitEventItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.OnSubmitEvent}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createOnSubmitEventAdapter() {
if (onSubmitEventItemProvider == null) {
onSubmitEventItemProvider = new OnSubmitEventItemProvider(this);
}
return onSubmitEventItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.ValidationRule} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ValidationRuleItemProvider validationRuleItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.ValidationRule}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createValidationRuleAdapter() {
if (validationRuleItemProvider == null) {
validationRuleItemProvider = new ValidationRuleItemProvider(this);
}
return validationRuleItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.List} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ListItemProvider listItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.List}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createListAdapter() {
if (listItemProvider == null) {
listItemProvider = new ListItemProvider(this);
}
return listItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.Position} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PositionItemProvider positionItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.Position}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createPositionAdapter() {
if (positionItemProvider == null) {
positionItemProvider = new PositionItemProvider(this);
}
return positionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.UserRole} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected UserRoleItemProvider userRoleItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.UserRole}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createUserRoleAdapter() {
if (userRoleItemProvider == null) {
userRoleItemProvider = new UserRoleItemProvider(this);
}
return userRoleItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.IFMLSlot} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IFMLSlotItemProvider ifmlSlotItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.IFMLSlot}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createIFMLSlotAdapter() {
if (ifmlSlotItemProvider == null) {
ifmlSlotItemProvider = new IFMLSlotItemProvider(this);
}
return ifmlSlotItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.OnSelectEvent} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected OnSelectEventItemProvider onSelectEventItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.OnSelectEvent}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createOnSelectEventAdapter() {
if (onSelectEventItemProvider == null) {
onSelectEventItemProvider = new OnSelectEventItemProvider(this);
}
return onSelectEventItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.Form} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected FormItemProvider formItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.Form}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createFormAdapter() {
if (formItemProvider == null) {
formItemProvider = new FormItemProvider(this);
}
return formItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.Device} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected DeviceItemProvider deviceItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.Device}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createDeviceAdapter() {
if (deviceItemProvider == null) {
deviceItemProvider = new DeviceItemProvider(this);
}
return deviceItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.SelectionField} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected SelectionFieldItemProvider selectionFieldItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.SelectionField}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createSelectionFieldAdapter() {
if (selectionFieldItemProvider == null) {
selectionFieldItemProvider = new SelectionFieldItemProvider(this);
}
return selectionFieldItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.SimpleField} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected SimpleFieldItemProvider simpleFieldItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.SimpleField}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createSimpleFieldAdapter() {
if (simpleFieldItemProvider == null) {
simpleFieldItemProvider = new SimpleFieldItemProvider(this);
}
return simpleFieldItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.Details} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected DetailsItemProvider detailsItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.Details}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createDetailsAdapter() {
if (detailsItemProvider == null) {
detailsItemProvider = new DetailsItemProvider(this);
}
return detailsItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.IFMLWindow} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IFMLWindowItemProvider ifmlWindowItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.IFMLWindow}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createIFMLWindowAdapter() {
if (ifmlWindowItemProvider == null) {
ifmlWindowItemProvider = new IFMLWindowItemProvider(this);
}
return ifmlWindowItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.OnLoadEvent} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected OnLoadEventItemProvider onLoadEventItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.OnLoadEvent}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createOnLoadEventAdapter() {
if (onLoadEventItemProvider == null) {
onLoadEventItemProvider = new OnLoadEventItemProvider(this);
}
return onLoadEventItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.IFMLMenu} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IFMLMenuItemProvider ifmlMenuItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.IFMLMenu}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createIFMLMenuAdapter() {
if (ifmlMenuItemProvider == null) {
ifmlMenuItemProvider = new IFMLMenuItemProvider(this);
}
return ifmlMenuItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.JumpEvent} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected JumpEventItemProvider jumpEventItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.JumpEvent}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createJumpEventAdapter() {
if (jumpEventItemProvider == null) {
jumpEventItemProvider = new JumpEventItemProvider(this);
}
return jumpEventItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.LandingEvent} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected LandingEventItemProvider landingEventItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.LandingEvent}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createLandingEventAdapter() {
if (landingEventItemProvider == null) {
landingEventItemProvider = new LandingEventItemProvider(this);
}
return landingEventItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link IFML.Extensions.SetContextEvent} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected SetContextEventItemProvider setContextEventItemProvider;
/**
* This creates an adapter for a {@link IFML.Extensions.SetContextEvent}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createSetContextEventAdapter() {
if (setContextEventItemProvider == null) {
setContextEventItemProvider = new SetContextEventItemProvider(this);
}
return setContextEventItemProvider;
}
/**
* This returns the root adapter factory that contains this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ComposeableAdapterFactory getRootAdapterFactory() {
return parentAdapterFactory == null ? this : parentAdapterFactory.getRootAdapterFactory();
}
/**
* This sets the composed adapter factory that contains this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setParentAdapterFactory(ComposedAdapterFactory parentAdapterFactory) {
this.parentAdapterFactory = parentAdapterFactory;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean isFactoryForType(Object type) {
return supportedTypes.contains(type) || super.isFactoryForType(type);
}
/**
* This implementation substitutes the factory itself as the key for the adapter.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter adapt(Notifier notifier, Object type) {
return super.adapt(notifier, this);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object adapt(Object object, Object type) {
if (isFactoryForType(type)) {
Object adapter = super.adapt(object, type);
if (!(type instanceof Class<?>) || (((Class<?>)type).isInstance(adapter))) {
return adapter;
}
}
return null;
}
/**
* This adds a listener.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void addListener(INotifyChangedListener notifyChangedListener) {
changeNotifier.addListener(notifyChangedListener);
}
/**
* This removes a listener.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void removeListener(INotifyChangedListener notifyChangedListener) {
changeNotifier.removeListener(notifyChangedListener);
}
/**
* This delegates to {@link #changeNotifier} and to {@link #parentAdapterFactory}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void fireNotifyChanged(Notification notification) {
changeNotifier.fireNotifyChanged(notification);
if (parentAdapterFactory != null) {
parentAdapterFactory.fireNotifyChanged(notification);
}
}
/**
* This disposes all of the item providers created by this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void dispose() {
if (onSubmitEventItemProvider != null) onSubmitEventItemProvider.dispose();
if (validationRuleItemProvider != null) validationRuleItemProvider.dispose();
if (listItemProvider != null) listItemProvider.dispose();
if (positionItemProvider != null) positionItemProvider.dispose();
if (userRoleItemProvider != null) userRoleItemProvider.dispose();
if (ifmlSlotItemProvider != null) ifmlSlotItemProvider.dispose();
if (onSelectEventItemProvider != null) onSelectEventItemProvider.dispose();
if (formItemProvider != null) formItemProvider.dispose();
if (deviceItemProvider != null) deviceItemProvider.dispose();
if (selectionFieldItemProvider != null) selectionFieldItemProvider.dispose();
if (simpleFieldItemProvider != null) simpleFieldItemProvider.dispose();
if (detailsItemProvider != null) detailsItemProvider.dispose();
if (ifmlWindowItemProvider != null) ifmlWindowItemProvider.dispose();
if (onLoadEventItemProvider != null) onLoadEventItemProvider.dispose();
if (ifmlMenuItemProvider != null) ifmlMenuItemProvider.dispose();
if (jumpEventItemProvider != null) jumpEventItemProvider.dispose();
if (landingEventItemProvider != null) landingEventItemProvider.dispose();
if (setContextEventItemProvider != null) setContextEventItemProvider.dispose();
}
}
|
|
/*
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Tomcat", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
* [Additional notices, if required by prior licensing conditions]
*
*/
package org.apache.tomcat.util;
import java.io.IOException;
import java.io.OutputStream;
import java.util.*;
import java.text.*;
/**
* This class can be used to efficiently parse and write an RFC 1123
* formatted date in an HTTP message header. Also supports reading the
* RFC 1036 format and ANSI C's asctime() format, as suggested by HTTP/1.0
* and mandated by HTTP/1.1.
*
* @author dac@eng.sun.com
* @author Jason Hunter [jch@eng.sun.com]
* @author James Todd [gonzo@eng.sun.com]
*/
public class HttpDate extends Ascii {
// private StringManager sm =
// StringManager.getManager(Constants.Package);
// ONLY FOR COMPAT -- KILL ASAP -- just make sure that dependant
// classes know what's up. ref. MimeHeaderField
private static final String DATESTR = "Sun, 06 Nov 1994 08:49:37 GMT";
public static final int DATELEN = DATESTR.length();
// END COMPAT -- DON'T FORGET TO KILL
// we force our locale here as all http dates are in english
private final static Locale loc = Locale.US;
// all http dates are expressed as time at GMT
private final static TimeZone zone = TimeZone.getTimeZone("GMT");
// format for RFC 1123 date string -- "Sun, 06 Nov 1994 08:49:37 GMT"
private final static String rfc1123Pattern =
"EEE, dd MMM yyyy HH:mm:ss z";
// format for RFC 1036 date string -- "Sunday, 06-Nov-94 08:49:37 GMT"
private final static String rfc1036Pattern =
"EEEEEEEEE, dd-MMM-yy HH:mm:ss z";
// format for C asctime() date string -- "Sun Nov 6 08:49:37 1994"
private final static String asctimePattern =
"EEE MMM d HH:mm:ss yyyy";
private final static SimpleDateFormat rfc1123Format =
new SimpleDateFormat(rfc1123Pattern, loc);
private final static SimpleDateFormat rfc1036Format =
new SimpleDateFormat(rfc1036Pattern, loc);
private final static SimpleDateFormat asctimeFormat =
new SimpleDateFormat(asctimePattern, loc);
static {
rfc1123Format.setTimeZone(zone);
rfc1036Format.setTimeZone(zone);
asctimeFormat.setTimeZone(zone);
}
// protected so that oldcookieexpiry in cookieutils can use
// yes, this is sloppy as crap and could stand to be done better.
protected Calendar calendar = new GregorianCalendar(zone, loc);
public HttpDate() {
calendar.setTime(new Date(System.currentTimeMillis()));
}
public HttpDate(long ms) {
calendar.setTime(new Date(ms));
}
public void setTime() {
calendar.setTime(new Date(System.currentTimeMillis()));
}
public void setTime(long ms) {
calendar.setTime(new Date(ms));
}
public void parse(String dateString) {
try {
Date date = rfc1123Format.parse(dateString);
calendar.setTime(date);
return;
} catch (ParseException e) {
}
try {
Date date = rfc1036Format.parse(dateString);
calendar.setTime(date);
return;
} catch (ParseException e) {
}
try {
Date date = asctimeFormat.parse(dateString);
calendar.setTime(date);
return;
} catch (ParseException pe) {
//String msg = sm.getString("httpDate.pe", dateString);
String msg = "Could not parse data: " + dateString;
throw new IllegalArgumentException(msg);
}
}
public void parse(byte[] b, int off, int len) {
// ok -- so this is pretty stoopid, but the old version of this
// source took this arg set, so we will too for now (backwards compat)
String dateString = new String(b, off, len);
parse(dateString);
}
public void write(OutputStream out) throws IOException {
String dateString = rfc1123Format.format(calendar.getTime());
byte[] b = dateString.getBytes();
out.write(b);
}
public String toString() {
return rfc1123Format.format(calendar.getTime());
}
public long getTime() {
return calendar.getTime().getTime();
}
public static long getCurrentTime() {
return System.currentTimeMillis();
}
//
// // KILL, THIS IS ONLY HERE FOR TEMP COMPAT as MimeHeaderField uses it.
// public int getBytes(byte[] buf, int off, int len) {
// if (len < DATELEN) {
// String msg = sm.getString("httpDate.iae", new Integer(len));
//
// throw new IllegalArgumentException(msg);
// }
//
// String dateString = rfc1123Format.format(calendar.getTime());
// byte[] b = dateString.getBytes();
// System.arraycopy(b, 0, buf, off, DATELEN);
// return DATELEN;
// }
}
|