gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package com.eaw1805.battles.field.processors.movement; import com.eaw1805.battles.field.movement.BaseFieldBattlePathCalculator; import com.eaw1805.battles.field.orders.OrderUtils; import com.eaw1805.battles.field.processors.MovementProcessor; import com.eaw1805.battles.field.processors.commander.CommanderType; import com.eaw1805.battles.field.utils.ArmyUtils; import com.eaw1805.battles.field.utils.FieldBattleCollectionUtils; import com.eaw1805.battles.field.utils.MapUtils; import com.eaw1805.data.model.Nation; import com.eaw1805.data.model.army.Battalion; import com.eaw1805.data.model.army.Brigade; import com.eaw1805.data.model.battles.field.FieldBattleMap; import com.eaw1805.data.model.battles.field.FieldBattlePosition; import com.eaw1805.data.model.battles.field.FieldBattleSector; import com.eaw1805.data.model.battles.field.Order; import com.eaw1805.data.model.battles.field.enumerations.ArmEnum; import com.eaw1805.data.model.battles.field.enumerations.FormationEnum; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; /** * Base class for processing movement for a particular type of order. Each subclass processes movement of a single order type. * * @author fragkakis */ public abstract class BaseOrderMovementProcessor { protected MovementProcessor movementProcessor; protected AdditionalOrderBrigadeFilter additionalOrderBrigadeFilter; /** * This is the sector on which a brigade will stop advancing in case it * fails a morale check when advancing next to an enemy. */ protected FieldBattleSector retreatPointOnMoraleCheckFail; private final static Logger LOGGER = LoggerFactory.getLogger(BaseOrderMovementProcessor.class); private static final Map<ArmEnum, Map<ArmEnum, Integer>> ATTACK_MORALE_MODIFIERS; static { ATTACK_MORALE_MODIFIERS = new HashMap<ArmEnum, Map<ArmEnum, Integer>>(); ATTACK_MORALE_MODIFIERS.put(ArmEnum.INFANTRY, new HashMap<ArmEnum, Integer>()); ATTACK_MORALE_MODIFIERS.get(ArmEnum.INFANTRY).put(ArmEnum.INFANTRY, 0); ATTACK_MORALE_MODIFIERS.get(ArmEnum.INFANTRY).put(ArmEnum.CAVALRY, -20); ATTACK_MORALE_MODIFIERS.get(ArmEnum.INFANTRY).put(ArmEnum.ARTILLERY, -10); ATTACK_MORALE_MODIFIERS.put(ArmEnum.CAVALRY, new HashMap<ArmEnum, Integer>()); ATTACK_MORALE_MODIFIERS.get(ArmEnum.CAVALRY).put(ArmEnum.INFANTRY, 10); ATTACK_MORALE_MODIFIERS.get(ArmEnum.CAVALRY).put(ArmEnum.CAVALRY, 10); ATTACK_MORALE_MODIFIERS.get(ArmEnum.CAVALRY).put(ArmEnum.ARTILLERY, 0); ATTACK_MORALE_MODIFIERS.put(ArmEnum.ARTILLERY, new HashMap<ArmEnum, Integer>()); ATTACK_MORALE_MODIFIERS.get(ArmEnum.ARTILLERY).put(ArmEnum.INFANTRY, 0); ATTACK_MORALE_MODIFIERS.get(ArmEnum.ARTILLERY).put(ArmEnum.CAVALRY, 0); ATTACK_MORALE_MODIFIERS.get(ArmEnum.ARTILLERY).put(ArmEnum.ARTILLERY, 0); } /** * Constructor. * * @param movementProcessor */ public BaseOrderMovementProcessor(MovementProcessor movementProcessor) { this.movementProcessor = movementProcessor; additionalOrderBrigadeFilter = new AdditionalOrderBrigadeFilter(); } /** * The top-level method of the movement processor. Moves a brigade across * the battle field, depending on the order it has been given. By default, * it moves the brigade along its checkpoints, and then performs the * order-specific movement. Override if appropriate. * * @param brigade the brigade to be moved. * @param visibleEnemySectors the sectors of visible enemies * @param visibleEnemies the visible enemies * @param pathCalculator the path calculator * @param order the order of the brigade */ public void move(Brigade brigade, Set<FieldBattleSector> visibleEnemySectors, Set<Brigade> visibleEnemies, BaseFieldBattlePathCalculator pathCalculator, Order order) { retreatPointOnMoraleCheckFail = null; FieldBattleMap fbMap = movementProcessor.getParent().getFbMap(); int remainingMps = findMps(brigade); remainingMps = beforeCheckpointsOrderMovement(brigade, visibleEnemySectors, visibleEnemies, pathCalculator, order, remainingMps); remainingMps = changeFormationIfAppropriate(brigade, order, remainingMps); if (remainingMps > 0 && !OrderUtils.lastCheckpointReached(order)) { remainingMps = proceedTowardsLastCheckpoint(fbMap, brigade, pathCalculator, order, remainingMps); } if (remainingMps > 0 && OrderUtils.lastCheckpointReached(order)) { afterCheckpointsOrderSpecificMovement(brigade, visibleEnemySectors, visibleEnemies, pathCalculator, order, remainingMps); } // the order may have resulted in the brigade having left the field battle (i.e. retreat). In that case, no movement can take place if (brigade.getFieldBattlePosition().exists()) { FieldBattleSector positionAfterMove = MapUtils.getSectorFromPosition(fbMap, brigade.getFieldBattlePosition()); // When moving adjacently to an enemy, perform a morale check. If // the check fails, stay 1 tile away. If the check passes, move // normally next to the enemy. This does not apply for artilleries, // as they will never attempt to move next to an enemy. if (brigade.getArmTypeEnum() != ArmEnum.ARTILLERY) { int enemySide = findSide(brigade.getNation()) == 0 ? 1 : 0; Set<FieldBattleSector> neighbours = MapUtils.getHorizontalAndVerticalNeighbours(positionAfterMove); Set<Brigade> neighbouringEnemies = movementProcessor.getParent().findBrigadesOfSide(neighbours, enemySide); if (!neighbouringEnemies.isEmpty()) { Brigade randomNeighbourEnemy = FieldBattleCollectionUtils.getRandom(neighbouringEnemies); int moraleModifier = ATTACK_MORALE_MODIFIERS.get(brigade.getArmTypeEnum()).get(randomNeighbourEnemy.getArmTypeEnum()); // attacking brigades that are influenced by "Fearless attacker" commanders receive // a +5% bonus in morale checks if (order.getOrderTypeEnum().isAttackOrder() && movementProcessor.getParent().getCommanderProcessor().influencedByCommanderOfType(brigade, CommanderType.FEARLESS_ATTACKER)) { LOGGER.trace("{} is influenced by a Fearless attacker commander and is performing an attack order, +5% morale check bonus", brigade); moraleModifier += 5; } LOGGER.debug("{} is {} and is attacking {} which is {}, {}% morale check bonus", new Object[]{brigade, brigade.getArmTypeEnum(), randomNeighbourEnemy, randomNeighbourEnemy.getArmTypeEnum(), moraleModifier}); boolean moraleCheckResult = movementProcessor.getParent().getMoraleChecker().checkMorale(brigade, moraleModifier); if (!moraleCheckResult && retreatPointOnMoraleCheckFail != null) { positionAfterMove = retreatPointOnMoraleCheckFail; brigade.setFieldBattlePosition(new FieldBattlePosition(retreatPointOnMoraleCheckFail.getX(), retreatPointOnMoraleCheckFail.getY())); } } } else { } movementProcessor.moveBrigade(brigade, positionAfterMove, pathCalculator); } } protected int findMps(Brigade brigade) { int totalMps = 0; for (Battalion battalion : brigade.getBattalions()) { totalMps += battalion.getType().getSps(); } int mps = totalMps / brigade.getBattalions().size(); LOGGER.debug("Movement points: {}", mps); return mps; } /** * If a formation change is required, this takes up all the halfround. * * @param brigade the brigade * @param order the order * @param remainingMps the remaining movement points * @return the new remaining movement points */ private int changeFormationIfAppropriate(Brigade brigade, Order order, int remainingMps) { /** * Check for first half round and set formation */ if (brigade.getFormation() == null) { brigade.setFormationEnum(order.getFormationEnum()); } else { FormationEnum orderFormation = order.getFormationEnum(); FormationEnum brigadeCurrentFormation = brigade.getFormationEnum(); if (brigadeCurrentFormation != orderFormation) { /** * In case this is an Infantry in Square, don't change the formation * if there is a non-routing Cavalry enemy in radius 3. */ if (brigade.getArmTypeEnum() == ArmEnum.INFANTRY && brigadeCurrentFormation == FormationEnum.SQUARE) { int ourSide = movementProcessor.getParent().findSide(brigade); int enemySide = ourSide == 0 ? 1 : 0; FieldBattleSector currentLocation = movementProcessor.getParent().getSector(brigade); Set<FieldBattleSector> sectorsInRadius3 = MapUtils.findSectorsInRadius(currentLocation, 3); Set<Brigade> enemiesInRadius3 = movementProcessor.getParent().findBrigadesOfSide(sectorsInRadius3, enemySide); if (ArmyUtils.containsUnbrokenBrigadeOfArm(enemiesInRadius3, ArmEnum.CAVALRY)) { return remainingMps; } } // Changing the formation takes up the whole halfround. LOGGER.debug("{}: Changed formation from {} to {}, no further movement possible", new Object[]{brigade, brigadeCurrentFormation, orderFormation}); brigade.setFormationEnum(orderFormation); remainingMps = -1; } } return remainingMps; } /** * Abstract method that contains the order-specific movement rules that must * take place BEFORE any checkpoint-related movement has taken place. * Default implementation does not perform any movement. Must be overridden * if necessary. * * @param brigade the brigade to be moved. * @param visibleEnemySectors the sectors of visible enemies * @param visibleEnemies the visible enemies * @param pathCalculator the path calculator * @param order the order of the brigade * @param remainingMps the remaining movement points * @return the remaining movement points */ protected int beforeCheckpointsOrderMovement(Brigade brigade, Set<FieldBattleSector> visibleEnemySectors, Set<Brigade> visibleEnemies, BaseFieldBattlePathCalculator pathCalculator, Order order, int remainingMps) { // do nothing return remainingMps; } /** * Abstract method that contains the order-specific movement rules that must * take place AFTER the last checkpoint has been reached. Must be implemented. * * @param brigade the brigade to be moved. * @param visibleEnemySectors the sectors of visible enemies * @param visibleEnemies the visible enemies * @param pathCalculator the path calculator * @param order the order of the brigade * @param remainingMps the remaining movement points */ protected abstract void afterCheckpointsOrderSpecificMovement(Brigade brigade, Set<FieldBattleSector> visibleEnemySectors, Set<Brigade> visibleEnemies, BaseFieldBattlePathCalculator pathCalculator, Order order, int remainingMps); protected int proceedTowardsLastCheckpoint(FieldBattleMap fbMap, Brigade brigade, BaseFieldBattlePathCalculator pathCalculator, Order order, int remainingMps) { FieldBattleSector positionAfterMove = MapUtils.getSectorFromPosition(fbMap, brigade.getFieldBattlePosition()); while (remainingMps > 0) { FieldBattlePosition nextCheckpoint = OrderUtils.nextCheckpoint(order); FieldBattleSector nextCheckpointSector = MapUtils.getSectorFromPosition(fbMap, nextCheckpoint); // we consider backwards movement allowed when still moving towards last checkpoint remainingMps = proceedTowardsSector(brigade, nextCheckpointSector, remainingMps, pathCalculator, true); positionAfterMove = MapUtils.getSectorFromPosition(fbMap, brigade.getFieldBattlePosition()); if (positionAfterMove == nextCheckpointSector) { // checkpoint reached, mark it as such and return; markCheckpointAsReached(order, positionAfterMove); if (OrderUtils.lastCheckpointReached(order)) { break; } } else { // even if there are remaining MPs, they apparently were not enough to get us to the next checkpoint, so stop. break; } } return remainingMps; } private void markCheckpointAsReached(Order order, FieldBattleSector positionAfterMove) { FieldBattlePosition checkpoint1 = order.getCheckpoint1(); FieldBattlePosition checkpoint2 = order.getCheckpoint2(); FieldBattlePosition checkpoint3 = order.getCheckpoint3(); if (checkpoint1.exists() && checkpoint1.getX() == positionAfterMove.getX() && checkpoint1.getY() == positionAfterMove.getY() && !order.isReachedCheckpoint1()) { order.setReachedCheckpoint1(true); } else if (checkpoint2.exists() && checkpoint2.getX() == positionAfterMove.getX() && checkpoint2.getY() == positionAfterMove.getY() && !order.isReachedCheckpoint2()) { order.setReachedCheckpoint2(true); } else if (checkpoint3.exists() && checkpoint3.getX() == positionAfterMove.getX() && checkpoint3.getY() == positionAfterMove.getY() && !order.isReachedCheckpoint3()) { order.setReachedCheckpoint3(true); } else { throw new IllegalArgumentException("Sector " + positionAfterMove + " does not correspond to any of the checkpoints " + checkpoint1 + ", " + checkpoint2 + ", " + checkpoint3); } } /** * This method handles the movement towards a sector. * * @param brigade * @param sector * @param remainingMps * @param pathCalculator * @param backwardsAllowed * @return */ protected int proceedTowardsSector(Brigade brigade, FieldBattleSector sector, int remainingMps, BaseFieldBattlePathCalculator pathCalculator, boolean backwardsAllowed) { FieldBattleSector currentLocation = MapUtils.getSectorFromPosition(sector.getMap(), brigade.getFieldBattlePosition()); if (currentLocation == sector) { // brigade is already there, return its Movement Points as they are return remainingMps; } int ourSide = movementProcessor.getParent().findSide(brigade); int enemySide = ourSide == 0 ? 1 : 0; List<FieldBattleSector> path = pathCalculator.findPath(currentLocation, sector, brigade.getArmTypeEnum(), brigade.getFormationEnum(), backwardsAllowed); /** * If there is no path to the destination */ if (path == null || path.size() == 0) { Set<FieldBattleSector> neighbours = MapUtils.getNeighbours(sector); if(neighbours.contains(currentLocation)) { // we are at a neighbouring sector, stay there } else { // move to the closest neighbouring sector List<FieldBattleSector> neighboursClosestFirst = MapUtils.orderByDistance(currentLocation, neighbours); for(FieldBattleSector neighbourClosestFirst : neighboursClosestFirst) { List<FieldBattleSector> neighbourPath = pathCalculator.findPath(currentLocation, neighbourClosestFirst, brigade.getArmTypeEnum(), brigade.getFormationEnum(), backwardsAllowed); if(neighbourPath!=null && !neighbourPath.isEmpty()) { return proceedTowardsSector(brigade, neighbourClosestFirst, remainingMps, pathCalculator, backwardsAllowed); } } } remainingMps = -1; } else { FieldBattleSector destinationWithinMps = null; for (int i = 1; i < path.size(); i++) { retreatPointOnMoraleCheckFail = path.get(i - 1); FieldBattleSector pathSector = path.get(i); // artillery brigades will keep a distance of 2 tiles from any enemy, and won't move any longer // this is the only case where destinationWithinMps may end up null if (brigade.getArmTypeEnum() == ArmEnum.ARTILLERY) { boolean nextToEnemy = sectorIsNextToEnemies(pathSector, enemySide); if (nextToEnemy) { remainingMps = -1; break; } } int cost = pathCalculator.findCost(currentLocation, pathSector, brigade.getArmTypeEnum(), brigade.getFormationEnum(), backwardsAllowed); // for the 1st sector in the path, don't care about the cost if (i > 1 && cost > remainingMps) { break; } else { destinationWithinMps = pathSector; } } // in case of artillery closing in on enemies, the destination may be null if (destinationWithinMps != null) { remainingMps = remainingMps - pathCalculator.findCost(currentLocation, destinationWithinMps, brigade.getArmTypeEnum(), brigade.getFormationEnum(), backwardsAllowed); // for any strategic points in the path, mark them as owned for (FieldBattleSector pathSector : path.subList(0, path.indexOf(destinationWithinMps) + 1)) { if (pathSector.isStrategicPoint()) { pathSector.setCurrentHolder(brigade.getNation()); } } brigade.getFieldBattlePosition().setX(destinationWithinMps.getX()); brigade.getFieldBattlePosition().setY(destinationWithinMps.getY()); } } return remainingMps; } private boolean sectorIsNextToEnemies(FieldBattleSector sector, int enemySide) { Set<FieldBattleSector> neighbourSectors = MapUtils.getNeighbours(sector); Set<Brigade> neighbourEnemies = movementProcessor.getParent().findBrigadesOfSide(neighbourSectors, enemySide); if (neighbourEnemies.isEmpty()) { return false; } else { return true; } } /** * Finds the side of a nation. * * @param nation the nation * @return the side number (0 or 1) */ protected int findSide(Nation nation) { return movementProcessor.getParent().getBattleField().getSide(0).contains(nation) ? 0 : 1; } }
/** * Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.kaazing.gateway.client.impl.wsn; import org.kaazing.gateway.client.impl.Expectations; import org.kaazing.gateway.client.impl.WebSocketChannel; import org.kaazing.gateway.client.impl.WebSocketHandler; import org.kaazing.gateway.client.impl.WebSocketHandlerListener; import org.kaazing.gateway.client.impl.util.WSURI; import org.kaazing.gateway.client.impl.ws.WebSocketCompositeChannel; import org.kaazing.gateway.client.impl.wsn.WebSocketNativeBalancingHandler; import org.kaazing.gateway.client.impl.wsn.WebSocketNativeChannel; import org.kaazing.gateway.client.util.WrappedByteBuffer; import org.kaazing.net.ws.WebSocket; import org.kaazing.net.ws.WebSocketFactory; import org.kaazing.net.ws.impl.WebSocketImpl; import java.io.UnsupportedEncodingException; import java.net.URISyntaxException; import org.jmock.Mockery; import org.jmock.api.Invocation; import org.jmock.lib.action.CustomAction; import org.junit.Test; public class WebSocketNativeBalancingHandlerTest { /* * pass through for non-kaazing gateway * should treat balancing message as regular message */ @Test public void testProcessConnect() throws URISyntaxException { Mockery context = new Mockery(); final WebSocketHandler nextHandler = context.mock(WebSocketHandler.class); final WebSocketHandlerListener listener = context.mock(WebSocketHandlerListener.class); final String[] requestedProtocols = new String[] { "foo" }; final String buf = '\uf0ff' + "N"; context.checking(new Expectations() { { oneOf(nextHandler).setListener(with(aNonNull(WebSocketHandlerListener.class))); will(saveParameter("listener", 0)); oneOf(nextHandler).processConnect(with(aNonNull(WebSocketChannel.class)), with(aNonNull(WSURI.class)), with(equal(requestedProtocols))); will(new CustomAction("will fire connectionOpen") { @Override public Object invoke(Invocation invocation) { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); listener.connectionOpened(channel, ""); return null; } }); oneOf(listener).connectionOpened(with(aNonNull(WebSocketChannel.class)), with("")); will(new CustomAction("will balancer message") { @Override public Object invoke(Invocation invocation) throws UnsupportedEncodingException { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); WrappedByteBuffer message = new WrappedByteBuffer(buf.getBytes("UTF-8")); listener.binaryMessageReceived(channel, message); return null; } }); oneOf(listener).binaryMessageReceived(with(aNonNull(WebSocketChannel.class)),with(aNonNull(WrappedByteBuffer.class))); } }); WSURI uri = new WSURI("ws://locationhost:8001/echo"); WebSocketChannel channel = new WebSocketNativeChannel(uri); WebSocketNativeBalancingHandler handler = new WebSocketNativeBalancingHandler(); handler.setNextHandler(nextHandler); handler.setListener(listener); handler.processConnect(channel, uri, requestedProtocols); context.assertIsSatisfied(); } /* * wait receive balancer message for kaazing gateway */ @Test public void testWaitBalancerMessage() throws URISyntaxException { Mockery context = new Mockery(); final WebSocketHandler nextHandler = context.mock(WebSocketHandler.class); final WebSocketHandlerListener listener = context.mock(WebSocketHandlerListener.class); final String[] requestedProtocols = new String[] { "x-kaazing-handshake" }; context.checking(new Expectations() { { oneOf(nextHandler).setListener(with(aNonNull(WebSocketHandlerListener.class))); will(saveParameter("listener", 0)); oneOf(nextHandler).processConnect(with(aNonNull(WebSocketChannel.class)), with(aNonNull(WSURI.class)), with(equal(requestedProtocols))); will(new CustomAction("will fire connectionOpen") { @Override public Object invoke(Invocation invocation) { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); listener.connectionOpened(channel, requestedProtocols[0]); return null; } }); } }); WSURI uri = new WSURI("ws://locationhost:8001/echo"); WebSocketChannel channel = new WebSocketNativeChannel(uri); WebSocketNativeBalancingHandler handler = new WebSocketNativeBalancingHandler(); handler.setNextHandler(nextHandler); handler.setListener(listener); handler.processConnect(channel, uri, requestedProtocols); context.assertIsSatisfied(); } /* * pass through */ @Test public void testProcessAuthorize() throws URISyntaxException { Mockery context = new Mockery(); final WebSocketHandler nextHandler = context.mock(WebSocketHandler.class); final WebSocketHandlerListener listener = context.mock(WebSocketHandlerListener.class); context.checking(new Expectations() { { oneOf(nextHandler).setListener(with(aNonNull(WebSocketHandlerListener.class))); will(saveParameter("listener", 0)); oneOf(nextHandler).processAuthorize(with(aNonNull(WebSocketChannel.class)), with("Application Basic relam")); will(new CustomAction("will fire 401 challenge") { @Override public Object invoke(Invocation invocation) { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); listener.authenticationRequested(channel, "location", (String)invocation.getParameter(1)); return null; } }); oneOf(listener).authenticationRequested(with(aNonNull(WebSocketChannel.class)), with("location"), with("Application Basic relam")); } }); WSURI uri = new WSURI("ws://locationhost:8001/echo"); WebSocketChannel channel = new WebSocketChannel(uri); WebSocketNativeBalancingHandler handler = new WebSocketNativeBalancingHandler(); handler.setNextHandler(nextHandler); handler.setListener(listener); handler.processAuthorize(channel, "Application Basic relam"); context.assertIsSatisfied(); } /* * pass through */ @Test public void testProcessBinaryMessage() throws URISyntaxException { Mockery context = new Mockery(); final WebSocketHandler nextHandler = context.mock(WebSocketHandler.class); final WebSocketHandlerListener listener = context.mock(WebSocketHandlerListener.class); final WrappedByteBuffer message = new WrappedByteBuffer("test message".getBytes()); context.checking(new Expectations() { { oneOf(nextHandler).setListener(with(aNonNull(WebSocketHandlerListener.class))); will(saveParameter("listener", 0)); oneOf(nextHandler).processBinaryMessage(with(aNonNull(WebSocketChannel.class)), with(message)); will(new CustomAction("will fire text message received") { @Override public Object invoke(Invocation invocation) { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); listener.binaryMessageReceived(channel, message); return null; } }); oneOf(listener).binaryMessageReceived(with(aNonNull(WebSocketChannel.class)), with(message)); } }); WSURI uri = new WSURI("ws://locationhost:8001/echo"); WebSocketChannel channel = getNativeChannel(uri); WebSocketNativeBalancingHandler handler = new WebSocketNativeBalancingHandler(); handler.setNextHandler(nextHandler); handler.setListener(listener); handler.processBinaryMessage(channel, message); context.assertIsSatisfied(); } /* * pass through */ @Test public void testProcessTextMessage() throws URISyntaxException { Mockery context = new Mockery(); final WebSocketHandler nextHandler = context.mock(WebSocketHandler.class); final WebSocketHandlerListener listener = context.mock(WebSocketHandlerListener.class); final WrappedByteBuffer message = new WrappedByteBuffer("test message".getBytes()); context.checking(new Expectations() { { oneOf(nextHandler).setListener(with(aNonNull(WebSocketHandlerListener.class))); will(saveParameter("listener", 0)); oneOf(nextHandler).processTextMessage(with(aNonNull(WebSocketChannel.class)), with("test message")); will(new CustomAction("will fire message received") { @Override public Object invoke(Invocation invocation) { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); listener.binaryMessageReceived(channel, message); return null; } }); oneOf(listener).binaryMessageReceived(with(aNonNull(WebSocketChannel.class)), with(aNonNull(WrappedByteBuffer.class))); } }); WSURI uri = new WSURI("ws://locationhost:8001/echo"); WebSocketChannel channel = getNativeChannel(uri); WebSocketNativeBalancingHandler handler = new WebSocketNativeBalancingHandler(); handler.setNextHandler(nextHandler); handler.setListener(listener); handler.processTextMessage(channel, "test message"); context.assertIsSatisfied(); } /* * check message leading byte, * if it starts with '\uf0ff'N, fire open event * make sure it only fires open event twice */ @Test public void testProcessBalancerMessage() throws URISyntaxException, UnsupportedEncodingException { Mockery context = new Mockery(); final WebSocketHandler nextHandler = context.mock(WebSocketHandler.class); final WebSocketHandlerListener listener = context.mock(WebSocketHandlerListener.class); final String balancerNoMessage = '\uf0ff' + "N"; context.checking(new Expectations() { { oneOf(nextHandler).setListener(with(aNonNull(WebSocketHandlerListener.class))); will(saveParameter("listener", 0)); oneOf(nextHandler).processTextMessage(with(aNonNull(WebSocketChannel.class)), with("test message")); will(new CustomAction("will balancer message") { @Override public Object invoke(Invocation invocation) throws UnsupportedEncodingException { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); listener.textMessageReceived(channel, balancerNoMessage); return null; } }); oneOf(listener).connectionOpened(with(aNonNull(WebSocketChannel.class)), with("x-kaazing-handshake")); will(new CustomAction("will fire balancer message again") { @Override public Object invoke(Invocation invocation) throws UnsupportedEncodingException { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); listener.textMessageReceived(channel, balancerNoMessage); return null; } }); oneOf(listener).connectionOpened(with(aNonNull(WebSocketChannel.class)), with("")); will(new CustomAction("will fire 3nd balancer message") { @Override public Object invoke(Invocation invocation) throws UnsupportedEncodingException { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); listener.textMessageReceived(channel, balancerNoMessage); return null; } }); oneOf(listener).textMessageReceived(with(aNonNull(WebSocketChannel.class)), with(aNonNull(String.class))); //with(new ByteBufferMatcher(buf, "match balancer message"))); } }); WSURI uri = new WSURI("ws://locationhost:8001/echo"); WebSocketChannel channel = getNativeChannel(uri); WebSocketNativeBalancingHandler handler = new WebSocketNativeBalancingHandler(); handler.setNextHandler(nextHandler); handler.setListener(listener); handler.processTextMessage(channel, "test message"); context.assertIsSatisfied(); } /* * check message leading byte, * if it starts with '\uf0ff'R, redirect */ @Test public void testProcessBalancerRedirectMessage() throws URISyntaxException, UnsupportedEncodingException { Mockery context = new Mockery(); final WebSocketHandler nextHandler = context.mock(WebSocketHandler.class); final WebSocketHandlerListener listener = context.mock(WebSocketHandlerListener.class); final String balancerRedirectMessage = '\uf0ff' + "Rws://localhost/echo"; context.checking(new Expectations() { { oneOf(nextHandler).setListener(with(aNonNull(WebSocketHandlerListener.class))); will(saveParameter("listener", 0)); oneOf(nextHandler).processTextMessage(with(aNonNull(WebSocketChannel.class)), with("test message")); will(new CustomAction("will fire redirect message") { @Override public Object invoke(Invocation invocation) { WebSocketHandlerListener listener = (WebSocketHandlerListener)lookup("listener"); WebSocketChannel channel = (WebSocketChannel)invocation.getParameter(0); listener.textMessageReceived(channel, balancerRedirectMessage); return null; } }); oneOf(nextHandler).processClose(with(aNonNull(WebSocketChannel.class)), with(0), with(aNull(String.class))); } }); WSURI uri = new WSURI("ws://locationhost:8001/echo"); WebSocketChannel channel = getNativeChannel(uri); WebSocketNativeBalancingHandler handler = new WebSocketNativeBalancingHandler(); handler.setNextHandler(nextHandler); handler.setListener(listener); handler.processTextMessage(channel, "test message"); context.assertIsSatisfied(); } private WebSocketNativeChannel getNativeChannel(WSURI uri) throws URISyntaxException { WebSocketFactory wsFactory = WebSocketFactory.createWebSocketFactory(); WebSocket ws = wsFactory.createWebSocket(uri.getURI()); WebSocketCompositeChannel cc = ((WebSocketImpl)ws).getCompositeChannel(); WebSocketNativeChannel channel = new WebSocketNativeChannel(uri); cc.selectedChannel = channel; channel.setParent(cc); return channel; } }
/* * PanoramaGL library * Version 0.2 beta * Copyright (c) 2010 Javier Baez <javbaezga@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.panoramagl; import java.util.List; import com.panoramagl.enumerations.PLCameraAnimationType; import com.panoramagl.hotspots.PLIHotspot; import com.panoramagl.ios.UITouch; import com.panoramagl.ios.structs.CGPoint; import com.panoramagl.ios.structs.UIAcceleration; import com.panoramagl.loaders.PLILoader; import com.panoramagl.structs.PLPosition; import com.panoramagl.transitions.PLITransition; import android.hardware.SensorEvent; import android.view.MotionEvent; public abstract class PLViewListener { /**touch methods*/ public void onTouchesBegan(PLIView view, List<UITouch> touches, MotionEvent event) { } public void onTouchesMoved(PLIView view, List<UITouch> touches, MotionEvent event) { } public void onTouchesEnded(PLIView view, List<UITouch> touches, MotionEvent event) { } public boolean onShouldBeginTouching(PLIView view, List<UITouch> touches, MotionEvent event) { return true; } public void onDidBeginTouching(PLIView view, List<UITouch> touches, MotionEvent event) { } public boolean onShouldMoveTouching(PLIView view, List<UITouch> touches, MotionEvent event) { return true; } public void onDidMoveTouching(PLIView view, List<UITouch> touches, MotionEvent event) { } public boolean onShouldEndTouching(PLIView view, List<UITouch> touches, MotionEvent event) { return true; } public void onDidEndTouching(PLIView view, List<UITouch> touches, MotionEvent event) { } /**accelerate methods*/ public boolean onShouldAccelerate(PLIView view, UIAcceleration acceleration, SensorEvent event) { return true; } public void onDidAccelerate(PLIView view, UIAcceleration acceleration, SensorEvent event) { } /**inertia methods*/ public boolean onShouldBeginInertia(PLIView view, CGPoint startPoint, CGPoint endPoint) { return true; } public void onDidBeginInertia(PLIView view, CGPoint startPoint, CGPoint endPoint) { } public boolean onShouldRunInertia(PLIView view, CGPoint startPoint, CGPoint endPoint) { return true; } public void onDidRunInertia(PLIView view, CGPoint startPoint, CGPoint endPoint) { } public void onDidEndInertia(PLIView view, CGPoint startPoint, CGPoint endPoint) { } /**scrolling methods*/ public boolean onShouldBeingScrolling(PLIView view, CGPoint startPoint, CGPoint endPoint) { return true; } public void onDidBeginScrolling(PLIView view, CGPoint startPoint, CGPoint endPoint) { } public void onDidEndScrolling(PLIView view, CGPoint startPoint, CGPoint endPoint) { } /**zooming methods*/ public boolean onShouldBeginZooming(PLIView view) { return true; } public void onDidBeginZooming(PLIView view, CGPoint startPoint, CGPoint endPoint) { } public boolean onShouldRunZooming(PLIView view, float distance, boolean isZoomIn, boolean isZoomOut) { return true; } public void onDidRunZooming(PLIView view, float distance, boolean isZoomIn, boolean isZoomOut) { } public void onDidEndZooming(PLIView view) { } /**reset methods*/ public boolean onShouldReset(PLIView view) { return true; } public void onDidReset(PLIView view) { } /**camera methods*/ public void onDidBeginCameraAnimation(PLIView view, Object sender, PLICamera camera, PLCameraAnimationType type) { } public void onDidEndCameraAnimation(PLIView view, Object sender, PLICamera camera, PLCameraAnimationType type) { } public void onDidResetCamera(PLIView view, Object sender, PLICamera camera) { } public void onDidLookAtCamera(PLIView view, Object sender, PLICamera camera, float pitch, float yaw, boolean animated) { } public void onDidRotateCamera(PLIView view, Object sender, PLICamera camera, float pitch, float yaw, float roll) { } public void onDidFovCamera(PLIView view, Object sender, PLICamera camera, float fov, boolean animated) { } /**scene element methods*/ public void onDidOverElement(PLIView view, PLISceneElement element, CGPoint screenPoint, PLPosition scene3DPoint) { } public void onDidClickElement(PLIView view, PLISceneElement element, CGPoint screenPoint, PLPosition scene3DPoint) { } public void onDidOutElement(PLIView view, PLISceneElement element, CGPoint screenPoint, PLPosition scene3DPoint) { } /**hotspot methods*/ public void onDidOverHotspot(PLIView view, PLIHotspot hotspot, CGPoint screenPoint, PLPosition scene3DPoint) { } public void onDidClickHotspot(PLIView view, PLIHotspot hotspot, CGPoint screenPoint, PLPosition scene3DPoint) { } public void onDidOutHotspot(PLIView view, PLIHotspot hotspot, CGPoint screenPoint, PLPosition scene3DPoint) { } /**transition methods*/ public void onDidBeginTransition(PLIView view, PLITransition transition) { } public void onDidProcessTransition(PLIView view, PLITransition transition, int progressPercentage) { } public void onDidStopTransition(PLIView view, PLITransition transition, int progressPercentage) { } public void onDidEndTransition(PLIView view, PLITransition transition) { } /**loader methods*/ public void onDidBeginLoader(PLIView view, PLILoader loader) { } public void onDidCompleteLoader(PLIView view, PLILoader loader) { } public void onDidStopLoader(PLIView view, PLILoader loader) { } public void onDidErrorLoader(PLIView view, PLILoader loader, String error) { } }
package app.bennsandoval.com.woodmin.activities; import android.app.AlertDialog; import android.app.ProgressDialog; import android.content.ContentUris; import android.content.ContentValues; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.design.widget.FloatingActionButton; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.CardView; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.squareup.picasso.Picasso; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import app.bennsandoval.com.woodmin.R; import app.bennsandoval.com.woodmin.Woodmin; import app.bennsandoval.com.woodmin.data.WoodminContract; import app.bennsandoval.com.woodmin.interfaces.Woocommerce; import app.bennsandoval.com.woodmin.models.v3.orders.Item; import app.bennsandoval.com.woodmin.models.v3.orders.MetaItem; import app.bennsandoval.com.woodmin.models.v3.orders.Note; import app.bennsandoval.com.woodmin.models.v3.orders.Notes; import app.bennsandoval.com.woodmin.models.v3.orders.Order; import app.bennsandoval.com.woodmin.models.v3.orders.OrderResponse; import app.bennsandoval.com.woodmin.models.v3.orders.OrderUpdate; import app.bennsandoval.com.woodmin.models.v3.orders.OrderUpdateValues; import app.bennsandoval.com.woodmin.models.v3.products.Product; import app.bennsandoval.com.woodmin.models.v3.products.Variation; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; public class OrderDetail extends AppCompatActivity implements LoaderManager.LoaderCallbacks<Cursor> { private final String LOG_TAG = OrderDetail.class.getSimpleName(); private int mOderId = -1; private Order mOrderSelected; private Gson mGson = new GsonBuilder().create(); private LinearLayout mHeader; private TextView mOrder; private TextView mEmail; private TextView mPhone; private LinearLayout mLyPhone; private LinearLayout mLyEmail; private TextView mPrice; private TextView mStatus; private TextView mCustomer; private TextView mItems; private TextView mDate; private TextView mPayment; private TextView mAmount; private TextView mTaxes; private TextView mTotal; private TextView mBilling; private TextView mShipping; private ProgressDialog mProgress; private static final int ORDER_LOADER = 101; private static final String[] ORDER_PROJECTION = { WoodminContract.OrdersEntry.COLUMN_JSON, }; private int COLUMN_ORDER_COLUMN_JSON = 0; private static final String[] PRODUCT_PROJECTION = { WoodminContract.ProductEntry.COLUMN_ID, WoodminContract.ProductEntry.COLUMN_JSON, }; private int COLUMN_PRODUCT_COLUMN_JSON = 1; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getApplicationContext()); if(mOderId < 0 ) { mOderId = getIntent().getIntExtra("order", -1); if(mOderId < 0 ) { mOderId = prefs.getInt("product_details", -1); } else { SharedPreferences.Editor editor = prefs.edit(); editor.putInt("product_details", mOderId); editor.apply(); } } mProgress = new ProgressDialog(OrderDetail.this); mProgress.setTitle(getString(R.string.app_name)); setContentView(R.layout.activity_order); mHeader = (LinearLayout) findViewById(R.id.header); mOrder = (TextView) findViewById(R.id.order); mPrice = (TextView) findViewById(R.id.price); mEmail = (TextView) findViewById(R.id.email); mPhone = (TextView) findViewById(R.id.phone); mLyPhone = (LinearLayout) findViewById(R.id.call_button); mLyEmail = (LinearLayout) findViewById(R.id.email_button); mStatus = (TextView) findViewById(R.id.status); mCustomer = (TextView) findViewById(R.id.customer); mItems = (TextView) findViewById(R.id.items); mDate = (TextView) findViewById(R.id.date); mPayment = (TextView) findViewById(R.id.payment); mAmount = (TextView) findViewById(R.id.amount); mTaxes = (TextView) findViewById(R.id.taxes); mTotal = (TextView) findViewById(R.id.total); mBilling = (TextView) findViewById(R.id.billing); mShipping = (TextView) findViewById(R.id.shipping); getSupportLoaderManager().initLoader(ORDER_LOADER, null, this); FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); if(fab != null) { fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(OrderDetail.this) .setTitle(getString(R.string.order, mOrderSelected.getOrderNumber())) .setMessage(getString(R.string.order_update_confirmation)) .setCancelable(false) .setPositiveButton(getString(R.string.yes), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { finalizeOrder(); } }) .setNegativeButton(getString(R.string.no), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { } }); alertDialogBuilder.create().show(); } }); } } @Override public Loader<Cursor> onCreateLoader(int id, Bundle bundle) { Log.d(LOG_TAG, "onCreateLoader"); String sortOrder = WoodminContract.OrdersEntry._ID + " ASC"; CursorLoader cursorLoader = null; Uri ordersUri = WoodminContract.OrdersEntry.CONTENT_URI; switch (id) { case ORDER_LOADER: if(mOderId > 0){ String query = WoodminContract.OrdersEntry.COLUMN_ID + " == ?" ; String[] parameters = new String[]{ String.valueOf(mOderId) }; cursorLoader = new CursorLoader( getApplicationContext(), ordersUri, ORDER_PROJECTION, query, parameters, sortOrder); } break; default: cursorLoader = null; break; } return cursorLoader; } @Override public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor) { switch (cursorLoader.getId()) { case ORDER_LOADER: mOrderSelected = null; if (cursor.moveToFirst()) { do { String json = cursor.getString(COLUMN_ORDER_COLUMN_JSON); if(json!=null){ mOrderSelected = mGson.fromJson(json, Order.class); } } while (cursor.moveToNext()); fillView(); } break; default: break; } } @Override public void onLoaderReset(Loader<Cursor> cursorLoader) { Log.d(LOG_TAG, "onLoaderReset"); switch (cursorLoader.getId()) { case ORDER_LOADER: { mOrderSelected = null; break; } default: break; } } private void fillView(){ if(mOrderSelected != null) { if(mOrderSelected.getStatus().toUpperCase().equals("COMPLETED")){ mHeader.setBackgroundColor(getResources().getColor(R.color.colorPrimary)); mStatus.setTextColor(getResources().getColor(R.color.colorPrimary)); } else if(mOrderSelected.getStatus().toUpperCase().equals("CANCELLED") || mOrderSelected.getStatus().toUpperCase().equals("REFUNDED")){ mHeader.setBackgroundColor(getResources().getColor(R.color.red)); mStatus.setTextColor(getResources().getColor(R.color.red)); } else { mHeader.setBackgroundColor(getResources().getColor(R.color.orange)); mStatus.setTextColor(getResources().getColor(R.color.orange)); } mOrder.setText(getString(R.string.order, mOrderSelected.getOrderNumber())); mPrice.setText(getString(R.string.price, mOrderSelected.getTotal())); mStatus.setText(mOrderSelected.getStatus().toUpperCase()); if(mOrderSelected.getCustomer() != null && mOrderSelected.getCustomer().getBillingAddress() != null && mOrderSelected.getCustomer().getBillingAddress().getEmail() != null && mOrderSelected.getCustomer().getBillingAddress().getEmail().length() > 1){ mEmail.setText(mOrderSelected.getCustomer().getBillingAddress().getEmail()); mLyEmail.setVisibility(View.VISIBLE); mLyEmail.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent emailIntent = new Intent(Intent.ACTION_SENDTO, Uri.fromParts("mailto",mOrderSelected.getCustomer().getBillingAddress().getEmail(), null)); emailIntent.putExtra(Intent.EXTRA_SUBJECT, ""); startActivity(Intent.createChooser(emailIntent, "Woodmin")); } }); } else { mLyEmail.setVisibility(View.GONE); } if(mOrderSelected.getCustomer() != null && mOrderSelected.getCustomer().getBillingAddress() != null && mOrderSelected.getCustomer().getBillingAddress().getPhone() != null && mOrderSelected.getCustomer().getBillingAddress().getPhone().length() > 1){ mPhone.setText(mOrderSelected.getCustomer().getBillingAddress().getPhone()); mLyPhone.setVisibility(View.VISIBLE); mLyPhone.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent callIntent = new Intent(Intent.ACTION_DIAL); callIntent.setData(Uri.parse("tel:" + mOrderSelected.getCustomer().getBillingAddress().getPhone())); startActivity(callIntent); } }); } else { mLyPhone.setVisibility(View.GONE); } if(mOrderSelected.getBillingAddress().getFirstName() != null && mOrderSelected.getBillingAddress().getFirstName().length() > 0){ mCustomer.setText(mOrderSelected.getBillingAddress().getFirstName() + " " + mOrderSelected.getBillingAddress().getLastName()); } else { mCustomer.setText(getString(R.string.guest)); } int itemsCount = 0; for (Item item:mOrderSelected.getItems()) { itemsCount += item.getQuantity(); } mItems.setText(getString(R.string.items, itemsCount)); SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss", Locale.getDefault()); mDate.setText(format.format(mOrderSelected.getCreatedAt())); if(mOrderSelected.getPaymentDetails() != null){ mPayment.setText(mOrderSelected.getPaymentDetails().getMethodTitle()); } mAmount.setText("$"+mOrderSelected.getSubtotal()); mTaxes.setText("$"+mOrderSelected.getTotalTax()); mTotal.setText("$"+mOrderSelected.getTotal()); if(mOrderSelected.getCustomer() != null && mOrderSelected.getBillingAddress() != null){ String address = mOrderSelected.getBillingAddress().getAddressOne() + " " + mOrderSelected.getBillingAddress().getAddressTwo() + " " + mOrderSelected.getBillingAddress().getPostcode() + " " + mOrderSelected.getBillingAddress().getState() + " " + mOrderSelected.getBillingAddress().getCity() + " " + mOrderSelected.getBillingAddress().getCountry(); mBilling.setText(address); } if(mOrderSelected.getCustomer() != null && mOrderSelected.getShippingAddress() != null){ String addres = mOrderSelected.getShippingAddress().getAddressOne() + " " + mOrderSelected.getShippingAddress().getAddressTwo() + " " + mOrderSelected.getShippingAddress().getPostcode() + " " + mOrderSelected.getShippingAddress().getCountry() + " " + mOrderSelected.getShippingAddress().getState() + " " + mOrderSelected.getShippingAddress().getCity(); mShipping.setText(addres); } CardView cart = (CardView)findViewById(R.id.shopping_card); cart.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent orderIntent = new Intent(getApplicationContext(), OrderLinesShip.class); orderIntent.putExtra("order", mOderId); startActivityForResult(orderIntent, 100); } }); while(cart.getChildCount() > 1) { cart.removeViewAt(1); } LinearLayout cardDetails = (LinearLayout)findViewById(R.id.shopping_card_details); cardDetails.removeAllViews(); List<String> ids = new ArrayList<>(); List<String> parameters = new ArrayList<>(); for(Item item:mOrderSelected.getItems()) { ids.add(String.valueOf(item.getProductId())); parameters.add("?"); } String query = WoodminContract.ProductEntry.COLUMN_ID + " IN (" + TextUtils.join(", ", parameters) + ")"; Cursor cursor = getContentResolver().query(WoodminContract.ProductEntry.CONTENT_URI, PRODUCT_PROJECTION, query, ids.toArray(new String[ids.size()]), null); List<Product> products = new ArrayList<>(); if(cursor != null) { if (cursor.moveToFirst()) { do { String json = cursor.getString(COLUMN_PRODUCT_COLUMN_JSON); if(json!=null) { Product product = mGson.fromJson(json, Product.class); products.add(product); } } while (cursor.moveToNext()); } cursor.close(); } for(Item item:mOrderSelected.getItems()) { View child = getLayoutInflater().inflate(R.layout.activity_order_item, null); ImageView imageView = (ImageView) child.findViewById(R.id.image); TextView quantity = (TextView) child.findViewById(R.id.quantity); TextView description = (TextView) child.findViewById(R.id.description); TextView price = (TextView) child.findViewById(R.id.price); TextView sku = (TextView) child.findViewById(R.id.sku); quantity.setText(String.valueOf(item.getQuantity())); if(item.getMeta().size()>0){ String descriptionWithMeta = item.getName(); for(MetaItem itemMeta:item.getMeta()){ descriptionWithMeta += "\n" + itemMeta.getLabel() + " " + itemMeta.getValue(); } description.setText(descriptionWithMeta); } else { description.setText(item.getName()); } price.setText(getString(R.string.price, item.getTotal())); sku.setText(item.getSku()); Product productForItem = null; for(Product product: products) { if(product.getId() == item.getProductId()) { productForItem = product; break; } for(Variation variation:product.getVariations()) { if(variation.getId() == item.getProductId()) { productForItem = product; break; } } } if(productForItem == null) { Log.v(LOG_TAG, "Missing product"); } else { Picasso.with(getApplicationContext()) .load(productForItem.getFeaturedSrc()) .resize(50, 50) .centerCrop() .placeholder(android.R.color.transparent) .error(R.drawable.ic_action_cancel) .into(imageView); } cardDetails.addView(child); } getNotes(); } else { finish(); } } private void getNotes() { Woocommerce woocommerceApi = ((Woodmin) getApplication()).getWoocommerceApiHandler(); HashMap<String, String> options = new HashMap<>(); Call<Notes> call = woocommerceApi.getOrdersNotes(options, String.valueOf(mOrderSelected.getId())); call.enqueue(new Callback<Notes>() { @Override public void onResponse(Call<Notes> call, Response<Notes> response) { int statusCode = response.code(); if (statusCode == 200) { SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy", Locale.getDefault()); final LinearLayout notesView = (LinearLayout) findViewById(R.id.notes); if(notesView.getChildCount() == 1) { for (Note note : response.body().getNotes()) { final View child = getLayoutInflater().inflate(R.layout.activity_note_item, null); TextView privateNote = (TextView) child.findViewById(R.id.private_note); TextView noteText = (TextView) child.findViewById(R.id.note_text); TextView noteDate = (TextView) child.findViewById(R.id.note_date); if (note.isCustomerNote()) { privateNote.setText(getString(R.string.public_note)); } else { privateNote.setText(getString(R.string.private_note)); } noteText.setText(note.getNote()); noteDate.setText(format.format(note.getCreatedAt())); runOnUiThread(new Runnable() { @Override public void run() { notesView.addView(child); } }); } } } } @Override public void onFailure(Call<Notes> call, Throwable throwable) { Log.e(LOG_TAG, "onFailure "); } }); } private void finalizeOrder() { mProgress.setMessage(getString(R.string.finalize_order)); mProgress.show(); OrderUpdate orderUpdate = new OrderUpdate(); OrderUpdateValues orderUpdateValues = new OrderUpdateValues(); orderUpdateValues.setStatus("completed"); orderUpdate.setOrder(orderUpdateValues); Woocommerce woocommerceApi = ((Woodmin) getApplication()).getWoocommerceApiHandler(); Call<OrderResponse> call = woocommerceApi.updateOrder(mOrderSelected.getOrderNumber(), orderUpdate); call.enqueue(new Callback<OrderResponse>() { @Override public void onResponse(final Call<OrderResponse> call, final Response<OrderResponse> response) { runOnUiThread(new Runnable() { @Override public void run() { mProgress.dismiss(); } }); int statusCode = response.code(); if (statusCode == 200) { Order order = response.body().getOrder(); ContentValues orderValues = new ContentValues(); orderValues.put(WoodminContract.OrdersEntry.COLUMN_ID, order.getId()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_ORDER_NUMBER, order.getOrderNumber()); if(order.getCreatedAt() != null) { orderValues.put(WoodminContract.OrdersEntry.COLUMN_CREATED_AT, WoodminContract.getDbDateString(order.getCreatedAt())); } if(order.getUpdatedAt() != null) { orderValues.put(WoodminContract.OrdersEntry.COLUMN_UPDATED_AT, WoodminContract.getDbDateString(order.getUpdatedAt())); } if(order.getCompletedAt() != null) { orderValues.put(WoodminContract.OrdersEntry.COLUMN_COMPLETED_AT, WoodminContract.getDbDateString(order.getCompletedAt())); } orderValues.put(WoodminContract.OrdersEntry.COLUMN_STATUS, order.getStatus()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CURRENCY, order.getCurrency()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_TOTAL, order.getTotal()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SUBTOTAL, order.getSubtotal()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_TOTAL_LINE_ITEMS_QUANTITY, order.getTotalLineItemsQuantity()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_TOTAL_TAX, order.getTotalTax()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_TOTAL_SHIPPING, order.getTotalShipping()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CART_TAX, order.getCartTax()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_TAX, order.getShippingTax()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_TOTAL_DISCOUNT, order.getTotalDiscount()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CART_DISCOUNT, order.getCartDiscount()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_ORDER_DISCOUNT, order.getOrderDiscount()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_METHODS, order.getShippingMethods()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_NOTE, order.getNote()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_VIEW_ORDER_URL, order.getViewOrderUrl()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_PAYMENT_DETAILS_METHOD_ID, order.getPaymentDetails().getMethodId()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_PAYMENT_DETAILS_METHOD_TITLE, order.getPaymentDetails().getMethodTitle()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_PAYMENT_DETAILS_PAID, order.getPaymentDetails().isPaid() ? "1" : "0"); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_FIRST_NAME, order.getBillingAddress().getFirstName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_LAST_NAME , order.getBillingAddress().getLastName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_COMPANY, order.getBillingAddress().getCompany()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_ADDRESS_1, order.getBillingAddress().getAddressOne()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_ADDRESS_2, order.getBillingAddress().getAddressTwo()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_CITY, order.getBillingAddress().getCity()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_STATE, order.getBillingAddress().getState()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_POSTCODE, order.getBillingAddress().getPostcode()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_COUNTRY, order.getBillingAddress().getCountry()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_EMAIL, order.getBillingAddress().getEmail()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_BILLING_PHONE, order.getBillingAddress().getPhone()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_FIRST_NAME, order.getShippingAddress().getFirstName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_LAST_NAME, order.getShippingAddress().getLastName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_COMPANY, order.getShippingAddress().getCompany()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_ADDRESS_1, order.getShippingAddress().getAddressOne()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_ADDRESS_2, order.getShippingAddress().getAddressTwo()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_CITY, order.getShippingAddress().getCity()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_STATE, order.getShippingAddress().getState()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_POSTCODE, order.getShippingAddress().getPostcode()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_SHIPPING_COUNTRY, order.getShippingAddress().getCountry()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_ID, order.getCustomerId()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_EMAIL, order.getCustomer().getEmail()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_FIRST_NAME, order.getCustomer().getFirstName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_LAST_NAME, order.getCustomer().getLastName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_USERNAME, order.getCustomer().getUsername()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_LAST_ORDER_ID, order.getCustomer().getLastOrderId()); if(order.getCustomer().getLastOrderDate() != null) { orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_LAST_ORDER_DATE, WoodminContract.getDbDateString(order.getCustomer().getLastOrderDate())); } orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_ORDERS_COUNT, order.getCustomer().getOrdersCount()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_TOTAL_SPEND, order.getCustomer().getTotalSpent()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_AVATAR_URL, order.getCustomer().getAvatarUrl()); if(order.getCustomer().getBillingAddress()!= null){ orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_FIRST_NAME, order.getCustomer().getBillingAddress().getFirstName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_LAST_NAME, order.getCustomer().getBillingAddress().getLastName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_COMPANY, order.getCustomer().getBillingAddress().getCompany()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_ADDRESS_1, order.getCustomer().getBillingAddress().getAddressOne()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_ADDRESS_2, order.getCustomer().getBillingAddress().getAddressTwo()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_CITY, order.getCustomer().getBillingAddress().getCity()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_STATE, order.getCustomer().getBillingAddress().getState()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_POSTCODE, order.getCustomer().getBillingAddress().getPostcode()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_COUNTRY, order.getCustomer().getBillingAddress().getCountry()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_EMAIL, order.getCustomer().getBillingAddress().getEmail()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_BILLING_PHONE, order.getCustomer().getBillingAddress().getPhone()); } if(order.getCustomer().getShippingAddress() != null){ orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_SHIPPING_FIRST_NAME, order.getCustomer().getShippingAddress().getFirstName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_SHIPPING_LAST_NAME , order.getCustomer().getShippingAddress().getLastName()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_SHIPPING_COMPANY, order.getCustomer().getShippingAddress().getCompany()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_SHIPPING_ADDRESS_1, order.getCustomer().getShippingAddress().getAddressOne()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_SHIPPING_ADDRESS_2, order.getCustomer().getShippingAddress().getAddressTwo()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_SHIPPING_CITY, order.getCustomer().getShippingAddress().getCity()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_SHIPPING_STATE, order.getCustomer().getShippingAddress().getState()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_SHIPPING_POSTCODE, order.getCustomer().getShippingAddress().getPostcode()); orderValues.put(WoodminContract.OrdersEntry.COLUMN_CUSTOMER_SHIPPING_COUNTRY, order.getCustomer().getShippingAddress().getCountry()); } orderValues.put(WoodminContract.OrdersEntry.COLUMN_JSON, mGson.toJson(order)); orderValues.put(WoodminContract.OrdersEntry.COLUMN_ENABLE, 1); Uri insertedOrderUri = getContentResolver().insert(WoodminContract.OrdersEntry.CONTENT_URI, orderValues); long orderId = ContentUris.parseId(insertedOrderUri); Log.d(LOG_TAG, "Orders successful updated ID: " + orderId); getContentResolver().notifyChange(WoodminContract.OrdersEntry.CONTENT_URI, null, false); runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(getApplicationContext(), getString(R.string.success_update), Toast.LENGTH_LONG).show(); } }); } else { Log.e(LOG_TAG, "onFailure "); runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(getApplicationContext(), getString(R.string.error_update), Toast.LENGTH_LONG).show(); } }); } } @Override public void onFailure(Call<OrderResponse> call, Throwable t) { Log.e(LOG_TAG, "onFailure "); runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(getApplicationContext(), getString(R.string.error_update), Toast.LENGTH_LONG).show(); } }); } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.service; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cluster.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.processors.affinity.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.lang.*; import org.apache.ignite.services.*; import org.apache.ignite.resources.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.testframework.*; import org.apache.ignite.testframework.junits.common.*; import java.io.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; /** * Tests for {@link GridAffinityProcessor}. */ @GridCommonTest(group = "Service Processor") public abstract class GridServiceProcessorAbstractSelfTest extends GridCommonAbstractTest { /** Cache name. */ public static final String CACHE_NAME = "testServiceCache"; /** IP finder. */ private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** Random generator. */ private static final Random RAND = new Random(); /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration c = super.getConfiguration(gridName); TcpDiscoverySpi discoSpi = new TcpDiscoverySpi(); discoSpi.setIpFinder(ipFinder); c.setDiscoverySpi(discoSpi); ServiceConfiguration[] svcs = services(); if (svcs != null) c.setServiceConfiguration(svcs); CacheConfiguration cc = new CacheConfiguration(); cc.setName(CACHE_NAME); cc.setCacheMode(CacheMode.PARTITIONED); cc.setBackups(nodeCount()); c.setCacheConfiguration(cc); return c; } /** * Gets number of nodes. * * @return Number of nodes. */ protected abstract int nodeCount(); /** * Gets services configurations. * * @return Services configuration. */ protected ServiceConfiguration[] services() { return null; } /** {@inheritDoc} */ @SuppressWarnings("ConstantConditions") @Override protected void beforeTestsStarted() throws Exception { assert nodeCount() >= 1; for (int i = 0; i < nodeCount(); i++) startGrid(i); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { DummyService.reset(); } /** * @throws Exception If failed. */ protected void startExtraNodes(int cnt) throws Exception { for (int i = 0; i < cnt; i++) startGrid(nodeCount() + i); } /** * @throws Exception If failed. */ protected void stopExtraNodes(int cnt) throws Exception { for (int i = 0; i < cnt; i++) stopGrid(nodeCount() + i); } /** * @return Random grid. */ protected Ignite randomGrid() { return grid(RAND.nextInt(nodeCount())); } /** * @throws Exception If failed. */ public void testSameConfiguration() throws Exception { String name = "dupService"; IgniteServices svcs1 = randomGrid().services().withAsync(); IgniteServices svcs2 = randomGrid().services().withAsync(); svcs1.deployClusterSingleton(name, new DummyService()); IgniteFuture<?> fut1 = svcs1.future(); svcs2.deployClusterSingleton(name, new DummyService()); IgniteFuture<?> fut2 = svcs2.future(); info("Deployed service: " + name); fut1.get(); info("Finished waiting for service future1: " + name); // This must succeed without exception because configuration is the same. fut2.get(); info("Finished waiting for service future2: " + name); } /** * @throws Exception If failed. */ public void testDifferentConfiguration() throws Exception { String name = "dupService"; IgniteServices svcs1 = randomGrid().services().withAsync(); IgniteServices svcs2 = randomGrid().services().withAsync(); svcs1.deployClusterSingleton(name, new DummyService()); IgniteFuture<?> fut1 = svcs1.future(); svcs2.deployNodeSingleton(name, new DummyService()); IgniteFuture<?> fut2 = svcs2.future(); info("Deployed service: " + name); fut1.get(); info("Finished waiting for service future: " + name); try { fut2.get(); fail("Failed to receive mismatching configuration exception."); } catch (IgniteException e) { info("Received mismatching configuration exception: " + e.getMessage()); } } /** * @throws Exception If failed. */ public void testGetServiceByName() throws Exception { String name = "serviceByName"; Ignite g = randomGrid(); g.services().deployNodeSingleton(name, new DummyService()); DummyService svc = g.services().service(name); assertNotNull(svc); Collection<DummyService> svcs = g.services().services(name); assertEquals(1, svcs.size()); } /** * @throws Exception If failed. */ public void testGetServicesByName() throws Exception { final String name = "servicesByName"; Ignite g = randomGrid(); g.services().deployMultiple(name, new DummyService(), nodeCount() * 2, 3); GridTestUtils.retryAssert(log, 50, 200, new CA() { @Override public void apply() { int cnt = 0; for (int i = 0; i < nodeCount(); i++) { Collection<DummyService> svcs = grid(i).services().services(name); if (svcs != null) cnt += svcs.size(); } assertEquals(nodeCount() * 2, cnt); } }); } /** * @throws Exception If failed. */ public void testDeployOnEachNode() throws Exception { Ignite g = randomGrid(); String name = "serviceOnEachNode"; CountDownLatch latch = new CountDownLatch(nodeCount()); DummyService.exeLatch(name, latch); IgniteServices svcs = g.services().withAsync(); svcs.deployNodeSingleton(name, new DummyService()); IgniteFuture<?> fut = svcs.future(); info("Deployed service: " + name); fut.get(); info("Finished waiting for service future: " + name); latch.await(); assertEquals(name, nodeCount(), DummyService.started(name)); assertEquals(name, 0, DummyService.cancelled(name)); checkCount(name, g.services().serviceDescriptors(), nodeCount()); } /** * @throws Exception If failed. */ public void testDeploySingleton() throws Exception { Ignite g = randomGrid(); String name = "serviceSingleton"; CountDownLatch latch = new CountDownLatch(1); DummyService.exeLatch(name, latch); IgniteServices svcs = g.services().withAsync(); svcs.deployClusterSingleton(name, new DummyService()); IgniteFuture<?> fut = svcs.future(); info("Deployed service: " + name); fut.get(); info("Finished waiting for service future: " + name); latch.await(); assertEquals(name, 1, DummyService.started(name)); assertEquals(name, 0, DummyService.cancelled(name)); checkCount(name, g.services().serviceDescriptors(), 1); } /** * @throws Exception If failed. */ public void testAffinityDeploy() throws Exception { Ignite g = randomGrid(); final Integer affKey = 1; // Store a cache key. g.jcache(CACHE_NAME).put(affKey, affKey.toString()); String name = "serviceAffinity"; IgniteServices svcs = g.services().withAsync(); svcs.deployKeyAffinitySingleton(name, new AffinityService(affKey), CACHE_NAME, affKey); IgniteFuture<?> fut = svcs.future(); info("Deployed service: " + name); fut.get(); info("Finished waiting for service future: " + name); checkCount(name, g.services().serviceDescriptors(), 1); } /** * @throws Exception If failed. */ public void testDeployMultiple1() throws Exception { Ignite g = randomGrid(); String name = "serviceMultiple1"; CountDownLatch latch = new CountDownLatch(nodeCount() * 2); DummyService.exeLatch(name, latch); IgniteServices svcs = g.services().withAsync(); svcs.deployMultiple(name, new DummyService(), nodeCount() * 2, 3); IgniteFuture<?> fut = svcs.future(); info("Deployed service: " + name); fut.get(); info("Finished waiting for service future: " + name); latch.await(); assertEquals(name, nodeCount() * 2, DummyService.started(name)); assertEquals(name, 0, DummyService.cancelled(name)); checkCount(name, g.services().serviceDescriptors(), nodeCount() * 2); } /** * @throws Exception If failed. */ public void testDeployMultiple2() throws Exception { Ignite g = randomGrid(); String name = "serviceMultiple2"; int cnt = nodeCount() * 2 + 1; CountDownLatch latch = new CountDownLatch(cnt); DummyService.exeLatch(name, latch); IgniteServices svcs = g.services().withAsync(); svcs.deployMultiple(name, new DummyService(), cnt, 3); IgniteFuture<?> fut = svcs.future(); info("Deployed service: " + name); fut.get(); info("Finished waiting for service future: " + name); latch.await(); assertEquals(name, cnt, DummyService.started(name)); assertEquals(name, 0, DummyService.cancelled(name)); checkCount(name, g.services().serviceDescriptors(), cnt); } /** * @throws Exception If failed. */ public void testCancelSingleton() throws Exception { Ignite g = randomGrid(); String name = "serviceCancel"; CountDownLatch latch = new CountDownLatch(1); DummyService.exeLatch(name, latch); g.services().deployClusterSingleton(name, new DummyService()); info("Deployed service: " + name); latch.await(); assertEquals(name, 1, DummyService.started(name)); assertEquals(name, 0, DummyService.cancelled(name)); latch = new CountDownLatch(1); DummyService.cancelLatch(name, latch); g.services().cancel(name); info("Cancelled service: " + name); latch.await(); assertEquals(name, 1, DummyService.started(name)); assertEquals(name, 1, DummyService.cancelled(name)); } /** * @throws Exception If failed. */ public void testCancelEachNode() throws Exception { Ignite g = randomGrid(); String name = "serviceCancelEachNode"; CountDownLatch latch = new CountDownLatch(nodeCount()); DummyService.exeLatch(name, latch); g.services().deployNodeSingleton(name, new DummyService()); info("Deployed service: " + name); latch.await(); assertEquals(name, nodeCount(), DummyService.started(name)); assertEquals(name, 0, DummyService.cancelled(name)); latch = new CountDownLatch(nodeCount()); DummyService.cancelLatch(name, latch); g.services().cancel(name); info("Cancelled service: " + name); latch.await(); assertEquals(name, nodeCount(), DummyService.started(name)); assertEquals(name, nodeCount(), DummyService.cancelled(name)); } /** * @param svcName Service name. * @param descs Descriptors. * @param cnt Expected count. */ protected void checkCount(String svcName, Iterable<ServiceDescriptor> descs, int cnt) { assertEquals(cnt, actualCount(svcName, descs)); } /** * @param svcName Service name. * @param descs Descriptors. * @return Services count. */ protected int actualCount(String svcName, Iterable<ServiceDescriptor> descs) { int sum = 0; for (ServiceDescriptor d : descs) { if (d.name().equals(svcName)) { for (Integer i : d.topologySnapshot().values()) sum += i; } } return sum; } /** * Counter service. */ protected interface CounterService { /** * @return Number of increments happened on the same service instance. */ int localIncrements(); /** * @return Incremented value. */ int increment(); /** * @return Current value. */ int get(); } /** * Affinity service. */ protected static class AffinityService implements Service { /** */ private static final long serialVersionUID = 0L; /** Affinity key. */ private final Object affKey; /** Grid. */ @IgniteInstanceResource private Ignite g; /** * @param affKey Affinity key. */ public AffinityService(Object affKey) { this.affKey = affKey; } /** {@inheritDoc} */ @Override public void cancel(ServiceContext ctx) { // No-op. } /** {@inheritDoc} */ @Override public void init(ServiceContext ctx) throws Exception { X.println("Initializing affinity service for key: " + affKey); ClusterNode n = g.affinity(CACHE_NAME).mapKeyToNode(affKey); assertNotNull(n); assertTrue(n.isLocal()); } /** {@inheritDoc} */ @Override public void execute(ServiceContext ctx) { X.println("Executing affinity service for key: " + affKey); } } /** * Counter service implementation. */ protected static class CounterServiceImpl implements CounterService, Service { /** Auto-injected grid instance. */ @IgniteInstanceResource private Ignite ignite; /** */ private IgniteCache<String, Value> cache; /** Cache key. */ private String key; /** Invocation count. */ private AtomicInteger locIncrements = new AtomicInteger(); /** {@inheritDoc} */ @Override public int localIncrements() { return locIncrements.get(); } /** {@inheritDoc} */ @Override public int increment() { locIncrements.incrementAndGet(); try { while (true) { Value val = cache.get(key); if (val == null) { Value old = cache.getAndPutIfAbsent(key, val = new Value(0)); if (old != null) val = old; } Value newVal = new Value(val.get() + 1); if (cache.replace(key, val, newVal)) return newVal.get(); } } catch (Exception e) { throw new IgniteException(e); } } /** {@inheritDoc} */ @Override public int get() { try { Value val = cache.get(key); return val == null ? 0 : val.get(); } catch (Exception e) { throw new IgniteException(e); } } /** {@inheritDoc} */ @Override public void cancel(ServiceContext ctx) { X.println("Stopping counter service: " + ctx.name()); } /** {@inheritDoc} */ @Override public void init(ServiceContext ctx) throws Exception { X.println("Initializing counter service: " + ctx.name()); key = ctx.name(); cache = ignite.jcache(CACHE_NAME); } /** {@inheritDoc} */ @Override public void execute(ServiceContext ctx) throws Exception { X.println("Executing counter service: " + ctx.name()); } /** * */ private static class Value implements Serializable { /** Value. */ private final int v; /** * @param v Value. */ private Value(int v) { this.v = v; } /** * @return Value. */ int get() { return v; } /** {@inheritDoc} */ @Override public boolean equals(Object o) { return this == o || o instanceof Value && v == ((Value)o).v; } /** {@inheritDoc} */ @Override public int hashCode() { return v; } } } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.d; import com.facebook.buck.cxx.CxxBuckConfig; import com.facebook.buck.cxx.CxxLink; import com.facebook.buck.cxx.CxxLinkableEnhancer; import com.facebook.buck.cxx.platform.CxxPlatform; import com.facebook.buck.cxx.platform.Linker; import com.facebook.buck.cxx.platform.NativeLinkable; import com.facebook.buck.cxx.platform.NativeLinkableInput; import com.facebook.buck.graph.AbstractBreadthFirstTraversal; import com.facebook.buck.io.MorePaths; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.InternalFlavor; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.DefaultSourcePathResolver; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.SymlinkTree; import com.facebook.buck.rules.Tool; import com.facebook.buck.rules.args.SourcePathArg; import com.facebook.buck.rules.args.StringArg; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.util.MoreMaps; import com.google.common.base.Preconditions; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import java.util.Map; import java.util.Optional; import java.util.TreeMap; /** Utility functions for use in D Descriptions. */ abstract class DDescriptionUtils { public static final Flavor SOURCE_LINK_TREE = InternalFlavor.of("source-link-tree"); /** * Creates a BuildTarget, based on an existing build target, but flavored with a CxxPlatform and * an additional flavor created by combining a prefix and an output file name. * * @param existingTarget the existing target * @param flavorPrefix prefix to be used for added flavor * @param fileName filename to be used for added flavor * @param cxxPlatform the C++ platform to compile for * @return the new BuildTarget */ public static BuildTarget createBuildTargetForFile( BuildTarget existingTarget, String flavorPrefix, String fileName, CxxPlatform cxxPlatform) { return existingTarget.withAppendedFlavors( cxxPlatform.getFlavor(), InternalFlavor.of(flavorPrefix + Flavor.replaceInvalidCharacters(fileName))); } /** * Creates a new BuildTarget, based on an existing target, for a file to be compiled. * * @param existingTarget the existing target * @param src the source file to be compiled * @param cxxPlatform the C++ platform to compile the file for * @return a BuildTarget to compile a D source file to an object file */ public static BuildTarget createDCompileBuildTarget( BuildTarget existingTarget, String src, CxxPlatform cxxPlatform) { return createBuildTargetForFile( existingTarget, "compile-", DCompileStep.getObjectNameForSourceName(src), cxxPlatform); } /** * Creates a {@link NativeLinkable} using sources compiled by the D compiler. * * @param params build parameters for the build target * @param sources source files to compile * @param compilerFlags flags to pass to the compiler * @param buildRuleResolver resolver for build rules * @param cxxPlatform the C++ platform to compile for * @param dBuckConfig the Buck configuration for D * @return the new build rule */ public static CxxLink createNativeLinkable( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, BuildRuleResolver buildRuleResolver, CxxPlatform cxxPlatform, DBuckConfig dBuckConfig, CxxBuckConfig cxxBuckConfig, ImmutableList<String> compilerFlags, SourceList sources, ImmutableList<String> linkerFlags, DIncludes includes) throws NoSuchBuildTargetException { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(buildRuleResolver); SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder); ImmutableList<SourcePath> sourcePaths = sourcePathsForCompiledSources( buildTarget, projectFilesystem, params, buildRuleResolver, sourcePathResolver, ruleFinder, cxxPlatform, dBuckConfig, compilerFlags, sources, includes); // Return a rule to link the .o for the binary together with its // dependencies. return CxxLinkableEnhancer.createCxxLinkableBuildRule( cxxBuckConfig, cxxPlatform, projectFilesystem, buildRuleResolver, sourcePathResolver, ruleFinder, buildTarget, Linker.LinkType.EXECUTABLE, Optional.empty(), BuildTargets.getGenPath(projectFilesystem, buildTarget, "%s/" + buildTarget.getShortName()), Linker.LinkableDepType.STATIC, /* thinLto */ false, FluentIterable.from(params.getBuildDeps()).filter(NativeLinkable.class), /* cxxRuntimeType */ Optional.empty(), /* bundleLoader */ Optional.empty(), ImmutableSet.of(), ImmutableSet.of(), NativeLinkableInput.builder() .addAllArgs(StringArg.from(dBuckConfig.getLinkerFlags())) .addAllArgs(StringArg.from(linkerFlags)) .addAllArgs(SourcePathArg.from(sourcePaths)) .build(), Optional.empty()); } public static BuildTarget getSymlinkTreeTarget(BuildTarget baseTarget) { return baseTarget.withAppendedFlavors(SOURCE_LINK_TREE); } public static SymlinkTree createSourceSymlinkTree( BuildTarget target, BuildTarget baseBuildTarget, ProjectFilesystem projectFilesystem, SourcePathResolver pathResolver, SourceList sources) { Preconditions.checkState(target.getFlavors().contains(SOURCE_LINK_TREE)); return new SymlinkTree( target, projectFilesystem, BuildTargets.getGenPath(projectFilesystem, baseBuildTarget, "%s"), MoreMaps.transformKeys( sources.toNameMap(baseBuildTarget, pathResolver, "srcs"), MorePaths.toPathFn(projectFilesystem.getRootPath().getFileSystem()))); } private static ImmutableMap<BuildTarget, DLibrary> getTransitiveDLibraryRules( Iterable<? extends BuildRule> inputs) { final ImmutableMap.Builder<BuildTarget, DLibrary> libraries = ImmutableMap.builder(); new AbstractBreadthFirstTraversal<BuildRule>(inputs) { @Override public Iterable<BuildRule> visit(BuildRule rule) { if (rule instanceof DLibrary) { libraries.put(rule.getBuildTarget(), (DLibrary) rule); return rule.getBuildDeps(); } return ImmutableSet.of(); } }.start(); return libraries.build(); } /** * Ensures that a DCompileBuildRule exists for the given target, creating a DCompileBuildRule if * neccesary. * * @param baseParams build parameters for the rule * @param buildRuleResolver BuildRuleResolver the rule should be in * @param src the source file to be compiled * @param compilerFlags flags to pass to the compiler * @param compileTarget the target the rule should be for * @param dBuckConfig the Buck configuration for D * @return the build rule */ public static DCompileBuildRule requireBuildRule( BuildTarget compileTarget, BuildTarget baseBuildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams baseParams, BuildRuleResolver buildRuleResolver, SourcePathRuleFinder ruleFinder, DBuckConfig dBuckConfig, ImmutableList<String> compilerFlags, String name, SourcePath src, DIncludes includes) throws NoSuchBuildTargetException { return (DCompileBuildRule) buildRuleResolver.computeIfAbsentThrowing( compileTarget, ignored -> { Tool compiler = dBuckConfig.getDCompiler(); Map<BuildTarget, DIncludes> transitiveIncludes = new TreeMap<>(); transitiveIncludes.put(baseBuildTarget, includes); for (Map.Entry<BuildTarget, DLibrary> library : getTransitiveDLibraryRules(baseParams.getBuildDeps()).entrySet()) { transitiveIncludes.put(library.getKey(), library.getValue().getIncludes()); } ImmutableSortedSet.Builder<BuildRule> depsBuilder = ImmutableSortedSet.naturalOrder(); depsBuilder.addAll(compiler.getDeps(ruleFinder)); depsBuilder.addAll(ruleFinder.filterBuildRuleInputs(src)); for (DIncludes dIncludes : transitiveIncludes.values()) { depsBuilder.addAll(dIncludes.getDeps(ruleFinder)); } ImmutableSortedSet<BuildRule> deps = depsBuilder.build(); return new DCompileBuildRule( compileTarget, projectFilesystem, baseParams.withDeclaredDeps(deps).withoutExtraDeps(), compiler, ImmutableList.<String>builder() .addAll(dBuckConfig.getBaseCompilerFlags()) .addAll(compilerFlags) .build(), name, ImmutableSortedSet.of(src), ImmutableList.copyOf(transitiveIncludes.values())); }); } /** * Generates BuildTargets and BuildRules to compile D sources to object files, and returns a list * of SourcePaths referring to the generated object files. * * @param sources source files to compile * @param compilerFlags flags to pass to the compiler * @param baseParams build parameters for the compilation * @param buildRuleResolver resolver for build rules * @param sourcePathResolver resolver for source paths * @param cxxPlatform the C++ platform to compile for * @param dBuckConfig the Buck configuration for D * @return SourcePaths of the generated object files */ public static ImmutableList<SourcePath> sourcePathsForCompiledSources( BuildTarget baseBuildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams baseParams, BuildRuleResolver buildRuleResolver, SourcePathResolver sourcePathResolver, SourcePathRuleFinder ruleFinder, CxxPlatform cxxPlatform, DBuckConfig dBuckConfig, ImmutableList<String> compilerFlags, SourceList sources, DIncludes includes) throws NoSuchBuildTargetException { ImmutableList.Builder<SourcePath> sourcePaths = ImmutableList.builder(); for (Map.Entry<String, SourcePath> source : sources.toNameMap(baseBuildTarget, sourcePathResolver, "srcs").entrySet()) { BuildTarget compileTarget = createDCompileBuildTarget(baseBuildTarget, source.getKey(), cxxPlatform); BuildRule rule = requireBuildRule( compileTarget, baseBuildTarget, projectFilesystem, baseParams, buildRuleResolver, ruleFinder, dBuckConfig, compilerFlags, source.getKey(), source.getValue(), includes); sourcePaths.add(rule.getSourcePathToOutput()); } return sourcePaths.build(); } }
package com.ibm.nmon.gui.interval; import java.beans.PropertyChangeEvent; import java.util.Calendar; import java.util.Date; import java.util.TimeZone; import javax.swing.SwingConstants; import java.awt.BorderLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JSpinner; import javax.swing.SpinnerDateModel; import javax.swing.JSpinner.DateEditor; import javax.swing.event.ChangeListener; import javax.swing.event.ChangeEvent; import com.ibm.nmon.gui.Styles; import com.ibm.nmon.gui.main.NMONVisualizerGui; import com.ibm.nmon.interval.Interval; /** * Panel for entering intervals using absolute date times */ final class AbsoluteTimeIntervalPanel extends BaseIntervalPanel { private static final long serialVersionUID = 3451148920350034946L; private final JSpinner start; private final JSpinner end; private final JLabel startLabel; private final JLabel endLabel; AbsoluteTimeIntervalPanel(NMONVisualizerGui gui) { super(gui); setLayout(new BorderLayout()); add.addActionListener(addInterval); // start and end text boxes with labels, followed by a Add button startLabel = new JLabel("Start:"); startLabel.setHorizontalAlignment(SwingConstants.TRAILING); startLabel.setFont(Styles.LABEL); start = new JSpinner(new SpinnerDateModel(new Date(getDefaultStartTime()), null, null, Calendar.MINUTE)); start.setEditor(new DateEditor(start, Styles.DATE_FORMAT_STRING_WITH_YEAR)); start.addChangeListener(intervalUpdater); endLabel = new JLabel("End:"); endLabel.setHorizontalAlignment(SwingConstants.TRAILING); endLabel.setFont(Styles.LABEL); end = new JSpinner(new SpinnerDateModel(new Date(getDefaultEndTime()), null, null, Calendar.MINUTE)); end.setEditor(new DateEditor(end, Styles.DATE_FORMAT_STRING_WITH_YEAR)); end.addChangeListener(intervalUpdater); JPanel namePanel = new JPanel(); namePanel.add(nameLabel); namePanel.add(name); JPanel startPanel = new JPanel(); startPanel.add(startLabel); startPanel.add(start); JPanel endPanel = new JPanel(); endPanel.add(endLabel); endPanel.add(end); JPanel buttonsPanel = new JPanel(); buttonsPanel.add(add); buttonsPanel.add(endToStart); buttonsPanel.add(reset); JPanel dataPanel = new JPanel(); dataPanel.setLayout(new GridBagLayout()); GridBagConstraints labelConstraints = new GridBagConstraints(); GridBagConstraints fieldConstraints = new GridBagConstraints(); labelConstraints.gridx = 0; fieldConstraints.gridx = 1; labelConstraints.gridy = 0; fieldConstraints.gridy = 0; labelConstraints.insets = new Insets(0, 0, 0, 5); fieldConstraints.insets = new Insets(5, 0, 0, 5); labelConstraints.fill = GridBagConstraints.HORIZONTAL; fieldConstraints.fill = GridBagConstraints.HORIZONTAL; dataPanel.add(startLabel, labelConstraints); dataPanel.add(start, fieldConstraints); ++labelConstraints.gridy; ++fieldConstraints.gridy; dataPanel.add(endLabel, labelConstraints); dataPanel.add(end, fieldConstraints); add(namePanel, BorderLayout.PAGE_START); add(dataPanel, BorderLayout.CENTER); add(buttonsPanel, BorderLayout.PAGE_END); } @Override long getStartTime() { return ((Date) start.getValue()).getTime(); } @Override long getEndTime() { return ((Date) end.getValue()).getTime(); } void setTimes(long start, long end) { this.start.setValue(new java.util.Date(start)); this.end.setValue(new java.util.Date(end)); } @Override TimeZone getTimeZone() { DateEditor de = (DateEditor) start.getEditor(); return de.getFormat().getTimeZone(); } @Override protected void setStartToEnd() { start.setValue(end.getValue()); ((JSpinner.DefaultEditor) end.getEditor()).getTextField().requestFocus(); } public void setEnabled(boolean enabled) { super.setEnabled(enabled); if (enabled) { ((JSpinner.DefaultEditor) start.getEditor()).getTextField().requestFocus(); } } @Override public void propertyChange(PropertyChangeEvent evt) { if ("timeZone".equals(evt.getPropertyName())) { TimeZone timeZone = (TimeZone) evt.getNewValue(); DateEditor de = (DateEditor) start.getEditor(); de.getFormat().setTimeZone(timeZone); // hack to get the spinner to fire a state change and update the displayed value ((SpinnerDateModel) start.getModel()).setCalendarField(Calendar.MINUTE); ((SpinnerDateModel) start.getModel()).setCalendarField(Calendar.SECOND); de = (DateEditor) end.getEditor(); de.getFormat().setTimeZone(timeZone); ((SpinnerDateModel) end.getModel()).setCalendarField(Calendar.MINUTE); ((SpinnerDateModel) end.getModel()).setCalendarField(Calendar.SECOND); } } // update the interval when the start or end time changes private final ChangeListener intervalUpdater = new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { long startTime = getStartTime(); long endTime = getEndTime(); if (endTime > startTime) { startLabel.setFont(Styles.LABEL); endLabel.setFont(Styles.LABEL); startLabel.setForeground(Styles.DEFAULT_COLOR); endLabel.setForeground(Styles.DEFAULT_COLOR); ((JSpinner.DefaultEditor) start.getEditor()).getTextField().setForeground(Styles.DEFAULT_COLOR); ((JSpinner.DefaultEditor) end.getEditor()).getTextField().setForeground(Styles.DEFAULT_COLOR); Interval i = new Interval(startTime, endTime); firePropertyChange("interval", null, i); } else { startLabel.setFont(Styles.LABEL_ERROR); endLabel.setFont(Styles.LABEL_ERROR); startLabel.setForeground(Styles.ERROR_COLOR); endLabel.setForeground(Styles.ERROR_COLOR); ((JSpinner.DefaultEditor) start.getEditor()).getTextField().setForeground(Styles.ERROR_COLOR); ((JSpinner.DefaultEditor) end.getEditor()).getTextField().setForeground(Styles.ERROR_COLOR); firePropertyChange("interval", null, null); } } }; }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.api.control.model; import com.google.api.AuthRequirement; import com.google.api.AuthenticationRule; import com.google.api.HttpRule; import com.google.api.MetricRule; import com.google.api.Service; import com.google.api.SystemParameter; import com.google.api.SystemParameterRule; import com.google.api.UsageRule; import com.google.api.control.util.StringUtils; import com.google.auto.value.AutoValue; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.flogger.FluentLogger; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.CheckForNull; import javax.annotation.Nullable; /** * MethodRegistry provides registry of the API methods defined by a Service. */ public class MethodRegistry { private static final FluentLogger log = FluentLogger.forEnclosingClass(); private static final String OPTIONS_VERB = "OPTIONS"; private final Service theService; private final Map<String, AuthInfo> authInfos; private final Map<String, QuotaInfo> quotaInfos; private final Map<String, List<Info>> infosByHttpMethod; private final Map<String, Info> extractedMethods; /** * @param s contains the methods to be registered */ public MethodRegistry(Service s) { Preconditions.checkNotNull(s, "The service must be specified"); Preconditions.checkArgument(!Strings.isNullOrEmpty(s.getName()), "The service name must be specified"); theService = s; infosByHttpMethod = Maps.newHashMap(); extractedMethods = Maps.newHashMap(); authInfos = extractAuth(s); quotaInfos = extractQuota(s); extractMethods(); } /** * Finds the {@code Info} instance that matches {@code httpMethod} and {@code url}. * * @param httpMethod the method of a HTTP request * @param url the url of a HTTP request * @return an {@code Info} corresponding to the url and method, or null if none is found */ public @Nullable Info lookup(String httpMethod, String url) { httpMethod = httpMethod.toLowerCase(); if (url.startsWith("/")) { url = url.substring(1); } url = StringUtils.stripTrailingSlash(url); List<Info> infos = infosByHttpMethod.get(httpMethod); if (infos == null) { log.atFine().log( "no information about any urls for HTTP method %s when checking %s", httpMethod, url); return null; } for (Info info : infos) { log.atFine().log("trying %s with template %s", url, info.getTemplate()); if (info.getTemplate().matches(url)) { log.atFine().log("%s matched %s", url, info.getTemplate()); return info; } else { log.atFine().log("%s did not matched %s", url, info.getTemplate()); } } return null; } private void extractMethods() { if (!theService.hasHttp()) { return; } List<HttpRule> rules = theService.getHttp().getRulesList(); Set<String> allUrls = Sets.newHashSet(); Set<String> urlsWithOptions = Sets.newHashSet(); for (HttpRule r : rules) { String url = StringUtils.stripTrailingSlash(urlFrom(r)); String httpMethod = httpMethodFrom(r); if (Strings.isNullOrEmpty(url) || Strings.isNullOrEmpty(httpMethod) || Strings.isNullOrEmpty(r.getSelector())) { log.atWarning().log("invalid HTTP binding detected"); continue; } Info theMethod = getOrCreateInfo(r.getSelector()); if (!Strings.isNullOrEmpty(r.getBody())) { theMethod.setBodyFieldPath(r.getBody()); } if (!register(httpMethod, url, theMethod)) { continue; } allUrls.add(url); if (httpMethod.equals(OPTIONS_VERB)) { urlsWithOptions.add(url); } } allUrls.removeAll(urlsWithOptions); addCorsOptionSelectors(allUrls); updateUsage(); updateSystemParameters(); } private boolean register(String httpMethod, String url, Info theMethod) { try { PathTemplate t = PathTemplate.create(url); theMethod.setTemplate(t); List<Info> infos = infosByHttpMethod.get(httpMethod); if (infos == null) { infos = Lists.newArrayList(); infosByHttpMethod.put(httpMethod.toLowerCase(), infos); } infos.add(theMethod); log.atFine().log("registered template %s under method %s", t, httpMethod); return true; } catch (ValidationException e) { log.atWarning().log("invalid HTTP template %s provided", url); return false; } } private void addCorsOptionSelectors(Set<String> allUrls) { String baseSelector = String.format("%s.%s", theService.getName(), OPTIONS_VERB); String optionsSelector = baseSelector; // ensure that no existing options selector is used int index = 0; Info info = extractedMethods.get(optionsSelector); while (info != null) { index++; optionsSelector = String.format("%s.%d", baseSelector, index); info = extractedMethods.get(optionsSelector); } Info newInfo = getOrCreateInfo(optionsSelector); newInfo.setAllowUnregisteredCalls(true); for (String u : allUrls) { register(OPTIONS_VERB, u, newInfo); } } private void updateSystemParameters() { if (!theService.hasSystemParameters()) { return; } for (SystemParameterRule r : theService.getSystemParameters().getRulesList()) { Info info = extractedMethods.get(r.getSelector()); if (info == null) { log.atWarning().log("bad system parameter: no HTTP rule for %s", r.getSelector()); } else { for (SystemParameter parameter : r.getParametersList()) { if (Strings.isNullOrEmpty(parameter.getName())) { log.atWarning().log("bad system parameter: no parameter name for %s", r.getSelector()); continue; } if (!Strings.isNullOrEmpty(parameter.getHttpHeader())) { info.addHeaderParam(parameter.getName(), parameter.getHttpHeader()); } if (!Strings.isNullOrEmpty(parameter.getUrlQueryParameter())) { info.addHeaderParam(parameter.getName(), parameter.getUrlQueryParameter()); } } } } } private void updateUsage() { if (!theService.hasUsage()) { return; } for (UsageRule r : theService.getUsage().getRulesList()) { Info info = extractedMethods.get(r.getSelector()); if (info == null) { log.atWarning().log("bad usage selector: no HTTP rule for %s", r.getSelector()); } else { info.setAllowUnregisteredCalls(r.getAllowUnregisteredCalls()); } } } private Info getOrCreateInfo(String selector) { Info i = extractedMethods.get(selector); if (i != null) { return i; } i = new Info(selector, this.authInfos.get(selector), this.quotaInfos.get(selector)); extractedMethods.put(selector, i); return i; } private static String httpMethodFrom(HttpRule r) { switch (r.getPatternCase()) { case CUSTOM: return r.getCustom().getKind().toLowerCase(); case DELETE: case GET: case PATCH: case POST: case PUT: return r.getPatternCase().toString().toLowerCase(); default: return null; } } private static String urlFrom(HttpRule r) { switch (r.getPatternCase()) { case CUSTOM: return r.getCustom().getKind(); case DELETE: return r.getDelete(); case GET: return r.getGet(); case PATCH: return r.getPatch(); case POST: return r.getPost(); case PUT: return r.getPut(); default: return null; } } private static Map<String, AuthInfo> extractAuth(Service service) { if (!service.hasAuthentication()) { return ImmutableMap.<String, AuthInfo>of(); } ImmutableMap.Builder<String, AuthInfo> authInfoBuilder = ImmutableMap.builder(); for (AuthenticationRule authRule : service.getAuthentication().getRulesList()) { ImmutableMap.Builder<String, Set<String>> providerToAudienceBuilder = ImmutableMap.builder(); for (AuthRequirement requirement : authRule.getRequirementsList()) { providerToAudienceBuilder.put( requirement.getProviderId(), ImmutableSet.copyOf(requirement.getAudiences().split(","))); } AuthInfo authInfo = new AuthInfo(providerToAudienceBuilder.build()); authInfoBuilder.put(authRule.getSelector(), authInfo); } return authInfoBuilder.build(); } private static Map<String, QuotaInfo> extractQuota(Service service) { if (!service.hasQuota()) { return ImmutableMap.<String, QuotaInfo>of(); } ImmutableMap.Builder<String, QuotaInfo> quotaInfoBuilder = ImmutableMap.builder(); for (MetricRule metricRule : service.getQuota().getMetricRulesList()) { quotaInfoBuilder.put( metricRule.getSelector(), QuotaInfo.create(metricRule.getMetricCostsMap())); } return quotaInfoBuilder.build(); } /** * Consolidates information about methods defined in a Service */ public static class Info { private static final String API_KEY_NAME = "api_key"; private final Optional<AuthInfo> authInfo; private final QuotaInfo quotaInfo; private boolean allowUnregisteredCalls; private String selector; private String backendAddress; private String bodyFieldPath; private Map<String, List<String>> urlQueryParams; private Map<String, List<String>> headerParams; private PathTemplate template; public Info(String selector, @CheckForNull AuthInfo authInfo, @CheckForNull QuotaInfo quotaInfo) { this.selector = selector; this.authInfo = Optional.<AuthInfo>fromNullable(authInfo); this.quotaInfo = quotaInfo != null ? quotaInfo : QuotaInfo.DEFAULT; this.urlQueryParams = Maps.newHashMap(); this.headerParams = Maps.newHashMap(); } public PathTemplate getTemplate() { return template; } public void setTemplate(PathTemplate template) { this.template = template; } public void addUrlQueryParam(String name, String param) { List<String> l = urlQueryParams.get(name); if (l == null) { l = Lists.newArrayList(); urlQueryParams.put(name, l); } l.add(param); } public List<String> urlQueryParam(String name) { List<String> l = urlQueryParams.get(name); if (l == null) { return Collections.emptyList(); } return ImmutableList.copyOf(l); } public List<String> apiKeyUrlQueryParam() { return urlQueryParam(API_KEY_NAME); } public void addHeaderParam(String name, String param) { List<String> l = headerParams.get(name); if (l == null) { l = Lists.newArrayList(); headerParams.put(name, l); } l.add(param); } public List<String> headerParam(String name) { List<String> l = headerParams.get(name); if (l == null) { return Collections.emptyList(); } return ImmutableList.copyOf(l); } public List<String> apiKeyHeaderParam() { return headerParam(API_KEY_NAME); } public Optional<AuthInfo> getAuthInfo() { return this.authInfo; } public QuotaInfo getQuotaInfo() { return quotaInfo; } public boolean shouldAllowUnregisteredCalls() { return allowUnregisteredCalls; } public void setAllowUnregisteredCalls(boolean allowRegisteredCalls) { this.allowUnregisteredCalls = allowRegisteredCalls; } public String getSelector() { return selector; } public void setSelector(String selector) { this.selector = selector; } public String getBackendAddress() { return backendAddress; } public void setBackendAddress(String backendAddress) { this.backendAddress = backendAddress; } public String getBodyFieldPath() { return bodyFieldPath; } public void setBodyFieldPath(String bodyFieldPath) { this.bodyFieldPath = bodyFieldPath; } } /** * Consolidates authentication information about methods defined in a Service */ public static final class AuthInfo { private final Map<String, Set<String>> providerIdsToAudiences; public AuthInfo(Map<String, Set<String>> providerIdsToAudiences) { Preconditions.checkNotNull(providerIdsToAudiences); this.providerIdsToAudiences = providerIdsToAudiences; } public boolean isProviderIdAllowed(String providerid) { Preconditions.checkNotNull(providerid); return this.providerIdsToAudiences.containsKey(providerid); } public Set<String> getAudiencesForProvider(String providerId) { Preconditions.checkNotNull(providerId); if (this.providerIdsToAudiences.containsKey(providerId)) { return this.providerIdsToAudiences.get(providerId); } return ImmutableSet.<String>of(); } } @AutoValue public abstract static class QuotaInfo { public static final QuotaInfo DEFAULT = QuotaInfo.create(ImmutableMap.<String, Long>of()); public abstract Map<String, Long> getMetricCosts(); public static QuotaInfo create(Map<String, Long> metricCosts) { return new AutoValue_MethodRegistry_QuotaInfo(metricCosts); } } }
/* * Copyright 2010-2017 Boxfuse GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flywaydb.core.internal.dbsupport.db2; import org.flywaydb.core.internal.dbsupport.Delimiter; import org.junit.Test; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import static org.junit.Assert.*; public class DB2SqlStatementBuilderSmallTest { @Test public void isBegin() throws Exception { assertTrue(DB2SqlStatementBuilder.isBegin("BEGIN")); assertTrue(DB2SqlStatementBuilder.isBegin("LABEL:BEGIN")); assertTrue(DB2SqlStatementBuilder.isBegin("LABEL: BEGIN")); assertTrue(DB2SqlStatementBuilder.isBegin("LABEL :BEGIN")); assertTrue(DB2SqlStatementBuilder.isBegin("LABEL : BEGIN")); assertTrue(DB2SqlStatementBuilder.isBegin("REFERENCING NEW AS NEW FOR EACH ROW BEGIN ATOMIC")); } @Test public void isEnd() throws Exception { assertTrue(DB2SqlStatementBuilder.isEnd("END", null, new Delimiter(";", false), 1)); assertTrue(DB2SqlStatementBuilder.isEnd("END;", null, new Delimiter(";", false), 1)); assertTrue(DB2SqlStatementBuilder.isEnd("END", "LABEL", new Delimiter(";", false), 1)); assertTrue(DB2SqlStatementBuilder.isEnd("END@", "LABEL", new Delimiter("@", false), 1)); assertTrue(DB2SqlStatementBuilder.isEnd("END;", "LABEL", new Delimiter("@", false), 2)); assertTrue(DB2SqlStatementBuilder.isEnd("END LABEL", "LABEL", new Delimiter(";", false), 1)); assertFalse(DB2SqlStatementBuilder.isEnd("END FOR", null, new Delimiter(";", false), 1)); assertFalse(DB2SqlStatementBuilder.isEnd("END FOR", "LABEL", new Delimiter(";", false), 1)); assertFalse(DB2SqlStatementBuilder.isEnd("END IF", null, new Delimiter(";", false), 1)); assertFalse(DB2SqlStatementBuilder.isEnd("END IF", "LABEL", new Delimiter(";", false), 1)); assertFalse(DB2SqlStatementBuilder.isEnd("SELECT XXX INTO YYY_END", "LABEL", new Delimiter(";", false), 1)); assertFalse(DB2SqlStatementBuilder.isEnd("IF (COALESCE(XXX.END_YYY,'') <> '') THEN", "LABEL", new Delimiter(";", false), 1)); } @Test public void extractLabel() throws Exception { assertNull(DB2SqlStatementBuilder.extractLabel("BEGIN")); assertEquals("LABEL", DB2SqlStatementBuilder.extractLabel("LABEL:BEGIN")); assertEquals("LABEL", DB2SqlStatementBuilder.extractLabel("LABEL: BEGIN")); assertEquals("LABEL", DB2SqlStatementBuilder.extractLabel("LABEL :BEGIN")); assertEquals("LABEL", DB2SqlStatementBuilder.extractLabel("LABEL : BEGIN")); } @Test public void isTerminated() throws IOException { assertTerminated("CREATE OR REPLACE PROCEDURE IP_ROLLUP(\n" + " IN iODLID INTEGER,\n" + " IN iNDLID INTEGER,\n" + " IN iOID INTEGER,\n" + " IN iTYPE VARCHAR(5),\n" + " IN iACTION VARCHAR(6)\n" + " )\n" + " LANGUAGE SQL\n" + "SPECIFIC SP_IP_ROLLUP\n" + "MAIN : BEGIN\n" + " -- COMMENT WITH END\n" + "END\n" + "@", "@"); } @Test public void isTerminatedLabel() throws IOException { assertTerminated("CREATE OR REPLACE PROCEDURE CUST_INSERT_IP_GL(\n" + " IN iORDER_INTERLINER_ID INTEGER,\n" + " IN iACCT_CODE VARCHAR(50),\n" + " IN iACCT_TYPE CHAR(3),\n" + " IN iAMOUNT DOUBLE,\n" + " IN iSOURCE_TYPE VARCHAR(20),\n" + " OUT oLEAVE_MAIN VARCHAR(5)\n" + " )\n" + " LANGUAGE SQL\n" + "SPECIFIC SP_CUST_INSERT_IP_GL\n" + "MAIN : BEGIN\n" + " SET oLEAVE_MAIN = 'False';\n" + "END MAIN\n" + "@", "@"); } @Test public void isTerminatedNested() throws IOException { assertTerminated("CREATE PROCEDURE dummy_proc ()\n" + "LANGUAGE SQL\n" + "BEGIN\n" + " declare var1 TIMESTAMP;\n" + " \n" + " SELECT CURRENT_TIMESTAMP INTO var1 FROM SYSIBM.DUAL;\n" + " \n" + " BEGIN\n" + " declare var2 DATE;\n" + " SELECT CURRENT_DATE INTO var2 FROM SYSIBM.DUAL;\n" + " END;\n" + "END;", ";"); } @Test public void isTerminatedNestedDelimiter() throws IOException { assertTerminated("CREATE OR REPLACE PROCEDURE NESTED () LANGUAGE SQL\n" + "MAIN: BEGIN\n" + " BEGIN\n" + " -- do something\n" + " END;\n" + "END MAIN\n" + "@", "@"); } @Test public void isTerminatedNestedLabel() throws IOException { assertTerminated("CREATE OR REPLACE PROCEDURE TEST2\n" + "BEGIN\n" + " MAIN: BEGIN\n" + " MAIN1: BEGIN\n" + " END MAIN1;\n" + " END MAIN;\n" + " \n" + " TEST: BEGIN\n" + " TEST1: BEGIN\n" + " END TEST1;\n" + " END TEST;\n" + "END\n" + "@", "@"); } @Test public void isTerminatedForIf() throws IOException { assertTerminated("CREATE OR REPLACE PROCEDURE FORIF(\n" + " IN my_arg INTEGER\n" + " )\n" + " LANGUAGE SQL\n" + "SPECIFIC xxx\n" + "MAIN:BEGIN\n" + " FOR C1 AS\n" + " SELECT aaa\n" + " FROM ttt\n" + " WHERE ccc = 'vvv'DO\n" + " -- Do something\n" + " IF my_condition THEN\n" + " -- Do something\n" + " ELSE\n" + " -- Do something\n" + " END IF;\n" + " END FOR;\n" + "END@", "@"); } @Test public void isTerminatedTrigger1() throws IOException { assertTerminated("CREATE OR REPLACE TRIGGER I_TRIGGER_I\n" + "BEFORE INSERT ON I_TRIGGER\n" + "REFERENCING NEW AS NEW FOR EACH ROW\n" + "BEGIN ATOMIC\n" + "IF NEW.ID IS NULL\n" + "THEN SET NEW.ID = NEXTVAL FOR SEQ_I_TRIGGER;\n" + "END IF;\n" + "END\n" + "/", "/"); } @Test public void isTerminatedTrigger2() throws IOException { assertTerminated("CREATE OR REPLACE TRIGGER I_TRIGGER_I\n" + "BEFORE INSERT ON I_TRIGGER\n" + "REFERENCING NEW AS NEW FOR EACH ROW BEGIN ATOMIC\n" + "IF NEW.ID IS NULL\n" + "THEN SET NEW.ID = NEXTVAL FOR SEQ_I_TRIGGER;\n" + "END IF;\n" + "END\n" + "/", "/"); } @Test public void isTerminatedCase() throws IOException { assertTerminated("SELECT EMPNO, LASTNAME,\n" + " CASE SUBSTR(WORKDEPT,1,1)\n" + " WHEN 'A' THEN 'Administration'\n" + " WHEN 'B' THEN 'Human Resources'\n" + " WHEN 'C' THEN 'Design'\n" + " WHEN 'D' THEN 'Operations'\n" + " END\n" + " FROM EMPLOYEE;", ";"); } private void assertTerminated(String sqlScriptSource, String delimiter) throws IOException { DB2SqlStatementBuilder builder = new DB2SqlStatementBuilder(); builder.setDelimiter(new Delimiter(delimiter, false)); BufferedReader bufferedReader = new BufferedReader(new StringReader(sqlScriptSource)); int num = 0; String line; while ((line = bufferedReader.readLine()) != null) { num++; assertFalse("Line " + num + " should not terminate: " + line, builder.isTerminated()); builder.addLine(line); } assertTrue(builder.isTerminated()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.connect.transforms; import org.apache.kafka.common.config.ConfigException; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.SchemaAndValue; import org.apache.kafka.connect.data.SchemaBuilder; import org.apache.kafka.connect.data.Struct; import org.apache.kafka.connect.header.ConnectHeaders; import org.apache.kafka.connect.header.Header; import org.apache.kafka.connect.header.Headers; import org.apache.kafka.connect.source.SourceRecord; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.apache.kafka.connect.data.Schema.STRING_SCHEMA; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; public class HeaderFromTest { static class RecordBuilder { private final List<String> fields = new ArrayList<>(2); private final List<Schema> fieldSchemas = new ArrayList<>(2); private final List<Object> fieldValues = new ArrayList<>(2); private final ConnectHeaders headers = new ConnectHeaders(); public RecordBuilder() { } public RecordBuilder withField(String name, Schema schema, Object value) { fields.add(name); fieldSchemas.add(schema); fieldValues.add(value); return this; } public RecordBuilder addHeader(String name, Schema schema, Object value) { headers.add(name, new SchemaAndValue(schema, value)); return this; } public SourceRecord schemaless(boolean keyTransform) { Map<String, Object> map = new HashMap<>(); for (int i = 0; i < this.fields.size(); i++) { String fieldName = this.fields.get(i); map.put(fieldName, this.fieldValues.get(i)); } return sourceRecord(keyTransform, null, map); } private Schema schema() { SchemaBuilder schemaBuilder = new SchemaBuilder(Schema.Type.STRUCT); for (int i = 0; i < this.fields.size(); i++) { String fieldName = this.fields.get(i); schemaBuilder.field(fieldName, this.fieldSchemas.get(i)); } return schemaBuilder.build(); } private Struct struct(Schema schema) { Struct struct = new Struct(schema); for (int i = 0; i < this.fields.size(); i++) { String fieldName = this.fields.get(i); struct.put(fieldName, this.fieldValues.get(i)); } return struct; } public SourceRecord withSchema(boolean keyTransform) { Schema schema = schema(); Struct struct = struct(schema); return sourceRecord(keyTransform, schema, struct); } private SourceRecord sourceRecord(boolean keyTransform, Schema keyOrValueSchema, Object keyOrValue) { Map<String, ?> sourcePartition = singletonMap("foo", "bar"); Map<String, ?> sourceOffset = singletonMap("baz", "quxx"); String topic = "topic"; Integer partition = 0; Long timestamp = 0L; ConnectHeaders headers = this.headers; if (keyOrValueSchema == null) { // When doing a schemaless transformation we don't expect the header to have a schema headers = new ConnectHeaders(); for (Header header : this.headers) { headers.add(header.key(), new SchemaAndValue(null, header.value())); } } return new SourceRecord(sourcePartition, sourceOffset, topic, partition, keyTransform ? keyOrValueSchema : null, keyTransform ? keyOrValue : "key", !keyTransform ? keyOrValueSchema : null, !keyTransform ? keyOrValue : "value", timestamp, headers); } @Override public String toString() { return "RecordBuilder(" + "fields=" + fields + ", fieldSchemas=" + fieldSchemas + ", fieldValues=" + fieldValues + ", headers=" + headers + ')'; } } public static List<Arguments> data() { List<Arguments> result = new ArrayList<>(); for (Boolean testKeyTransform : asList(true, false)) { result.add( Arguments.of( "basic copy", testKeyTransform, new RecordBuilder() .withField("field1", STRING_SCHEMA, "field1-value") .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.COPY, new RecordBuilder() .withField("field1", STRING_SCHEMA, "field1-value") .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value") .addHeader("inserted1", STRING_SCHEMA, "field1-value") )); result.add( Arguments.of( "basic move", testKeyTransform, new RecordBuilder() .withField("field1", STRING_SCHEMA, "field1-value") .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.MOVE, new RecordBuilder() // field1 got moved .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value") .addHeader("inserted1", STRING_SCHEMA, "field1-value") )); result.add( Arguments.of( "copy with preexisting header", testKeyTransform, new RecordBuilder() .withField("field1", STRING_SCHEMA, "field1-value") .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("inserted1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.COPY, new RecordBuilder() .withField("field1", STRING_SCHEMA, "field1-value") .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("inserted1", STRING_SCHEMA, "existing-value") .addHeader("inserted1", STRING_SCHEMA, "field1-value") )); result.add( Arguments.of( "move with preexisting header", testKeyTransform, new RecordBuilder() .withField("field1", STRING_SCHEMA, "field1-value") .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("inserted1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.MOVE, new RecordBuilder() // field1 got moved .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("inserted1", STRING_SCHEMA, "existing-value") .addHeader("inserted1", STRING_SCHEMA, "field1-value") )); Schema schema = new SchemaBuilder(Schema.Type.STRUCT).field("foo", STRING_SCHEMA).build(); Struct struct = new Struct(schema).put("foo", "foo-value"); result.add( Arguments.of( "copy with struct value", testKeyTransform, new RecordBuilder() .withField("field1", schema, struct) .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.COPY, new RecordBuilder() .withField("field1", schema, struct) .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value") .addHeader("inserted1", schema, struct) )); result.add( Arguments.of( "move with struct value", testKeyTransform, new RecordBuilder() .withField("field1", schema, struct) .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.MOVE, new RecordBuilder() // field1 got moved .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value") .addHeader("inserted1", schema, struct) )); result.add( Arguments.of( "two headers from same field", testKeyTransform, new RecordBuilder() .withField("field1", STRING_SCHEMA, "field1-value") .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value"), // two headers from the same field asList("field1", "field1"), asList("inserted1", "inserted2"), HeaderFrom.Operation.MOVE, new RecordBuilder() // field1 got moved .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value") .addHeader("inserted1", STRING_SCHEMA, "field1-value") .addHeader("inserted2", STRING_SCHEMA, "field1-value") )); result.add( Arguments.of( "two fields to same header", testKeyTransform, new RecordBuilder() .withField("field1", STRING_SCHEMA, "field1-value") .withField("field2", STRING_SCHEMA, "field2-value") .addHeader("header1", STRING_SCHEMA, "existing-value"), // two headers from the same field asList("field1", "field2"), asList("inserted1", "inserted1"), HeaderFrom.Operation.MOVE, new RecordBuilder() // field1 and field2 got moved .addHeader("header1", STRING_SCHEMA, "existing-value") .addHeader("inserted1", STRING_SCHEMA, "field1-value") .addHeader("inserted1", STRING_SCHEMA, "field2-value") )); } return result; } private Map<String, Object> config(List<String> headers, List<String> transformFields, HeaderFrom.Operation operation) { Map<String, Object> result = new HashMap<>(); result.put(HeaderFrom.HEADERS_FIELD, headers); result.put(HeaderFrom.FIELDS_FIELD, transformFields); result.put(HeaderFrom.OPERATION_FIELD, operation.toString()); return result; } @ParameterizedTest @MethodSource("data") public void schemaless(String description, boolean keyTransform, RecordBuilder originalBuilder, List<String> transformFields, List<String> headers1, HeaderFrom.Operation operation, RecordBuilder expectedBuilder) { HeaderFrom<SourceRecord> xform = keyTransform ? new HeaderFrom.Key<>() : new HeaderFrom.Value<>(); xform.configure(config(headers1, transformFields, operation)); ConnectHeaders headers = new ConnectHeaders(); headers.addString("existing", "existing-value"); SourceRecord originalRecord = originalBuilder.schemaless(keyTransform); SourceRecord expectedRecord = expectedBuilder.schemaless(keyTransform); SourceRecord xformed = xform.apply(originalRecord); assertSameRecord(expectedRecord, xformed); } @ParameterizedTest @MethodSource("data") public void withSchema(String description, boolean keyTransform, RecordBuilder originalBuilder, List<String> transformFields, List<String> headers1, HeaderFrom.Operation operation, RecordBuilder expectedBuilder) { HeaderFrom<SourceRecord> xform = keyTransform ? new HeaderFrom.Key<>() : new HeaderFrom.Value<>(); xform.configure(config(headers1, transformFields, operation)); ConnectHeaders headers = new ConnectHeaders(); headers.addString("existing", "existing-value"); Headers expect = headers.duplicate(); for (int i = 0; i < headers1.size(); i++) { expect.add(headers1.get(i), originalBuilder.fieldValues.get(i), originalBuilder.fieldSchemas.get(i)); } SourceRecord originalRecord = originalBuilder.withSchema(keyTransform); SourceRecord expectedRecord = expectedBuilder.withSchema(keyTransform); SourceRecord xformed = xform.apply(originalRecord); assertSameRecord(expectedRecord, xformed); } @ParameterizedTest @ValueSource(booleans = {true, false}) public void invalidConfigExtraHeaderConfig(boolean keyTransform) { Map<String, Object> config = config(singletonList("foo"), asList("foo", "bar"), HeaderFrom.Operation.COPY); HeaderFrom<?> xform = keyTransform ? new HeaderFrom.Key<>() : new HeaderFrom.Value<>(); assertThrows(ConfigException.class, () -> xform.configure(config)); } @ParameterizedTest @ValueSource(booleans = {true, false}) public void invalidConfigExtraFieldConfig(boolean keyTransform) { Map<String, Object> config = config(asList("foo", "bar"), singletonList("foo"), HeaderFrom.Operation.COPY); HeaderFrom<?> xform = keyTransform ? new HeaderFrom.Key<>() : new HeaderFrom.Value<>(); assertThrows(ConfigException.class, () -> xform.configure(config)); } @ParameterizedTest @ValueSource(booleans = {true, false}) public void invalidConfigEmptyHeadersAndFieldsConfig(boolean keyTransform) { Map<String, Object> config = config(emptyList(), emptyList(), HeaderFrom.Operation.COPY); HeaderFrom<?> xform = keyTransform ? new HeaderFrom.Key<>() : new HeaderFrom.Value<>(); assertThrows(ConfigException.class, () -> xform.configure(config)); } private static void assertSameRecord(SourceRecord expected, SourceRecord xformed) { assertEquals(expected.sourcePartition(), xformed.sourcePartition()); assertEquals(expected.sourceOffset(), xformed.sourceOffset()); assertEquals(expected.topic(), xformed.topic()); assertEquals(expected.kafkaPartition(), xformed.kafkaPartition()); assertEquals(expected.keySchema(), xformed.keySchema()); assertEquals(expected.key(), xformed.key()); assertEquals(expected.valueSchema(), xformed.valueSchema()); assertEquals(expected.value(), xformed.value()); assertEquals(expected.timestamp(), xformed.timestamp()); assertEquals(expected.headers(), xformed.headers()); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2011 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.pscan; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import net.htmlparser.jericho.MasonTagTypes; import net.htmlparser.jericho.MicrosoftConditionalCommentTagTypes; import net.htmlparser.jericho.PHPTagTypes; import net.htmlparser.jericho.Source; import org.apache.log4j.Logger; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.control.Control.Mode; import org.parosproxy.paros.core.proxy.ProxyListener; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.db.DatabaseException; import org.parosproxy.paros.db.TableHistory; import org.parosproxy.paros.extension.SessionChangedListener; import org.parosproxy.paros.extension.history.ExtensionHistory; import org.parosproxy.paros.extension.history.ProxyListenerLog; import org.parosproxy.paros.model.HistoryReference; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.model.Session; import org.parosproxy.paros.network.HttpHeader; import org.parosproxy.paros.network.HttpMalformedHeaderException; import org.parosproxy.paros.network.HttpMessage; import org.parosproxy.paros.view.View; import org.zaproxy.zap.extension.alert.ExtensionAlert; import org.zaproxy.zap.utils.Stats; public class PassiveScanThread extends Thread implements ProxyListener, SessionChangedListener { private static final Logger logger = Logger.getLogger(PassiveScanThread.class); // Could be after the last one that saves the HttpMessage, as this ProxyListener doesn't change // the HttpMessage. public static final int PROXY_LISTENER_ORDER = ProxyListenerLog.PROXY_LISTENER_ORDER + 1; private static Set<Integer> optedInHistoryTypes = new HashSet<Integer>(); @SuppressWarnings("unused") private OptionsPassiveScan options = null; private PassiveScannerList scannerList = null; private int currentId = 1; private int lastId = -1; private int mainSleep = 5000; private int postSleep = 200; private volatile boolean shutDown = false; private final ExtensionHistory extHist; private final ExtensionAlert extAlert; private final PassiveScanParam pscanOptions; private TableHistory historyTable = null; private HistoryReference href = null; private Session session; private String currentRuleName = ""; private String currentUrl = ""; private long currentRuleStartTime = 0; private Map<Integer, Integer> alertCounts = new HashMap<Integer, Integer>(); /** * Constructs a {@code PassiveScanThread} with the given data. * * @param passiveScannerList the passive scanners, must not be {@code null}. * @param extHist the extension to obtain the (cached) history references, might be {@code * null}. * @param extensionAlert the extension used to raise the alerts, must not be {@code null}. * @deprecated (2.6.0) Use {@link #PassiveScanThread(PassiveScannerList, ExtensionHistory, * ExtensionAlert, PassiveScanParam)} instead. It will be removed in a future release. */ @Deprecated public PassiveScanThread( PassiveScannerList passiveScannerList, ExtensionHistory extHist, ExtensionAlert extensionAlert) { this(passiveScannerList, extHist, extensionAlert, new PassiveScanParam()); } /** * Constructs a {@code PassiveScanThread} with the given data. * * @param passiveScannerList the passive scanners, must not be {@code null}. * @param extHist the extension to obtain the (cached) history references, might be {@code * null}. * @param extensionAlert the extension used to raise the alerts, must not be {@code null}. * @param pscanOptions the passive scanner options, must not be {@code null}. * @since 2.6.0 */ public PassiveScanThread( PassiveScannerList passiveScannerList, ExtensionHistory extHist, ExtensionAlert extensionAlert, PassiveScanParam pscanOptions) { super("ZAP-PassiveScanner"); this.setDaemon(true); if (extensionAlert == null) { throw new IllegalArgumentException("Parameter extensionAlert must not be null."); } this.scannerList = passiveScannerList; MicrosoftConditionalCommentTagTypes.register(); PHPTagTypes.register(); PHPTagTypes.PHP_SHORT .deregister(); // remove PHP short tags otherwise they override processing // instructions MasonTagTypes.register(); extAlert = extensionAlert; this.extHist = extHist; this.pscanOptions = pscanOptions; } @Override public void run() { historyTable = Model.getSingleton().getDb().getTableHistory(); session = Model.getSingleton().getSession(); // Get the last id - in case we've just opened an existing session currentId = this.getLastHistoryId(); lastId = currentId; while (!shutDown) { try { if (href != null || lastId > currentId) { currentId++; } else { // Either just started or there are no new records try { Thread.sleep(mainSleep); if (shutDown) { return; } lastId = this.getLastHistoryId(); } catch (InterruptedException e) { // New URL, but give it a chance to be processed first try { Thread.sleep(postSleep); } catch (InterruptedException e2) { // Ignore } } } try { href = getHistoryReference(currentId); // historyRecord = historyTable.read(currentId); } catch (Exception e) { if (shutDown) { return; } logger.error("Failed to read record " + currentId + " from History table", e); } if (href != null && (!pscanOptions.isScanOnlyInScope() || session.isInScope(href))) { try { // Parse the record HttpMessage msg = href.getHttpMessage(); Source src = new Source(msg.getResponseBody().toString()); currentUrl = msg.getRequestHeader().getURI().toString(); PassiveScanData passiveScanData = new PassiveScanData(msg); for (PassiveScanner scanner : scannerList.list()) { try { if (shutDown) { return; } int hrefHistoryType = href.getHistoryType(); if (scanner.isEnabled() && (scanner.appliesToHistoryType(hrefHistoryType) || optedInHistoryTypes.contains(hrefHistoryType))) { boolean cleanScanner = false; if (scanner instanceof PluginPassiveScanner) { ((PluginPassiveScanner) scanner) .init(this, msg, passiveScanData); cleanScanner = true; } else { scanner.setParent(this); } currentRuleName = scanner.getName(); currentRuleStartTime = System.currentTimeMillis(); scanner.scanHttpRequestSend(msg, href.getHistoryId()); if (msg.isResponseFromTargetHost()) { scanner.scanHttpResponseReceive( msg, href.getHistoryId(), src); } if (cleanScanner) { ((PluginPassiveScanner) scanner).clean(); } long timeTaken = System.currentTimeMillis() - currentRuleStartTime; Stats.incCounter("stats.pscan." + currentRuleName, timeTaken); if (timeTaken > 5000) { // Took over 5 seconds, thats not ideal String responseInfo = ""; if (msg.isResponseFromTargetHost()) { responseInfo = msg.getResponseHeader() .getHeader( HttpHeader.CONTENT_TYPE) + " " + msg.getResponseBody().length(); } logger.warn( "Passive Scan rule " + currentRuleName + " took " + (timeTaken / 1000) + " seconds to scan " + currentUrl + " " + responseInfo); } } } catch (Throwable e) { if (shutDown) { return; } logger.error( "Scanner " + scanner.getName() + " failed on record " + currentId + " from History table: " + href.getMethod() + " " + href.getURI(), e); } // Unset in case this is the last one that gets run for a while currentRuleName = ""; currentRuleStartTime = 0; } } catch (Exception e) { if (HistoryReference.getTemporaryTypes().contains(href.getHistoryType())) { if (logger.isDebugEnabled()) { logger.debug( "Temporary record " + currentId + " no longer available:", e); } } else { logger.error( "Parser failed on record " + currentId + " from History table", e); } } currentUrl = ""; } } catch (Exception e) { if (shutDown) { return; } logger.error("Failed on record " + currentId + " from History table", e); } if (View.isInitialised()) { Control.getSingleton() .getExtensionLoader() .getExtension(ExtensionPassiveScan.class) .getScanStatus() .setScanCount(getRecordsToScan()); } } } private HistoryReference getHistoryReference(final int historyReferenceId) { if (extHist != null) { return extHist.getHistoryReference(historyReferenceId); } try { return new HistoryReference(historyReferenceId); } catch (HttpMalformedHeaderException | DatabaseException e) { return null; } } private int getLastHistoryId() { return historyTable.lastIndex(); } protected int getRecordsToScan() { if (historyTable == null) { return 0; } return this.getLastHistoryId() - getLastScannedId(); } private int getLastScannedId() { if (currentId > lastId) { return currentId - 1; } return currentId; } public void raiseAlert(int id, Alert alert) { if (shutDown) { return; } if (currentId != id) { logger.error("Alert id != currentId! " + id + " " + currentId); } alert.setSource(Alert.Source.PASSIVE); // Raise the alert extAlert.alertFound(alert, href); if (this.pscanOptions.getMaxAlertsPerRule() > 0) { // Theres a limit on how many each rule can raise Integer count = alertCounts.get(alert.getPluginId()); if (count == null) { count = Integer.valueOf(0); } alertCounts.put(alert.getPluginId(), count + 1); if (count > this.pscanOptions.getMaxAlertsPerRule()) { // Disable the plugin PassiveScanner scanner = this.scannerList.getScanner(alert.getPluginId()); if (scanner != null) { logger.info( "Disabling passive scanner " + scanner.getName() + " as it has raised more than " + this.pscanOptions.getMaxAlertsPerRule() + " alerts."); scanner.setEnabled(false); } } } } public void addTag(int id, String tag) { if (shutDown) { return; } try { if (!href.getTags().contains(tag)) { href.addTag(tag); } } catch (Exception e) { logger.error(e.getMessage(), e); } } @Override public int getArrangeableListenerOrder() { return PROXY_LISTENER_ORDER; } @Override public boolean onHttpRequestSend(HttpMessage msg) { // Ignore return true; } @Override public boolean onHttpResponseReceive(HttpMessage msg) { // Wakey wakey this.interrupt(); return true; } @Override public void sessionChanged(Session session) { // Reset the currentId historyTable = Model.getSingleton().getDb().getTableHistory(); href = null; // Get the last id - in case we've just opened an existing session currentId = historyTable.lastIndex(); lastId = currentId; } @Override public void sessionScopeChanged(Session session) {} public void shutdown() { this.shutDown = true; } @Override public void sessionAboutToChange(Session session) {} @Override public void sessionModeChanged(Mode mode) { // Ignore } /** * Add the History Type ({@code int}) to the set of applicable history types. * * @param type the type to be added to the set of applicable history types * @since 2.8.0 */ public static void addApplicableHistoryType(int type) { optedInHistoryTypes.add(type); } /** * Remove the History Type ({@code int}) from the set of applicable history types. * * @param type the type to be removed from the set of applicable history types * @since 2.8.0 */ public static void removeApplicableHistoryType(int type) { optedInHistoryTypes.remove(type); } /** * Returns the set of History Types which have "opted-in" to be applicable for passive scanning. * * @return a set of {@code Integer} representing all of the History Types which have "opted-in" * for passive scanning. * @since 2.8.0 */ public static Set<Integer> getOptedInHistoryTypes() { return Collections.unmodifiableSet(optedInHistoryTypes); } /** * Returns the full set (both default and "opted-in") which are to be applicable for passive * scanning. * * @return a set of {@code Integer} representing all of the History Types which are applicable * for passive scanning. * @since 2.8.0 */ public static Set<Integer> getApplicableHistoryTypes() { Set<Integer> allApplicableTypes = new HashSet<Integer>(); allApplicableTypes.addAll(PluginPassiveScanner.getDefaultHistoryTypes()); if (!optedInHistoryTypes.isEmpty()) { allApplicableTypes.addAll(optedInHistoryTypes); } return allApplicableTypes; } public String getCurrentRuleName() { return currentRuleName; } public String getCurrentUrl() { return currentUrl; } public long getCurrentRuleStartTime() { return currentRuleStartTime; } }
/*************************GO-LICENSE-START********************************* * Copyright 2015 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.server; import com.thoughtworks.go.util.ArrayUtil; import com.thoughtworks.go.util.FileUtil; import com.thoughtworks.go.util.ReflectionUtil; import com.thoughtworks.go.util.SystemEnvironment; import org.eclipse.jetty.jmx.MBeanContainer; import org.eclipse.jetty.server.*; import org.eclipse.jetty.server.handler.HandlerCollection; import org.eclipse.jetty.webapp.JettyWebXmlConfiguration; import org.eclipse.jetty.webapp.WebAppContext; import org.eclipse.jetty.webapp.WebInfConfiguration; import org.eclipse.jetty.webapp.WebXmlConfiguration; import org.hamcrest.CoreMatchers; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.mockito.ArgumentCaptor; import javax.net.ssl.SSLSocketFactory; import java.io.File; import java.io.IOException; import java.util.Iterator; import java.util.List; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.*; public class Jetty9ServerTest { private Jetty9Server jetty9Server; private Server server; private SystemEnvironment systemEnvironment; @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); private SSLSocketFactory sslSocketFactory; private File configDir; @Before public void setUp() throws Exception { server = mock(Server.class); systemEnvironment = mock(SystemEnvironment.class); when(systemEnvironment.getServerPort()).thenReturn(1234); when(systemEnvironment.keystore()).thenReturn(temporaryFolder.newFolder()); when(systemEnvironment.truststore()).thenReturn(temporaryFolder.newFolder()); when(systemEnvironment.getWebappContextPath()).thenReturn("context"); when(systemEnvironment.getCruiseWar()).thenReturn("cruise.war"); when(systemEnvironment.getParentLoaderPriority()).thenReturn(true); when(systemEnvironment.useCompressedJs()).thenReturn(true); when(systemEnvironment.get(SystemEnvironment.RESPONSE_BUFFER_SIZE)).thenReturn(1000); when(systemEnvironment.get(SystemEnvironment.IDLE_TIMEOUT)).thenReturn(2000); when(systemEnvironment.configDir()).thenReturn(configDir = temporaryFolder.newFile()); when(systemEnvironment.getJettyConfigFile()).thenReturn(new File("foo")); sslSocketFactory = mock(SSLSocketFactory.class); when(sslSocketFactory.getSupportedCipherSuites()).thenReturn(new String[]{}); jetty9Server = new Jetty9Server(systemEnvironment, "pwd", sslSocketFactory, server); ReflectionUtil.setStaticField(Jetty9Server.class, "JETTY_XML_LOCATION_IN_JAR", "config"); } @Test public void shouldAddMBeanContainerAsEventListener() throws Exception { ArgumentCaptor<MBeanContainer> captor = ArgumentCaptor.forClass(MBeanContainer.class); jetty9Server.configure(); verify(server).addEventListener(captor.capture()); MBeanContainer mBeanContainer = captor.getValue(); assertThat(mBeanContainer.getMBeanServer(), is(not(nullValue()))); } @Test public void shouldAddHttpSocketConnector() throws Exception { ArgumentCaptor<Connector> captor = ArgumentCaptor.forClass(Connector.class); jetty9Server.configure(); verify(server, times(2)).addConnector(captor.capture()); List<Connector> connectors = captor.getAllValues(); Connector plainConnector = connectors.get(0); assertThat(plainConnector instanceof ServerConnector, is(true)); ServerConnector connector = (ServerConnector) plainConnector; assertThat(connector.getServer(), is(server)); assertThat(connector.getConnectionFactories().size(), is(1)); ConnectionFactory connectionFactory = connector.getConnectionFactories().iterator().next(); assertThat(connectionFactory instanceof HttpConnectionFactory, is(true)); } @Test public void shouldAddSSLSocketConnector() throws Exception { ArgumentCaptor<Connector> captor = ArgumentCaptor.forClass(Connector.class); jetty9Server.configure(); verify(server, times(2)).addConnector(captor.capture()); List<Connector> connectors = captor.getAllValues(); Connector sslConnector = connectors.get(1); assertThat(sslConnector instanceof ServerConnector, is(true)); ServerConnector connector = (ServerConnector) sslConnector; assertThat(connector.getServer(), is(server)); assertThat(connector.getConnectionFactories().size(), is(2)); Iterator<ConnectionFactory> iterator = connector.getConnectionFactories().iterator(); ConnectionFactory first = iterator.next(); ConnectionFactory second = iterator.next(); assertThat(first instanceof SslConnectionFactory, is(true)); SslConnectionFactory sslConnectionFactory = (SslConnectionFactory) first; assertThat(sslConnectionFactory.getProtocol(), is("SSL-HTTP/1.1")); assertThat(second instanceof HttpConnectionFactory, is(true)); } @Test public void shouldAddWelcomeRequestHandler() throws Exception { ArgumentCaptor<HandlerCollection> captor = ArgumentCaptor.forClass(HandlerCollection.class); jetty9Server.configure(); verify(server, times(1)).setHandler(captor.capture()); HandlerCollection handlerCollection = captor.getValue(); assertThat(handlerCollection.getHandlers().length, is(3)); Handler handler = handlerCollection.getHandlers()[0]; assertThat(handler instanceof Jetty9Server.GoServerWelcomeFileHandler, is(true)); Jetty9Server.GoServerWelcomeFileHandler welcomeFileHandler = (Jetty9Server.GoServerWelcomeFileHandler) handler; assertThat(welcomeFileHandler.getContextPath(), is("/")); } @Test public void shouldAddResourceHandlerForAssets() throws Exception { ArgumentCaptor<HandlerCollection> captor = ArgumentCaptor.forClass(HandlerCollection.class); jetty9Server.configure(); verify(server, times(1)).setHandler(captor.capture()); HandlerCollection handlerCollection = captor.getValue(); assertThat(handlerCollection.getHandlers().length, is(3)); Handler handler = handlerCollection.getHandlers()[1]; assertThat(handler instanceof AssetsContextHandler, is(true)); AssetsContextHandler assetsContextHandler = (AssetsContextHandler) handler; assertThat(assetsContextHandler.getContextPath(), is("context/assets")); } @Test public void shouldAddWebAppContextHandler() throws Exception { ArgumentCaptor<HandlerCollection> captor = ArgumentCaptor.forClass(HandlerCollection.class); jetty9Server.configure(); verify(server, times(1)).setHandler(captor.capture()); HandlerCollection handlerCollection = captor.getValue(); assertThat(handlerCollection.getHandlers().length, is(3)); Handler handler = handlerCollection.getHandlers()[2]; assertThat(handler instanceof WebAppContext, is(true)); WebAppContext webAppContext = (WebAppContext) handler; List<String> configClasses = ArrayUtil.asList(webAppContext.getConfigurationClasses()); assertThat(configClasses.contains(WebInfConfiguration.class.getCanonicalName()), is(true)); assertThat(configClasses.contains(WebXmlConfiguration.class.getCanonicalName()), is(true)); assertThat(configClasses.contains(JettyWebXmlConfiguration.class.getCanonicalName()), is(true)); assertThat(webAppContext.getContextPath(), is("context")); assertThat(webAppContext.getWar(), is("cruise.war")); assertThat(webAppContext.isParentLoaderPriority(), is(true)); assertThat(webAppContext.getDefaultsDescriptor(), is("jar:file:cruise.war!/WEB-INF/webdefault.xml")); } @Test public void shouldSetStopAtShutdown() throws Exception { jetty9Server.configure(); verify(server).setStopAtShutdown(true); } @Test public void shouldAddExtraJarsIntoClassPath() throws Exception { jetty9Server.configure(); jetty9Server.addExtraJarsToClasspath("test-addons/some-addon-dir/addon-1.JAR,test-addons/some-addon-dir/addon-2.jar"); assertThat(getWebAppContext(jetty9Server).getExtraClasspath(), is("test-addons/some-addon-dir/addon-1.JAR,test-addons/some-addon-dir/addon-2.jar," + configDir)); } @Test public void shouldSetInitParams() throws Exception { jetty9Server.configure(); jetty9Server.setInitParameter("name", "value"); assertThat(getWebAppContext(jetty9Server).getInitParameter("name"), CoreMatchers.is("value")); } @Test public void shouldReplaceJettyXmlIfItDoesNotContainCorrespondingJettyVersionNumber() throws IOException { File jettyXml = temporaryFolder.newFile("jetty.xml"); when(systemEnvironment.getJettyConfigFile()).thenReturn(jettyXml); String originalContent = "jetty-v6.2.3\nsome other local changes"; FileUtil.writeContentToFile(originalContent, jettyXml); jetty9Server.replaceJettyXmlIfItBelongsToADifferentVersion(systemEnvironment.getJettyConfigFile()); assertThat(FileUtil.readContentFromFile(systemEnvironment.getJettyConfigFile()), is(FileUtil.readContentFromFile(new File(getClass().getResource("config/jetty.xml").getPath())))); } @Test public void shouldNotReplaceJettyXmlIfItAlreadyContainsCorrespondingVersionNumber() throws IOException { File jettyXml = temporaryFolder.newFile("jetty.xml"); when(systemEnvironment.getJettyConfigFile()).thenReturn(jettyXml); String originalContent = "jetty-v9.2.3\nsome other local changes"; FileUtil.writeContentToFile(originalContent, jettyXml); jetty9Server.replaceJettyXmlIfItBelongsToADifferentVersion(systemEnvironment.getJettyConfigFile()); assertThat(FileUtil.readContentFromFile(systemEnvironment.getJettyConfigFile()), is(originalContent)); } private WebAppContext getWebAppContext(Jetty9Server server) { return (WebAppContext) ReflectionUtil.getField(server, "webAppContext"); } }
/* * Copyright 2016-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rage; import static com.facebook.buck.rage.AbstractRageConfig.RageProtocolVersion; import com.facebook.buck.cli.BuckConfig; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.log.LogConfigPaths; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildId; import com.facebook.buck.util.OptionalCompat; import com.facebook.buck.util.Optionals; import com.facebook.buck.util.environment.BuildEnvironmentDescription; import com.facebook.buck.util.immutables.BuckStyleImmutable; import com.google.common.base.Function; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.immutables.value.Value; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Optional; /** * Base class for gathering logs and other interesting information from buck. */ public abstract class AbstractReport { private static final Logger LOG = Logger.get(AbstractReport.class); private final ProjectFilesystem filesystem; private final DefectReporter defectReporter; private final BuildEnvironmentDescription buildEnvironmentDescription; private final PrintStream output; private final RageConfig rageConfig; private final ExtraInfoCollector extraInfoCollector; public AbstractReport( ProjectFilesystem filesystem, DefectReporter defectReporter, BuildEnvironmentDescription buildEnvironmentDescription, PrintStream output, RageConfig rageBuckConfig, ExtraInfoCollector extraInfoCollector) { this.filesystem = filesystem; this.defectReporter = defectReporter; this.buildEnvironmentDescription = buildEnvironmentDescription; this.output = output; this.rageConfig = rageBuckConfig; this.extraInfoCollector = extraInfoCollector; } protected abstract ImmutableSet<BuildLogEntry> promptForBuildSelection() throws IOException; protected abstract Optional<SourceControlInfo> getSourceControlInfo() throws IOException, InterruptedException; protected abstract Optional<UserReport> getUserReport() throws IOException; public final Optional<DefectSubmitResult> collectAndSubmitResult() throws IOException, InterruptedException { ImmutableSet<BuildLogEntry> selectedBuilds = promptForBuildSelection(); if (selectedBuilds.isEmpty()) { return Optional.empty(); } Optional<UserReport> userReport = getUserReport(); Optional<SourceControlInfo> sourceControlInfo = getSourceControlInfo(); ImmutableSet<Path> extraInfoPaths = ImmutableSet.of(); Optional<String> extraInfo = Optional.empty(); try { Optional<ExtraInfoResult> extraInfoResultOptional = extraInfoCollector.run(); if (extraInfoResultOptional.isPresent()) { extraInfoPaths = extraInfoResultOptional.get().getExtraFiles(); extraInfo = Optional.of(extraInfoResultOptional.get().getOutput()); } } catch (DefaultExtraInfoCollector.ExtraInfoExecutionException e) { output.printf("There was a problem gathering additional information: %s. " + "The results will not be attached to the report.", e.getMessage()); } UserLocalConfiguration userLocalConfiguration = UserLocalConfiguration.of(isNoBuckCheckPresent(), getLocalConfigs()); ImmutableSet<Path> includedPaths = FluentIterable.from(selectedBuilds) .transformAndConcat( new Function<BuildLogEntry, Iterable<Path>>() { @Override public Iterable<Path> apply(BuildLogEntry input) { ImmutableSet.Builder<Path> result = ImmutableSet.builder(); Optionals.addIfPresent(input.getRuleKeyLoggerLogFile(), result); Optionals.addIfPresent(input.getMachineReadableLogFile(), result); result.add(input.getRelativePath()); return result.build(); } }) .append(extraInfoPaths) .append(userLocalConfiguration.getLocalConfigsContents().keySet()) .append(getTracePathsOfBuilds(selectedBuilds)) .toSet(); DefectReport defectReport = DefectReport.builder() .setUserReport(userReport) .setHighlightedBuildIds( FluentIterable.from(selectedBuilds) .transformAndConcat( new Function<BuildLogEntry, Iterable<BuildId>>() { @Override public Iterable<BuildId> apply(BuildLogEntry input) { return OptionalCompat.asSet(input.getBuildId()); } })) .setBuildEnvironmentDescription(buildEnvironmentDescription) .setSourceControlInfo(sourceControlInfo) .setIncludedPaths(includedPaths) .setExtraInfo(extraInfo) .setUserLocalConfiguration(userLocalConfiguration) .build(); output.println("Writing report, please wait.."); return Optional.of(defectReporter.submitReport(defectReport)); } public void presentDefectSubmitResult( Optional<DefectSubmitResult> defectSubmitResult, boolean showJson) { if (!defectSubmitResult.isPresent()) { output.println("No logs of interesting commands were found. Check if buck-out/log contains " + "commands except buck launch & buck rage."); return; } DefectSubmitResult result = defectSubmitResult.get(); // If request has an empty isRequestSuccessful, it means we did not try to upload it somewhere. if (!result.getIsRequestSuccessful().isPresent()) { if (result.getReportSubmitLocation().isPresent()) { output.printf("Report saved at %s\n", result.getReportSubmitLocation().get()); } else { output.printf( "=> Failed to save report locally. Reason: %s\n", result.getReportSubmitErrorMessage().orElse("Unknown")); } return; } if (result.getIsRequestSuccessful().get()) { if (result.getRequestProtocol().equals(RageProtocolVersion.SIMPLE)) { output.printf("%s", result.getReportSubmitMessage().get()); } else { String message = "=> Upload was successful.\n"; if (result.getReportSubmitLocation().isPresent()) { message += "=> Report was uploaded to " + result.getReportSubmitLocation().get() + "\n"; } if (result.getReportSubmitMessage().isPresent() && showJson) { message += "=> Full Response was: " + result.getReportSubmitMessage().get() + "\n"; } output.print(message); } } else { output.printf( "=> Failed to upload report because of error: %s.\n=> Report was saved locally at %s\n", result.getReportSubmitErrorMessage().get(), result.getReportSubmitLocation()); } } @Value.Immutable @BuckStyleImmutable interface AbstractUserReport { String getUserIssueDescription(); } private ImmutableMap<Path, String> getLocalConfigs() { Path rootPath = filesystem.getRootPath(); ImmutableSet<Path> knownUserLocalConfigs = ImmutableSet.of( Paths.get(BuckConfig.BUCK_CONFIG_OVERRIDE_FILE_NAME), LogConfigPaths.LOCAL_PATH, Paths.get(".watchman.local"), Paths.get(".buckjavaargs.local"), Paths.get(".bucklogging.local.properties")); ImmutableMap.Builder<Path, String> localConfigs = ImmutableMap.builder(); for (Path localConfig : knownUserLocalConfigs) { try { localConfigs.put( localConfig, new String(Files.readAllBytes(rootPath.resolve(localConfig)), StandardCharsets.UTF_8)); } catch (FileNotFoundException e) { LOG.debug("%s was not found.", localConfig); } catch (IOException e) { LOG.warn("Failed to read contents of %s.", rootPath.resolve(localConfig).toString()); } } return localConfigs.build(); } /** * It returns a list of trace files that corresponds to builds while respecting the maximum * size of the final zip file. * @param entries the highlighted builds * @return a set of paths that points to the corresponding traces. */ private ImmutableSet<Path> getTracePathsOfBuilds(ImmutableSet<BuildLogEntry> entries) { ImmutableSet.Builder<Path> tracePaths = new ImmutableSet.Builder<>(); long reportSizeBytes = 0; for (BuildLogEntry entry : entries) { reportSizeBytes += entry.getSize(); if (entry.getTraceFile().isPresent()) { try { Path traceFile = filesystem.getPathForRelativeExistingPath(entry.getTraceFile().get()); long traceFileSizeBytes = Files.size(traceFile); if (rageConfig.getReportMaxSizeBytes().isPresent()) { if (reportSizeBytes + traceFileSizeBytes < rageConfig.getReportMaxSizeBytes().get()) { tracePaths.add(entry.getTraceFile().get()); reportSizeBytes += traceFileSizeBytes; } } else { tracePaths.add(entry.getTraceFile().get()); reportSizeBytes += traceFileSizeBytes; } } catch (IOException e) { LOG.info("Trace path %s wasn't valid, skipping it.", entry.getTraceFile().get()); } } } return tracePaths.build(); } private boolean isNoBuckCheckPresent() { return Files.exists(filesystem.getRootPath().resolve(".nobuckcheck")); } }
package com.ltapps.textscanner; import android.app.ProgressDialog; import android.content.ClipData; import android.content.ClipboardManager; import android.content.Context; import android.content.Intent; import android.content.res.AssetManager; import android.os.AsyncTask; import android.os.Bundle; import android.os.Environment; import android.support.v4.content.ContextCompat; import android.support.v4.view.ViewCompat; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.text.Editable; import android.text.Spannable; import android.text.SpannableString; import android.text.TextWatcher; import android.text.method.ScrollingMovementMethod; import android.text.style.BackgroundColorSpan; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.AdView; import com.google.android.gms.ads.MobileAds; import com.googlecode.tesseract.android.TessBaseAPI; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; public class Recognizer extends AppCompatActivity implements Toolbar.OnMenuItemClickListener { private Toolbar toolbar; private EditText search; private TextView textView; private String textScanned; ProgressDialog progressCopy, progressOcr; TessBaseAPI baseApi; AsyncTask<Void, Void, Void> copy = new copyTask(); AsyncTask<Void, Void, Void> ocr = new ocrTask(); private AdView mAdView; private static final String DATA_PATH = Environment.getExternalStorageDirectory().getAbsolutePath() + "/com.ltapps.textscanner/"; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.recognizer); // AdMob App ID MobileAds.initialize(this, BuildConfig.AdMobAppId); mAdView = findViewById(R.id.adView); AdRequest adRequest = new AdRequest.Builder().build(); mAdView.loadAd(adRequest); toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); toolbar.setOnMenuItemClickListener(this); ViewCompat.setElevation(toolbar,10); ViewCompat.setElevation((LinearLayout) findViewById(R.id.extension),10); textView = (TextView) findViewById(R.id.textExtracted); textView.setMovementMethod(new ScrollingMovementMethod()); search = (EditText) findViewById(R.id.search_text); // Setting progress dialog for copy job. progressCopy = new ProgressDialog(Recognizer.this); progressCopy.setProgressStyle(ProgressDialog.STYLE_SPINNER); progressCopy.setIndeterminate(true); progressCopy.setCancelable(false); progressCopy.setTitle("Dictionaries"); progressCopy.setMessage("Copying dictionary files"); // Setting progress dialog for ocr job. progressOcr = new ProgressDialog(this); progressOcr.setProgressStyle(ProgressDialog.STYLE_SPINNER); progressOcr.setIndeterminate(true); progressOcr.setCancelable(false); progressOcr.setTitle("OCR"); progressOcr.setMessage("Extracting text, please wait"); textScanned = ""; search.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {} @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { } @Override public void afterTextChanged(Editable editable) { String ett = search.getText().toString().replaceAll("\n"," "); String tvt = textView.getText().toString().replaceAll("\n"," "); textView.setText(textView.getText().toString()); if(!ett.toString().isEmpty()) { int ofe = tvt.toLowerCase().indexOf(ett.toLowerCase(), 0); Spannable WordtoSpan = new SpannableString(textView.getText()); for (int ofs = 0; ofs < tvt.length() && ofe != -1; ofs = ofe + 1) { ofe = tvt.toLowerCase().indexOf(ett.toLowerCase(), ofs); if (ofe == -1) break; else { WordtoSpan.setSpan(new BackgroundColorSpan(ContextCompat.getColor(Recognizer.this, R.color.colorAccent)), ofe, ofe + ett.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); textView.setText(WordtoSpan, TextView.BufferType.SPANNABLE); } } } } }); copy.execute(); ocr.execute(); } private void recognizeText(){ String language = ""; if (Binarization.language == 0) language = "eng"; else language= "spa"; baseApi = new TessBaseAPI(); baseApi.init(DATA_PATH, language,TessBaseAPI.OEM_TESSERACT_ONLY); baseApi.setImage(Binarization.umbralization); textScanned = baseApi.getUTF8Text(); } private void copyAssets() { AssetManager assetManager = getAssets(); String[] files = null; try { files = assetManager.list("trainneddata"); } catch (IOException e) { Log.e("tag", "Failed to get asset file list.", e); } for(String filename : files) { Log.i("files",filename); InputStream in = null; OutputStream out = null; String dirout= DATA_PATH + "tessdata/"; File outFile = new File(dirout, filename); if(!outFile.exists()) { try { in = assetManager.open("trainneddata/"+filename); (new File(dirout)).mkdirs(); out = new FileOutputStream(outFile); copyFile(in, out); in.close(); in = null; out.flush(); out.close(); out = null; } catch (IOException e) { Log.e("tag", "Error creating files", e); } } } } private void copyFile(InputStream in, OutputStream out) throws IOException { byte[] buffer = new byte[1024]; int read; while((read = in.read(buffer)) != -1){ out.write(buffer, 0, read); } } private class copyTask extends AsyncTask<Void, Void, Void> { @Override protected void onPreExecute() { super.onPreExecute(); progressCopy.show(); } @Override protected void onPostExecute(Void aVoid) { super.onPostExecute(aVoid); progressCopy.cancel(); progressOcr.show(); } @Override protected Void doInBackground(Void... params) { Log.i("CopyTask","copying.."); copyAssets(); return null; } } private class ocrTask extends AsyncTask<Void, Void, Void> { @Override protected void onPreExecute() { super.onPreExecute(); } @Override protected void onPostExecute(Void aVoid) { super.onPostExecute(aVoid); progressOcr.cancel(); textView.setText(textScanned); } @Override protected Void doInBackground(Void... params) { Log.i("OCRTask","extracting.."); recognizeText(); return null; } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_result, menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onMenuItemClick(MenuItem item) { switch (item.getItemId()) { case R.id.copy_text: ClipboardManager clipboard = (ClipboardManager) getSystemService(Context.CLIPBOARD_SERVICE); ClipData clip = ClipData.newPlainText("TextScanner", textView.getText()); clipboard.setPrimaryClip(clip); Toast.makeText(Recognizer.this,"Text has been copied to clipboard", Toast.LENGTH_LONG).show(); break; case R.id.new_scan: Intent i = getBaseContext().getPackageManager() .getLaunchIntentForPackage( getBaseContext().getPackageName() ); i.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(i); break; } return false; } }
/** * Copyright (c) 2011-2015, Qulice.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the Qulice.com nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.qulice.checkstyle; import com.puppycrawl.tools.checkstyle.api.Check; import com.puppycrawl.tools.checkstyle.api.DetailAST; import com.puppycrawl.tools.checkstyle.api.TokenTypes; /** * Checks that constant, declared as private field of class is used more than * once. * * @author Dmitry Bashkin (dmitry.bashkin@qulice.com) * @version $Id$ */ public final class ConstantUsageCheck extends Check { @Override public int[] getDefaultTokens() { return new int[]{ TokenTypes.VARIABLE_DEF, }; } /** * {@inheritDoc} * @checkstyle NestedIfDepth (35 lines) */ @Override public void visitToken(final DetailAST ast) { if (this.isField(ast) && this.isFinal(ast)) { final DetailAST namenode = ast.findFirstToken(TokenTypes.IDENT); if (!"serialVersionUID".equals(this.getText(namenode))) { this.checkField(ast, namenode); } } } /** * Check that constant, declared as private field of class * is used more than ones. * @param ast Node which contains VARIABLE_DEF * @param namenode Node which contains variable name */ private void checkField(final DetailAST ast, final DetailAST namenode) { final String name = namenode.getText(); final int line = namenode.getLineNo(); DetailAST variable = ast.getNextSibling(); int counter = 0; while (null != variable) { switch (variable.getType()) { case TokenTypes.VARIABLE_DEF: counter += this.parseVarDef(variable, name); break; case TokenTypes.CLASS_DEF: counter += this.parseDef( variable, name, TokenTypes.OBJBLOCK ); break; default: counter += this.parseDef(variable, name, TokenTypes.SLIST); break; } variable = variable.getNextSibling(); } if (counter == 0 && this.isPrivate(ast)) { this.log( line, String.format("Private constant \"%s\" is not used", name) ); } } /** * Parses the variable definition and increments the counter * if name is found. * @param variable DetailAST of variable definition * @param name Name of constant we search for * @return Zero if not found, 1 otherwise */ private int parseVarDef(final DetailAST variable, final String name) { int counter = 0; final DetailAST assign = variable.findFirstToken(TokenTypes.ASSIGN); if (assign != null) { DetailAST expression = assign.findFirstToken(TokenTypes.EXPR); if (expression == null) { expression = assign.findFirstToken( TokenTypes.ARRAY_INIT ); } final String text = this.getText(expression); if (text.contains(name)) { ++counter; } } return counter; } /** * Returns text representation of the specified node, including it's * children. * @param node Node, containing text. * @return Text representation of the node. */ private String getText(final DetailAST node) { String ret; if (0 == node.getChildCount()) { ret = node.getText(); } else { final StringBuilder result = new StringBuilder(); DetailAST child = node.getFirstChild(); while (null != child) { final String text = this.getText(child); result.append(text); if (".".equals(node.getText()) && child.getNextSibling() != null) { result.append(node.getText()); } child = child.getNextSibling(); } ret = result.toString(); } return ret; } /** * Returns <code>true</code> if specified node has parent node of type * <code>OBJBLOCK</code>. * @param node Node to check. * @return True if parent node is <code>OBJBLOCK</code>, else * returns <code>false</code>. */ private boolean isField(final DetailAST node) { final DetailAST parent = node.getParent(); return TokenTypes.OBJBLOCK == parent.getType(); } /** * Returns true if specified node has modifiers of type <code>FINAL</code>. * @param node Node to check. * @return True if specified node contains modifiers of type * <code>FINAL</code>, else returns <code>false</code>. */ private boolean isFinal(final DetailAST node) { final DetailAST modifiers = node.findFirstToken(TokenTypes.MODIFIERS); return modifiers.branchContains(TokenTypes.FINAL); } /** * Returns true if specified node has modifiers of type * <code>PRIVATE</code>. * @param node Node to check. * @return True if specified node contains modifiers of type * <code>PRIVATE</code>, else returns <code>false</code>. */ private boolean isPrivate(final DetailAST node) { final DetailAST modifiers = node.findFirstToken(TokenTypes.MODIFIERS); return modifiers.branchContains(TokenTypes.LITERAL_PRIVATE); } /** * Parses the body of the definition (either method or inner class) and * increments counter each time when it founds constant name. * @param definition Tree node, containing definition. * @param name Constant name to search. * @param type Type of definition start. * @return Number of found constant usages. */ private int parseDef(final DetailAST definition, final String name, final int type) { int counter = 0; final DetailAST modifiers = definition.findFirstToken(TokenTypes.MODIFIERS); if (modifiers != null) { counter += this.parseAnnotation(modifiers, name); } final DetailAST opening = definition.findFirstToken(type); if (null != opening) { final DetailAST closing = opening.findFirstToken(TokenTypes.RCURLY); final int start = opening.getLineNo(); final int end = closing.getLineNo() - 1; final String[] lines = this.getLines(); for (int pos = start; pos < end; pos += 1) { if (lines[pos].contains(name)) { counter += 1; } } } return counter; } /** * Parses the annotation value pair and increments the counter * if name is found. * @param modifiers DetailAST of variable definition * @param name Name of constant we search for * @return Zero if not found, 1 otherwise */ private int parseAnnotation(final DetailAST modifiers, final String name) { int counter = 0; final DetailAST variable = modifiers.findFirstToken(TokenTypes.ANNOTATION); if (variable != null) { final String txt = this.getText(variable); if (txt.contains(name)) { ++counter; } } return counter; } }
/** * * Copyright 2014 Vyacheslav Blinov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx.amp.packet; import java.util.Collections; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import org.jivesoftware.smack.packet.ExtensionElement; import org.jivesoftware.smackx.amp.AMPDeliverCondition; import org.jivesoftware.smackx.amp.AMPExpireAtCondition; import org.jivesoftware.smackx.amp.AMPMatchResourceCondition; public class AMPExtension implements ExtensionElement { public static final String NAMESPACE = "http://jabber.org/protocol/amp"; public static final String ELEMENT = "amp"; private CopyOnWriteArrayList<Rule> rules = new CopyOnWriteArrayList<Rule>(); private boolean perHop = false; private final String from; private final String to; private final Status status; /** * Create a new AMPExtension instance with defined from, to and status attributes. Used to create incoming packets. * @param from jid that triggered this amp callback. * @param to receiver of this amp receipt. * @param status status of this amp receipt. */ public AMPExtension(String from, String to, Status status) { this.from = from; this.to = to; this.status = status; } /** * Create a new amp request extension to be used with outgoing message. */ public AMPExtension() { this.from = null; this.to = null; this.status = null; } /** * @return jid that triggered this amp callback. */ public String getFrom() { return from; } /** * @return receiver of this amp receipt. */ public String getTo() { return to; } /** * Status of this amp notification * @return Status for this amp */ public Status getStatus() { return status; } /** * Returns a unmodifiable List of the rules in the packet. * * @return a unmodifiable List of the rules in the packet. */ public List<Rule> getRules() { return Collections.unmodifiableList(rules); } /** * Adds a rule to the amp element. Amp can have any number of rules. * * @param rule the rule to add. */ public void addRule(Rule rule) { rules.add(rule); } /** * Returns a count of the rules in the AMP packet. * * @return the number of rules in the AMP packet. */ public int getRulesCount() { return rules.size(); } /** * Sets this amp ruleset to be "per-hop". * * @param enabled true if "per-hop" should be enabled */ public synchronized void setPerHop(boolean enabled) { perHop = enabled; } /** * Returns true is this ruleset is "per-hop". * * @return true is this ruleset is "per-hop". */ public synchronized boolean isPerHop() { return perHop; } /** * Returns the XML element name of the extension sub-packet root element. * Always returns "amp" * * @return the XML element name of the stanza(/packet) extension. */ @Override public String getElementName() { return ELEMENT; } /** * Returns the XML namespace of the extension sub-packet root element. * According the specification the namespace is always "http://jabber.org/protocol/xhtml-im" * * @return the XML namespace of the stanza(/packet) extension. */ @Override public String getNamespace() { return NAMESPACE; } /** * Returns the XML representation of a XHTML extension according the specification. **/ @Override public String toXML() { StringBuilder buf = new StringBuilder(); buf.append("<").append(getElementName()).append(" xmlns=\"").append(getNamespace()).append("\""); if (status != null) { buf.append(" status=\"").append(status.toString()).append("\""); } if (to != null) { buf.append(" to=\"").append(to).append("\""); } if (from != null) { buf.append(" from=\"").append(from).append("\""); } if (perHop) { buf.append(" per-hop=\"true\""); } buf.append(">"); // Loop through all the rules and append them to the string buffer for (Rule rule : getRules()) { buf.append(rule.toXML()); } buf.append("</").append(getElementName()).append(">"); return buf.toString(); } /** * XEP-0079 Rule element. Defines AMP Rule parameters. Can be added to AMPExtension. */ public static class Rule { public static final String ELEMENT = "rule"; private final Action action; private final Condition condition; public Action getAction() { return action; } public Condition getCondition() { return condition; } /** * Create a new amp rule with specified action and condition. Value will be taken from condition argument * @param action action for this rule * @param condition condition for this rule */ public Rule(Action action, Condition condition) { if (action == null) throw new NullPointerException("Can't create Rule with null action"); if (condition == null) throw new NullPointerException("Can't create Rule with null condition"); this.action = action; this.condition = condition; } private String toXML() { return "<" + ELEMENT + " " + Action.ATTRIBUTE_NAME + "=\"" + action.toString() + "\" " + Condition.ATTRIBUTE_NAME + "=\"" + condition.getName() + "\" " + "value=\"" + condition.getValue() + "\"/>"; } } /** * Interface for defining XEP-0079 Conditions and their values * @see AMPDeliverCondition * @see AMPExpireAtCondition * @see AMPMatchResourceCondition **/ public static interface Condition { String getName(); String getValue(); static final String ATTRIBUTE_NAME="condition"; } /** * amp action attribute * See http://xmpp.org/extensions/xep-0079.html#actions-def **/ public static enum Action { /** * The "alert" action triggers a reply <message/> stanza to the sending entity. * This <message/> stanza MUST contain the element <amp status='alert'/>, * which itself contains the <rule/> that triggered this action. In all other respects, * this action behaves as "drop". */ alert, /** * The "drop" action silently discards the message from any further delivery attempts * and ensures that it is not placed into offline storage. * The drop MUST NOT result in other responses. */ drop, /** * The "error" action triggers a reply <message/> stanza of type "error" to the sending entity. * The <message/> stanza's <error/> child MUST contain a * <failed-rules xmlns='http://jabber.org/protocol/amp#errors'/> error condition, * which itself contains the rules that triggered this action. */ error, /** * The "notify" action triggers a reply <message/> stanza to the sending entity. * This <message/> stanza MUST contain the element <amp status='notify'/>, which itself * contains the <rule/> that triggered this action. Unlike the other actions, * this action does not override the default behavior for a server. * Instead, the server then executes its default behavior after sending the notify. */ notify; public static final String ATTRIBUTE_NAME="action"; } /** * amp notification status as defined by XEP-0079 */ public static enum Status { alert, error, notify } }
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.ide.common.server.factconstraints.predefined; import java.util.Collection; import org.drools.builder.ResourceType; import org.drools.ide.common.client.factconstraints.ConstraintConfiguration; import org.drools.ide.common.client.factconstraints.ValidationResult; import org.drools.ide.common.client.factconstraints.config.SimpleConstraintConfigurationImpl; import org.drools.ide.common.server.factconstraints.Constraint; import org.drools.ide.common.server.factconstraints.predefined.RangeConstraint; import org.drools.io.ResourceFactory; import org.drools.verifier.Verifier; import org.drools.verifier.VerifierConfiguration; import org.drools.verifier.VerifierConfigurationImpl; import org.drools.verifier.VerifierError; import org.drools.verifier.builder.VerifierBuilder; import org.drools.verifier.builder.VerifierBuilderFactory; import org.drools.verifier.components.PatternComponent; import org.drools.verifier.components.RuleComponent; import org.drools.verifier.data.VerifierReport; import org.drools.verifier.report.components.Severity; import org.drools.verifier.report.components.VerifierMessageBase; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; public class RangeConstraintTest { private Constraint cons; private ConstraintConfiguration conf; @Before public void setup() { cons = new RangeConstraint(); conf = new SimpleConstraintConfigurationImpl(); conf.setFactType("Person"); conf.setFieldName("age"); } //@Test @Test public void testValidConstraint() { conf.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MIN, "-0.5"); conf.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MAX, "100"); ValidationResult result = cons.validate(12, conf); assertTrue(result.isSuccess()); result = cons.validate(new Integer("12"), conf); assertTrue(result.isSuccess()); result = cons.validate("12", conf); assertTrue(result.isSuccess()); result = cons.validate(0.6, conf); assertTrue(result.isSuccess()); result = cons.validate(new Float("-0.3"), conf); assertTrue(result.isSuccess()); result = cons.validate("90.76", conf); assertTrue(result.isSuccess()); } //@Test @Test public void testInvalidConstraint() { conf.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MIN, "-0.5"); conf.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MAX, "100"); ValidationResult result = cons.validate(new Object(), conf); assertFalse(result.isSuccess()); System.out.println("Message: " + result.getMessage()); result = cons.validate(null, conf); assertFalse(result.isSuccess()); System.out.println("Message: " + result.getMessage()); result = cons.validate("", conf); assertFalse(result.isSuccess()); System.out.println("Message: " + result.getMessage()); result = cons.validate("ABC", conf); assertFalse(result.isSuccess()); System.out.println("Message: " + result.getMessage()); result = cons.validate(new Long("-100"), conf); assertFalse(result.isSuccess()); System.out.println("Message: " + result.getMessage()); result = cons.validate(-0.5, conf); assertFalse(result.isSuccess()); System.out.println("Message: " + result.getMessage()); result = cons.validate(100, conf); assertFalse(result.isSuccess()); System.out.println("Message: " + result.getMessage()); } //@Test @Test public void testUsingVerifier() { //age constraint conf.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MIN, "0"); conf.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MAX, "120"); System.out.println("Validation Rule:\n" + cons.getVerifierRule(conf) + "\n\n"); //salary constraint ConstraintConfiguration salaryCons = new SimpleConstraintConfigurationImpl(); salaryCons.setFactType("Person"); salaryCons.setFieldName("salary"); salaryCons.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MIN, "0"); salaryCons.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MAX, "1000.6"); System.out.println("Validation Rule:\n" + cons.getVerifierRule(salaryCons) + "\n\n"); String ruleToVerify = ""; int fail = 0; //OK ruleToVerify += "package org.drools.factconstraint.test\n\n"; ruleToVerify += "import org.drools.factconstraint.model.*\n"; ruleToVerify += "rule \"rule1\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == 10)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n\n"; //FAIL - 1 ruleToVerify += "rule \"rule2\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == -5)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n\n"; fail++; //OK ruleToVerify += "rule \"rule3\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == 100)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; //OK ruleToVerify += "rule \"rule4\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(salary == 100)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; //OK ruleToVerify += "rule \"rule5\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(salary == 89.67)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; //FAIL - 2 ruleToVerify += "rule \"rule6\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(salary == 1000.7)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; fail++; //FAIL - 3 ruleToVerify += "rule \"rule7\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(salary == 1024)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; fail++; //OK ruleToVerify += "rule \"rule8\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == 45, salary == 1000)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; //FAIL: age - 4 ruleToVerify += "rule \"rule9\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == 40, salary == 1011)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; fail++; //FAIL salary - 5 ruleToVerify += "rule \"rule10\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == 43, salary == 1007)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; fail++; //FAIL both (creates 2 warnings) - 6,7 ruleToVerify += "rule \"rule11\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == 403, salary == 1008)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; fail += 2; //FAIL both (creates 2 warnings) - 8,9 ruleToVerify += "rule \"rule12\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == 404, salary == -0.679)\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n"; fail += 2; VerifierBuilder vBuilder = VerifierBuilderFactory.newVerifierBuilder(); VerifierConfiguration vconf = new VerifierConfigurationImpl(); vconf.getVerifyingResources().put(ResourceFactory.newByteArrayResource(cons.getVerifierRule(this.conf).getBytes()), ResourceType.DRL); vconf.getVerifyingResources().put(ResourceFactory.newByteArrayResource(cons.getVerifierRule(salaryCons).getBytes()), ResourceType.DRL); Verifier verifier = vBuilder.newVerifier(vconf); verifier.addResourcesToVerify(ResourceFactory.newByteArrayResource(ruleToVerify.getBytes()), ResourceType.DRL); if (verifier.hasErrors()) { for (VerifierError error : verifier.getErrors()) { System.out.println(error.getMessage()); } throw new RuntimeException("Error building verifier"); } assertFalse(verifier.hasErrors()); boolean noProblems = verifier.fireAnalysis(); assertTrue(noProblems); VerifierReport result = verifier.getResult(); Collection<VerifierMessageBase> warnings = result.getBySeverity(Severity.ERROR); System.out.println(warnings); assertEquals(fail, warnings.size()); verifier.dispose(); } @Test public void testNestedPatternsUsingVerifier() { System.out.println("\n\n\n\ntestNestedPatternsUsingVerifier\n"); //age constraint conf.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MIN, "0"); conf.setArgumentValue(RangeConstraint.RANGE_CONSTRAINT_MAX, "120"); System.out.println("Validation Rule:\n" + cons.getVerifierRule(conf) + "\n\n"); String ruleToVerify = ""; int fail = 0; //OK ruleToVerify += "package org.drools.factconstraint.test\n\n"; ruleToVerify += "import org.drools.factconstraint.model.*\n"; // ruleToVerify += "rule \"rule1\"\n"; // ruleToVerify += " when\n"; // ruleToVerify += " java.util.List() from collect(Person(age == 10))\n"; // ruleToVerify += " then\n"; // ruleToVerify += " System.out.println(\"Rule fired\");\n"; // ruleToVerify += "end\n\n"; //FAIL - 1 ruleToVerify += "rule \"rule2\"\n"; ruleToVerify += " when\n"; ruleToVerify += " java.util.List() from collect(Person(age == 10))\n"; ruleToVerify += " java.util.List() from collect(Person(age == 130))\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n\n"; fail++; //FAIL - 2 ruleToVerify += "rule \"rule3\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == 10)\n"; ruleToVerify += " Person(age == 20)\n"; ruleToVerify += " exists (Person (age == 130))\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n\n"; fail++; ruleToVerify += "rule \"rule4\"\n"; ruleToVerify += " when\n"; ruleToVerify += " Person(age == 10)\n"; ruleToVerify += " exists (Person (age == 30) or Person (age == 130))\n"; ruleToVerify += " then\n"; ruleToVerify += " System.out.println(\"Rule fired\");\n"; ruleToVerify += "end\n\n"; fail++; VerifierBuilder vBuilder = VerifierBuilderFactory.newVerifierBuilder(); VerifierConfiguration vconf = new VerifierConfigurationImpl(); vconf.getVerifyingResources().put(ResourceFactory.newByteArrayResource(cons.getVerifierRule(this.conf).getBytes()), ResourceType.DRL); Verifier verifier = vBuilder.newVerifier(vconf); verifier.addResourcesToVerify(ResourceFactory.newByteArrayResource(ruleToVerify.getBytes()), ResourceType.DRL); if (verifier.hasErrors()) { for (VerifierError error : verifier.getErrors()) { System.out.println(error.getMessage()); } throw new RuntimeException("Error building verifier"); } assertFalse(verifier.hasErrors()); boolean noProblems = verifier.fireAnalysis(); assertTrue(noProblems); VerifierReport result = verifier.getResult(); Collection<VerifierMessageBase> errors = result.getBySeverity(Severity.ERROR); System.out.println(errors); assertEquals(fail, errors.size()); // System.out.println("\nOrders:"); // for (VerifierMessageBase message : errors) { // if (message.getFaulty() instanceof PatternComponent) { // int rootPatternOrderNumber = this.getRootPatternOrderNumber((PatternComponent) message.getFaulty()); // System.out.println(((PatternComponent) message.getFaulty()).getPath()+". Order= "+rootPatternOrderNumber); // } // } verifier.dispose(); } // private int getRootPatternOrderNumber(RuleComponent pattern){ // if (pattern.getParentPatternComponent() == null){ // return (pattern instanceof PatternComponent)?((PatternComponent)pattern).getPatternOrderNumber():pattern.getOrderNumber(); // }else{ // return getRootPatternOrderNumber(pattern.getParentPatternComponent()); // } // } }
package org.hisp.dhis.message; /* * Copyright (c) 2004-2016, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import static org.apache.commons.lang3.StringUtils.defaultIfEmpty; import java.util.HashMap; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.mail.DefaultAuthenticator; import org.apache.commons.mail.EmailException; import org.apache.commons.mail.HtmlEmail; import org.hisp.dhis.commons.util.DebugUtils; import org.hisp.dhis.setting.SettingKey; import org.hisp.dhis.setting.SystemSettingManager; import org.hisp.dhis.system.velocity.VelocityManager; import org.hisp.dhis.user.User; import org.hisp.dhis.user.UserSettingKey; import org.hisp.dhis.user.UserSettingService; import org.springframework.scheduling.annotation.Async; import com.google.common.base.Strings; /** * @author Lars Helge Overland */ public class EmailMessageSender implements MessageSender { private static final Log log = LogFactory.getLog( EmailMessageSender.class ); private static final String FROM_ADDRESS = "noreply@dhis2.org"; private static final String DEFAULT_APPLICATION_TITLE = "DHIS 2"; private static final String DEFAULT_FROM_NAME = DEFAULT_APPLICATION_TITLE + " Message [No reply]"; private static final String DEFAULT_SUBJECT_PREFIX = "[" + DEFAULT_APPLICATION_TITLE + "] "; private static final String LB = System.getProperty( "line.separator" ); private static final String MESSAGE_EMAIL_TEMPLATE = "message_email"; // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private SystemSettingManager systemSettingManager; public void setSystemSettingManager( SystemSettingManager systemSettingManager ) { this.systemSettingManager = systemSettingManager; } private UserSettingService userSettingService; public void setUserSettingService( UserSettingService userSettingService ) { this.userSettingService = userSettingService; } // ------------------------------------------------------------------------- // MessageSender implementation // ------------------------------------------------------------------------- /** * Note this methods is invoked asynchronously. */ @Async @Override public String sendMessage( String subject, String text, String footer, User sender, Set<User> users, boolean forceSend ) { String hostName = (String) systemSettingManager.getSystemSetting( SettingKey.EMAIL_HOST_NAME ); int port = (int) systemSettingManager.getSystemSetting( SettingKey.EMAIL_PORT ); String username = (String) systemSettingManager.getSystemSetting( SettingKey.EMAIL_USERNAME ); String password = (String) systemSettingManager.getSystemSetting( SettingKey.EMAIL_PASSWORD ); boolean tls = (boolean) systemSettingManager.getSystemSetting( SettingKey.EMAIL_TLS ); String from = (String) systemSettingManager.getSystemSetting( SettingKey.EMAIL_SENDER ); if ( hostName == null ) { return null; } String plainContent = renderPlainContent( text, sender ); String htmlContent = renderHtmlContent( text, footer, sender ); try { HtmlEmail email = getHtmlEmail( hostName, port, username, password, tls, from ); email.setSubject( customizeTitle( DEFAULT_SUBJECT_PREFIX ) + subject ); email.setTextMsg( plainContent ); email.setHtmlMsg( htmlContent ); boolean hasRecipients = false; for ( User user : users ) { boolean doSend = forceSend || (Boolean) userSettingService.getUserSetting( UserSettingKey.MESSAGE_EMAIL_NOTIFICATION, user ); if ( doSend && user.getEmail() != null && !user.getEmail().trim().isEmpty() ) { email.addBcc( user.getEmail() ); log.info( "Sending email to user: " + user.getUsername() + " with email address: " + user.getEmail() + " to host: " + hostName + ":" + port ); hasRecipients = true; } } if ( hasRecipients ) { email.send(); log.info( "Email sent using host: " + hostName + ":" + port + " with TLS: " + tls ); } } catch ( EmailException ex ) { log.warn( "Could not send email: " + ex.getMessage() + ", " + DebugUtils.getStackTrace( ex ) ); } catch ( RuntimeException ex ) { log.warn( "Error while sending email: " + ex.getMessage() + ", " + DebugUtils.getStackTrace( ex ) ); } return null; } // ------------------------------------------------------------------------- // Supportive methods // ------------------------------------------------------------------------- private HtmlEmail getHtmlEmail( String hostName, int port, String username, String password, boolean tls, String sender ) throws EmailException { HtmlEmail email = new HtmlEmail(); email.setHostName( hostName ); email.setFrom( defaultIfEmpty( sender, FROM_ADDRESS ), customizeTitle( DEFAULT_FROM_NAME ) ); email.setSmtpPort( port ); email.setStartTLSEnabled( tls ); if ( username != null && password != null ) { email.setAuthenticator( new DefaultAuthenticator( username, password ) ); } return email; } private String renderPlainContent( String text, User sender ) { return sender == null ? text : (text + LB + LB + sender.getName() + LB + (sender.getOrganisationUnitsName() != null ? (sender.getOrganisationUnitsName() + LB) : StringUtils.EMPTY) + (sender.getEmail() != null ? (sender.getEmail() + LB) : StringUtils.EMPTY) + (sender.getPhoneNumber() != null ? (sender.getPhoneNumber() + LB) : StringUtils.EMPTY)); } private String renderHtmlContent( String text, String footer, User sender ) { HashMap<String, Object> content = new HashMap<>(); if ( !Strings.isNullOrEmpty( text ) ) { content.put( "text", text.replaceAll( "\\r?\\n", "<br>" ) ); } if ( !Strings.isNullOrEmpty( footer ) ) { content.put( "footer", footer ); } if ( sender != null ) { content.put( "senderName", sender.getName() ); if ( sender.getOrganisationUnitsName() != null ) { content.put( "organisationUnitsName", sender.getOrganisationUnitsName() ); } if ( sender.getEmail() != null ) { content.put( "email", sender.getEmail() ); } if ( sender.getPhoneNumber() != null ) { content.put( "phoneNumber", sender.getPhoneNumber() ); } } return new VelocityManager().render( content, MESSAGE_EMAIL_TEMPLATE ); } private String customizeTitle( String title ) { String appTitle = (String) systemSettingManager.getSystemSetting( SettingKey.APPLICATION_TITLE ); if ( appTitle != null && !appTitle.isEmpty() ) { title = title.replace( DEFAULT_APPLICATION_TITLE, appTitle ); } return title; } }
/* * Copyright 2011 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.jscomp.NodeTraversal.AbstractShallowStatementCallback; import com.google.javascript.jscomp.NodeTraversal.Callback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Finds all references to global symbols and rewrites them to be property * accesses to a special object with the same name as the global symbol. * * Given the name of the global object is NS * <pre> var a = 1; function b() { return a }</pre> * becomes * <pre> NS.a = 1; NS.b = function b() { return NS.a }</pre> * * This allows splitting code into modules that depend on each other's * global symbols, without using polluting JavaScript's global scope with those * symbols. You typically define just a single global symbol, wrap each module * in a function wrapper, and pass the global symbol around, eg, * <pre> var uniqueNs = uniqueNs || {}; </pre> * <pre> (function (NS) { ...your module code here... })(uniqueNs); </pre> * * * <p>This compile step requires moveFunctionDeclarations to be turned on * to guarantee semantics. * * <p>For lots of examples, see the unit test. * * */ final class RescopeGlobalSymbols implements CompilerPass { // Appended to variables names that conflict with globalSymbolNamespace. private static final String DISAMBIGUATION_SUFFIX = "$"; private static final String WINDOW = "window"; private static final Set<String> SPECIAL_EXTERNS = ImmutableSet.of(WINDOW, "eval", "arguments", "undefined", // The javascript built-in objects (listed in Ecma 262 section 4.2) "Object", "Function", "Array", "String", "Boolean", "Number", "Math", "Date", "RegExp", "JSON", "Error", "EvalError", "ReferenceError", "SyntaxError", "TypeError", "URIError"); private final AbstractCompiler compiler; private final String globalSymbolNamespace; private final boolean addExtern; private final boolean assumeCrossModuleNames; private final Set<String> crossModuleNames = new HashSet<>(); private final Set<String> maybeReferencesThis = new HashSet<>(); /** * Constructor for the RescopeGlobalSymbols compiler pass. * * @param compiler The JSCompiler, for reporting code changes. * @param globalSymbolNamespace Name of namespace into which all global * symbols are transferred. * @param assumeCrossModuleNames If true, all global symbols will be assumed * cross module boundaries and thus require renaming. */ RescopeGlobalSymbols( AbstractCompiler compiler, String globalSymbolNamespace, boolean assumeCrossModuleNames) { this(compiler, globalSymbolNamespace, true, assumeCrossModuleNames); } /** * Constructor for the RescopeGlobalSymbols compiler pass for use in testing. * * @param compiler The JSCompiler, for reporting code changes. * @param globalSymbolNamespace Name of namespace into which all global * symbols are transferred. * @param addExtern If true, the compiler will consider the * globalSymbolNamespace an extern name. * @param assumeCrossModuleNames If true, all global symbols will be assumed * cross module boundaries and thus require renaming. * VisibleForTesting */ RescopeGlobalSymbols( AbstractCompiler compiler, String globalSymbolNamespace, boolean addExtern, boolean assumeCrossModuleNames) { this.compiler = compiler; this.globalSymbolNamespace = globalSymbolNamespace; this.addExtern = addExtern; this.assumeCrossModuleNames = assumeCrossModuleNames; } private boolean isCrossModuleName(String name) { return assumeCrossModuleNames || crossModuleNames.contains(name) || compiler.getCodingConvention().isExported(name, false); } private void addExternForGlobalSymbolNamespace() { Node varNode = IR.var(IR.name(globalSymbolNamespace)); CompilerInput input = compiler.getSynthesizedExternsInput(); input.getAstRoot(compiler).addChildrenToBack(varNode); compiler.reportCodeChange(); } @Override public void process(Node externs, Node root) { // Make the name of the globalSymbolNamespace an extern. if (addExtern) { addExternForGlobalSymbolNamespace(); } // Rewrite all references to global symbols to properties of a // single symbol by: // (If necessary the 4 traversals could be combined. They are left // separate for readability reasons.) // 1. turning global named function statements into var assignments. NodeTraversal.traverseEs6( compiler, root, new RewriteGlobalFunctionStatementsToVarAssignmentsCallback()); // 2. find global names that are used in more than one module. Those that // are have to be rewritten. List<Callback> nonMutatingPasses = new ArrayList<>(); nonMutatingPasses.add(new FindCrossModuleNamesCallback()); // and find names that may refering functions that reference this. nonMutatingPasses.add(new FindNamesReferencingThis()); CombinedCompilerPass.traverse(compiler, root, nonMutatingPasses); // 3. rewriting all references to be property accesses of the single symbol. RewriteScopeCallback rewriteScope = new RewriteScopeCallback(); NodeTraversal.traverseEs6(compiler, root, rewriteScope); // 4. removing the var from statements in global scope if the declared names // have been rewritten in the previous pass. NodeTraversal.traverseEs6(compiler, root, new RemoveGlobalVarCallback()); rewriteScope.declareModuleGlobals(); // Extra pass which makes all extern global symbols reference window // explicitly. NodeTraversal.traverseEs6( compiler, root, new MakeExternsReferenceWindowExplicitly()); } /** * Rewrites function statements to var statements + assignment. * * <pre>function test(){}</pre> * becomes * <pre>var test = function (){}</pre> * * After this traversal, the special case of global function statements * can be ignored. */ private class RewriteGlobalFunctionStatementsToVarAssignmentsCallback extends AbstractShallowStatementCallback { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (NodeUtil.isFunctionDeclaration(n)) { String name = NodeUtil.getName(n); n.getFirstChild().setString(""); Node prev = parent.getChildBefore(n); n.detachFromParent(); Node var = NodeUtil.newVarNode(name, n); if (prev == null) { parent.addChildToFront(var); } else { parent.addChildAfter(var, prev); } compiler.reportCodeChange(); } } } /** * Find all global names that are used in more than one module. The following * compiler transformations can ignore the globals that are not. */ private class FindCrossModuleNamesCallback extends AbstractPostOrderCallback { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isName()) { String name = n.getString(); if ("".equals(name) || crossModuleNames.contains(name)) { return; } Scope s = t.getScope(); Var v = s.getVar(name); if (v == null || !v.isGlobal()) { return; } CompilerInput input = v.getInput(); if (input == null) { // We know nothing. Assume name is used across modules. crossModuleNames.add(name); return; } // Compare the module where the variable is declared to the current // module. If they are different, the variable is used across modules. JSModule module = input.getModule(); if (module != t.getModule()) { crossModuleNames.add(name); } } } } /** * Builds the maybeReferencesThis set of names that may reference a function * that references this. If the function a name references does not reference * this it can be called as a method call where the this value is not the * same as in a normal function call. */ private class FindNamesReferencingThis extends AbstractPostOrderCallback { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isName()) { String name = n.getString(); if (name.isEmpty()) { return; } Node value = null; if (parent.isAssign() && n == parent.getFirstChild()) { value = parent.getLastChild(); } else if (parent.isVar()) { value = n.getFirstChild(); } else if (parent.isFunction()) { value = parent; } if (value == null) { return; } // We already added this symbol. Done after checks above because those // are comparatively cheap. if (maybeReferencesThis.contains(name)) { return; } Scope s = t.getScope(); Var v = s.getVar(name); if (v == null || !v.isGlobal()) { return; } // If anything but a function is assignment we assume that possibly // a function referencing this is being assignment. Otherwise we // check whether the function that is being assigned references this. if (!value.isFunction() || NodeUtil.referencesThis(value)) { maybeReferencesThis.add(name); } } } } /** * Visits each NAME token and checks whether it refers to a global variable. * If yes, rewrites the name to be a property access on the * "globalSymbolNamespace". * * <pre>var a = 1, b = 2, c = 3;</pre> * becomes * <pre>var NS.a = 1, NS.b = 2, NS.c = 4</pre> * (The var token is removed in a later traversal.) * * <pre>a + b</pre> * becomes * <pre>NS.a + NS.b</pre> * * <pre>a()</pre> * becomes * <pre>(0,NS.a)()</pre> * Notice the special syntax here to preserve the *this* semantics in the * function call. */ private class RewriteScopeCallback extends AbstractPostOrderCallback { List<ModuleGlobal> preDeclarations = new ArrayList<>(); @Override public void visit(NodeTraversal t, Node n, Node parent) { if (!n.isName()) { return; } String name = n.getString(); // Ignore anonymous functions if (parent.isFunction() && name.isEmpty()) { return; } Var var = t.getScope().getVar(name); if (var == null) { return; } // Don't touch externs. if (var.isExtern()) { return; } // When the globalSymbolNamespace is used as a local variable name // add suffix to avoid shadowing the namespace. Also add a suffix // if a name starts with the name of the globalSymbolNamespace and // the suffix. if (!var.isExtern() && !var.isGlobal() && (name.equals(globalSymbolNamespace) || name.startsWith( globalSymbolNamespace + DISAMBIGUATION_SUFFIX))) { n.setString(name + DISAMBIGUATION_SUFFIX); compiler.reportCodeChange(); } // We only care about global vars. if (!var.isGlobal()) { return; } Node nameNode = var.getNameNode(); // The exception variable (e in try{}catch(e){}) should not be rewritten. if (nameNode != null && nameNode.getParent() != null && nameNode.getParent().isCatch()) { return; } replaceSymbol(n, name, t.getInput()); } private void replaceSymbol(Node node, String name, CompilerInput input) { Node parent = node.getParent(); boolean isCrossModule = isCrossModuleName(name); if (!isCrossModule) { // When a non cross module name appears outside a var declaration we // never have to do anything. if (!parent.isVar()) { return; } // If it is a var declaration, but no cross module names are declared // we also don't have to do anything. boolean hasCrossModuleChildren = false; for (Node c : parent.children()) { // Var child is no longer a name means it was transformed already // which means there was a cross module name. if (!c.isName() || isCrossModuleName(c.getString())) { hasCrossModuleChildren = true; break; } } if (!hasCrossModuleChildren) { return; } } Node replacement = isCrossModule ? IR.getprop( IR.name(globalSymbolNamespace).srcref(node), IR.string(name).srcref(node)) : IR.name(name).srcref(node); replacement.srcref(node); if (node.hasChildren()) { // var declaration list: var a = 1, b = 2; Node assign = IR.assign( replacement, node.removeFirstChild()); parent.replaceChild(node, assign); } else if (isCrossModule) { parent.replaceChild(node, replacement); if (parent.isCall() && !maybeReferencesThis.contains(name)) { // Do not write calls like this: (0, _a)() but rather as _.a(). The // this inside the function will be wrong, but it doesn't matter // because the this is never read. parent.putBooleanProp(Node.FREE_CALL, false); } } // If we changed a non cross module name that was in a var declaration // we need to preserve that var declaration. Because it is global // anyway, we just put it at the beginning of the current input. // Example: // var crossModule = i++, notCrossModule = i++ // becomes // var notCrossModule;_.crossModule = i++, notCrossModule = i++ if (!isCrossModule && parent.isVar()) { preDeclarations.add(new ModuleGlobal( input.getAstRoot(compiler), IR.name(name).srcref(node))); } compiler.reportCodeChange(); } /** * Adds back declarations for variables that do not cross module boundaries. * Must be called after RemoveGlobalVarCallback. */ void declareModuleGlobals() { for (ModuleGlobal global : preDeclarations) { if (global.root.getFirstChild() != null && global.root.getFirstChild().isVar()) { global.root.getFirstChild().addChildToBack(global.name); } else { global.root.addChildToFront( IR.var(global.name).srcref(global.name)); } compiler.reportCodeChange(); } } /** * Variable that doesn't cross module boundaries. */ private class ModuleGlobal { final Node root; final Node name; ModuleGlobal(Node root, Node name) { this.root = root; this.name = name; } } } /** * Removes every occurrence of var that declares a global variable. * * <pre>var NS.a = 1, NS.b = 2;</pre> * becomes * <pre>NS.a = 1; NS.b = 2;</pre> * * <pre>for (var a = 0, b = 0;;)</pre> * becomes * <pre>for (NS.a = 0, NS.b = 0;;)</pre> * * Declarations without assignments are optimized away: * <pre>var a = 1, b;</pre> * becomes * <pre>NS.a = 1</pre> */ private class RemoveGlobalVarCallback extends AbstractShallowStatementCallback { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (!n.isVar()) { return; } List<Node> commas = new ArrayList<>(); List<Node> interestingChildren = new ArrayList<>(); // Filter out declarations without assignments. // As opposed to regular var nodes, there are always assignments // because the previous traversal in RewriteScopeCallback creates // them. boolean allName = true; for (Node c : n.children()) { if (!c.isName()) { allName = false; } if (c.isAssign() || parent.isFor()) { interestingChildren.add(c); } } // If every child of a var declares a name, it must stay in place. // This is the case if none of the declared variables cross module // boundaries. if (allName) { return; } for (Node c : interestingChildren) { if (parent.isFor() && parent.getFirstChild() == n) { commas.add(c.cloneTree()); } else { // Var statement outside of for-loop. Node expr = IR.exprResult(c.cloneTree()).srcref(c); parent.addChildBefore(expr, n); } } if (!commas.isEmpty()) { Node comma = joinOnComma(commas, n); parent.addChildBefore(comma, n); } // Remove the var node. parent.removeChild(n); compiler.reportCodeChange(); } private Node joinOnComma(List<Node> commas, Node source) { Node comma = commas.get(0); for (int i = 1; i < commas.size(); i++) { Node nextComma = IR.comma(comma, commas.get(i)); nextComma.useSourceInfoIfMissingFrom(source); comma = nextComma; } return comma; } } /** * Rewrites extern names to be explicit children of window instead of only * implicitly referencing it. * This enables injecting window into a scope and make all global symbol * depend on the injected object. */ private class MakeExternsReferenceWindowExplicitly extends AbstractPostOrderCallback { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (!n.isName()) { return; } String name = n.getString(); if (globalSymbolNamespace.equals(name) || SPECIAL_EXTERNS.contains(name)) { return; } Var var = t.getScope().getVar(name); if (name.length() > 0 && (var == null || var.isExtern())) { parent.replaceChild(n, IR.getprop(IR.name(WINDOW), IR.string(name)) .srcrefTree(n)); compiler.reportCodeChange(); } } } }
/* * $Id$ * This file is a part of the Arakhne Foundation Classes, http://www.arakhne.org/afc * * Copyright (c) 2000-2012 Stephane GALLAND. * Copyright (c) 2005-10, Multiagent Team, Laboratoire Systemes et Transports, * Universite de Technologie de Belfort-Montbeliard. * Copyright (c) 2013-2020 The original authors, and other authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.arakhne.afc.io.shape; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.arakhne.afc.attrs.attr.Attribute; import org.arakhne.afc.attrs.attr.AttributeImpl; import org.arakhne.afc.attrs.attr.AttributeType; import org.arakhne.afc.attrs.attr.AttributeValue; import org.arakhne.afc.attrs.collection.AbstractAttributeProvider; import org.arakhne.afc.attrs.collection.AttributeCollection; import org.arakhne.afc.attrs.collection.AttributeProvider; import org.arakhne.afc.io.dbase.DBaseFileReader; import org.arakhne.afc.io.dbase.DBaseFileWriter; import org.arakhne.afc.io.shape.ESRIBounds; import org.arakhne.afc.io.shape.ESRIFileUtil; import org.arakhne.afc.io.shape.ESRIPoint; import org.arakhne.afc.io.shape.ElementExporter; import org.arakhne.afc.io.shape.ElementFactory; import org.arakhne.afc.io.shape.ShapeElementType; import org.arakhne.afc.io.shape.ShapeFileIndexReader; import org.arakhne.afc.io.shape.ShapeFileIndexRecord; import org.arakhne.afc.io.shape.ShapeFileIndexWriter; import org.arakhne.afc.io.shape.ShapeFileReader; import org.arakhne.afc.io.shape.ShapeFileWriter; import org.arakhne.afc.io.shape.ShapeMultiPatchType; import org.arakhne.afc.math.geometry.d3.d.Point3d; import org.arakhne.afc.vmutil.FileSystem; /** Testing writing of SHP, SHX, nd DBF at the same time. * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ * @since 14.0 */ @SuppressWarnings("all") public class GlobalWriteTest extends AbstractIoShapeTest { /** */ public static String ATTR1 = "BOOL1"; //$NON-NLS-1$ /** */ public static String ATTR2 = "DOUBLE2"; //$NON-NLS-1$ /** */ public static String ATTR3 = "STRING3"; //$NON-NLS-1$ /** */ public static String ATTR5 = "INDEX5"; //$NON-NLS-1$ private Point3d[] data; private List<TestingAttributeProvider> attributes; private Point3dExporter exporter; @BeforeEach public void setUp() throws Exception { this.data = new Point3d[10]; for(int i=0; i<this.data.length; ++i) { this.data[i] = randomPoint3D(); } this.attributes = new ArrayList<>(); for(int i=0; i<this.data.length; ++i) { this.attributes.add(new TestingAttributeProvider(i)); } this.exporter = new Point3dExporter(); } @AfterEach public void tearDown() throws Exception { this.exporter = null; this.attributes.clear(); this.attributes = null; this.data = null; } private void assertContent(File shpFile, File shxFile, File dbfFile) throws Exception { // Read Shp ShapeFileReader<Point3d> shpReader = new ShapeFileReader<>(shpFile, new Point3dImporter()); Point3d p; int i=0; while ((p = shpReader.read())!=null) { assertEpsilonEquals(this.data[i], p); ++i; } shpReader.close(); // Read Dbf DBaseFileReader dbfReader = new DBaseFileReader(dbfFile); dbfReader.readDBFHeader(); dbfReader.readDBFFields(); AttributeProvider attrContainer; i = 0; while ((attrContainer = dbfReader.readNextAttributeProvider())!=null) { assertTrue(attrContainer.getAttributeAsBool(ATTR1)); assertEpsilonEquals(3.14, attrContainer.getAttributeAsDouble(ATTR2)); assertEquals("hello world", attrContainer.getAttributeAsString(ATTR3)); //$NON-NLS-1$ assertEquals(i, attrContainer.getAttributeAsInt(ATTR5)); ++i; } dbfReader.close(); // Read Shx try (ShapeFileIndexReader shxReader = new ShapeFileIndexReader(shxFile)) { ShapeFileIndexRecord record; shxReader.readHeader(); int offsetInContent = 0; while ((record = shxReader.read())!=null) { assertEquals(36, record.getRecordContentLength()); assertEquals(offsetInContent, record.getOffsetInContent()); assertEquals(offsetInContent+100, record.getOffsetInFile()); offsetInContent += 44; } } } /** * @throws Exception */ public void testShpShxDbfCreationByHand() throws Exception { File shpFile = File.createTempFile(GlobalWriteTest.class.getSimpleName(), ".shp"); //$NON-NLS-1$ File shxFile = FileSystem.replaceExtension(shpFile, ".shx"); //$NON-NLS-1$ File dbfFile = FileSystem.replaceExtension(shpFile, ".dbf"); //$NON-NLS-1$ try { // Shp writing ShapeFileWriter<Point3d> writer = new ShapeFileWriter<>( shpFile, ShapeElementType.POINT_Z, this.exporter); writer.write(Arrays.asList(this.data)); writer.close(); // Shx writing ESRIFileUtil.generateShapeFileIndexFromShapeFile(shpFile); // Dbf writing DBaseFileWriter dbfWriter = new DBaseFileWriter(dbfFile); dbfWriter.writeHeader(this.attributes); dbfWriter.write(this.attributes); dbfWriter.close(); // assertContent(shpFile, shxFile, dbfFile); } finally { shpFile.delete(); shxFile.delete(); dbfFile.delete(); } } /** * @throws Exception */ public void testShpShxDbfEmbeddedCreation() throws Exception { File shpFile = File.createTempFile(GlobalWriteTest.class.getSimpleName(), ".shp"); //$NON-NLS-1$ File shxFile = FileSystem.replaceExtension(shpFile, ".shx"); //$NON-NLS-1$ File dbfFile = FileSystem.replaceExtension(shpFile, ".dbf"); //$NON-NLS-1$ try { // Create Dbf writer DBaseFileWriter dbfWriter = new DBaseFileWriter(dbfFile); // Create Shx writer ShapeFileIndexWriter shxWriter = new ShapeFileIndexWriter( shxFile, ShapeElementType.POINT_Z, this.exporter.getFileBounds()); // Shp writing ShapeFileWriter<Point3d> writer = new ShapeFileWriter<>( shpFile, ShapeElementType.POINT_Z, this.exporter, dbfWriter, shxWriter); writer.write(Arrays.asList(this.data)); writer.close(); // assertContent(shpFile, shxFile, dbfFile); } finally { shpFile.delete(); shxFile.delete(); dbfFile.delete(); } } /** * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ private class Point3dImporter implements ElementFactory<Point3d> { /** */ public Point3dImporter() { // } /** * {@inheritDoc} */ @Override public Point3d createPoint(AttributeCollection provider, int shapeIndex, ESRIPoint point) { return new Point3d(point.getX(), point.getY(), point.getZ()); } } // class Point3dImporter /** * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ private class Point3dExporter implements ElementExporter<Point3d> { /** */ public Point3dExporter() { // } @Override public AttributeProvider getAttributeProvider(Point3d element) throws IOException { for(int i=0; i<GlobalWriteTest.this.data.length; ++i) { if (GlobalWriteTest.this.data[i].equals(element)) { return GlobalWriteTest.this.attributes.get(i); } } return null; } @Override public AttributeProvider[] getAttributeProviders( Collection<? extends Point3d> elements) throws IOException { AttributeProvider[] attrs = new AttributeProvider[elements.size()]; Iterator<? extends Point3d> iterator = elements.iterator(); int i=0; while (iterator.hasNext()) { attrs[i] = getAttributeProvider(iterator.next()); ++i; } return attrs; } @Override public ESRIBounds getFileBounds() { double minx, miny, minz, maxx, maxy, maxz; minx = miny = minz = Double.POSITIVE_INFINITY; maxx = maxy = maxz = Double.NEGATIVE_INFINITY; for(Point3d p : GlobalWriteTest.this.data) { if (p.getX()<minx) minx = p.getX(); if (p.getY()<miny) miny = p.getY(); if (p.getZ()<minz) minz = p.getZ(); if (p.getX()>maxx) maxx = p.getX(); if (p.getY()>maxy) maxy = p.getY(); if (p.getZ()>maxz) maxz = p.getZ(); } return new ESRIBounds(minx, maxx, miny, maxy, minz, maxz, Double.NaN, Double.NaN); } @Override public int getGroupCountFor(Point3d element) throws IOException { return 1; } @Override public ShapeMultiPatchType getGroupTypeFor(Point3d element, int groupIndex) throws IOException { throw new UnsupportedOperationException(); } @Override public ESRIPoint getPointAt(Point3d element, int groupIndex, int pointIndex, boolean expectM, boolean expectZ) throws IOException { if (expectZ) return new ESRIPoint(element.getX(), element.getY(), element.getZ()); return new ESRIPoint(element.getX(), element.getY()); } @Override public int getPointCountFor(Point3d element, int groupIndex) throws IOException { return 1; } } // class Point3dExporter /** * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ private static class TestingAttributeProvider extends AbstractAttributeProvider { private static final long serialVersionUID = -4650185028340292171L; private final Attribute attr1 = new AttributeImpl(ATTR1, true); private final Attribute attr2 = new AttributeImpl(ATTR2, 3.14); private final Attribute attr3 = new AttributeImpl(ATTR3, "hello world"); //$NON-NLS-1$ private final Attribute attr5; /** * @param index */ public TestingAttributeProvider(int index) { this.attr5 = new AttributeImpl(ATTR5, index); } /** * {@inheritDoc} */ @Override public void freeMemory() { // } /** * {@inheritDoc} */ @Override public Collection<String> getAllAttributeNames() { return Arrays.asList(ATTR1, ATTR2, ATTR3, ATTR5); } /** * {@inheritDoc} */ @Override public Collection<Attribute> getAllAttributes() { return Arrays.asList(this.attr1, this.attr2, this.attr3, this.attr5); } /** * {@inheritDoc} */ @Override public Map<AttributeType, Collection<Attribute>> getAllAttributesByType() { Map<AttributeType, Collection<Attribute>> theMap = new TreeMap<>(); theMap.put(AttributeType.BOOLEAN, Collections.singleton(this.attr1)); theMap.put(AttributeType.REAL, Collections.singleton(this.attr2)); theMap.put(AttributeType.STRING, Collections.singleton(this.attr3)); theMap.put(AttributeType.INTEGER, Collections.singleton(this.attr5)); return theMap; } /** * {@inheritDoc} */ @Override public AttributeValue getAttribute(String name) { if (ATTR1.equals(name)) return this.attr1; if (ATTR2.equals(name)) return this.attr2; if (ATTR3.equals(name)) return this.attr3; if (ATTR5.equals(name)) return this.attr5; return null; } /** * {@inheritDoc} */ @Override public AttributeValue getAttribute(String name, AttributeValue defaultValue) { if (ATTR1.equals(name)) return this.attr1; if (ATTR2.equals(name)) return this.attr2; if (ATTR3.equals(name)) return this.attr3; if (ATTR5.equals(name)) return this.attr5; return defaultValue; } /** * {@inheritDoc} */ @Override public int getAttributeCount() { return 5; } /** * {@inheritDoc} */ @Override public Attribute getAttributeObject(String name) { if (ATTR1.equals(name)) return this.attr1; if (ATTR2.equals(name)) return this.attr2; if (ATTR3.equals(name)) return this.attr3; if (ATTR5.equals(name)) return this.attr5; return null; } /** * {@inheritDoc} */ @Override public boolean hasAttribute(String name) { if (ATTR1.equals(name)) return true; if (ATTR2.equals(name)) return true; if (ATTR3.equals(name)) return true; if (ATTR5.equals(name)) return true; return false; } @Override public void toMap(Map<String, Object> mapToFill) { throw new UnsupportedOperationException(); } } // class TestingAttributeProvider }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.lib.jdbc.parser.sql; import com.google.common.annotations.VisibleForTesting; import org.antlr.v4.runtime.tree.ParseTree; import plsql.plsqlBaseListener; import plsql.plsqlParser; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Listener for use with {@linkplain org.antlr.v4.runtime.tree.ParseTreeWalker}. */ public class SQLListener extends plsqlBaseListener { private final HashMap<String, String> columns = new HashMap<>(); private static final String NULL_STRING = "NULL"; private boolean insideStatement = false; private boolean caseSensitive = false; private boolean allowNulls = false; private Set<String> columnsExpected; private String table; private String schema; private final Pattern tableSchemaPattern = Pattern.compile("\"(.*)\"\\.\"(.*)\""); private List<plsqlParser.Column_nameContext> columnNames; @Override public void enterUpdate_set_clause(plsqlParser.Update_set_clauseContext ctx) { for(plsqlParser.Column_based_update_set_clauseContext x : ctx.column_based_update_set_clause()) { columns.put(formatName(x.column_name(0).getText().trim()), formatValue(x.expression().getText().trim())); } } private void extractTableAndSchema(String tableSchema) { Matcher m = tableSchemaPattern.matcher(tableSchema); if (m.matches()) { schema = m.group(1); table = m.group(2); } else { table = format(table); // no schema name, only table } } @Override public void enterUpdate_statement(plsqlParser.Update_statementContext ctx) { if (table == null) { String tableSchema = ctx.general_table_ref().getText(); extractTableAndSchema(tableSchema); } } @Override public void enterDelete_statement(plsqlParser.Delete_statementContext ctx) { if (table == null) { String tableSchema = ctx.general_table_ref().getText(); extractTableAndSchema(tableSchema); } } @Override public void enterInsert_into_clause(plsqlParser.Insert_into_clauseContext ctx) { if (table == null) { String tableSchema = ctx.general_table_ref().getText(); extractTableAndSchema(tableSchema); } this.columnNames = ctx.column_name(); } @Override public void enterValues_clause(plsqlParser.Values_clauseContext ctx) { List<plsqlParser.ExpressionContext> expressions = ctx.expression_list().expression(); for (int i = 0; i < expressions.size(); i++) { columns.put(formatName(columnNames.get(i).getText().trim()), formatValue(expressions.get(i).getText().trim())); } } @Override public void enterWhere_clause(plsqlParser.Where_clauseContext ctx) { insideStatement = true; } @Override public void exitWhere_clause(plsqlParser.Where_clauseContext ctx) { insideStatement = false; } @Override public void enterEquality_expression(plsqlParser.Equality_expressionContext ctx) { if (insideStatement) { // This is pretty horrible, but after some experimentation, I figured that the // third level of the tree contained the actual data. I am assuming it is because // top level is actually empty root, 2nd level contains the actual node, and 3rd level // has its individual tokens -> 0 is key, 1 is = and 2 is the value. String key = null; String val = null; ParseTree level0 = ctx.getChild(0); if (level0 != null) { ParseTree level1 = level0.getChild(0); if (level1 != null) { ParseTree keyNode = level1.getChild(0); if (keyNode != null) { key = formatName(keyNode.getText()); } ParseTree valNode = level1.getChild(2); if (valNode != null) { val = valNode.getText(); } } } // Why check the table's column names? Because stuff like TO_DATE(<something>) will also come in here // with each token as a key with null value. if (key != null && (val != null || (allowNulls && columnsExpected != null && columnsExpected.contains(key))) && !columns.containsKey(key)) { columns.put(key, formatValue(val)); } } } /** * Format column names based on whether they are case-sensitive */ private String formatName(String columnName) { String returnValue = format(columnName); if (caseSensitive) { return returnValue; } return returnValue.toUpperCase(); } /** * Unescapes strings and returns them. */ private String formatValue(String value) { // The value can either be null (if the IS keyword is present before it or just a NULL string with no quotes) if (value == null || NULL_STRING.equalsIgnoreCase(value)) { return null; } String returnValue = format(value); return returnValue.replaceAll("''", "'"); } @VisibleForTesting public String format(String columnName) { int stripCount; if (columnName.startsWith("\"\'")) { stripCount = 2; } else if (columnName.startsWith("\"") || columnName.startsWith("\'")) { stripCount = 1; } else { return columnName; } return columnName.substring(stripCount, columnName.length() - stripCount); } /** * Reset the listener to use with the next statement. All column information is cleared. */ public void reset(){ columns.clear(); this.columnsExpected = null; columnNames = null; table = null; schema = null; insideStatement = false; } public Map<String, String> getColumns() { return columns; } public String getTable() { return table; } public String getSchema() { return schema; } public void setCaseSensitive() { this.caseSensitive = true; } public void allowNulls() { this.allowNulls = true; } public void setColumns(Set<String> columns) { this.columnsExpected = columns; } }
/* * Copyright 2015 The SageTV Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sage; import sage.Wizard.MaintenanceType; import java.util.List; public class EPGDataSource { public static final long MILLIS_PER_HOUR = 1000L * 60L * 60L; protected static final String PROVIDER_ID = "provider_id"; protected static final String ENABLED = "enabled"; protected static final String LAST_RUN = "last_run"; protected static final String EXPANDED_UNTIL = "expanded_until"; protected static final String EPG_NAME = "epg_name"; protected static final String DUMP_DIR = "dump_dir"; protected static final String UNAVAILABLE_STATIONS = "unavailable_stations"; protected static final String UNAVAILABLE_CHANNEL_NUMS = "unavailable_channel_nums"; protected static final String APPLIED_SERVICE_LEVEL = "applied_service_level"; protected static final String CHAN_DOWNLOAD_COMPLETE = "chan_download_complete"; protected static final String SERVER_UPDATE_ID = "server_update_id"; protected static final String DISABLE_DATA_SCANNING = "disable_data_scanning"; protected static final String EPG_DATA_SCAN_PERIOD = "epg_data_scan_period"; protected static final long CONFIRMATION_AHEAD_TIME = 24*MILLIS_PER_HOUR; private static long GPS_OFFSET; private static final java.text.DateFormat utcDateFormat = new java.text.SimpleDateFormat("MM/dd/yyyy HH:mm:ss"); private static final java.text.DateFormat localDateFormat = new java.text.SimpleDateFormat("MM/dd/yyyy hh:mm:ss"); static { // start time (GPS time), 0 is at Jan 6, 1980 java.util.GregorianCalendar gcal = new java.util.GregorianCalendar(1980, java.util.Calendar.JANUARY, 6, 0, 0, 0); gcal.setTimeZone(java.util.TimeZone.getTimeZone("GMT")); GPS_OFFSET = gcal.getTimeInMillis(); utcDateFormat.setTimeZone(java.util.TimeZone.getTimeZone("GMT")); } public EPGDataSource(int inEPGSourceID) { wiz = Wizard.getInstance(); epgSourceID = inEPGSourceID; //Wizard.getInstance().notifyOfID(epgSourceID); prefsRoot = EPG.EPG_DATA_SOURCES_KEY + '/' + epgSourceID + '/'; lastRun = Sage.getLong(prefsRoot + LAST_RUN, 0); expandedUntil = Sage.getLong(prefsRoot + EXPANDED_UNTIL, 0); providerID = Sage.getLong(prefsRoot + PROVIDER_ID, 0); enabled = Sage.getBoolean(prefsRoot + ENABLED, true); name = Sage.get(prefsRoot + EPG_NAME, "Undefined Source Name"); Sage.put(prefsRoot + EPG.EPG_CLASS, "Basic"); unavailStations = Sage.parseCommaDelimIntSet(Sage.get(prefsRoot + UNAVAILABLE_STATIONS, "")); unavailChanNums = Sage.parseCommaDelimSet(Sage.get(prefsRoot + UNAVAILABLE_CHANNEL_NUMS, "")); appliedServiceLevel = Sage.getInt(prefsRoot + APPLIED_SERVICE_LEVEL, 0); chanDownloadComplete = Sage.getBoolean(prefsRoot + CHAN_DOWNLOAD_COMPLETE, false); dataScanAllowed = !Sage.getBoolean(prefsRoot + DISABLE_DATA_SCANNING, false); dumpDir = Sage.get(prefsRoot + DUMP_DIR, null); if (dumpDir != null) { new java.io.File(dumpDir).mkdirs(); } updateIDMap = java.util.Collections.synchronizedMap(new java.util.HashMap()); String updateStr = Sage.get(prefsRoot + SERVER_UPDATE_ID, ""); java.util.StringTokenizer toker = new java.util.StringTokenizer(updateStr, ";"); while (toker.hasMoreTokens()) { String toke = toker.nextToken(); int idx = toke.indexOf(','); if (idx != -1) { try { updateIDMap.put(new Integer(toke.substring(0, idx)), new Long(toke.substring(idx + 1))); } catch (Exception e) {} } } } public boolean doesStationIDWantScan(int statID) { // These are the station IDs we assign ourselves in the channel scan. Exclude the ones that map to default channels and // also ones that map to tribune station IDs return statID > 0 && statID < 10000 && dataScanAllowed; } public boolean initDataScanInfo() { if (Sage.client || !dataScanAllowed) return false; // Get our CDI and find out if it does data scans, and if it does then sync our update times with when // we're expanded until so the EPG will tell us to update when the next scan can be done CaptureDeviceInput[] cdis = MMC.getInstance().getInputsForProvider(providerID); scannedUntil = Long.MAX_VALUE; if (cdis.length > 0 && cdis[0].doesDataScanning()) { int[] allStations = EPG.getInstance().getAllStations(providerID); for (int i = 0; i < allStations.length; i++) { if (canViewStation(allStations[i]) && doesStationIDWantScan(allStations[i])) { Long stationUpdateTime = updateIDMap.get(new Integer(allStations[i])); if (stationUpdateTime != null) { scannedUntil = Math.min(stationUpdateTime.longValue(), scannedUntil); } else scannedUntil = 0; // haven't scanned yet for this station! } } } if (scannedUntil <= Sage.time()) { boolean foundOne = false; for (int i = 0; i < cdis.length; i++) { if (cdis[i].getCaptureDevice().requestDataScan(cdis[i])) foundOne = true; } if (foundOne) { dataScanRequested = true; SeekerSelector.getInstance().kick(); } return true; } else return false; } protected void doDataScan() { long dataScanPeriod = Sage.getLong(EPG_DATA_SCAN_PERIOD, 4*Sage.MILLIS_PER_HR); // This does the data scan if it needs to be done CaptureDeviceInput[] cdis = MMC.getInstance().getInputsForProvider(providerID); CaptureDeviceInput cdi = null; boolean kickSeekNow = false; for (int i = 0; i < cdis.length; i++) { if (cdi != null) { cdis[i].getCaptureDevice().cancelDataScanRequest(cdis[i]); kickSeekNow = true; } else if (cdis[i].isActive() && cdis[i].getCaptureDevice().isDataScanning()) cdi = cdis[i]; } if (kickSeekNow) SeekerSelector.getInstance().kick(); if (dataScanAllowed && cdi != null && cdi.isActive() && cdi.getCaptureDevice().isDataScanning()) { if (Sage.DBG) System.out.println("EPGDS " + name + " found a capture device to start data scanning with:" + cdi); // Now we need to find the actual stations we want to scan for and go to it! int[] allStations = EPG.getInstance().getAllStations(providerID); long newScannedUntil = Long.MAX_VALUE; java.util.Map<String, List<Integer>> majorToChannelMap = new java.util.HashMap<String, List<Integer>>(); for (int i = 0; i < allStations.length; i++) { if (abort || !enabled) return; if (canViewStation(allStations[i]) && doesStationIDWantScan(allStations[i])) { Long stationUpdateTime = updateIDMap.get(new Integer(allStations[i])); if (stationUpdateTime != null) { if (stationUpdateTime.longValue() > Sage.time()) { newScannedUntil = Math.min(stationUpdateTime.longValue(), newScannedUntil); continue; } } String currChan = EPG.getInstance().getPhysicalChannel(providerID, allStations[i]); java.util.StringTokenizer toker = new java.util.StringTokenizer(currChan, "-"); String majChan = currChan; java.util.List<Integer> currStatList = null; if (toker.countTokens() > 1) { if (toker.countTokens() > 2) majChan = toker.nextToken() + "-" + toker.nextToken(); else majChan = toker.nextToken(); } /* else { // This probably isn't a digital TV channel which means it'll screw us up, so skip it // This could also be a channel which doesn't have a major-minor identifier!! continue; }*/ currStatList = majorToChannelMap.get(majChan); if (currStatList == null) { currStatList = new java.util.ArrayList<Integer>(); majorToChannelMap.put(majChan, currStatList); } currStatList.add(new Integer(allStations[i])); } } java.util.Iterator<java.util.Map.Entry<String, List<Integer>>> walker = majorToChannelMap.entrySet().iterator(); while (walker.hasNext()) { if (abort || !enabled) return; java.util.Map.Entry<String, List<Integer>> ent = walker.next(); String currMajor = ent.getKey(); java.util.List<Integer> currStatList = ent.getValue(); // If we're here then we want to scan! synchronized (SeekerSelector.getInstance()) { if (cdi.getCaptureDevice().isDataScanning()) { cdi.tuneToChannel(EPG.getInstance().getPhysicalChannel(providerID, currStatList.get(0).intValue())); } else // our data scanning has been stopped, so just return return; } // Now we wait until we think we have all of the data for this channel try { if (Sage.DBG) System.out.println("EPGDS waiting for data scan on major channel " + currMajor + "...."); Thread.sleep(Sage.getLong("epg/data_scan_channel_dwell_new", 2*Sage.MILLIS_PER_MIN)); } catch (Exception e) {} // We should do a scan for DTV data every 4 hours, so mark it as done for the next 4 hours // But round this up to the next hour so we don't do a bunch of incremental scans // when that timer runs out long newval = Sage.time() + dataScanPeriod; newval = (newval - (newval % (Sage.MILLIS_PER_HR))) + Sage.MILLIS_PER_HR; for (int i = 0; i < currStatList.size(); i++) { updateIDMap.put(currStatList.get(i), new Long(newval)); } newScannedUntil = Math.min(newval, newScannedUntil); } if (newScannedUntil < Long.MAX_VALUE) { saveUpdateMap(); scannedUntil = newScannedUntil; } for (int i = 0; i < cdis.length; i++) cdis[i].getCaptureDevice().cancelDataScanRequest(cdis[i]); dataScanRequested = false; SeekerSelector.getInstance().kick(); } } public void processEPGDataMsg(sage.msg.SageMsg msg) { if (!dataScanAllowed) return; /* * The EPG message data format is as follows: * EPG-0|major-minor AN/DT|startTimeGPS|durationSeconds|language|title|description|rating| */ String msgString; try { msgString = new String((byte[])msg.getData(), Sage.BYTE_CHARSET); } catch (java.io.UnsupportedEncodingException e) { msgString = new String((byte[])msg.getData()); } if (((byte[])msg.getData()).length != msgString.length()) throw new InternalError("Byte array length is not the same length as string and we used a byte charset!!!"); if (msgString.length() == 0) return; try { int offset = 0; java.util.StringTokenizer toker = new java.util.StringTokenizer(msgString, "|", true); offset += toker.nextToken().length(); // First token is "EPG-0" offset += toker.nextToken().length(); // delimiter String chanInfo = toker.nextToken(); // Channel number and DT or AN offset += chanInfo.length(); String chanNum = chanInfo.substring(0, chanInfo.indexOf(' ')); int stationID = sage.EPG.getInstance().guessStationIDFromPhysicalChannel(providerID, chanNum, chanNum.indexOf('-') != -1); if (stationID == 0) stationID = sage.EPG.getInstance().guessStationID(providerID, chanNum); if (stationID > 10000) { // It has TMS EPG data, so do NOT overwrite it with what we have found here // For TVTV they're station IDs overlap with the generated ones so don't take anything if using their data //if (sage.Sage.DBG) System.out.println("Skipping EPG data message because we have that channel's EPG data from a better source"); return; } if (stationID == 0) { //if (sage.Sage.DBG) System.out.println("Skipping EPG data message because we don't have a station ID for this channel"); return; } offset += toker.nextToken().length(); // delimiter String timeStr = toker.nextToken(); offset += timeStr.length(); long startTime; try { if (timeStr.startsWith("GPS:")) { startTime = Long.parseLong(timeStr.substring(4)) * 1000; startTime += GPS_OFFSET; // Fix issues with leap second differences between GPS & UTC time startTime -= startTime % 60000; } else if (timeStr.startsWith("UTC:")) { startTime = utcDateFormat.parse(timeStr.substring(4)).getTime(); } else if (timeStr.startsWith("LOCAL:")) { localDateFormat.setTimeZone(java.util.TimeZone.getDefault()); startTime = localDateFormat.parse(timeStr.substring(6)).getTime(); } else startTime = Long.parseLong(timeStr) * 1000; } catch (Exception e) { System.out.println("ERROR parsing EPG message start time of:" + e); return; } offset += toker.nextToken().length(); // delimiter String durStr = toker.nextToken(); offset += durStr.length(); int duration = Integer.parseInt(durStr); // duration offset += toker.nextToken().length(); // delimiter String language = toker.nextToken(); offset += language.length(); if (!"|".equals(language)) offset += toker.nextToken().length(); // delimiter else language = ""; if (language.length() > 0) { if ("eng".equalsIgnoreCase(language)) language = "English"; else if ("spa".equalsIgnoreCase(language)) language = "Spanish"; else if ("dan".equalsIgnoreCase(language)) language = "Danish"; else if ("swe".equalsIgnoreCase(language)) language = "Swedish"; else if ("fra".equalsIgnoreCase(language)) language = "French"; } // Now we need to check for alternate character sets which means we'd need to switch to the byte arrays String title="", description=""; for (int i = 0; i < 2 && offset < msgString.length(); i++) { if (msgString.charAt(offset) != '[') { if (i == 0) { title = toker.nextToken(); // title offset += title.length(); if ("|".equals(title)) title = ""; else if (toker.hasMoreTokens()) offset += toker.nextToken().length(); // delimiter } else { description = toker.nextToken(); // description offset += description.length(); if ("|".equals(description)) description = ""; else if (toker.hasMoreTokens()) offset += toker.nextToken().length(); // delimiter } } else { String charset = Sage.BYTE_CHARSET; int len = msgString.indexOf('|', offset + 1) - offset; int fullLen = len; int baseOffset = offset; do { int brack1 = offset; do { int brack2 = msgString.indexOf(']', brack1); if (brack2 == -1) break; int eqIdx = msgString.indexOf('=', brack1); if (eqIdx > brack2 || eqIdx == -1) break; String attName = msgString.substring(brack1 + 1, eqIdx); String attValue = msgString.substring(eqIdx + 1, brack2); if ("set".equals(attName)) charset = attValue; else if ("len".equals(attName)) { try { len = Integer.parseInt(attValue); } catch (NumberFormatException e){ if (Sage.DBG) System.out.println("Formatting error with EPG data:" + e); } } offset += brack2 - offset + 1; brack1 = msgString.indexOf('[', brack2); } while (brack1 != -1 && brack1 < offset + len); try { if (i == 0) title += new String((byte[])msg.getData(), offset, len, charset); else description += new String((byte[])msg.getData(), offset, len, charset); } catch (java.io.UnsupportedEncodingException e) { if (Sage.DBG) System.out.println("Unsupported encoding for EPG data of:" + charset + " err=" + e); if (i == 0) title += new String((byte[])msg.getData(), offset, len); else description += new String((byte[])msg.getData(), offset, len); } //if (Sage.DBG) System.out.println("Parsing EPG data w/ charset=" + charset + " len=" + len + ((i == 0) ? (" title=" + title) : (" desc=" + description))); offset += len + 1; } while (baseOffset + fullLen > offset); do { baseOffset += toker.nextToken().length(); } while (baseOffset < offset); } } String rating = (toker.hasMoreTokens() ? toker.nextToken() : ""); byte prByte = (byte)0; String[] ers = null; String rated = null; if (rating.length() > 0) { if (rating.indexOf("PG-13") != -1) rated = "PG-13"; else if (rating.indexOf("NC-17") != -1) rated = "NC-17"; // Extract the portion of interest int pidx1 = rating.indexOf('('); int pidx2 = rating.indexOf(')'); if (pidx1 != -1 && pidx2 > pidx1) { // Break down the rating information into the parts we care about. java.util.StringTokenizer ratToker = new java.util.StringTokenizer(rating.substring(pidx1 + 1, pidx2), "-;"); if (ratToker.countTokens() > 1) { String tvRating = ratToker.nextToken() + ratToker.nextToken(); for (int i = 1; i < sage.Airing.PR_NAMES.length; i++) { if (tvRating.equalsIgnoreCase(sage.Airing.PR_NAMES[i])) { prByte = (byte) i; break; } } java.util.ArrayList erList = Pooler.getPooledArrayList(); while (ratToker.hasMoreTokens()) { // Now extract the other specific rating information String currRate = ratToker.nextToken(); if ("V".equals(currRate)) { if (prByte == Airing.TVMA_VALUE) erList.add("Graphic Violence"); else if (prByte == Airing.TV14_VALUE) erList.add("Violence"); else erList.add("Mild Violence"); } else if ("S".equals(currRate)) { if (prByte == Airing.TVMA_VALUE) erList.add("Strong Sexual Content"); else if (!erList.contains("Adult Situations")) erList.add("Adult Situations"); } else if ("D".equals(currRate)) { if (!erList.contains("Adult Situations")) erList.add("Adult Situations"); if (!erList.contains("Language")) erList.add("Language"); } else if ("L".equals(currRate)) { if (!erList.contains("Language")) erList.add("Language"); } else if (rated == null && ("G".equals(currRate) || "PG".equals(currRate) || "R".equals(currRate))) rated = currRate; else if (rated == null && "X".equals(currRate)) rated = "AO"; else if (rated == null && "NR".equals(currRate)) rated = "NR"; } if (!erList.isEmpty()) ers = (String[]) erList.toArray(Pooler.EMPTY_STRING_ARRAY); Pooler.returnPooledArrayList(erList); } } } if (!"|".equals(rating) && toker.hasMoreTokens()) toker.nextToken(); // delimiter String category = (toker.hasMoreTokens() ? toker.nextToken() : null); String subcategory = null; if ("|".equals(category)) category = null; if (category != null) { int idx = category.indexOf('/'); if (idx != -1) { subcategory = category.substring(idx + 1); category = category.substring(0, idx); } } title = title.trim(); description = description.trim(); String extID = "DT" + Math.abs((title + "-" + duration + "-" + description).hashCode()); String[] categories = new String[(category == null ? 0 : 1) + (subcategory == null ? 0 : 1)]; if (category != null) categories[0] = category; if (subcategory != null) categories[1] = subcategory; sage.Show myShow = sage.Wizard.getInstance().addShow(title, null, null, description, 0, categories, null, null, rated, ers, null, null, null, extID, language, 0, DBObject.MEDIA_MASK_TV, (short)0, (short)0, false, (byte)0, (byte)0, (byte)0, (byte)0, (byte)0, (byte)0, (byte)0, (byte)0); //System.out.println("Added show:" + myShow); sage.Airing myAir = sage.Wizard.getInstance().addAiring(extID, stationID, startTime, duration*1000L, (byte)0, (byte)0, prByte, DBObject.MEDIA_MASK_TV); //System.out.println("Added air:" + myAir + " start=" + sage.Sage.dfFull(myAir.getStartTime())); } catch (RuntimeException re) { if (Sage.DBG) System.out.println("ERROR processing EPG data message \"" + msgString + "\" of:" + re); if (Sage.DBG) re.printStackTrace(); } } public String getName() { return name; } public void setName(String s) { if (s == null) s = ""; name = s; Sage.put(prefsRoot + EPG_NAME, name); } public final long getLastRun() { return lastRun; } public boolean usesPlugin() { return EPG.getInstance().hasEPGPlugin() && !Sage.getBoolean(prefsRoot + "disable_plugin", false); } public long getExpandedUntil() { return expandedUntil; } protected void setExpandedUntil(long x) { expandedUntil = x; Sage.putLong(prefsRoot + EXPANDED_UNTIL, expandedUntil); } public void reset() { setExpandedUntil(0); lastRun = 0; chanDownloadComplete = false; Sage.putLong(prefsRoot + LAST_RUN, 0); } public final void setEnabled(boolean x) { enabled = x; Sage.putBoolean(prefsRoot + ENABLED, enabled); } public final boolean getEnabled() { return enabled; } public final long getProviderID() { return providerID; } public final void setProviderID(long id) { Sage.putLong(prefsRoot + PROVIDER_ID, providerID = id); } public void abortUpdate() { abort = true; } public final void clearAbort() { abort = false; } // Formerly abstract protected boolean extractGuide(long guideTime) { int defaultStationID = Long.toString(providerID).hashCode(); if (defaultStationID > 0) defaultStationID *= -1; boolean[] didAdd = new boolean[1]; MMC mmc = MMC.getInstance(); CaptureDeviceInput cdi = mmc.getInputForProvider(providerID); // We're no longer needed, we'll get cleaned up soon. if (cdi == null) return true; // Don't automatically insert the default channels for digital tuners; let them // be found from a scan instead if (cdi.getType() == CaptureDeviceInput.DIGITAL_TUNER_CROSSBAR_INDEX) { // We still need to put empty maps in there so it thinks there's actually lineup // data for this source...and there is in the overrides. EPG.getInstance().setLineup(providerID, new java.util.HashMap<Integer, String[]>()); EPG.getInstance().setServiceLevels(providerID, new java.util.HashMap<Integer, Integer>()); return true; } int minChan = cdi.getMinChannel(); int maxChan = cdi.getMaxChannel(); if ((cdi.getType() != 1 || cdi.weirdRF()) && Sage.getBoolean("epg/dont_create_full_channel_list_for_non_tuner_inputs", true)) { // Not a tv tuner, so just set the min and max equal so it only creates a single channel maxChan = minChan; } java.util.HashMap<Integer, String[]> lineMap = new java.util.HashMap<Integer, String[]>(); for (int i = minChan; i <= maxChan; i++) { wiz.addChannel(cdi.getCrossName(), name, null, defaultStationID + i, 0, didAdd); if (didAdd[0]) wiz.resetAirings(defaultStationID + i); lineMap.put(new Integer(defaultStationID + i), new String[] { Integer.toString(i) }); } EPG.getInstance().setLineup(providerID, lineMap); EPG.getInstance().setServiceLevels(providerID, new java.util.HashMap<Integer, Integer>()); return true; } public final boolean expand() { String arf = "expand called on " + getName() + " at " + Sage.df() + " expandedUntil=" + Sage.df(expandedUntil) + " scannedUntil=" + Sage.df(scannedUntil); if (Sage.DBG) System.out.println(arf); errorText += arf + "\r\n"; if (!enabled || (getTimeTillUpdate() > 0)) { return true; } else if (abort) return false; lastRun = Sage.time(); Sage.putLong(prefsRoot + LAST_RUN, lastRun); // Reload this info so we cna figure out who needs a scan initDataScanInfo(); if ((getTimeTillExpand() == 0) && !abort && enabled) { if (Sage.DBG) System.out.println("EPG Expanding " + getName() + " at " + Sage.df()); errorText += "EPG Expanding " + getName() + " at " + Sage.df() + "\r\n"; // We're expanded into the present at least boolean needsExpand = expandedUntil < Sage.time(); if (needsExpand) setExpandedUntil(Math.max(Sage.time(), expandedUntil)); // Log our request for a data scan if we need one if (dataScanAllowed && scannedUntil <= Sage.time() && !Sage.client) { CaptureDeviceInput[] cdis = MMC.getInstance().getInputsForProvider(providerID); boolean foundOne = false; for (int i = 0; i < cdis.length; i++) { if (cdis[i].getCaptureDevice().requestDataScan(cdis[i])) foundOne = true; } if (foundOne) { dataScanRequested = true; SeekerSelector.getInstance().kick(); } } if (Sage.client || !needsExpand || extractGuide(expandedUntil)) { Sage.putBoolean(prefsRoot + CHAN_DOWNLOAD_COMPLETE, chanDownloadComplete = true); if (!abort && enabled && needsExpand) setExpandedUntil(expandedUntil + getGuideWidth()); if (!abort && enabled && !Sage.client && scannedUntil <= Sage.time() && dataScanAllowed) doDataScan(); } else { if (!abort && enabled && scannedUntil <= Sage.time() && dataScanAllowed) doDataScan(); return false; } } return true; } // Formerly abstract protected long getGuideWidth() { return Sage.MILLIS_PER_DAY; } // Formerly abstract protected long getDesiredExpand() { return 0; } public final long getTimeTillUpdate() { return getTimeTillExpand(); } public long getTimeTillExpand() { if (!enabled) return Long.MAX_VALUE; // We only factor in the scanning time if the device is available for scanning, // or if we haven't submitted the scan request to the device yet if (dataScanAllowed) { CaptureDeviceInput[] cdis = MMC.getInstance().getInputsForProvider(providerID); for (int i = 0; i < cdis.length; i++) { if ((cdis[i].isActive() && cdis[i].getCaptureDevice().isDataScanning()) || (cdis[i].doesDataScanning() && !dataScanRequested)) { return Math.max(0, Math.min(expandedUntil - Sage.time(), scannedUntil - Sage.time())); } } } return Math.max(0, expandedUntil - Sage.time()); } public String getErrorText() { return errorText; } protected void appendExceptionError(Throwable t) { java.io.StringWriter sw = new java.io.StringWriter(); java.io.PrintWriter pw = new java.io.PrintWriter(sw); if (Sage.DBG) t.printStackTrace(pw); pw.flush(); errorText += sw.toString(); } public final int getEPGSourceID() { return epgSourceID; } protected String getNewErrorText() { if (errTextPos < errorText.length()) { String rv = errorText.substring(errTextPos); errTextPos = errorText.length(); return rv; } else return ""; } /*public int[] getUnavailableStations() { int[] rv = new int[unavailStations.size()]; java.util.Iterator walker = unavailStations.iterator(); int idx = 0; while (walker.hasNext()) rv[idx++] = ((Integer) walker.next()).intValue(); return rv; }*/ public boolean canViewStation(int x) { return !unavailStations.contains(new Integer(x)); } public boolean canViewStationOnChannel(int statID, String chanNum) { return !unavailStations.contains(new Integer(statID)) && !unavailChanNums.contains(chanNum); } public void setCanViewStation(int stationID, boolean good) { int startSize = unavailStations.size(); if (good) unavailStations.remove(new Integer(stationID)); else unavailStations.add(new Integer(stationID)); if (startSize != unavailStations.size()) { Sage.put(prefsRoot + UNAVAILABLE_STATIONS, Sage.createCommaDelimSetString(unavailStations)); if (good) setExpandedUntil(0); synchronized (updateIDMap) { if (updateIDMap.keySet().removeAll(unavailStations)) { saveUpdateMap(); } } NetworkClient.distributeRecursivePropertyChange(EPG.EPG_DATA_SOURCES_KEY); EPG.getInstance().resetViewableStationsCache(); } } public void setCanViewStationOnChannel(int stationID, String chanNum, boolean good) { String[] possChans = EPG.getInstance().getChannels(providerID, stationID); int startSize1 = unavailStations.size(); int startSize2 = unavailChanNums.size(); if (possChans.length <= 1) { if (good) unavailStations.remove(new Integer(stationID)); else unavailStations.add(new Integer(stationID)); if (possChans.length == 1) unavailChanNums.remove(possChans[0]); } else if (good) { if (unavailStations.contains(new Integer(stationID))) { // All chans were bad for this station, now we're marking one of them good unavailStations.remove(new Integer(stationID)); for (int i = 0; i < possChans.length; i++) { if (chanNum.equals(possChans[i])) { if (i != 0) { possChans[i] = possChans[0]; possChans[0] = chanNum; // This changes the actual storage array, so we can just update it EPG.getInstance().setLineup(providerID, EPG.getInstance().getLineup(providerID)); } } else unavailChanNums.add(possChans[i]); } } else // Just remove this one from the bad num list unavailChanNums.remove(chanNum); } else { if (!unavailStations.contains(new Integer(stationID)) && !unavailChanNums.contains(chanNum)) { // Not all were bad before, they may be now so check it out int goodChanIdx = -1; for (int i = 0; i < possChans.length; i++) { if (!chanNum.equals(possChans[i]) && !unavailChanNums.contains(possChans[i])) { goodChanIdx = i; break; } } if (goodChanIdx != -1) { String swap = possChans[0]; possChans[0] = possChans[goodChanIdx]; possChans[goodChanIdx] = swap; unavailChanNums.add(chanNum); } else { for (int i = 0; i < possChans.length; i++) unavailChanNums.remove(possChans[i]); unavailStations.add(new Integer(stationID)); } } } if (startSize1 != unavailStations.size()) { Sage.put(prefsRoot + UNAVAILABLE_STATIONS, Sage.createCommaDelimSetString(unavailStations)); if (good) setExpandedUntil(0); synchronized (updateIDMap) { if (updateIDMap.keySet().removeAll(unavailStations)) { saveUpdateMap(); } } } if (startSize2 != unavailChanNums.size()) Sage.put(prefsRoot + UNAVAILABLE_CHANNEL_NUMS, Sage.createCommaDelimSetString(unavailChanNums)); if (startSize1 != unavailStations.size() || startSize2 != unavailChanNums.size()) { NetworkClient.distributeRecursivePropertyChange(EPG.EPG_DATA_SOURCES_KEY); EPG.getInstance().resetViewableStationsCache(); } } public void applyServiceLevel(int newLevel) { if (appliedServiceLevel == newLevel) return; java.util.Set<Integer> badStations = new java.util.HashSet<Integer>(); EPG epg = EPG.getInstance(); int[] stations = epg.getAllStations(providerID); for (int i = 0; i < stations.length; i++) { if (epg.getServiceLevel(providerID, stations[i]) > newLevel) badStations.add(new Integer(stations[i])); } unavailStations = badStations; Sage.put(prefsRoot + UNAVAILABLE_STATIONS, Sage.createCommaDelimSetString(unavailStations)); Sage.putInt(prefsRoot + APPLIED_SERVICE_LEVEL, appliedServiceLevel = newLevel); setExpandedUntil(0); synchronized (updateIDMap) { if (updateIDMap.keySet().removeAll(unavailStations)) { saveUpdateMap(); } } NetworkClient.distributeRecursivePropertyChange(EPG.EPG_DATA_SOURCES_KEY); EPG.getInstance().resetViewableStationsCache(); } protected void saveUpdateMap() { java.util.Iterator<java.util.Map.Entry<Integer, Long>> walker = updateIDMap.entrySet().iterator(); StringBuilder sb = new StringBuilder(); while (walker.hasNext()) { java.util.Map.Entry ent = (java.util.Map.Entry) walker.next(); sb.append(ent.getKey()); sb.append(','); sb.append(ent.getValue()); sb.append(';'); } Sage.put(prefsRoot + SERVER_UPDATE_ID, sb.toString()); } public boolean isChanDownloadComplete() { return chanDownloadComplete; } public int getAppliedServiceLevel() { return appliedServiceLevel; } protected String prefsRoot; protected final int epgSourceID; protected Wizard wiz; protected boolean enabled; protected String errorText = ""; private int errTextPos; protected String name; protected boolean abort; protected String dumpDir; protected long providerID; private long lastRun; private long expandedUntil; private long scannedUntil; protected int appliedServiceLevel; protected boolean chanDownloadComplete; protected java.util.Set<Integer> unavailStations; protected java.util.Set<String> unavailChanNums; protected java.util.Map<Integer, Long> updateIDMap; protected boolean dataScanAllowed; protected boolean dataScanRequested; /** * Called when removing data source from the EPG */ public void destroySelf() { // do nothing by default. } /** * wake up this data source to perform any background updates */ public void kick() { // do nothing by default } /* * Get the type of Maintenance that Wizard should apply * based on this EPG update */ public MaintenanceType getRequiredMaintenanceType() { // Use the default daily timing for full maintenance. return MaintenanceType.NONE; } }
/* * Copyright 2015 xinjunli (micromagic@sina.com). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package self.micromagic.util.converter; import java.text.NumberFormat; import self.micromagic.cg.ClassGenerator; import self.micromagic.eterna.share.EternaException; import self.micromagic.eterna.share.TypeManager; import self.micromagic.util.FormatTool; import self.micromagic.util.container.RequestParameterMap; import self.micromagic.util.ref.ObjectRef; import self.micromagic.util.ref.StringRef; public class LongConverter extends AbstractNumericalConverter { private static Long DEFAULT_VALUE = new Long(0L); private NumberFormat numberFormat; public void setNumberFormat(NumberFormat numberFormat) { this.numberFormat = numberFormat; } public int getConvertType(StringRef typeName) { if (typeName != null) { typeName.setString("long"); } return TypeManager.TYPE_LONG; } public long getResult(Object result) throws EternaException { try { return this.convertToLong(result); } catch (Exception ex) { throw getErrorTypeException(result, "long"); } } public long convertToLong(Object value) { return this.convertToLong(value, this.numberFormat); } public long convertToLong(Object value, NumberFormat format) { if (this.isNull(value)) { return 0L; } if (value instanceof Number) { return ((Number) value).longValue(); } if (value instanceof String) { return this.convertToLong((String) value, format); } if (value instanceof java.util.Date) { return ((java.util.Date) value).getTime(); } Object tmpObj = this.changeByPropertyEditor(value); if (tmpObj instanceof Long) { return ((Long) tmpObj).longValue(); } if (ClassGenerator.isArray(value.getClass())) { String str = RequestParameterMap.getFirstParam(value); return this.convertToLong(str, format); } if (value instanceof ObjectRef) { ObjectRef ref = (ObjectRef) value; if (ref.isNumber()) { return ref.longValue(); } else if (ref.isString()) { return this.convertToLong(ref.toString(), format); } else { return this.convertToLong(ref.getObject(), format); } } throw new ClassCastException(getCastErrorMessage(value, "long")); } public long convertToLong(String value) { return this.convertToLong(value, this.numberFormat); } public long convertToLong(String value, NumberFormat format) { if (this.isNull(value)) { return 0L; } try { if (format == null) { Object tmpObj = this.changeByPropertyEditor(value); if (tmpObj instanceof Long) { return ((Long) tmpObj).longValue(); } return Long.parseLong(value); } else { return FormatTool.getThreadFormat(format).parse(value).longValue(); } } catch (Exception ex) {} throw new ClassCastException(getCastErrorMessage(value, "long")); } public Object convert(Object value) { if (this.isNull(value)) { return null; } if (value instanceof Long) { return value; } try { return new Long(this.convertToLong(value)); } catch (Exception ex) { if (this.needThrow) { if (ex instanceof RuntimeException) { throw (RuntimeException) ex; } throw new ClassCastException(getCastErrorMessage(value, "long")); } else { return DEFAULT_VALUE; } } } public Object convert(String value) { if (this.isNull(value)) { return null; } try { return new Long(this.convertToLong(value)); } catch (Exception ex) { if (this.needThrow) { if (ex instanceof RuntimeException) { throw (RuntimeException) ex; } throw new ClassCastException(getCastErrorMessage(value, "long")); } else { return DEFAULT_VALUE; } } } }
package edu.unl.act.rma.firm.streamflow.component; import java.rmi.RemoteException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; import javax.ejb.EJB; import javax.ejb.Remote; import javax.ejb.Stateless; import javax.jws.WebMethod; import javax.sql.DataSource; import edu.unl.act.rma.firm.core.DataSourceInjector; import edu.unl.act.rma.firm.core.DataSourceTypes; import edu.unl.act.rma.firm.core.LogManager; import edu.unl.act.rma.firm.core.Logger; import edu.unl.act.rma.firm.core.Loggers; import edu.unl.act.rma.firm.core.StationList; import edu.unl.act.rma.firm.core.component.SpatialQuery; import edu.unl.act.rma.firm.core.spatial.BoundingBox; import edu.unl.act.rma.firm.core.spatial.USCounty; import edu.unl.act.rma.firm.core.spatial.USRegion; import edu.unl.act.rma.firm.core.spatial.USState; @Stateless @Remote( { StreamFlowSpatialExtension.class }) public class StreamFlowSpatialExtensionBean implements StreamFlowSpatialExtension { private static Logger LOG = LogManager.getLogger(Loggers.COMPONENT_LOG, StreamFlowSpatialExtensionBean.class); private static String ZIP_QUERY = "select lat, lon from ZipCodes where zip_code = ?"; private static String STATION_QUERY = "select station_id FROM station WHERE 1=1 AND 3963.191 * ACOS( (SIN(PI()* LAT /180)*SIN(PI() * latitude/180)) + " + "(COS(PI()* LAT /180)*cos(PI()*latitude/180)*COS(PI() * longitude/180-PI()* LON/180))) <= ? ORDER BY 3963.191 * ACOS((SIN(PI()* LAT /180)*SIN(PI()*latitude/180)) " + "+ (COS(PI()* LAT /180)*cos(PI()*latitude/180)*COS(PI() * longitude/180-PI()* LON/180)))"; private DataSource source = DataSourceInjector .injectDataSource(DataSourceTypes.STREAM_FLOW); private DataSource firm_source = DataSourceInjector .injectDataSource(DataSourceTypes.SYSTEM); @EJB(name = "SpatialQuery", beanInterface = SpatialQuery.class) private SpatialQuery spatialQuery; @WebMethod public List<String> getStationsByZipCode(String zipCode, int distance) throws RemoteException { Connection zip_conn = null; Connection station_conn = null; ArrayList<String> stations = new ArrayList<String>(); PreparedStatement zip_stmt = null; ResultSet zip_code_result = null; PreparedStatement station_query = null; ResultSet station_results = null; try { zip_conn = firm_source.getConnection(); station_conn = source.getConnection(); zip_stmt = zip_conn.prepareStatement(ZIP_QUERY); zip_stmt.setString(1, zipCode); zip_code_result = zip_stmt.executeQuery(); if (zip_code_result.next()) { String lat = zip_code_result.getString(1); String lon = zip_code_result.getString(2); String station_query_string = STATION_QUERY.replaceAll("LAT", lat); station_query_string = station_query_string.replaceAll("LON", lon); station_query = station_conn .prepareStatement(station_query_string); station_query.setInt(1, distance); LOG.error("query string: " + station_query_string); station_results = station_query.executeQuery(); while (station_results.next()) { stations.add(station_results.getString(1)); } } } catch (SQLException sqe) { LOG.error("sql exception querying stations", sqe); RemoteException re = new RemoteException(); re.initCause(sqe); throw re; } catch (Exception e) { LOG.error("unknown exception creating meta result", e); RemoteException re = new RemoteException(); re.initCause(e); throw re; } finally { try { if (zip_code_result != null) { zip_code_result.close(); } if (station_results != null) { station_results.close(); } if (zip_stmt != null) { zip_stmt.close(); } if (station_query != null) { station_query.close(); } if (zip_conn != null) { zip_conn.close(); } if (station_conn != null) { station_conn.close(); } } catch (SQLException sqe2) { LOG.error("could not close the connection", sqe2); throw new RemoteException("could not close connection"); } } return stations; } @WebMethod public USCounty getCounty(String stationID) throws RemoteException { Connection station_conn = null; PreparedStatement station_query = null; ResultSet station_results = null; String county_name = null; String state_name = null; try { station_conn = source.getConnection(); station_query = station_conn .prepareStatement("select county, state from station where station.station_id = ?"); station_query.setString(1, stationID); station_results = station_query.executeQuery(); if (station_results.next()) { county_name = station_results.getString(1); state_name = station_results.getString(2); } try { return spatialQuery.searchCountiesByState(county_name, USState.valueOf(state_name)).iterator().next(); } catch (NoSuchElementException nse) { LOG.warn("No county could be found for: " + county_name + " in state " + state_name); return null; } } catch (SQLException sqe) { LOG.error("sql exception querying stations", sqe); RemoteException re = new RemoteException(); re.initCause(sqe); throw re; } catch (Exception e) { LOG.error("unknown exception creating meta result", e); RemoteException re = new RemoteException(); re.initCause(e); throw re; } finally { try { if (station_results != null) { station_results.close(); } if (station_query != null) { station_query.close(); } if (station_conn != null) { station_conn.close(); } } catch (SQLException sqe2) { LOG.error("could not close the connection", sqe2); throw new RemoteException("could not close connection"); } } } @WebMethod public StationList queryStations(BoundingBox region) throws RemoteException { List<String> stations = this.getStationsForDefinedRegion(region); StationList list = new StationList(); list.setStations(stations); return list; } @WebMethod public List<String> getStationsForState(USState state) throws RemoteException { Connection conn = null; ArrayList<String> station_ids = new ArrayList<String>(); PreparedStatement stmt = null; ResultSet station_query = null; try { conn = source.getConnection(); stmt = conn .prepareStatement("select station_id from station where state = ? order by station_name"); stmt.setString(1, state.name()); station_query = stmt.executeQuery(); while (station_query.next()) { station_ids.add(station_query.getString(1)); } } catch (SQLException sqe) { LOG.error("sql exception querying meta data", sqe); throw new RemoteException( "unable to query metadata from datasource"); } finally { try { if (station_query != null) { station_query.close(); } if (stmt != null) { stmt.close(); } if (conn != null) { conn.close(); } } catch (SQLException sqe2) { LOG.error("could not close the connection", sqe2); throw new RemoteException("could not close connection"); } } return station_ids; } @WebMethod public List<String> getStationsForGeographicRegion(USRegion region) throws RemoteException { Connection conn = null; ArrayList<String> station_ids = new ArrayList<String>(); PreparedStatement stmt = null; ResultSet station_query = null; try { conn = source.getConnection(); stmt = conn .prepareStatement("select station_id from station where state = ? order by state, station_name"); for (USState state : USState.getStatesByRegion(region)) { stmt.setString(1, state.name()); station_query = stmt.executeQuery(); while (station_query.next()) { station_ids.add(station_query.getString(1)); } } } catch (SQLException sqe) { LOG.error("sql exception querying meta data", sqe); throw new RemoteException( "unable to query metadata from datasource"); } finally { try { if (station_query != null) { station_query.close(); } if (stmt != null) { stmt.close(); } if (conn != null) { conn.close(); } } catch (SQLException sqe2) { LOG.error("could not close the connection", sqe2); throw new RemoteException("could not close connection"); } } return station_ids; } @WebMethod public List<String> getStationsForDefinedRegion(BoundingBox region) throws RemoteException { Connection conn = null; ArrayList<String> station_ids = new ArrayList<String>(); PreparedStatement stmt = null; ResultSet station_query = null; try { conn = source.getConnection(); stmt = conn .prepareStatement("select station_id from station where longitude >= ? and " + "longitude <= ? and latitude >= ? and latitude <= ? order by state, station_name"); stmt.setDouble(1, region.getWest()); stmt.setDouble(2, region.getEast()); stmt.setDouble(3, region.getSouth()); stmt.setDouble(4, region.getNorth()); station_query = stmt.executeQuery(); while (station_query.next()) { station_ids.add(station_query.getString(1)); } } catch (SQLException sqe) { LOG.error("sql exception querying meta data", sqe); throw new RemoteException( "unable to query metadata from datasource"); } finally { try { if (station_query != null) { station_query.close(); } if (stmt != null) { stmt.close(); } if (conn != null) { conn.close(); } } catch (SQLException sqe2) { LOG.error("could not close the connection", sqe2); throw new RemoteException("could not close connection"); } } return station_ids; } @Override public List<String> getStationsFromPoint(float lat, float lon, int distance) throws RemoteException { Connection conn; try { conn = source.getConnection(); } catch (Exception e) { LOG.error("could not get the connection", e); RemoteException re = new RemoteException( "could not get a connection"); re.initCause(e); throw re; } try { PreparedStatement stmt = conn .prepareStatement("SELECT station_id, ( 3959 * acos( cos( radians(?) ) * cos( radians( latitude ) ) * " + "cos( radians( longitude ) - radians(?) ) + sin( radians(?) ) * sin( radians( latitude ) ) ) ) AS distance FROM " + "station HAVING distance < ?;"); stmt.setFloat(1, lat); stmt.setFloat(2, lon); stmt.setFloat(3, lat); stmt.setFloat(4, distance); ArrayList<String> results = new ArrayList<String>(); ResultSet station_ids = stmt.executeQuery(); while (station_ids.next()) { results.add(station_ids.getString(1)); } return results; } catch (Exception e) { LOG.error("could not query the data", e); RemoteException re = new RemoteException(); re.initCause(e); throw re; } finally { try { if (conn != null) { conn.close(); } } catch (Exception e) { LOG.warn("could not close a connection", e); } } } }
package org.leibnizcenter.rechtspraak.tokens.tokentree; import com.google.common.collect.Maps; import org.leibnizcenter.rechtspraak.tagging.DeterministicTagger; import org.leibnizcenter.rechtspraak.tagging.Label; import org.leibnizcenter.rechtspraak.tokens.LabeledToken; import org.leibnizcenter.rechtspraak.tokens.RechtspraakElement; import org.leibnizcenter.rechtspraak.tokens.TokenList; import org.leibnizcenter.rechtspraak.tokens.numbering.ListMarking; import org.leibnizcenter.rechtspraak.tokens.numbering.Numbering; import org.leibnizcenter.rechtspraak.tokens.quote.Quote; import org.leibnizcenter.rechtspraak.tokens.text.Newline; import org.leibnizcenter.rechtspraak.tokens.text.TextElement; import org.leibnizcenter.rechtspraak.tokens.text.TokenTreeLeaf; import org.leibnizcenter.util.Collections3; import org.leibnizcenter.util.Regex; import org.leibnizcenter.util.Xml; import org.leibnizcenter.rechtspraak.tokens.text.IgnoreElement; import org.w3c.dom.*; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.security.InvalidParameterException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.leibnizcenter.rechtspraak.tokens.tokentree.leibniztags.LeibnizTags.*; /** * Created by maarten on 27-3-16. */ public class TokenTree implements TokenTreeVertex { private final List<TokenTreeVertex> children; private final Node element; public static final String TAG_SECTION = "section"; public static final String TAG_TITLE = "title"; public static final String TAG_NR = "nr"; public static final String TAG_PARA = "para"; private static final Pattern KNOWN_ELEMENTS = Pattern.compile("(itemized|ordered)list" + "|footnote" + "|footnote-ref" + "|listmarking" + "|listitem" + "|link" + "|" + TAG_POTENTIAL_NR + "|" + TAG_NR + "|" + TAG_TEXT + "|" + TAG_TEXTGROUP + "|" + TAG_QUOTE + "|emphasis" + "|section" + "|bridgehead" + "|(uitspraak|conclusie)\\.info" + "|(rs:)?para(block|group)?" + "|" + TAG_TITLE + "|(informal)?table" + "|(inline)?mediaobject" + ""); public TokenTree(Element root) { element = root; children = new ArrayList<>(); // Make sure we have a reference to all children as they are now; the XML tree might change. Node[] originalChildren = Xml.getChildren(root); for (Node child : originalChildren) { switch (child.getNodeType()) { case Node.TEXT_NODE: String text = child.getTextContent(); if (text.length() != 0 && !Regex.CONSECUTIVE_WHITESPACE.matcher(text).matches()) { // Only process non-whitespace blocks of text children.add(fromTextNode((Text) child)); } break; case Node.ELEMENT_NODE: children.add(fromElement((Element) child)); break; case Node.PROCESSING_INSTRUCTION_NODE: children.add(fromProcessingInstruction((ProcessingInstruction) child)); break; default: throw new IllegalStateException("Unknown node type found"); } } } public TokenTree(Node e, List<TokenTreeVertex> children) { this.element = e; this.children = children; } private static TokenTreeVertex fromProcessingInstruction(ProcessingInstruction pi) { switch (pi.getTarget()) { case "linebreak": case "breakline": return new Newline(pi); default: throw new InvalidParameterException("Unknown PI found: " + pi.getTarget()); } } public static List<Label> labelFromAnnotation(List<TokenTreeLeaf> l) { return l.stream() .map(TokenTree::getFromAnnotation) .collect(Collectors.toList()); } private static Label getFromAnnotation(TokenTreeLeaf el) { // TODO inline label based on whether a text node follows element if (el instanceof RechtspraakElement) { Label l = Label.fromString.get(((RechtspraakElement) el).getAttribute("manualAnnotation")); if (l == null) if (el instanceof ListMarking || // el instanceof IgnoreElement || ((RechtspraakElement) el).getTagName().matches("quote|footnote\\-ref")) return Label.TEXT_BLOCK; else throw new NullPointerException(); return l; } else if (el instanceof Newline) { return Label.NEWLINE; } else { throw new IllegalStateException(); } } public static List<LabeledToken> labelFromXmlTags(List<TokenTreeLeaf> tokens) { ArrayList<LabeledToken> ll = new ArrayList<>(tokens.size()); for (int i = 0; i < tokens.size(); i++) { ll.add( new LabeledToken(tokens.get(i), inferLabelFromXmlStructure(tokens, i)) ); } return ll; } private static Label inferLabelFromXmlStructure( List<TokenTreeLeaf> tokens, int i) { TokenTreeLeaf el = tokens.get(i); Label label = Label.TEXT_BLOCK; // Default: out if (el instanceof Newline) label = Label.NEWLINE; // else if (el instanceof Quote) label = Label.QUOTE; else if (el instanceof Numbering) { label = Label.getNumberingType(tokens, i); }//todo return label; } private static boolean isTitleInSection(TokenTreeLeaf el) { Element title = Xml.getParentWithTagName(el, TAG_TITLE); if (title != null) { Element section = Xml.getParentWithTagName(title, TAG_SECTION); return section != null; } else { return false; } } private static TokenTreeVertex fromElement(Element e) { if (!KNOWN_ELEMENTS.matcher(e.getNodeName()).matches()) System.err.println("? " + e.getNodeName() + " ?"); if (e.hasAttribute("manualAnnotation")) { switch (Label.fromString.get(e.getAttribute("manualAnnotation"))) { case NEWLINE: return new Newline(e); case NR: return new Numbering(e, true); case SECTION_TITLE: return new TextElement(e); case TEXT_BLOCK: return new TextElement(e); default: throw new Error(); } } switch (e.getTagName()) { case "para": case "text": case "emphasis": // If it's empty, this counts as a newline String textContent1 = e.getTextContent(); if (textContent1 == null || textContent1.length() == 0 || Regex.CONSECUTIVE_WHITESPACE.matcher(textContent1).matches()) return new Newline(e); // If this is a node with a numbering, create a <potentialnumber/> in front return fromPotentiallyMixed(e); case "nr": if (e.getTextContent() == null || e.getTextContent().length() == 0) return new TextElement(e); case "potentialnr": return new Numbering(e, e.getTagName().equals("nr")); case "listmarking": return new TextElement(e); case "quote": return new Quote(e); default: if (IgnoreElement.dontTokenize(e.getTagName())) return new IgnoreElement(e); return new TokenTree(e); } } // /** // * Parse all docs // */ // public static void main(String[] args) throws IOException, SAXException, ParserConfigurationException { // List<File> xmlFiles = Xml.listXmlFiles(new File(Xml.OUT_FOLDER_AUTOMATIC_TAGGING), -1, false); // DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); // DocumentBuilder builder = factory.newDocumentBuilder(); // // int i = 0; // for (File file : xmlFiles) { // FileInputStream is = new FileInputStream(file); // builder.parse(new InputSource(new InputStreamReader(is))); // } // } private static TokenTreeVertex fromPotentiallyMixed(Element e) { Node[] children = Xml.getChildren(e); ArrayList<TokenTreeVertex> tokenSiblings = new ArrayList<>(children.length); for (int i = 0; i < children.length; i++) { Node c = children[i]; /////////////////////////// switch (c.getNodeType()) { case Element.ELEMENT_NODE: Element childE = (Element) c; tokenSiblings.add(fromElement(childE)); break; case Element.TEXT_NODE: String text = c.getTextContent(); // If we're not trailing or leading whitespace if (!((i == 0 || i == children.length - 1) && Regex.CONSECUTIVE_WHITESPACE.matcher(text).matches())) { List<TokenTreeVertex> numberings = findNumberings((Text) c); if (numberings.size() > 0) { tokenSiblings.addAll(numberings); } else { if (children.length == 1) return new TextElement(e); else { if (text.trim().length() > 0) { Element wrapped = Xml.wrapNodeInElement(c, TAG_TEXT); tokenSiblings.add(new TextElement(wrapped)); } } } } break; case Element.PROCESSING_INSTRUCTION_NODE: tokenSiblings.add(fromProcessingInstruction((ProcessingInstruction) c)); break; default: throw new IllegalStateException(); } ////////////////////////////////////////////////////// } if (tokenSiblings.size() > 0) { return new TokenTree(e, tokenSiblings); } else { return new Newline(e); } } private static List<TokenTreeVertex> findNumberings(Text txt) { List<TokenTreeVertex> children = new ArrayList<>(); int index = Quote.startsWithQuoteAtChar(txt.getTextContent()); if (index > -1) { Element quoteElement = Xml.wrapSubstringInElement(txt, index, 1, TAG_QUOTE); txt = (Text) quoteElement.getNextSibling(); children.add(new Quote(quoteElement)); } int listMarking = ListMarking.startsWithListMarkingAtChar(txt.getTextContent()); if (listMarking > -1) { Element element = Xml.wrapSubstringInElement(txt, 0, listMarking + 1, TAG_LIST_MARKING); txt = (Text) element.getNextSibling(); children.add(new ListMarking(element)); } CharSequence textContent = txt.getTextContent(); Matcher numberMatcher = Regex.START_WITH_NUM.matcher(textContent); while (numberMatcher.find() && !Regex.YYYY_MM_DD.matcher(textContent).find() && !Regex.DD_MON_YYYY.matcher(textContent).find()) { Element potentialNr = Xml.wrapSubstringInElement(txt, numberMatcher.start(1), numberMatcher.end(1) - numberMatcher.start(1), TAG_POTENTIAL_NR); txt = (Text) potentialNr.getNextSibling(); children.add(new Numbering(potentialNr, false)); numberMatcher = Regex.START_WITH_NUM.matcher(txt.getTextContent()); } if (children.size() > 0) { if (txt.getTextContent().length() > 0) children.add(new TextElement(Xml.wrapNodeInElement(txt, TAG_TEXT))); } return children; } private static TokenTreeVertex fromTextNode(Text child) { String textContent = child.getTextContent(); Matcher quoteMatcher = Quote.START_WITH_QUOTE.matcher(textContent); if (quoteMatcher.find()) { Element wrappedText = Xml.wrapNodeInElement(child, TAG_TEXTGROUP); if (quoteMatcher.groupCount() < 2) System.err.println("No group 1"); Xml.wrapSubstringInElement(child, quoteMatcher.start(1), quoteMatcher.end(1) - quoteMatcher.start(1), null, TAG_QUOTE, null); return new TokenTree(wrappedText); } List<TokenTreeVertex> nums = findNumberings(child); if (nums.size() > 0) { return new TokenTree(child, nums); } else { // Wrap textnode in <text> Element wrappedText = Xml.wrapNodeInElement(child, TAG_TEXT); return new TextElement(wrappedText); } } /** * Return the pre-processed list of elements. */ public List<org.leibnizcenter.rechtspraak.tokens.text.TokenTreeLeaf> leafsInPreOrder() { List<TokenTreeLeaf> list = leafsInPreOrderRecursive(); List<Map.Entry<Integer, Numbering>> allNumberings = new ArrayList<>(list.size()); for (int i = 0; i < list.size(); i++) { if (list.get(i) instanceof Numbering) allNumberings.add(Maps.immutableEntry(i, (Numbering) list.get(i))); } // Get all sequences of adjacent numberings setAdjacentNumbers(list); setSameProfileNumberingsPreAndSuccessors(allNumberings); setPlausiblePreAndSuccessors(allNumberings); // Set whether numbers are plausible for (int i = 0; i < list.size(); i++) { org.leibnizcenter.rechtspraak.tokens.text.TokenTreeLeaf element = list.get(i); if (element instanceof Numbering) { boolean isImplausible = DeterministicTagger .looksLikeNumberingButProbablyIsnt(list, i); if (isImplausible) { for (SameKindOfNumbering p : SameKindOfNumbering.values()) { SameKindOfNumbering.List sequence = ((Numbering) element).getSequence(p); if (!Collections3.isNullOrEmpty(sequence)) sequence.taintByImplausibleNumbering(); } } ((Numbering) element).isPlausibleNumbering = !isImplausible; } } return list; } private List<org.leibnizcenter.rechtspraak.tokens.text.TokenTreeLeaf> leafsInPreOrderRecursive() { List<org.leibnizcenter.rechtspraak.tokens.text.TokenTreeLeaf> list = new ArrayList<>(); for (TokenTreeVertex child : this.children) { if (child instanceof TokenTreeLeaf) { list.add((TokenTreeLeaf) child); } else if (child instanceof TokenTree) { list.addAll(((TokenTree) child).leafsInPreOrderRecursive()); } else { throw new IllegalStateException(); } } return list; } private void setAdjacentNumbers(List<TokenTreeLeaf> list) { for (SameKindOfNumbering p : SameKindOfNumbering.values()) { SameKindOfNumbering.List l = new SameKindOfNumbering.List(p); for (int i = 1; i <= list.size(); i++) { TokenTreeLeaf prev = list.get(i - 1); if (i == list.size()) { if (l.size() > 0) { // Add last match l.add((Numbering) prev); ((Numbering) prev).setSequence(p, l); } } else { TokenTreeLeaf token = list.get(i); if (p.test(prev, token)) { l.add((Numbering) prev);// We add 'token' in the next iteration ((Numbering) prev).setSequence(p, l); } else if (l.size() > 0) { // Add last match l.add((Numbering) prev); ((Numbering) prev).setSequence(p, l); l = new SameKindOfNumbering.List(p); // Start with fresh list } } } } } public void setSameProfileNumberingsPreAndSuccessors(List<Map.Entry<Integer, Numbering>> allNumberings) { allNumberings.stream().forEach((numbering1) -> allNumberings.stream() .filter(numbering2 -> SameKindOfNumbering.isSameProfileSuccession(numbering1, numbering2)) .forEach(numbering2 -> { numbering1.getValue().addSameProfileSuccessor(numbering2); numbering2.getValue().addSameProfilePredecessor(numbering1); })); } public void setPlausiblePreAndSuccessors(List<Map.Entry<Integer, Numbering>> allNumberings) { allNumberings.stream() .forEach((numbering1) -> allNumberings.stream() .filter(numbering2 -> numbering2.getKey().compareTo(numbering1.getKey()) > 0 && numbering2.getValue().isSuccedentOf(numbering1.getValue())) .forEach(numbering2 -> { numbering1.getValue().addPlausibleSuccessor(numbering2); numbering2.getValue().addPlausiblePredecessor(numbering1); })); } }
package com.millerjb.jenkins.plugin; import com.millerjb.jenkins.plugin.event.PullRequestBuilder; import com.millerjb.jenkins.plugin.event.PullRequestNotifyEvent; import com.millerjb.stash.api.StashHttpConnection; import com.millerjb.stash.api.StashHttpConnectionException; import com.millerjb.stash.domain.PullRequest; import com.millerjb.stash.response.StashResponse; import com.millerjb.stash.response.StashResponseException; import edu.umd.cs.findbugs.annotations.Nullable; import hudson.Extension; import hudson.model.*; import hudson.plugins.git.GitSCM; import hudson.plugins.git.GitStatus; import hudson.plugins.git.UserRemoteConfig; import hudson.scm.SCM; import hudson.triggers.Trigger; import hudson.triggers.TriggerDescriptor; import hudson.util.FormValidation; import jenkins.model.Jenkins; import org.apache.http.HttpStatus; import org.eclipse.jgit.transport.URIish; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; public class PullRequestTrigger extends Trigger<AbstractProject> implements PullRequestNotifyListener { private static final Logger logger = LoggerFactory.getLogger(PullRequestTrigger.class); String baseUrl; String username; String password; private transient AbstractProject myProject; private transient PullRequestBuilder eventBuilder; @DataBoundConstructor public PullRequestTrigger(String baseUrl, String username, String password) { super(); this.baseUrl = baseUrl; this.username = username; this.password = password; } @Override public void start(AbstractProject project, boolean newInstance) { super.start(project, newInstance); myProject = project; eventBuilder = new PullRequestBuilder(createConnection()); } @Override public void stop() { super.stop(); } public String getBaseUrl() { return baseUrl; } public String getUsername() { return username; } public String getPassword() { return password; } private StashHttpConnection createConnection() { StashHttpConnection conn = new StashHttpConnection(); try { conn.setBaseUrl(baseUrl); } catch (URISyntaxException e) { logger.error("BaseUrl is invalid", e); return null; } conn.setCredentials(username, password); return conn; } @Override public List<Cause> pullRequestNotifyEvent(PullRequestNotifyEvent event) { List<Cause> causes = new ArrayList<>(); if (isInteresting(event)) { List<PullRequest> pullRequests = eventBuilder.generateEvents(event, getLastBuildInMillis()); for (PullRequest pullRequest : pullRequests) { causes.add(scheduleBuild(pullRequest)); } } return causes; } /** * @return the timestamp of the last build (in millis) or <tt>0</tt> */ private long getLastBuildInMillis() { return myProject.getLastBuild() != null ? myProject.getLastBuild().getTimeInMillis() : 0; } /** * Schedules a build with environment variables for the pull request. * * @return The {@link Cause} given to the scheduling engine. */ private Cause scheduleBuild(PullRequest request) { List<ParameterValue> values = new ArrayList<>(); values.add(new StringParameterValue("STASH_PULL_REQUEST_ID", String.valueOf(request.getId()))); values.add(new StringParameterValue("STASH_PULL_REQUEST_TO_REF", request.getToRef().getDisplayId())); values.add(new StringParameterValue("STASH_PULL_REQUEST_FROM_CHANGESET", request.getFromRef().getLatestChangeset())); Cause cause = new PullRequestCause(request.getFromRef().getRepository().getProject().getKey(), request.getFromRef().getRepository().getSlug(), request.getId()); myProject.scheduleBuild2(0, cause, new ParametersAction(values)); return cause; } /** * Determine if a {@link PullRequestNotifyEvent} is interesting to this trigger. An event is considered "interesting" * if the following conditions are met: * - the trigger is enabled * - the trigger contains a {@link GitSCM} with a git URL that matches {@link PullRequestNotifyEvent#getUri()} */ private boolean isInteresting(PullRequestNotifyEvent event) { if (!myProject.isBuildable()) { logger.trace("Disabled."); return false; } SCM scm = myProject.getScm(); if (scm == null) { logger.trace("No SCM configured"); return false; } if (scm instanceof GitSCM) { GitSCM gitScm = (GitSCM) scm; for (UserRemoteConfig userRemoteConfig : gitScm.getUserRemoteConfigs()) { if (userRemoteConfig.getUrl().equals(event.getUri().toString())) { return true; } } } logger.trace("No matching GitSCM configured"); return false; } /** * Add support for unverified SSL certificates */ @Extension public static final class DescriptorImpl extends TriggerDescriptor { private static final Logger logger = LoggerFactory.getLogger(PullRequestTrigger.class); public DescriptorImpl() { super(PullRequestTrigger.class); } @Override public boolean isApplicable(Item item) { return true; } @Override public String getDisplayName() { return "Stash Pull Request"; } public FormValidation doCheckBaseUrl(@QueryParameter("baseUrl") String value) { if (value.length() == 0) { return FormValidation.error("You must specify a base URL"); } StashHttpConnection conn = new StashHttpConnection(); try { conn.setBaseUrl(value); } catch (URISyntaxException e) { logger.error("Unable to parse base URL", e); return FormValidation.error("Not a valid URL"); } try { StashResponse projects = conn.getProjects(); if (projects.getStatusCode() != HttpStatus.SC_OK) { logger.warn("API returned an unexpected response {}", projects.getEntity()); return FormValidation.error("API returned unexpected response %s", projects.getStatusCode()); } } catch (StashHttpConnectionException e) { logger.error("Error accessing API", e); return FormValidation.error("Unable to parse response from Stash API: %s", e.getMessage()); } catch (StashResponseException e) { logger.error("Error parsing response", e); return FormValidation.error("Could not access API. Response: %s - %s", e.getStatusCode(), e.getMessage()); } return FormValidation.ok(); } public FormValidation doCheckUsername(@QueryParameter("username") String value, @QueryParameter("baseUrl") String baseUrl) { if (value.length() == 0) { return FormValidation.error("You must specify a username"); } StashHttpConnection conn = new StashHttpConnection(); try { conn.setBaseUrl(baseUrl); } catch (URISyntaxException e) { logger.error("Unable to parse base URL", e); return FormValidation.error("Could not find user"); } try { StashResponse projects = conn.getUser(value); if (projects.getStatusCode() != HttpStatus.SC_OK) { logger.warn("API returned an unexpected response {}", projects.getEntity()); return FormValidation.error("Could not find user"); } } catch (StashHttpConnectionException e) { logger.error("Error accessing API", e); return FormValidation.error("Could not find user: %s", e.getMessage()); } catch (StashResponseException e) { logger.error("Error parsing response", e); return FormValidation.error("Could not find user. Response: %s - %s", e.getStatusCode(), e.getMessage()); } return FormValidation.ok(); } public FormValidation doCheckPassword(@QueryParameter("password") String value, @QueryParameter("baseUrl") String baseUrl, @QueryParameter("username") String username) { if (value.length() == 0) { return FormValidation.error("You must specify a password"); } StashHttpConnection conn = new StashHttpConnection(); try { conn.setBaseUrl(baseUrl); } catch (URISyntaxException e) { logger.error("Unable to parse base URL", e); return FormValidation.error("Could not authenticate"); } conn.setCredentials(username, value); try { StashResponse projects = conn.getUser(username); if (projects.getStatusCode() != HttpStatus.SC_OK) { // TODO: see if it is an error, and grab the error message logger.warn("API returned an unexpected response {}", projects.getEntity()); return FormValidation.error("Could not authenticate"); } } catch (StashHttpConnectionException e) { logger.error("Error accessing API", e); return FormValidation.error("Could not authenticate: %s", e.getMessage()); } catch (StashResponseException e) { logger.error("Error parsing response", e); return FormValidation.error("Could not authenticate. Response: %s - %s", e.getStatusCode(), e.getMessage()); } return FormValidation.ok(); } } @Extension public static class ListenerImpl extends GitStatus.Listener { private static final Logger logger = LoggerFactory.getLogger(ListenerImpl.class); @Override public List<GitStatus.ResponseContributor> onNotifyCommit(URIish uri, @Nullable String sha1, String... branches) { logger.trace("Triggering PullRequestNotifyEvent on projects with a PullRequestTrigger scheduleBuild configured"); List<GitStatus.ResponseContributor> responses = new ArrayList<>(); List<AbstractProject> projects = Jenkins.getInstance().getAllItems(AbstractProject.class); for (AbstractProject ap : projects) { PullRequestTrigger trigger = (PullRequestTrigger) ap.getTrigger(PullRequestTrigger.class); if (trigger != null) { logger.trace("Found PullRequestTrigger on {}", ap.getDisplayName()); List<Cause> causes = trigger.pullRequestNotifyEvent(new PullRequestNotifyEvent(uri, null)); for (Cause cause : causes) { responses.add(new GitStatus.MessageResponseContributor(cause.getShortDescription())); } } } return responses; } } }
package uk.bl.wa.interject.type; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.io.IOUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.tika.Tika; import org.apache.tika.metadata.Metadata; import org.apache.tika.mime.MediaType; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import uk.bl.wa.interject.factory.InterjectionFactory; import uk.bl.wa.interject.util.ProcessRunner; import uk.bl.wa.interject.util.ProcessRunner.ProcessRunnerException; import uk.bl.wa.interject.util.ProcessRunnerImpl; public class InterjectionTest { protected static Logger logger = LogManager .getLogger(InterjectionTest.class); private Tika tika = null; private String spectrumResult; @Before public void setUp() throws Exception { tika = new Tika(); Map<String, String> params = new HashMap<String, String>(); params.put("version", "basic"); MediaType spectrumType = new MediaType("application", "x-spectrum-tap", params); spectrumResult = spectrumType.toString(); } @Test public void testTika() { String strMime = tika.detect("test.bmp"); Assert.assertEquals("image/x-ms-bmp", strMime); Assert.assertNotSame("Mime doesn't match", "image/x-ms-png", strMime); } @Test public void testContentTypeBmp() { try { String filename = "/test.bmp"; InputStream inputStream = getClass().getResourceAsStream(filename); Tika tika = new Tika(); String mimeType = tika.detect(inputStream); String expected = MediaType.image("x-ms-bmp").toString(); Assert.assertEquals(expected, mimeType); System.out.println(String.format( "detected media type for given file %s: %s", filename, mimeType)); } catch (IOException e) { e.printStackTrace(); } } @Test public void testContentTypeTiff() { try { String filename = "/test.tiff"; InputStream inputStream = getClass().getResourceAsStream(filename); String mimeType = tika.detect(inputStream); String expected = MediaType.image("tiff").toString(); Assert.assertEquals(expected, mimeType); System.out.println(String.format( "detected media type for given file %s: %s", filename, mimeType)); } catch (IOException e) { e.printStackTrace(); } } @Test public void testEnumerationType() throws IOException { String filename = "/test.tiff"; InputStream inputStream = getClass().getResourceAsStream(filename); String sourceType = tika.detect(inputStream); String problemType = InterjectionFactory.INSTANCE.findProblemType( sourceType).getMimeType(); String expected = MediaType.image("tiff").toString(); String mimeType = problemType; Assert.assertEquals(expected, mimeType); System.out.println(String .format("detected media type for given file %s: %s", filename, mimeType)); } @Test public void testSpectrumFile() { try { Tika tika = new Tika(); String f = getClass().getResource("/Wheelie.tap").getFile(); File file = new File(f); String mimeType = tika.detect(file); Assert.assertNotNull(mimeType); Assert.assertEquals(spectrumResult, mimeType); System.out.println("Type : " + mimeType); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Test public void testSpectrumStream() { try { Tika tika = new Tika(); InputStream inputStream = getClass().getResourceAsStream( "/ZZOOM.tap"); String mimeType = tika.detect(inputStream, new Metadata()); Assert.assertNotNull(mimeType); Assert.assertEquals(spectrumResult, mimeType); System.out.println("Type via InputStream: " + mimeType); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Test public void testInputStream() { InputStream inputStream = getClass().getResourceAsStream( "/penguin3.wrl"); getBytesFromStream(inputStream); } public byte[] getBytesFromStream(InputStream inputStream) { byte[] bytes = null; try { bytes = IOUtils.toByteArray(inputStream); System.out.println("Result via InputStream: " + new String(bytes, "UTF8")); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return bytes; } @Test public void testVrmlVersion1() { Tika tika = new Tika(); String filename = "/penguin1.wrl"; InputStream inputStream = getClass().getResourceAsStream( filename); try { String mimeType = tika.detect(inputStream); System.out.println(filename + " mimeType : " + mimeType); Assert.assertEquals("model/vrml; version=1.0", mimeType); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Test public void testVrmlVersion2() { Tika tika = new Tika(); String filename = "/penguin2.wrl"; InputStream inputStream = getClass().getResourceAsStream( filename); try { String mimeType = tika.detect(inputStream); System.out.println(filename + " mimeType : " + mimeType); Assert.assertEquals("model/vrml; version=2.0", mimeType); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Test public void testProcess() { ProcessRunner runner = new ProcessRunnerImpl(); List<String> commands = new ArrayList<String>(); try { String path = "src/test/resources"; commands.add("/usr/local/bin/wine"); commands.add("../interject-access-external-tools/ivTools-3.0/ivvrml.exe"); commands.add("-2"); commands.add(path + "/penguin1.wrl"); commands.add("-o"); commands.add(path + "/penguin4.wrl"); runner.setStartingDir(new File(".")); runner.setCommand(commands); runner.setCollection(true); runner.execute(); System.out.println("output : " + runner.getProcessOutputAsString()); System.out.println("stderr : " + runner.getProcessErrorAsString()); System.out.println("Working Directory = " + System.getProperty("user.dir")); } catch (ProcessRunnerException e) { System.err.println("ERROR: "+e); e.printStackTrace(); System.err.println("Commands: "+commands); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.state.internals; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.utils.MockTime; import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.InvalidStateStoreException; import org.apache.kafka.streams.processor.TaskId; import org.apache.kafka.streams.processor.TopologyBuilder; import org.apache.kafka.streams.processor.internals.MockStreamsMetrics; import org.apache.kafka.streams.processor.internals.ProcessorTopology; import org.apache.kafka.streams.processor.internals.StateDirectory; import org.apache.kafka.streams.processor.internals.StoreChangelogReader; import org.apache.kafka.streams.processor.internals.StreamTask; import org.apache.kafka.streams.processor.internals.StreamThread; import org.apache.kafka.streams.processor.internals.StreamsMetadataState; import org.apache.kafka.streams.state.QueryableStoreTypes; import org.apache.kafka.streams.state.ReadOnlyKeyValueStore; import org.apache.kafka.streams.state.ReadOnlyWindowStore; import org.apache.kafka.streams.state.Stores; import org.apache.kafka.test.MockClientSupplier; import org.apache.kafka.test.MockProcessorSupplier; import org.apache.kafka.test.TestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.UUID; import static org.apache.kafka.streams.state.QueryableStoreTypes.windowStore; import static org.junit.Assert.assertEquals; public class StreamThreadStateStoreProviderTest { private StreamTask taskOne; private StreamTask taskTwo; private StreamThreadStateStoreProvider provider; private StateDirectory stateDirectory; private File stateDir; private boolean storesAvailable; private final String topicName = "topic"; @Before public void before() throws IOException { final TopologyBuilder builder = new TopologyBuilder(); builder.addSource("the-source", topicName); builder.addProcessor("the-processor", new MockProcessorSupplier(), "the-source"); builder.addStateStore(Stores.create("kv-store") .withStringKeys() .withStringValues().inMemory().build(), "the-processor"); builder.addStateStore(Stores.create("window-store") .withStringKeys() .withStringValues() .persistent() .windowed(10, 10, 2, false).build(), "the-processor"); final Properties properties = new Properties(); final String applicationId = "applicationId"; properties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId); properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); stateDir = TestUtils.tempDirectory(); final String stateConfigDir = stateDir.getPath(); properties.put(StreamsConfig.STATE_DIR_CONFIG, stateConfigDir); final StreamsConfig streamsConfig = new StreamsConfig(properties); final MockClientSupplier clientSupplier = new MockClientSupplier(); configureRestoreConsumer(clientSupplier, "applicationId-kv-store-changelog"); configureRestoreConsumer(clientSupplier, "applicationId-window-store-changelog"); builder.setApplicationId(applicationId); final ProcessorTopology topology = builder.build(null); final Map<TaskId, StreamTask> tasks = new HashMap<>(); stateDirectory = new StateDirectory(applicationId, stateConfigDir, new MockTime()); taskOne = createStreamsTask(applicationId, streamsConfig, clientSupplier, topology, new TaskId(0, 0)); tasks.put(new TaskId(0, 0), taskOne); taskTwo = createStreamsTask(applicationId, streamsConfig, clientSupplier, topology, new TaskId(0, 1)); tasks.put(new TaskId(0, 1), taskTwo); storesAvailable = true; provider = new StreamThreadStateStoreProvider( new StreamThread( builder, streamsConfig, clientSupplier, applicationId, "clientId", UUID.randomUUID(), new Metrics(), Time.SYSTEM, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0) { @Override public Map<TaskId, StreamTask> tasks() { return tasks; } @Override public boolean isInitialized() { return storesAvailable; } }); } @After public void cleanUp() throws IOException { Utils.delete(stateDir); } @Test public void shouldFindKeyValueStores() throws Exception { final List<ReadOnlyKeyValueStore<String, String>> kvStores = provider.stores("kv-store", QueryableStoreTypes.<String, String>keyValueStore()); assertEquals(2, kvStores.size()); } @Test public void shouldFindWindowStores() throws Exception { final List<ReadOnlyWindowStore<Object, Object>> windowStores = provider.stores("window-store", windowStore()); assertEquals(2, windowStores.size()); } @Test(expected = InvalidStateStoreException.class) public void shouldThrowInvalidStoreExceptionIfWindowStoreClosed() throws Exception { taskOne.getStore("window-store").close(); provider.stores("window-store", QueryableStoreTypes.windowStore()); } @Test(expected = InvalidStateStoreException.class) public void shouldThrowInvalidStoreExceptionIfKVStoreClosed() throws Exception { taskOne.getStore("kv-store").close(); provider.stores("kv-store", QueryableStoreTypes.keyValueStore()); } @Test public void shouldReturnEmptyListIfNoStoresFoundWithName() throws Exception { assertEquals(Collections.emptyList(), provider.stores("not-a-store", QueryableStoreTypes .keyValueStore())); } @Test public void shouldReturnEmptyListIfStoreExistsButIsNotOfTypeValueStore() throws Exception { assertEquals(Collections.emptyList(), provider.stores("window-store", QueryableStoreTypes.keyValueStore())); } @Test(expected = InvalidStateStoreException.class) public void shouldThrowInvalidStoreExceptionIfNotAllStoresAvailable() throws Exception { storesAvailable = false; provider.stores("kv-store", QueryableStoreTypes.keyValueStore()); } private StreamTask createStreamsTask(final String applicationId, final StreamsConfig streamsConfig, final MockClientSupplier clientSupplier, final ProcessorTopology topology, final TaskId taskId) { return new StreamTask( taskId, applicationId, Collections.singletonList(new TopicPartition(topicName, taskId.partition)), topology, clientSupplier.consumer, new StoreChangelogReader(clientSupplier.restoreConsumer, Time.SYSTEM, 5000), streamsConfig, new MockStreamsMetrics(new Metrics()), stateDirectory, null, new MockTime(), clientSupplier.getProducer(new HashMap<String, Object>())) { @Override protected void updateOffsetLimits() {} }; } private void configureRestoreConsumer(final MockClientSupplier clientSupplier, final String topic) { clientSupplier.restoreConsumer .updatePartitions(topic, Arrays.asList( new PartitionInfo(topic, 0, null, null, null), new PartitionInfo(topic, 1, null, null, null))); final TopicPartition tp1 = new TopicPartition(topic, 0); final TopicPartition tp2 = new TopicPartition(topic, 1); clientSupplier.restoreConsumer .assign(Arrays.asList( tp1, tp2)); final Map<TopicPartition, Long> offsets = new HashMap<>(); offsets.put(tp1, 0L); offsets.put(tp2, 0L); clientSupplier.restoreConsumer .updateBeginningOffsets(offsets); clientSupplier.restoreConsumer .updateEndOffsets(offsets); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.concurrencycontrol.channel; import static org.waveprotocol.wave.model.wave.Constants.NO_VERSION; import org.waveprotocol.wave.common.logging.LoggerBundle; import org.waveprotocol.wave.concurrencycontrol.client.ConcurrencyControl; import org.waveprotocol.wave.concurrencycontrol.common.ChannelException; import org.waveprotocol.wave.concurrencycontrol.common.CorruptionDetail; import org.waveprotocol.wave.concurrencycontrol.common.Recoverable; import org.waveprotocol.wave.concurrencycontrol.common.ResponseCode; import org.waveprotocol.wave.concurrencycontrol.common.TurbulenceListener; import org.waveprotocol.wave.concurrencycontrol.common.UnsavedDataListenerFactory; import org.waveprotocol.wave.model.id.IdFilter; import org.waveprotocol.wave.model.id.WaveId; import org.waveprotocol.wave.model.id.WaveletId; import org.waveprotocol.wave.model.id.WaveletName; import org.waveprotocol.wave.model.operation.wave.TransformedWaveletDelta; import org.waveprotocol.wave.model.operation.wave.WaveletDelta; import org.waveprotocol.wave.model.operation.wave.WaveletOperation; import org.waveprotocol.wave.model.util.CollectionUtils; import org.waveprotocol.wave.model.util.FuzzingBackOffScheduler; import org.waveprotocol.wave.model.util.Preconditions; import org.waveprotocol.wave.model.util.Scheduler; import org.waveprotocol.wave.model.version.HashedVersion; import org.waveprotocol.wave.model.version.HashedVersionFactory; import org.waveprotocol.wave.model.wave.ParticipantId; import org.waveprotocol.wave.model.wave.data.ObservableWaveletData; import org.waveprotocol.wave.model.wave.data.impl.EmptyWaveletSnapshot; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * Multiplexes several {@link OperationChannel operation channels} over one * {@link ViewChannel view channel}. * * * |- OperationChannelMultiplexer -----------------------------------------| * | | * | |-Stacklet---------------------------------| | * | | OperationChannel <-> WaveletDeltaChannel |-| | * <-> | |------------------------------------------| |-| <=> View Channel | <-> WaveService * | |------------------------------------------| | | * | |------------------------------------------| | * | | * | All exceptions are directed here | * |-----------------------------------------------------------------------| * * Note: * * All exceptions that are emitted from using the OperationChannel or * OperationChannelMultiplexer interfaces are caught in this class. * i.e. when the client calls methods from the left part of the diagram. * * All exceptions generated as a result of handling server messages in ViewChannel * are routed here through onException(). i.e. when the WaveService calls methods on * the right part of the diagram through call backs. * * This class is responsible for reporting all the exceptions to the user. * */ public class OperationChannelMultiplexerImpl implements OperationChannelMultiplexer { /** * Binds together both ends of a delta channel. */ interface MultiplexedDeltaChannel extends WaveletDeltaChannel, WaveletChannel.Listener { } /** * Factory for creating delta channels. */ interface DeltaChannelFactory { /** * Creates a delta channel. * * @param waveletChannel channel through which the delta channel * communicates */ MultiplexedDeltaChannel create(WaveletChannel waveletChannel); } /** * Factory for operation channels. */ interface OperationChannelFactory { /** * Creates an operation channel. * * @param deltaChannel channel through which the op channel communicates * @param waveletId wavelet id for the new operation channel * @param startVersion the version to start from * @param accessibility accessibility of the new channel * @return a new operation channel. */ InternalOperationChannel create(WaveletDeltaChannel deltaChannel, WaveletId waveletId, HashedVersion startVersion, Accessibility accessibility); } /** * A per-wavelet stack above this multiplexer. A stacklet forwards message * from the server to a listener at the bottom of the stacklet (a delta * channel). When communications fail a stacklet fetches reconnection version * from the contained operation channel. */ private static class Stacklet implements WaveletChannel.Listener { private final MultiplexedDeltaChannel deltaChannel; private final InternalOperationChannel opChannel; private boolean firstMessageReceived; private boolean dropAdditionalSnapshot; /** * Creates a stacklet. * * @param deltaChannel delta channel at the bottom of the stacklet * @param opChannel operation channel at the top of the stacklet * @param dropSnapshot whether to expect and drop an additional snapshot * after the first message. */ private Stacklet(MultiplexedDeltaChannel deltaChannel, InternalOperationChannel opChannel, boolean dropSnapshot) { this.deltaChannel = deltaChannel; this.opChannel = opChannel; this.firstMessageReceived = false; this.dropAdditionalSnapshot = dropSnapshot; } public void onWaveletSnapshot(ObservableWaveletData wavelet, HashedVersion lastCommittedVersion, HashedVersion currentVersion) throws ChannelException { // When a channel is created locally we fake an initial empty // snapshot. The server still sends one when it creates the wavelet // though, so it's dropped it here if that's expected. // See createOperationChannel(). if (!firstMessageReceived) { firstMessageReceived = true; } else if (dropAdditionalSnapshot) { // TODO(anorth): check the snapshot is as expected, even though // it's dropped. dropAdditionalSnapshot = false; return; } deltaChannel.onWaveletSnapshot(wavelet, lastCommittedVersion, currentVersion); } @Override public void onWaveletUpdate(List<TransformedWaveletDelta> deltas, HashedVersion lastCommittedVersion, HashedVersion currentVersion) throws ChannelException { if (!firstMessageReceived) { firstMessageReceived = true; } deltaChannel.onWaveletUpdate(deltas, lastCommittedVersion, currentVersion); } /** * Resets this stacklet ready for reconnection. */ public void reset() { deltaChannel.reset(opChannel); opChannel.reset(); } /** * Closes this stacklet permanently. */ public void close() { deltaChannel.reset(null); opChannel.close(); } public OperationChannel getOperationChannel() { return opChannel; } public boolean isExpectingSnapshot() { return dropAdditionalSnapshot; } } /** * Holder class for the copious number of loggers. */ public static class LoggerContext { public final LoggerBundle ops; public final LoggerBundle delta; public final LoggerBundle cc; public final LoggerBundle view; public LoggerContext(LoggerBundle ops, LoggerBundle delta, LoggerBundle cc, LoggerBundle view) { this.ops = ops; this.delta = delta; this.cc = cc; this.view = view; } } /** Multiplexer state. */ private static enum State { NOT_CONNECTED, CONNECTED, RECONNECTING } /** Wave id for channels in this mux. */ private final WaveId waveId; /** Multiplexed channels, indexed by wavelet id. */ private final Map<WaveletId, Stacklet> channels = CollectionUtils.newHashMap(); /** Factory for creating delta channels. */ private final DeltaChannelFactory deltaChannelFactory; /** Factory for creating operation-channel stacks on top of wave services. */ private final OperationChannelFactory opChannelFactory; /** Factory for creating a view channel */ private final ViewChannelFactory viewFactory; /** Logger. */ private final LoggerBundle logger; /** A stateful manager/factory for unsaved data listeners */ private UnsavedDataListenerFactory unsavedDataListenerFactory; /** Synthesizer of initial wavelet snapshots for locally-created wavelets. */ private final ObservableWaveletData.Factory<?> dataFactory; /** Produces hashed versions. */ private final HashedVersionFactory hashFactory; /** List of commands to run when the underlying view becomes connected. */ private final List<Runnable> onConnected = CollectionUtils.newArrayList(); // // Mutable state. // /** Connection state of the mux. */ private State state; /** Whether the initial open of the mux has finished. */ private boolean openFinished = false; /** * Underlying multiplexed view channel; created on reconnection, set null on * close. */ private ViewChannel viewChannel; /** * Tag identifying which view connection is current. Changes on each * reconnection. */ private int connectionTag = 0; /** Filter specifying wavelets to open. */ private IdFilter waveletFilter; /** Listener for handling new operation channels. */ private Listener muxListener; /** Used to backoff when reconnecting. */ private final Scheduler scheduler; /** A listener for turbulences in the channel or protocol */ private TurbulenceListener turbulenceListener; /** * Creates factory for building delta channels. * * @param logger logger to use for created channels */ private static DeltaChannelFactory createDeltaChannelFactory(final LoggerBundle logger) { return new DeltaChannelFactory() { @Override public MultiplexedDeltaChannel create(WaveletChannel waveletChannel) { return new WaveletDeltaChannelImpl(waveletChannel, logger); } }; } /** * Creates a factory for building operation channels on a wave. * * @param waveId wave id * @param unsavedDataListenerFactory factory for unsaved data listeners * @param loggers logger bundle * @return a new operation channel factory */ private static OperationChannelFactory createOperationChannelFactory(final WaveId waveId, final UnsavedDataListenerFactory unsavedDataListenerFactory, final LoggerContext loggers) { return new OperationChannelFactory() { @Override public InternalOperationChannel create(WaveletDeltaChannel deltaChannel, WaveletId waveletId, HashedVersion startVersion, Accessibility accessibility) { ConcurrencyControl cc = new ConcurrencyControl(loggers.cc, startVersion); if (unsavedDataListenerFactory != null) { cc.setUnsavedDataListener(unsavedDataListenerFactory.create(waveletId)); } return new OperationChannelImpl(loggers.ops, deltaChannel, cc, accessibility); } }; } /** * Creates a multiplexer. * * WARNING: the scheduler should provide back-off. Providing a scheduler which * executes immediately or does not back off may cause denial-of-service-like * reconnection attempts against the servers. Use something like * {@link FuzzingBackOffScheduler}. * * @param waveId wave id to open * @param viewFactory factory for opening view channels * @param dataFactory factory for making snapshots of empty wavelets * @param loggers log targets * @param unsavedDataListenerFactory a factory for adding listeners * @param scheduler scheduler for reconnection * @param hashFactory factory for hashed versions * @param turbulenceListener a listener to report turbulences to users */ public OperationChannelMultiplexerImpl(WaveId waveId, ViewChannelFactory viewFactory, ObservableWaveletData.Factory<?> dataFactory, LoggerContext loggers, UnsavedDataListenerFactory unsavedDataListenerFactory, Scheduler scheduler, HashedVersionFactory hashFactory, TurbulenceListener turbulenceListener) { // Construct default dependency implementations, based on given arguments. this(waveId, createDeltaChannelFactory(loggers.delta), createOperationChannelFactory(waveId, unsavedDataListenerFactory, loggers), viewFactory, dataFactory, scheduler, loggers.view, unsavedDataListenerFactory, hashFactory, turbulenceListener); Preconditions.checkNotNull(dataFactory, "null dataFactory"); } /** * Creates a multiplexer (direct dependency arguments only). Exposed as * package-private for testing. * * @param opChannelFactory factory for creating operation-channel stacks * @param channelFactory factory for creating the underlying view channel * @param dataFactory factory for creating wavelet snapshots * @param scheduler used to back off when reconnecting. assumed not null. * @param logger log target * @param unsavedDataListenerFactory * @param hashFactory factory for hashed versions * @param turbulenceListener a listener to report turbulences to users */ OperationChannelMultiplexerImpl( WaveId waveId, DeltaChannelFactory deltaChannelFactory, OperationChannelFactory opChannelFactory, ViewChannelFactory channelFactory, ObservableWaveletData.Factory<?> dataFactory, Scheduler scheduler, LoggerBundle logger, UnsavedDataListenerFactory unsavedDataListenerFactory, HashedVersionFactory hashFactory, TurbulenceListener turbulenceListener) { this.waveId = waveId; this.deltaChannelFactory = deltaChannelFactory; this.opChannelFactory = opChannelFactory; this.viewFactory = channelFactory; this.dataFactory = dataFactory; this.logger = logger; this.unsavedDataListenerFactory = unsavedDataListenerFactory; this.state = State.NOT_CONNECTED; this.scheduler = scheduler; this.hashFactory = hashFactory; this.turbulenceListener = turbulenceListener; } @Override public void open(Listener listener, IdFilter waveletFilter, Collection<KnownWavelet> knownWavelets) { this.muxListener = listener; this.waveletFilter = waveletFilter; try { if (!knownWavelets.isEmpty()) { for (KnownWavelet knownWavelet : knownWavelets) { Preconditions.checkNotNull(knownWavelet.snapshot, "Snapshot has no wavelet"); Preconditions.checkNotNull(knownWavelet.committedVersion, "Known wavelet has null committed version"); boolean dropAdditionalSnapshot = false; addOperationChannel(knownWavelet.snapshot.getWaveletId(), knownWavelet.snapshot, knownWavelet.committedVersion, knownWavelet.accessibility, dropAdditionalSnapshot); } // consider the wave as if open has finished. maybeOpenFinished(); } Map<WaveletId, List<HashedVersion>> knownSignatures = signaturesFromWavelets(knownWavelets); connect(knownSignatures); } catch (ChannelException e) { shutdown("Multiplexer open failed.", e); } } @Override public void open(Listener listener, IdFilter waveletFilter) { open(listener, waveletFilter, Collections.<KnownWavelet>emptyList()); } @Override public void close() { shutdown(ResponseCode.OK, "View closed.", null); // Remove listeners to avoid unexcepted notifications turbulenceListener = null; unsavedDataListenerFactory = null; } @Override public void createOperationChannel(WaveletId waveletId, ParticipantId creator) { if (channels.containsKey(waveletId)) { Preconditions.illegalArgument("Operation channel already exists for: " + waveletId); } // Create the new channel, and fake an initial snapshot. // TODO(anorth): inject a clock for providing timestamps. HashedVersion v0 = hashFactory.createVersionZero(WaveletName.of(waveId, waveletId)); final ObservableWaveletData emptySnapshot = dataFactory.create( new EmptyWaveletSnapshot(waveId, waveletId, creator, v0, System.currentTimeMillis())); try { boolean dropAdditionalSnapshot = true; addOperationChannel(waveletId, emptySnapshot, v0, Accessibility.READ_WRITE, dropAdditionalSnapshot); } catch (ChannelException e) { shutdown("Creating operation channel failed.", e); } } /** * Creates a view channel listener. The listener will forward messages to * stacklets while {@link #connectionTag} has the value it had at creation * time. When a channel (re)connects the tag changes. * * @param expectedWavelets wavelets and reconnection versions we expect to * receive a message for before * {@link ViewChannel.Listener#onOpenFinished()} */ private ViewChannel.Listener createViewListener( final Map<WaveletId, List<HashedVersion>> expectedWavelets) { final int expectedTag = connectionTag; return new ViewChannel.Listener() { /** * Wavelets for which we have not yet seen a message, or null after * onOpenFinished. */ Set<WaveletId> missingWavelets = CollectionUtils.newHashSet(expectedWavelets.keySet()); @Override public void onSnapshot(WaveletId waveletId, ObservableWaveletData wavelet, HashedVersion lastCommittedVersion, HashedVersion currentVersion) throws ChannelException { if (connectionTag == expectedTag) { removeMissingWavelet(waveletId); try { // Forward message to the appropriate stacklet, creating it if // needed. Stacklet stacklet = channels.get(waveletId); boolean dropAdditionalSnapshot = false; // TODO(anorth): Do better than guessing at accessibility here. if (stacklet == null) { createStacklet(waveletId, wavelet, Accessibility.READ_WRITE, dropAdditionalSnapshot); stacklet = channels.get(waveletId); } else if (!stacklet.isExpectingSnapshot()) { // Replace the existing stacklet by first removing the wavelet // and then adding the newly connected one. channels.remove(waveletId); unsavedDataListenerFactory.destroy(waveletId); muxListener.onOperationChannelRemoved(stacklet.getOperationChannel(), waveletId); createStacklet(waveletId, wavelet, Accessibility.READ_WRITE, dropAdditionalSnapshot); stacklet = channels.get(waveletId); } stacklet.onWaveletSnapshot(wavelet, lastCommittedVersion, currentVersion); } catch (ChannelException e) { throw exceptionWithContext(e, waveletId); } } } @Override public void onUpdate(WaveletId waveletId, List<TransformedWaveletDelta> deltas, HashedVersion lastCommittedVersion, HashedVersion currentVersion) throws ChannelException { if (connectionTag == expectedTag) { removeMissingWavelet(waveletId); maybeResetScheduler(deltas); try { Stacklet stacklet = channels.get(waveletId); if (stacklet == null) { //TODO(user): Figure out the right exception to throw here. throw new IllegalStateException("Received deltas with no stacklet present!"); } stacklet.onWaveletUpdate(deltas, lastCommittedVersion, currentVersion); } catch (ChannelException e) { throw exceptionWithContext(e, waveletId); } } else { logger.trace().log("Mux dropping update from defunct view"); } } @Override public void onOpenFinished() throws ChannelException { if (connectionTag == expectedTag) { if (missingWavelets == null) { // TODO(anorth): Add an error code for a protocol error and use // it here. throw new ChannelException(ResponseCode.INTERNAL_ERROR, "Multiplexer received openFinished twice", null, Recoverable.NOT_RECOVERABLE, waveId, null); } // If a missing wavelet could be reconnected at version zero then // fake the resync message here. The server no longer knows about // the wavelet so we should resubmit changes from version zero. Iterator<WaveletId> itr = missingWavelets.iterator(); while (itr.hasNext()) { WaveletId maybeMissing = itr.next(); List<HashedVersion> resyncVersions = expectedWavelets.get(maybeMissing); Preconditions.checkState(!resyncVersions.isEmpty(), "Empty resync versions for wavelet " + maybeMissing); if (resyncVersions.get(0).getVersion() == 0) { Stacklet stacklet = channels.get(maybeMissing); if (stacklet == null) { Preconditions.illegalState("Resync wavelet has no stacklet. Channels: " + channels.keySet() + ", resync: " + expectedWavelets.keySet()); } WaveletName wavelet = WaveletName.of(waveId, maybeMissing); List<TransformedWaveletDelta> resyncDeltaList = createVersionZeroResync(wavelet); HashedVersion v0 = hashFactory.createVersionZero(wavelet); stacklet.onWaveletUpdate(resyncDeltaList, v0, v0); itr.remove(); } } // Check we received a message for each expected wavelet. if (!missingWavelets.isEmpty()) { throw new ChannelException(ResponseCode.NOT_AUTHORIZED, "Server didn't acknowledge known wavelets; perhaps access has been lost: " + missingWavelets, null, Recoverable.NOT_RECOVERABLE, waveId, null); } missingWavelets = null; maybeOpenFinished(); } else { logger.trace().log("Mux dropping openFinished from defunct view"); } } @Override public void onConnected() { if (connectionTag == expectedTag) { OperationChannelMultiplexerImpl.this.onConnected(); } else { logger.trace().log("Mux dropping onConnected from defunct view"); } } @Override public void onClosed() { if (connectionTag == expectedTag) { reconnect(null); } else { logger.trace().log("Mux dropping onClosed from defunct view"); } } @Override public void onException(ChannelException e) { if (connectionTag == expectedTag) { onChannelException(e); } else { logger.trace().log("Mux dropping failure from defunct view"); } } /** * Adds a wavelet id to the set of seen ids if they are being tracked. */ private void removeMissingWavelet(WaveletId id) { if (missingWavelets != null) { missingWavelets.remove(id); } } /** * Resets the reconnection scheduler if a message indicates * the connection is somewhat ok. */ private void maybeResetScheduler(List<TransformedWaveletDelta> deltas) { // The connection is probably ok if we receive a delta. A snapshot // is not sufficient since some are locally generated. The delta need // not have ops; a reconnection delta is enough. if ((deltas.size() > 0)) { scheduler.reset(); } } }; } /** * Creates a stacklet and (optionally) initialises it with a snapshot. * * @param waveletId the wavelet id of the channel to create * @param snapshot the wavelet container for the new channel * @param committedVersion the committed version for the new channel * @param accessibility accessibility the user currently has to the wavelet * @param initialiseLocalChannel whether to send the snapshot through the * stacklet, in which case it should expect and drop an additional * snapshot from the network */ private void addOperationChannel(final WaveletId waveletId, ObservableWaveletData snapshot, HashedVersion committedVersion, Accessibility accessibility, boolean initialiseLocalChannel) throws ChannelException { final Stacklet stacklet = createStacklet(waveletId, snapshot, accessibility, initialiseLocalChannel); if (initialiseLocalChannel) { final HashedVersion currentVersion = snapshot.getHashedVersion(); initialiseLocallyCreatedStacklet(stacklet, waveletId, snapshot, committedVersion, currentVersion); } } /** * This is an ugly work-around the lack of ability to add channels to a view * in the view service API. We need to send some message through the stacklet * so it's connected but the server can't send us any message until we submit * the first delta, which requires a connected stacklet... */ private void initialiseLocallyCreatedStacklet(final Stacklet stacklet, final WaveletId waveletId, final ObservableWaveletData snapshot, final HashedVersion committedVersion, final HashedVersion currentVersion) throws ChannelException { if (state == State.CONNECTED) { try { stacklet.onWaveletSnapshot(snapshot, committedVersion, currentVersion); } catch (ChannelException e) { throw exceptionWithContext(e, waveletId); } } else { // Delay connecting the stacklet until the underlying view is connected. onConnected.add(new Runnable() { public void run() { try { stacklet.onWaveletSnapshot(snapshot, committedVersion, currentVersion); } catch (ChannelException e) { shutdown("Fake snapshot for wavelet channel " + waveId + "/" + waveletId + "failed", exceptionWithContext(e, waveletId)); } } }); } } /** * Adds a new operation-channel stacklet to this multiplexer and notifies the * listener of the new channel's creation. * * @param waveletId id of the concurrency domain for the new channel * @param snapshot wavelet initial state snapshot * @param accessibility accessibility of the stacklet; if not * {@link Accessibility#READ_WRITE} then * the stacklet will fail on send * @param dropSnapshot whether to expect and drop an additional snapshot from * the view */ private Stacklet createStacklet(final WaveletId waveletId, ObservableWaveletData snapshot, Accessibility accessibility, boolean dropSnapshot) { if (channels.containsKey(waveletId)) { Preconditions.illegalArgument("Cannot create duplicate channel for wavelet: " + waveId + "/" + waveletId); } WaveletChannel waveletChannel = createWaveletChannel(waveletId); MultiplexedDeltaChannel deltaChannel = deltaChannelFactory.create(waveletChannel); InternalOperationChannel opChannel = opChannelFactory.create(deltaChannel, waveletId, snapshot.getHashedVersion(), accessibility); Stacklet stacklet = new Stacklet(deltaChannel, opChannel, dropSnapshot); stacklet.reset(); channels.put(waveletId, stacklet); if (muxListener != null) { muxListener.onOperationChannelCreated(stacklet.getOperationChannel(), snapshot, accessibility); } return stacklet; } /** * Executes any pending commands in the {@link #onConnected} queue. */ private void onConnected() { state = State.CONNECTED; // Connect all channels created before now. for (Runnable command : onConnected) { command.run(); } onConnected.clear(); } /** * Handles failure of the view channel or an operation channel. * * @param e The exception that caused the channel to fail. */ private void onChannelException(ChannelException e) { if (turbulenceListener != null) { turbulenceListener.onFailure(e); } if (e.getRecoverable() != Recoverable.RECOVERABLE) { shutdown(e.getResponseCode(), "Channel Exception", e); } else { reconnect(e); } } private void connect(Map<WaveletId, List<HashedVersion>> knownWavelets) { Preconditions.checkState(state != State.CONNECTED, "Cannot connect already-connected channel"); checkConnectVersions(knownWavelets); logger.trace().log("Multiplexer reconnecting wave " + waveId); viewChannel = viewFactory.create(waveId); viewChannel.open(createViewListener(knownWavelets), waveletFilter, knownWavelets); } /** * Checks that reconnect versions are strictly increasing and removes any * that are not accepted by the connection's wavelet filter. */ private void checkConnectVersions(Map<WaveletId, List<HashedVersion>> knownWavelets) { Iterator<Map.Entry<WaveletId, List<HashedVersion>>> itr = knownWavelets.entrySet().iterator(); while (itr.hasNext()) { Map.Entry<WaveletId, List<HashedVersion>> entry = itr.next(); WaveletId id = entry.getKey(); if (IdFilter.accepts(waveletFilter, id)) { long prevVersion = NO_VERSION; for (HashedVersion v : entry.getValue()) { if ((prevVersion != NO_VERSION) && (v.getVersion() <= prevVersion)) { throw new IllegalArgumentException("Invalid reconnect versions for " + waveId + id + ": " + entry.getValue()); } prevVersion = v.getVersion(); } } else { // TODO(anorth): throw an IllegalArgumentException here after fixing // all callers to avoid this. logger.error().log( "Mux for " + waveId + " dropping resync versions for filtered wavelet " + id + ", filter " + waveletFilter); itr.remove(); } } } /** * Terminates all stacklets then reconnects with the known versions * provided by them. * @param exception The exception that caused the reconnection */ private void reconnect(ChannelException exception) { logger.trace().logLazyObjects("Multiplexer disconnected in state ", state , ", reconnecting."); state = State.RECONNECTING; // NOTE(zdwang): don't clear this as we'll lose wavelets if we've never // been connected. This is a reminder. // onConnected.clear(); // Reset each stacklet, collecting the reconnect versions. final Map<WaveletId, List<HashedVersion>> knownWavelets = CollectionUtils.newHashMap(); for (final WaveletId wavelet : channels.keySet()) { final Stacklet stacklet = channels.get(wavelet); stacklet.reset(); knownWavelets.put(wavelet, stacklet.getOperationChannel().getReconnectVersions()); } // Close the view channel and ignore future messages from it. connectionTag++; viewChannel.close(); // Run the connect part in the scheduler scheduler.schedule(new Scheduler.Command() { int tag = connectionTag; @Override public void execute() { if (tag == connectionTag) { // Reconnect by creating another view channel. connect(knownWavelets); } } }); } /** * Shuts down this multiplexer permanently. * * @param reasonCode code representing failure reason. If the value is not * {@code ResponseCode.OK} then the listener will be notified of connection failure. * @param description reason for failure * @param exception any exception that caused the shutdown. */ private void shutdown(ResponseCode reasonCode, String description, Throwable exception) { if (description == null) { description = "(No error description provided)"; } boolean notifyFailure = (reasonCode != ResponseCode.OK); // We are telling the user through UI that the wave is corrupt, so we must also report it // to the server. TODO(pablojan) Keep this until clarify proper error handling if (notifyFailure) { if (exception == null) { logger.error().log(description); } else { logger.error().log(description, exception); } } if (viewChannel != null) { // Ignore future messages. connectionTag++; state = State.NOT_CONNECTED; for (Stacklet stacklet : channels.values()) { stacklet.close(); } channels.clear(); viewChannel.close(); viewChannel = null; if (muxListener != null && notifyFailure) { muxListener.onFailed(new CorruptionDetail(reasonCode, description, exception)); } muxListener = null; } } /** * Shuts down this multiplexer permanently after an exception. */ private void shutdown(String message, ChannelException e) { shutdown(e.getResponseCode(), message, e); } /** * Creates a wavelet channel for submissions against a wavelet. * * @param waveletId wavelet id for the channel */ private WaveletChannel createWaveletChannel(final WaveletId waveletId) { return new WaveletChannel() { @Override public void submit(WaveletDelta delta, final SubmitCallback callback) { viewChannel.submitDelta(waveletId, delta, callback); } @Override public String debugGetProfilingInfo() { return viewChannel.debugGetProfilingInfo(waveletId); } }; } private void maybeOpenFinished() { // Forward message to the mux's open listener. if (!openFinished) { openFinished = true; muxListener.onOpenFinished(); } } /** * Wraps a channel exception in another providing wave and wavelet id context. */ private ChannelException exceptionWithContext(ChannelException e, WaveletId waveletId) { return new ChannelException(e.getResponseCode(), "Nested ChannelException", e, e.getRecoverable(), waveId, waveletId); } /** * Constructs a maps of list of wavelet signatures from a collection of * wavelet snapshots. * * Package-private for testing. */ static Map<WaveletId, List<HashedVersion>> signaturesFromWavelets( Collection<KnownWavelet> knownWavelets) { Map<WaveletId, List<HashedVersion>> signatures = new HashMap<WaveletId, List<HashedVersion>>(); for (KnownWavelet knownWavelet : knownWavelets) { if (knownWavelet.accessibility.isReadable()) { ObservableWaveletData snapshot = knownWavelet.snapshot; WaveletId waveletId = snapshot.getWaveletId(); List<HashedVersion> sigs = Collections.singletonList(snapshot.getHashedVersion()); signatures.put(waveletId, sigs); } } return signatures; } /** * Creates a container message mimicking a resync message for a wavelet at * version zero. */ private List<TransformedWaveletDelta> createVersionZeroResync(WaveletName wavelet) { return Collections.singletonList(new TransformedWaveletDelta((ParticipantId) null, hashFactory.createVersionZero(wavelet), 0L, Collections.<WaveletOperation> emptyList())); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3.exception; import java.io.PrintStream; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.ClassUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.SystemUtils; /** * <p>Provides utilities for manipulating and examining * <code>Throwable</code> objects.</p> * * @since 1.0 * @version $Id$ */ public class ExceptionUtils { /** * <p>Used when printing stack frames to denote the start of a * wrapped exception.</p> * * <p>Package private for accessibility by test suite.</p> */ static final String WRAPPED_MARKER = " [wrapped] "; /** * <p>The names of methods commonly used to access a wrapped exception.</p> */ // TODO: Remove in Lang 4.0 private static final String[] CAUSE_METHOD_NAMES = { "getCause", "getNextException", "getTargetException", "getException", "getSourceException", "getRootCause", "getCausedByException", "getNested", "getLinkedException", "getNestedException", "getLinkedCause", "getThrowable", }; /** * <p> * Public constructor allows an instance of <code>ExceptionUtils</code> to be created, although that is not * normally necessary. * </p> */ public ExceptionUtils() { super(); } //----------------------------------------------------------------------- /** * <p>Returns the default names used when searching for the cause of an exception.</p> * * <p>This may be modified and used in the overloaded getCause(Throwable, String[]) method.</p> * * @return cloned array of the default method names * @since 3.0 * @deprecated This feature will be removed in Lang 4.0 */ @Deprecated public static String[] getDefaultCauseMethodNames() { return ArrayUtils.clone(CAUSE_METHOD_NAMES); } //----------------------------------------------------------------------- /** * <p>Introspects the <code>Throwable</code> to obtain the cause.</p> * * <p>The method searches for methods with specific names that return a * <code>Throwable</code> object. This will pick up most wrapping exceptions, * including those from JDK 1.4. * * <p>The default list searched for are:</p> * <ul> * <li><code>getCause()</code></li> * <li><code>getNextException()</code></li> * <li><code>getTargetException()</code></li> * <li><code>getException()</code></li> * <li><code>getSourceException()</code></li> * <li><code>getRootCause()</code></li> * <li><code>getCausedByException()</code></li> * <li><code>getNested()</code></li> * </ul> * * <p>If none of the above is found, returns <code>null</code>.</p> * * @param throwable the throwable to introspect for a cause, may be null * @return the cause of the <code>Throwable</code>, * <code>null</code> if none found or null throwable input * @since 1.0 * @deprecated This feature will be removed in Lang 4.0 */ @Deprecated public static Throwable getCause(final Throwable throwable) { return getCause(throwable, CAUSE_METHOD_NAMES); } /** * <p>Introspects the <code>Throwable</code> to obtain the cause.</p> * * <p>A <code>null</code> set of method names means use the default set. * A <code>null</code> in the set of method names will be ignored.</p> * * @param throwable the throwable to introspect for a cause, may be null * @param methodNames the method names, null treated as default set * @return the cause of the <code>Throwable</code>, * <code>null</code> if none found or null throwable input * @since 1.0 * @deprecated This feature will be removed in Lang 4.0 */ @Deprecated public static Throwable getCause(final Throwable throwable, String[] methodNames) { if (throwable == null) { return null; } if (methodNames == null) { methodNames = CAUSE_METHOD_NAMES; } for (final String methodName : methodNames) { if (methodName != null) { final Throwable cause = getCauseUsingMethodName(throwable, methodName); if (cause != null) { return cause; } } } return null; } /** * <p>Introspects the <code>Throwable</code> to obtain the root cause.</p> * * <p>This method walks through the exception chain to the last element, * "root" of the tree, using {@link #getCause(Throwable)}, and * returns that exception.</p> * * <p>From version 2.2, this method handles recursive cause structures * that might otherwise cause infinite loops. If the throwable parameter * has a cause of itself, then null will be returned. If the throwable * parameter cause chain loops, the last element in the chain before the * loop is returned.</p> * * @param throwable the throwable to get the root cause for, may be null * @return the root cause of the <code>Throwable</code>, * <code>null</code> if none found or null throwable input */ public static Throwable getRootCause(final Throwable throwable) { final List<Throwable> list = getThrowableList(throwable); return list.size() < 2 ? null : (Throwable)list.get(list.size() - 1); } /** * <p>Finds a <code>Throwable</code> by method name.</p> * * @param throwable the exception to examine * @param methodName the name of the method to find and invoke * @return the wrapped exception, or <code>null</code> if not found */ // TODO: Remove in Lang 4.0 private static Throwable getCauseUsingMethodName(final Throwable throwable, final String methodName) { Method method = null; try { method = throwable.getClass().getMethod(methodName); } catch (final NoSuchMethodException ignored) { // NOPMD // exception ignored } catch (final SecurityException ignored) { // NOPMD // exception ignored } if (method != null && Throwable.class.isAssignableFrom(method.getReturnType())) { try { return (Throwable) method.invoke(throwable); } catch (final IllegalAccessException ignored) { // NOPMD // exception ignored } catch (final IllegalArgumentException ignored) { // NOPMD // exception ignored } catch (final InvocationTargetException ignored) { // NOPMD // exception ignored } } return null; } //----------------------------------------------------------------------- /** * <p>Counts the number of <code>Throwable</code> objects in the * exception chain.</p> * * <p>A throwable without cause will return <code>1</code>. * A throwable with one cause will return <code>2</code> and so on. * A <code>null</code> throwable will return <code>0</code>.</p> * * <p>From version 2.2, this method handles recursive cause structures * that might otherwise cause infinite loops. The cause chain is * processed until the end is reached, or until the next item in the * chain is already in the result set.</p> * * @param throwable the throwable to inspect, may be null * @return the count of throwables, zero if null input */ public static int getThrowableCount(final Throwable throwable) { return getThrowableList(throwable).size(); } /** * <p>Returns the list of <code>Throwable</code> objects in the * exception chain.</p> * * <p>A throwable without cause will return an array containing * one element - the input throwable. * A throwable with one cause will return an array containing * two elements. - the input throwable and the cause throwable. * A <code>null</code> throwable will return an array of size zero.</p> * * <p>From version 2.2, this method handles recursive cause structures * that might otherwise cause infinite loops. The cause chain is * processed until the end is reached, or until the next item in the * chain is already in the result set.</p> * * @see #getThrowableList(Throwable) * @param throwable the throwable to inspect, may be null * @return the array of throwables, never null */ public static Throwable[] getThrowables(final Throwable throwable) { final List<Throwable> list = getThrowableList(throwable); return list.toArray(new Throwable[list.size()]); } /** * <p>Returns the list of <code>Throwable</code> objects in the * exception chain.</p> * * <p>A throwable without cause will return a list containing * one element - the input throwable. * A throwable with one cause will return a list containing * two elements. - the input throwable and the cause throwable. * A <code>null</code> throwable will return a list of size zero.</p> * * <p>This method handles recursive cause structures that might * otherwise cause infinite loops. The cause chain is processed until * the end is reached, or until the next item in the chain is already * in the result set.</p> * * @param throwable the throwable to inspect, may be null * @return the list of throwables, never null * @since Commons Lang 2.2 */ public static List<Throwable> getThrowableList(Throwable throwable) { final List<Throwable> list = new ArrayList<Throwable>(); while (throwable != null && list.contains(throwable) == false) { list.add(throwable); throwable = ExceptionUtils.getCause(throwable); } return list; } //----------------------------------------------------------------------- /** * <p>Returns the (zero based) index of the first <code>Throwable</code> * that matches the specified class (exactly) in the exception chain. * Subclasses of the specified class do not match - see * {@link #indexOfType(Throwable, Class)} for the opposite.</p> * * <p>A <code>null</code> throwable returns <code>-1</code>. * A <code>null</code> type returns <code>-1</code>. * No match in the chain returns <code>-1</code>.</p> * * @param throwable the throwable to inspect, may be null * @param clazz the class to search for, subclasses do not match, null returns -1 * @return the index into the throwable chain, -1 if no match or null input */ public static int indexOfThrowable(final Throwable throwable, final Class<?> clazz) { return indexOf(throwable, clazz, 0, false); } /** * <p>Returns the (zero based) index of the first <code>Throwable</code> * that matches the specified type in the exception chain from * a specified index. * Subclasses of the specified class do not match - see * {@link #indexOfType(Throwable, Class, int)} for the opposite.</p> * * <p>A <code>null</code> throwable returns <code>-1</code>. * A <code>null</code> type returns <code>-1</code>. * No match in the chain returns <code>-1</code>. * A negative start index is treated as zero. * A start index greater than the number of throwables returns <code>-1</code>.</p> * * @param throwable the throwable to inspect, may be null * @param clazz the class to search for, subclasses do not match, null returns -1 * @param fromIndex the (zero based) index of the starting position, * negative treated as zero, larger than chain size returns -1 * @return the index into the throwable chain, -1 if no match or null input */ public static int indexOfThrowable(final Throwable throwable, final Class<?> clazz, final int fromIndex) { return indexOf(throwable, clazz, fromIndex, false); } //----------------------------------------------------------------------- /** * <p>Returns the (zero based) index of the first <code>Throwable</code> * that matches the specified class or subclass in the exception chain. * Subclasses of the specified class do match - see * {@link #indexOfThrowable(Throwable, Class)} for the opposite.</p> * * <p>A <code>null</code> throwable returns <code>-1</code>. * A <code>null</code> type returns <code>-1</code>. * No match in the chain returns <code>-1</code>.</p> * * @param throwable the throwable to inspect, may be null * @param type the type to search for, subclasses match, null returns -1 * @return the index into the throwable chain, -1 if no match or null input * @since 2.1 */ public static int indexOfType(final Throwable throwable, final Class<?> type) { return indexOf(throwable, type, 0, true); } /** * <p>Returns the (zero based) index of the first <code>Throwable</code> * that matches the specified type in the exception chain from * a specified index. * Subclasses of the specified class do match - see * {@link #indexOfThrowable(Throwable, Class)} for the opposite.</p> * * <p>A <code>null</code> throwable returns <code>-1</code>. * A <code>null</code> type returns <code>-1</code>. * No match in the chain returns <code>-1</code>. * A negative start index is treated as zero. * A start index greater than the number of throwables returns <code>-1</code>.</p> * * @param throwable the throwable to inspect, may be null * @param type the type to search for, subclasses match, null returns -1 * @param fromIndex the (zero based) index of the starting position, * negative treated as zero, larger than chain size returns -1 * @return the index into the throwable chain, -1 if no match or null input * @since 2.1 */ public static int indexOfType(final Throwable throwable, final Class<?> type, final int fromIndex) { return indexOf(throwable, type, fromIndex, true); } /** * <p>Worker method for the <code>indexOfType</code> methods.</p> * * @param throwable the throwable to inspect, may be null * @param type the type to search for, subclasses match, null returns -1 * @param fromIndex the (zero based) index of the starting position, * negative treated as zero, larger than chain size returns -1 * @param subclass if <code>true</code>, compares with {@link Class#isAssignableFrom(Class)}, otherwise compares * using references * @return index of the <code>type</code> within throwables nested within the specified <code>throwable</code> */ private static int indexOf(final Throwable throwable, final Class<?> type, int fromIndex, final boolean subclass) { if (throwable == null || type == null) { return -1; } if (fromIndex < 0) { fromIndex = 0; } final Throwable[] throwables = ExceptionUtils.getThrowables(throwable); if (fromIndex >= throwables.length) { return -1; } if (subclass) { for (int i = fromIndex; i < throwables.length; i++) { if (type.isAssignableFrom(throwables[i].getClass())) { return i; } } } else { for (int i = fromIndex; i < throwables.length; i++) { if (type.equals(throwables[i].getClass())) { return i; } } } return -1; } //----------------------------------------------------------------------- /** * <p>Prints a compact stack trace for the root cause of a throwable * to <code>System.err</code>.</p> * * <p>The compact stack trace starts with the root cause and prints * stack frames up to the place where it was caught and wrapped. * Then it prints the wrapped exception and continues with stack frames * until the wrapper exception is caught and wrapped again, etc.</p> * * <p>The output of this method is consistent across JDK versions. * Note that this is the opposite order to the JDK1.4 display.</p> * * <p>The method is equivalent to <code>printStackTrace</code> for throwables * that don't have nested causes.</p> * * @param throwable the throwable to output * @since 2.0 */ public static void printRootCauseStackTrace(final Throwable throwable) { printRootCauseStackTrace(throwable, System.err); } /** * <p>Prints a compact stack trace for the root cause of a throwable.</p> * * <p>The compact stack trace starts with the root cause and prints * stack frames up to the place where it was caught and wrapped. * Then it prints the wrapped exception and continues with stack frames * until the wrapper exception is caught and wrapped again, etc.</p> * * <p>The output of this method is consistent across JDK versions. * Note that this is the opposite order to the JDK1.4 display.</p> * * <p>The method is equivalent to <code>printStackTrace</code> for throwables * that don't have nested causes.</p> * * @param throwable the throwable to output, may be null * @param stream the stream to output to, may not be null * @throws IllegalArgumentException if the stream is <code>null</code> * @since 2.0 */ public static void printRootCauseStackTrace(final Throwable throwable, final PrintStream stream) { if (throwable == null) { return; } if (stream == null) { throw new IllegalArgumentException("The PrintStream must not be null"); } final String trace[] = getRootCauseStackTrace(throwable); for (final String element : trace) { stream.println(element); } stream.flush(); } /** * <p>Prints a compact stack trace for the root cause of a throwable.</p> * * <p>The compact stack trace starts with the root cause and prints * stack frames up to the place where it was caught and wrapped. * Then it prints the wrapped exception and continues with stack frames * until the wrapper exception is caught and wrapped again, etc.</p> * * <p>The output of this method is consistent across JDK versions. * Note that this is the opposite order to the JDK1.4 display.</p> * * <p>The method is equivalent to <code>printStackTrace</code> for throwables * that don't have nested causes.</p> * * @param throwable the throwable to output, may be null * @param writer the writer to output to, may not be null * @throws IllegalArgumentException if the writer is <code>null</code> * @since 2.0 */ public static void printRootCauseStackTrace(final Throwable throwable, final PrintWriter writer) { if (throwable == null) { return; } if (writer == null) { throw new IllegalArgumentException("The PrintWriter must not be null"); } final String trace[] = getRootCauseStackTrace(throwable); for (final String element : trace) { writer.println(element); } writer.flush(); } //----------------------------------------------------------------------- /** * <p>Creates a compact stack trace for the root cause of the supplied * <code>Throwable</code>.</p> * * <p>The output of this method is consistent across JDK versions. * It consists of the root exception followed by each of its wrapping * exceptions separated by '[wrapped]'. Note that this is the opposite * order to the JDK1.4 display.</p> * * @param throwable the throwable to examine, may be null * @return an array of stack trace frames, never null * @since 2.0 */ public static String[] getRootCauseStackTrace(final Throwable throwable) { if (throwable == null) { return ArrayUtils.EMPTY_STRING_ARRAY; } final Throwable throwables[] = getThrowables(throwable); final int count = throwables.length; final List<String> frames = new ArrayList<String>(); List<String> nextTrace = getStackFrameList(throwables[count - 1]); for (int i = count; --i >= 0;) { final List<String> trace = nextTrace; if (i != 0) { nextTrace = getStackFrameList(throwables[i - 1]); removeCommonFrames(trace, nextTrace); } if (i == count - 1) { frames.add(throwables[i].toString()); } else { frames.add(WRAPPED_MARKER + throwables[i].toString()); } for (int j = 0; j < trace.size(); j++) { frames.add(trace.get(j)); } } return frames.toArray(new String[frames.size()]); } /** * <p>Removes common frames from the cause trace given the two stack traces.</p> * * @param causeFrames stack trace of a cause throwable * @param wrapperFrames stack trace of a wrapper throwable * @throws IllegalArgumentException if either argument is null * @since 2.0 */ public static void removeCommonFrames(final List<String> causeFrames, final List<String> wrapperFrames) { if (causeFrames == null || wrapperFrames == null) { throw new IllegalArgumentException("The List must not be null"); } int causeFrameIndex = causeFrames.size() - 1; int wrapperFrameIndex = wrapperFrames.size() - 1; while (causeFrameIndex >= 0 && wrapperFrameIndex >= 0) { // Remove the frame from the cause trace if it is the same // as in the wrapper trace final String causeFrame = causeFrames.get(causeFrameIndex); final String wrapperFrame = wrapperFrames.get(wrapperFrameIndex); if (causeFrame.equals(wrapperFrame)) { causeFrames.remove(causeFrameIndex); } causeFrameIndex--; wrapperFrameIndex--; } } //----------------------------------------------------------------------- /** * <p>Gets the stack trace from a Throwable as a String.</p> * * <p>The result of this method vary by JDK version as this method * uses {@link Throwable#printStackTrace(java.io.PrintWriter)}. * On JDK1.3 and earlier, the cause exception will not be shown * unless the specified throwable alters printStackTrace.</p> * * @param throwable the <code>Throwable</code> to be examined * @return the stack trace as generated by the exception's * <code>printStackTrace(PrintWriter)</code> method */ public static String getStackTrace(final Throwable throwable) { final StringWriter sw = new StringWriter(); final PrintWriter pw = new PrintWriter(sw, true); throwable.printStackTrace(pw); return sw.getBuffer().toString(); } /** * <p>Captures the stack trace associated with the specified * <code>Throwable</code> object, decomposing it into a list of * stack frames.</p> * * <p>The result of this method vary by JDK version as this method * uses {@link Throwable#printStackTrace(java.io.PrintWriter)}. * On JDK1.3 and earlier, the cause exception will not be shown * unless the specified throwable alters printStackTrace.</p> * * @param throwable the <code>Throwable</code> to examine, may be null * @return an array of strings describing each stack frame, never null */ public static String[] getStackFrames(final Throwable throwable) { if (throwable == null) { return ArrayUtils.EMPTY_STRING_ARRAY; } return getStackFrames(getStackTrace(throwable)); } //----------------------------------------------------------------------- /** * <p>Returns an array where each element is a line from the argument.</p> * * <p>The end of line is determined by the value of {@link SystemUtils#LINE_SEPARATOR}.</p> * * @param stackTrace a stack trace String * @return an array where each element is a line from the argument */ static String[] getStackFrames(final String stackTrace) { final String linebreak = SystemUtils.LINE_SEPARATOR; final StringTokenizer frames = new StringTokenizer(stackTrace, linebreak); final List<String> list = new ArrayList<String>(); while (frames.hasMoreTokens()) { list.add(frames.nextToken()); } return list.toArray(new String[list.size()]); } /** * <p>Produces a <code>List</code> of stack frames - the message * is not included. Only the trace of the specified exception is * returned, any caused by trace is stripped.</p> * * <p>This works in most cases - it will only fail if the exception * message contains a line that starts with: * <code>&quot;&nbsp;&nbsp;&nbsp;at&quot;.</code></p> * * @param t is any throwable * @return List of stack frames */ static List<String> getStackFrameList(final Throwable t) { final String stackTrace = getStackTrace(t); final String linebreak = SystemUtils.LINE_SEPARATOR; final StringTokenizer frames = new StringTokenizer(stackTrace, linebreak); final List<String> list = new ArrayList<String>(); boolean traceStarted = false; while (frames.hasMoreTokens()) { final String token = frames.nextToken(); // Determine if the line starts with <whitespace>at final int at = token.indexOf("at"); if (at != -1 && token.substring(0, at).trim().isEmpty()) { traceStarted = true; list.add(token); } else if (traceStarted) { break; } } return list; } //----------------------------------------------------------------------- /** * Gets a short message summarising the exception. * <p> * The message returned is of the form * {ClassNameWithoutPackage}: {ThrowableMessage} * * @param th the throwable to get a message for, null returns empty string * @return the message, non-null * @since Commons Lang 2.2 */ public static String getMessage(final Throwable th) { if (th == null) { return ""; } final String clsName = ClassUtils.getShortClassName(th, null); final String msg = th.getMessage(); return clsName + ": " + StringUtils.defaultString(msg); } //----------------------------------------------------------------------- /** * Gets a short message summarising the root cause exception. * <p> * The message returned is of the form * {ClassNameWithoutPackage}: {ThrowableMessage} * * @param th the throwable to get a message for, null returns empty string * @return the message, non-null * @since Commons Lang 2.2 */ public static String getRootCauseMessage(final Throwable th) { Throwable root = ExceptionUtils.getRootCause(th); root = root == null ? th : root; return getMessage(root); } }
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2016 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.mwe2.codebuilder.fragments; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import javax.inject.Inject; import javax.inject.Provider; import com.google.inject.Injector; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.xtend2.lib.StringConcatenationClient; import org.eclipse.xtext.common.types.JvmDeclaredType; import org.eclipse.xtext.common.types.JvmType; import org.eclipse.xtext.generator.IFileSystemAccess2; import org.eclipse.xtext.util.Strings; import org.eclipse.xtext.xbase.compiler.ImportManager; import org.eclipse.xtext.xbase.lib.Pure; import org.eclipse.xtext.xtext.generator.model.GuiceModuleAccess.BindingFactory; import org.eclipse.xtext.xtext.generator.model.JavaFileAccess; import org.eclipse.xtext.xtext.generator.model.TypeReference; import org.eclipse.xtext.xtext.generator.model.XtendFileAccess; import org.eclipse.xtext.xtype.XImportDeclaration; import org.eclipse.xtext.xtype.XImportSection; import org.eclipse.xtext.xtype.XtypeFactory; import io.sarl.lang.mwe2.codebuilder.extractor.CodeElementExtractor; /** Generator of the script builder types. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ public class ScriptBuilderFragment extends AbstractSubCodeBuilderFragment { @Inject private BuilderFactoryContributions builderFactoryContributions; /** Replies the custom implementation for the script builder. * * @return the implementation. */ @Pure public TypeReference getScriptBuilderImplCustom() { return new TypeReference(getCodeElementExtractor().getBuilderPackage() + ".ScriptBuilderImplCustom"); //$NON-NLS-1$ } /** Replies the implementation for the builder of scripts. * * @return the implementation. */ @Pure public TypeReference getScriptBuilderImpl() { return new TypeReference(getCodeElementExtractor().getBuilderPackage() + ".ScriptBuilderImpl"); //$NON-NLS-1$ } @Override protected Collection<AbstractSubCodeBuilderFragment> initializeSubGenerators(Injector injector) { return Arrays.asList( injector.getInstance(ExpressionBuilderFragment.class), injector.getInstance(BlockExpressionBuilderFragment.class), injector.getInstance(FormalParameterBuilderFragment.class), injector.getInstance(TopElementBuilderFragment.class)); } @Override public void generate() { generateIScriptBuilder(); generateScriptBuilderImpl(); generateBuilderFactoryContributions(); super.generate(); } @Override public void generateBindings(BindingFactory factory) { super.generateBindings(factory); bindTypeReferences(factory, getScriptBuilderInterface(), getScriptBuilderImpl(), getScriptBuilderImplCustom()); } /** Generate the contributions for the BuildFactory. */ protected void generateBuilderFactoryContributions() { this.builderFactoryContributions.addContribution(new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("\t/** Create the factory for a " + getLanguageName() + " script."); //$NON-NLS-1$ //$NON-NLS-2$ it.newLine(); it.append("\t * @param packageName the name of the package of the script."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param resourceSet the resource set in which the script is created."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the factory."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\tpublic "); //$NON-NLS-1$ it.append(getScriptBuilderInterface()); it.append(" createScript(String packageName, "); //$NON-NLS-1$ it.append(ResourceSet.class); it.append(" resourceSet) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn createScript(packageName, createResource(resourceSet));"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Create the factory for a " + getLanguageName() + " script."); //$NON-NLS-1$ //$NON-NLS-2$ it.newLine(); it.append("\t * @param packageName the name of the package of the script."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param resource the resource in which the script is created."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the factory."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\tpublic "); //$NON-NLS-1$ it.append(getScriptBuilderInterface()); it.append(" createScript(String packageName, "); //$NON-NLS-1$ it.append(Resource.class); it.append(" resource) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(getScriptBuilderInterface()); it.append(" builder = getProvider("); //$NON-NLS-1$ it.append(getScriptBuilderInterface()); it.append(".class).get();"); //$NON-NLS-1$ it.newLine(); it.append("\t\tbuilder.eInit(resource, packageName);"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn builder;"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } }); } @Override public void generateXtendStubs() { super.generateXtendStubs(); final TypeReference stub = getScriptBuilderImplCustom(); final StringConcatenationClient content = new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("/** User-defined builder of the " + getLanguageName() + " scripts."); //$NON-NLS-1$//$NON-NLS-2$ it.newLine(); it.append(" */"); //$NON-NLS-1$ it.newLine(); it.append("class "); //$NON-NLS-1$ it.append(stub); it.append(" extends "); //$NON-NLS-1$ it.append(getScriptBuilderImpl()); it.append(" {"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("}"); //$NON-NLS-1$ it.newLine(); } }; final XtendFileAccess xtendFile = getFileAccessFactory().createXtendFile(stub, content); final IFileSystemAccess2 fileSystem = getSrc(); if (!fileSystem.isFile(xtendFile.getPath())) { xtendFile.writeTo(fileSystem); } } @Override public void generateJavaStubs() { super.generateJavaStubs(); final TypeReference stub = getScriptBuilderImplCustom(); final StringConcatenationClient content = new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("/** User-defined builder of the " + getLanguageName() + " scripts."); //$NON-NLS-1$//$NON-NLS-2$ it.newLine(); it.append(" */"); //$NON-NLS-1$ it.newLine(); it.append("public class "); //$NON-NLS-1$ it.append(stub); it.append(" extends "); //$NON-NLS-1$ it.append(getScriptBuilderImpl()); it.append(" {"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("}"); //$NON-NLS-1$ it.newLine(); } }; final JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(stub, content); final IFileSystemAccess2 fileSystem = getSrc(); if (!fileSystem.isFile(javaFile.getPath())) { javaFile.writeTo(fileSystem); } } /** Extract a top element from the grammar. * * @param description the description of the top element. * @param forInterface indicates if the generated code is for interfaces. * @return the top element. */ @SuppressWarnings("checkstyle:all") protected StringConcatenationClient generateTopElement(CodeElementExtractor.ElementDescription description, boolean forInterface) { final String topElementName = Strings.toFirstUpper(description.getName()); final TypeReference builderType = getCodeElementExtractor().getElementBuilderInterface(topElementName); return new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { if (!forInterface) { it.append("\t@"); //$NON-NLS-1$ it.append(Inject.class); it.newLine(); it.append("\tprivate "); //$NON-NLS-1$ it.append(Provider.class); it.append("<"); //$NON-NLS-1$ it.append(builderType); it.append("> "); //$NON-NLS-1$ it.append(Strings.toFirstLower(topElementName)); it.append("Provider;"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } it.append("\t/** Create " + getAorAnArticle(topElementName) //$NON-NLS-1$ + " "+ topElementName + " builder."); //$NON-NLS-1$//$NON-NLS-2$ it.newLine(); it.append("\t * @param name - the name of the " + topElementName + "."); //$NON-NLS-1$ //$NON-NLS-2$ it.newLine(); it.append("\t * @return the builder."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(builderType); it.append(" add"); //$NON-NLS-1$ it.append(topElementName); it.append("(String name)"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ it.newLineIfNotEmpty(); } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(builderType); it.append(" builder = this."); //$NON-NLS-1$ it.append(Strings.toFirstLower(topElementName)); it.append("Provider.get();"); //$NON-NLS-1$ it.newLine(); it.append("\t\tbuilder.eInit(getScript(), name);"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn builder;"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); } it.newLine(); } }; } /** Extract top elements from the grammar. * * @param forInterface indicates if the generated code is for interfaces. * @return the top elements. */ protected List<StringConcatenationClient> generateTopElements(boolean forInterface) { final List<StringConcatenationClient> topElements = new ArrayList<>(); for (final CodeElementExtractor.ElementDescription description : getCodeElementExtractor().getTopElements( getGrammar(), getCodeBuilderConfig())) { topElements.add(generateTopElement(description, forInterface)); } return topElements; } /** Generate the script builder interface. */ protected void generateIScriptBuilder() { final List<StringConcatenationClient> topElements = generateTopElements(true); final TypeReference builder = getScriptBuilderInterface(); final StringConcatenationClient content = new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("/** Builder of " + getLanguageName() + " scripts."); //$NON-NLS-1$ //$NON-NLS-2$ it.newLine(); it.append(" *"); //$NON-NLS-1$ it.newLine(); it.append(" * <p>This builder is provided for helping to create " //$NON-NLS-1$ + getLanguageName() + " Ecore elements."); //$NON-NLS-1$ it.newLine(); it.append(" *"); //$NON-NLS-1$ it.newLine(); it.append(" * <p>Do not forget to invoke {@link #finalizeScript()} for creating imports, etc."); //$NON-NLS-1$ it.newLine(); it.append(" */"); //$NON-NLS-1$ it.newLine(); it.append("@SuppressWarnings(\"all\")"); //$NON-NLS-1$ it.newLine(); it.append("public interface "); //$NON-NLS-1$ it.append(builder.getSimpleName()); it.append(" {"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append(generateFieldsAndMethods(true)); for (final StringConcatenationClient element : topElements) { it.append(element); } it.append("}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } }; final JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(builder, content); javaFile.writeTo(getSrcGen()); } /** Generate the script builder default implementation. */ protected void generateScriptBuilderImpl() { final List<StringConcatenationClient> topElements = generateTopElements(false); final TypeReference script = getScriptBuilderImpl(); final TypeReference scriptInterface = getScriptBuilderInterface(); final StringConcatenationClient content = new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("@SuppressWarnings(\"all\")"); //$NON-NLS-1$ it.newLine(); it.append("public class "); //$NON-NLS-1$ it.append(script.getSimpleName()); it.append(" extends "); //$NON-NLS-1$ it.append(getAbstractBuilderImpl()); it.append(" implements "); //$NON-NLS-1$ it.append(scriptInterface); it.append(" {"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append(generateFieldsAndMethods(false)); for (final StringConcatenationClient element : topElements) { it.append(element); } it.append("}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); } }; final JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(script, content); javaFile.writeTo(getSrcGen()); } /** Generate the fields and the methods. * * @param forInterface indicates if the generated code is for interfaces. * @return the fields and methods. */ @SuppressWarnings("checkstyle:all") protected StringConcatenationClient generateFieldsAndMethods(boolean forInterface) { TypeReference scriptInterface = getCodeElementExtractor().getLanguageScriptInterface(); return new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { // Fields if (!forInterface) { it.append("\tprivate "); //$NON-NLS-1$ it.append(scriptInterface); it.append(" script;"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } it.append("\t/** Create the internal " + getLanguageName() + " script."); //$NON-NLS-1$ //$NON-NLS-2$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append("void eInit("); //$NON-NLS-1$ it.append(Resource.class); it.append(" resource, String packageName)"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\tif (this.script == null) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tthis.script = "); //$NON-NLS-1$ it.append(getXFactoryFor(scriptInterface)); it.append(".eINSTANCE.create"); //$NON-NLS-1$ it.append(Strings.toFirstUpper(scriptInterface.getSimpleName())); it.append("();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t"); //$NON-NLS-1$ it.append(EList.class); it.append("<"); //$NON-NLS-1$ it.append(EObject.class); it.append("> content = resource.getContents();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tif (!content.isEmpty()) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tcontent.clear();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcontent.add(this.script);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tif (!"); //$NON-NLS-1$ it.append(Strings.class); it.append(".isEmpty(packageName)) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tscript.setPackage(packageName);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Replies the " + getLanguageName() + " script."); //$NON-NLS-1$ //$NON-NLS-2$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(scriptInterface); it.append(" getScript()"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn this.script;"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Replies the resource to which the script is attached."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(Resource.class); it.append(" eResource()"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn getScript().eResource();"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Finalize the script."); //$NON-NLS-1$ it.newLine(); it.append("\t *"); //$NON-NLS-1$ it.newLine(); it.append("\t * <p>The finalization includes: <ul>"); //$NON-NLS-1$ it.newLine(); it.append("\t * <li>The import section is created.</li>"); //$NON-NLS-1$ it.newLine(); it.append("\t * </ul>"); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append("void finalizeScript()"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(ImportManager.class); it.append(" concreteImports = new "); //$NON-NLS-1$ it.append(ImportManager.class); it.append("();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(XImportSection.class); it.append(" importSection = getScript().getImportSection();"); //$NON-NLS-1$ it.newLine(); it.append("\t\tif (importSection != null) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tfor ("); //$NON-NLS-1$ it.append(XImportDeclaration.class); it.append(" decl : importSection.getImportDeclarations()) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tconcreteImports.addImportFor(decl.getImportedType());"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\tfor (String importName : getImportManager().getImports()) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t"); //$NON-NLS-1$ it.append(JvmType.class); it.append(" type = getTypeReferences().findDeclaredType(importName, getScript());"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tif (type instanceof "); //$NON-NLS-1$ it.append(JvmDeclaredType.class); it.newLine(); it.append("\t\t\t\t\t&& concreteImports.addImportFor(type)) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t"); //$NON-NLS-1$ it.append(XImportDeclaration.class); it.append(" declaration = "); //$NON-NLS-1$ it.append(XtypeFactory.class); it.append(".eINSTANCE.createXImportDeclaration();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdeclaration.setImportedType(("); //$NON-NLS-1$ it.append(JvmDeclaredType.class); it.append(") type);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tif (importSection == null) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t\timportSection = "); //$NON-NLS-1$ it.append(XtypeFactory.class); it.append(".eINSTANCE.createXImportSection();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t\tgetScript().setImportSection(importSection);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\timportSection.getImportDeclarations().add(declaration);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); } }; } }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.test.compatibility; import com.hazelcast.cluster.ClusterState; import com.hazelcast.cp.internal.persistence.CPPersistenceService; import com.hazelcast.cp.internal.persistence.NopCPPersistenceService; import com.hazelcast.hotrestart.HotRestartService; import com.hazelcast.instance.EndpointQualifier; import com.hazelcast.instance.impl.HazelcastInstanceImpl; import com.hazelcast.instance.impl.NodeExtension; import com.hazelcast.internal.ascii.TextCommandService; import com.hazelcast.internal.auditlog.AuditlogService; import com.hazelcast.internal.auditlog.impl.NoOpAuditlogService; import com.hazelcast.internal.cluster.impl.JoinMessage; import com.hazelcast.internal.diagnostics.Diagnostics; import com.hazelcast.internal.dynamicconfig.DynamicConfigListener; import com.hazelcast.internal.hotrestart.InternalHotRestartService; import com.hazelcast.internal.jmx.ManagementService; import com.hazelcast.internal.management.TimedMemberStateFactory; import com.hazelcast.internal.memory.MemoryStats; import com.hazelcast.internal.networking.ChannelInitializerProvider; import com.hazelcast.internal.networking.InboundHandler; import com.hazelcast.internal.networking.OutboundHandler; import com.hazelcast.internal.nio.IOService; import com.hazelcast.internal.nio.tcp.TcpIpConnection; import com.hazelcast.internal.serialization.InternalSerializationService; import com.hazelcast.internal.serialization.SerializationService; import com.hazelcast.internal.util.ByteArrayProcessor; import com.hazelcast.nio.MemberSocketInterceptor; import com.hazelcast.security.SecurityContext; import com.hazelcast.security.SecurityService; import com.hazelcast.version.Version; import java.util.Map; import java.util.UUID; /** * Node extension that instantiates a {@link SamplingSerializationService} when asked to create * {@link SerializationService} instance. */ public class SamplingNodeExtension implements NodeExtension { private final NodeExtension nodeExtension; public SamplingNodeExtension(NodeExtension nodeExtension) { this.nodeExtension = nodeExtension; } @Override public InternalSerializationService createSerializationService() { InternalSerializationService serializationService = nodeExtension.createSerializationService(); return new SamplingSerializationService(serializationService); } @Override public SecurityService getSecurityService() { return nodeExtension.getSecurityService(); } @Override public void beforeStart() { nodeExtension.beforeStart(); } @Override public void printNodeInfo() { nodeExtension.printNodeInfo(); } @Override public void beforeJoin() { nodeExtension.beforeJoin(); } @Override public void afterStart() { nodeExtension.afterStart(); } @Override public boolean isStartCompleted() { return nodeExtension.isStartCompleted(); } @Override public void beforeShutdown() { nodeExtension.beforeShutdown(); } @Override public void shutdown() { nodeExtension.shutdown(); } @Override public SecurityContext getSecurityContext() { return nodeExtension.getSecurityContext(); } @Override public <T> T createService(Class<T> type) { return nodeExtension.createService(type); } @Override public Map<String, Object> createExtensionServices() { return nodeExtension.createExtensionServices(); } @Override public MemberSocketInterceptor getSocketInterceptor(EndpointQualifier endpointQualifier) { return nodeExtension.getSocketInterceptor(endpointQualifier); } @Override public InboundHandler[] createInboundHandlers(EndpointQualifier qualifier, TcpIpConnection connection, IOService ioService) { return nodeExtension.createInboundHandlers(qualifier, connection, ioService); } @Override public OutboundHandler[] createOutboundHandlers(EndpointQualifier qualifier, TcpIpConnection connection, IOService ioService) { return nodeExtension.createOutboundHandlers(qualifier, connection, ioService); } @Override public ChannelInitializerProvider createChannelInitializerProvider(IOService ioService) { return nodeExtension.createChannelInitializerProvider(ioService); } @Override public void onThreadStart(Thread thread) { nodeExtension.onThreadStart(thread); } @Override public void onThreadStop(Thread thread) { nodeExtension.onThreadStop(thread); } @Override public MemoryStats getMemoryStats() { return nodeExtension.getMemoryStats(); } @Override public void validateJoinRequest(JoinMessage joinMessage) { nodeExtension.validateJoinRequest(joinMessage); } @Override public void beforeClusterStateChange(ClusterState currState, ClusterState requestedState, boolean isTransient) { nodeExtension.beforeClusterStateChange(currState, requestedState, isTransient); } public void onInitialClusterState(ClusterState initialState) { nodeExtension.onInitialClusterState(initialState); } @Override public void onClusterStateChange(ClusterState newState, boolean isTransient) { nodeExtension.onClusterStateChange(newState, isTransient); } @Override public void afterClusterStateChange(ClusterState oldState, ClusterState newState, boolean isTransient) { nodeExtension.afterClusterStateChange(oldState, newState, isTransient); } @Override public void onPartitionStateChange() { nodeExtension.onPartitionStateChange(); } @Override public void onMemberListChange() { nodeExtension.onMemberListChange(); } @Override public void onClusterVersionChange(Version newVersion) { nodeExtension.onClusterVersionChange(newVersion); } @Override public boolean isNodeVersionCompatibleWith(Version clusterVersion) { return nodeExtension.isNodeVersionCompatibleWith(clusterVersion); } @Override public boolean registerListener(Object listener) { return nodeExtension.registerListener(listener); } @Override public HotRestartService getHotRestartService() { return nodeExtension.getHotRestartService(); } @Override public InternalHotRestartService getInternalHotRestartService() { return nodeExtension.getInternalHotRestartService(); } @Override public UUID createMemberUuid() { return nodeExtension.createMemberUuid(); } @Override public TimedMemberStateFactory createTimedMemberStateFactory(HazelcastInstanceImpl instance) { return nodeExtension.createTimedMemberStateFactory(instance); } @Override public ByteArrayProcessor createMulticastInputProcessor(IOService ioService) { return nodeExtension.createMulticastInputProcessor(ioService); } @Override public ByteArrayProcessor createMulticastOutputProcessor(IOService ioService) { return nodeExtension.createMulticastOutputProcessor(ioService); } @Override public DynamicConfigListener createDynamicConfigListener() { return nodeExtension.createDynamicConfigListener(); } @Override public void registerPlugins(Diagnostics diagnostics) { } @Override public ManagementService createJMXManagementService(HazelcastInstanceImpl instance) { return nodeExtension.createJMXManagementService(instance); } @Override public TextCommandService createTextCommandService() { return nodeExtension.createTextCommandService(); } @Override public void sendPhoneHome() { } @Override public void scheduleClusterVersionAutoUpgrade() { nodeExtension.scheduleClusterVersionAutoUpgrade(); } @Override public boolean isClientFailoverSupported() { return false; } @Override public AuditlogService getAuditlogService() { return NoOpAuditlogService.INSTANCE; } public CPPersistenceService getCPPersistenceService() { return NopCPPersistenceService.INSTANCE; } }
/* * Copyright 2013 Bazaarvoice, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bazaarvoice.jolt.modifier.spec; import com.bazaarvoice.jolt.common.ComputedKeysComparator; import com.bazaarvoice.jolt.common.ExecutionStrategy; import com.bazaarvoice.jolt.common.Optional; import com.bazaarvoice.jolt.common.pathelement.ArrayPathElement; import com.bazaarvoice.jolt.common.pathelement.LiteralPathElement; import com.bazaarvoice.jolt.common.pathelement.PathElement; import com.bazaarvoice.jolt.common.pathelement.StarAllPathElement; import com.bazaarvoice.jolt.common.pathelement.StarDoublePathElement; import com.bazaarvoice.jolt.common.pathelement.StarRegexPathElement; import com.bazaarvoice.jolt.common.pathelement.StarSinglePathElement; import com.bazaarvoice.jolt.common.spec.BaseSpec; import com.bazaarvoice.jolt.common.spec.OrderedCompositeSpec; import com.bazaarvoice.jolt.common.tree.ArrayMatchedElement; import com.bazaarvoice.jolt.common.tree.MatchedElement; import com.bazaarvoice.jolt.common.tree.WalkedPath; import com.bazaarvoice.jolt.exception.SpecException; import com.bazaarvoice.jolt.modifier.DataType; import com.bazaarvoice.jolt.modifier.OpMode; import com.bazaarvoice.jolt.modifier.TemplatrSpecBuilder; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Composite spec is non-leaf level spec that contains one or many child specs and processes * them based on a pre-determined execution strategy */ public class ModifierCompositeSpec extends ModifierSpec implements OrderedCompositeSpec { private static final HashMap<Class, Integer> orderMap; private static final ComputedKeysComparator computedKeysComparator; static { orderMap = new HashMap<>(); orderMap.put( ArrayPathElement.class, 1 ); orderMap.put( StarRegexPathElement.class, 2 ); orderMap.put( StarDoublePathElement.class, 3 ); orderMap.put( StarSinglePathElement.class, 4 ); orderMap.put( StarAllPathElement.class, 5 ); computedKeysComparator = ComputedKeysComparator.fromOrder(orderMap); } private final Map<String, ModifierSpec> literalChildren; private final List<ModifierSpec> computedChildren; private final ExecutionStrategy executionStrategy; private final DataType specDataType; public ModifierCompositeSpec( final String key, final Map<String, Object> spec, final OpMode opMode, TemplatrSpecBuilder specBuilder ) { super(key, opMode); Map<String, ModifierSpec> literals = new LinkedHashMap<>(); ArrayList<ModifierSpec> computed = new ArrayList<>(); List<ModifierSpec> children = specBuilder.createSpec( spec ); // remember max explicit index from spec to expand input array at runtime // need to validate spec such that it does not specify both array and literal path element int maxExplicitIndexFromSpec = -1, confirmedMapAtIndex = -1, confirmedArrayAtIndex = -1; for(int i=0; i<children.size(); i++) { ModifierSpec childSpec = children.get( i ); PathElement childPathElement = childSpec.pathElement; // for every child, // a) mark current index as either must be map or must be array // b) mark it as literal or computed // c) if arrayPathElement, // - make sure its an explicit index type // - save the max explicit index in spec if(childPathElement instanceof LiteralPathElement) { confirmedMapAtIndex = i; literals.put(childPathElement.getRawKey(), childSpec ); } else if(childPathElement instanceof ArrayPathElement) { confirmedArrayAtIndex = i; ArrayPathElement childArrayPathElement = (ArrayPathElement) childPathElement; if(!childArrayPathElement.isExplicitArrayIndex()) { throw new SpecException( opMode.name() + " RHS only supports explicit Array path element" ); } int explicitIndex = childArrayPathElement.getExplicitArrayIndex(); // if explicit index from spec also enforces "[...]?" don't bother using that as max index if ( !childSpec.checkValue ) { maxExplicitIndexFromSpec = Math.max( maxExplicitIndexFromSpec, explicitIndex ); } literals.put( String.valueOf( explicitIndex ), childSpec ); } else { // StarPathElements evaluates to String keys in a Map, EXCEPT StarAllPathElement // which can be both all keys in a map or all indexes in a list if(!(childPathElement instanceof StarAllPathElement)) { confirmedMapAtIndex = i; } computed.add( childSpec ); } // Bail as soon as both confirmedMapAtIndex & confirmedArrayAtIndex is set if(confirmedMapAtIndex > -1 && confirmedArrayAtIndex > -1) { throw new SpecException( opMode.name() + " RHS cannot mix int array index and string map key, defined spec for " + key + " contains: " + children.get( confirmedMapAtIndex ).pathElement.getCanonicalForm() + " conflicting " + children.get( confirmedArrayAtIndex ).pathElement.getCanonicalForm() ); } } // set the dataType from calculated indexes specDataType = DataType.determineDataType( confirmedArrayAtIndex, confirmedMapAtIndex, maxExplicitIndexFromSpec ); // Only the computed children need to be sorted Collections.sort( computed, computedKeysComparator ); computed.trimToSize(); literalChildren = Collections.unmodifiableMap( literals ); computedChildren = Collections.unmodifiableList( computed ); // extract generic execution strategy executionStrategy = determineExecutionStrategy(); } @Override @SuppressWarnings( "unchecked" ) public void applyElement( final String inputKey, Optional<Object> inputOptional, MatchedElement thisLevel, final WalkedPath walkedPath, final Map<String, Object> context ) { Object input = inputOptional.get(); // sanity checks, cannot work on a list spec with map input and vice versa, and runtime with null input if(!specDataType.isCompatible( input )) { return; } // create input if it is null if( input == null ) { input = specDataType.create( inputKey, walkedPath, opMode ); // if input has changed, wrap if ( input != null ) { inputOptional = Optional.of( input ); } } // if input is List, create special ArrayMatchedElement, which tracks the original size of the input array if(input instanceof List) { // LIST means spec had array index explicitly specified, hence expand if needed if( specDataType instanceof DataType.LIST ) { int origSize = specDataType.expand( input ); thisLevel = new ArrayMatchedElement( thisLevel.getRawKey(), origSize ); } else { // specDataType is RUNTIME, so spec had no array index explicitly specified, no need to expand thisLevel = new ArrayMatchedElement( thisLevel.getRawKey(), ((List) input).size() ); } } // add self to walked path walkedPath.add( input, thisLevel ); // Handle the rest of the children executionStrategy.process( this, inputOptional, walkedPath, null, context ); // We are done, so remove ourselves from the walkedPath walkedPath.removeLast(); } @Override public Map<String, ? extends BaseSpec> getLiteralChildren() { return literalChildren; } @Override public List<? extends BaseSpec> getComputedChildren() { return computedChildren; } @Override public ExecutionStrategy determineExecutionStrategy() { if ( computedChildren.isEmpty() ) { return ExecutionStrategy.ALL_LITERALS; } else if ( literalChildren.isEmpty() ) { return ExecutionStrategy.COMPUTED; } else if(opMode.equals( OpMode.DEFINER ) && specDataType instanceof DataType.LIST ) { return ExecutionStrategy.CONFLICT; } else { return ExecutionStrategy.ALL_LITERALS_WITH_COMPUTED; } } }
package org.apache.mesos.hdfs.scheduler; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.mesos.hdfs.config.HdfsFrameworkConfig; import org.apache.mesos.hdfs.state.IPersistentStateStore; import org.apache.mesos.hdfs.state.LiveState; import org.apache.mesos.hdfs.util.HDFSConstants; import org.apache.mesos.Protos.CommandInfo; import org.apache.mesos.Protos.Environment; import org.apache.mesos.Protos.ExecutorID; import org.apache.mesos.Protos.ExecutorInfo; import org.apache.mesos.Protos.Offer; import org.apache.mesos.Protos.OfferID; import org.apache.mesos.Protos.Resource; import org.apache.mesos.Protos.TaskID; import org.apache.mesos.Protos.TaskInfo; import org.apache.mesos.SchedulerDriver; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; /** * HdfsNode base class. */ public abstract class HdfsNode implements IOfferEvaluator, ILauncher { private final Log log = LogFactory.getLog(HdfsNode.class); private final LiveState liveState; private final ResourceFactory resourceFactory; protected final HdfsFrameworkConfig config; protected final IPersistentStateStore persistenceStore; protected final String name; public HdfsNode(LiveState liveState, IPersistentStateStore persistentStore, HdfsFrameworkConfig config, String name) { this.liveState = liveState; this.persistenceStore = persistentStore; this.config = config; this.name = name; this.resourceFactory = new ResourceFactory(config.getHdfsRole()); } public String getName() { return name; } protected abstract String getExecutorName(); protected abstract List<String> getTaskTypes(); private void launch(SchedulerDriver driver, Offer offer) { List<Task> tasks = createTasks(offer); List<TaskInfo> taskInfos = getTaskInfos(tasks); recordTasks(tasks); driver.launchTasks(Arrays.asList(offer.getId()), taskInfos); } public boolean tryLaunch(SchedulerDriver driver, Offer offer) { String nodeName = getName(); OfferID offerId = offer.getId(); log.info(String.format("Node: %s, evaluating offer: %s", nodeName, offerId)); boolean acceptOffer = evaluate(offer); if (acceptOffer) { log.info(String.format("Node: %s, accepting offer: %s", nodeName, offerId)); launch(driver, offer); } else { log.info(String.format("Node: %s, declining offer: %s", nodeName, offerId)); driver.declineOffer(offerId); } return acceptOffer; } private List<TaskInfo> getTaskInfos(List<Task> tasks) { List<TaskInfo> taskInfos = new ArrayList<TaskInfo>(); for (Task task : tasks) { taskInfos.add(task.getInfo()); } return taskInfos; } private void recordTasks(List<Task> tasks) { for (Task task : tasks) { TaskID taskId = task.getId(); liveState.addStagingTask(taskId); persistenceStore.addHdfsNode(taskId, task.getHostname(), task.getType(), task.getName()); } } private ExecutorInfo createExecutor(String taskIdName, String nodeName, String executorName) { int confServerPort = config.getConfigServerPort(); String cmd = "export JAVA_HOME=$MESOS_DIRECTORY/" + config.getJreVersion() + " && env ; cd hdfs-mesos-* && " + "exec `if [ -z \"$JAVA_HOME\" ]; then echo java; " + "else echo $JAVA_HOME/bin/java; fi` " + "$HADOOP_OPTS " + "$EXECUTOR_OPTS " + "-cp lib/*.jar org.apache.mesos.hdfs.executor." + executorName; return ExecutorInfo .newBuilder() .setName(nodeName + " executor") .setExecutorId(ExecutorID.newBuilder().setValue("executor." + taskIdName).build()) .addAllResources(getExecutorResources()) .setCommand( CommandInfo .newBuilder() .addAllUris( Arrays.asList( CommandInfo.URI .newBuilder() .setValue( String.format("http://%s:%d/%s", config.getFrameworkHostAddress(), confServerPort, HDFSConstants.HDFS_BINARY_FILE_NAME)) .build(), CommandInfo.URI .newBuilder() .setValue( String.format("http://%s:%d/%s", config.getFrameworkHostAddress(), confServerPort, HDFSConstants.HDFS_CONFIG_FILE_NAME)) .build(), CommandInfo.URI .newBuilder() .setValue(config.getJreUrl()) .build())) .setEnvironment(Environment.newBuilder() .addAllVariables(Arrays.asList( Environment.Variable.newBuilder() .setName("LD_LIBRARY_PATH") .setValue(config.getLdLibraryPath()).build(), Environment.Variable.newBuilder() .setName("HADOOP_OPTS") .setValue(config.getJvmOpts()).build(), Environment.Variable.newBuilder() .setName("HADOOP_HEAPSIZE") .setValue(String.format("%d", config.getHadoopHeapSize())).build(), Environment.Variable.newBuilder() .setName("HADOOP_NAMENODE_OPTS") .setValue("-Xmx" + config.getNameNodeHeapSize() + "m -Xms" + config.getNameNodeHeapSize() + "m").build(), Environment.Variable.newBuilder() .setName("HADOOP_DATANODE_OPTS") .setValue("-Xmx" + config.getDataNodeHeapSize() + "m -Xms" + config.getDataNodeHeapSize() + "m").build(), Environment.Variable.newBuilder() .setName("EXECUTOR_OPTS") .setValue("-Xmx" + config.getExecutorHeap() + "m -Xms" + config.getExecutorHeap() + "m").build()))) .setValue(cmd).build()) .build(); } private List<Resource> getTaskResources(String taskType) { double cpu = config.getTaskCpus(taskType); double mem = config.getTaskHeapSize(taskType) * config.getJvmOverhead(); List<Resource> resources = new ArrayList<Resource>(); resources.add(resourceFactory.createCpuResource(cpu)); resources.add(resourceFactory.createMemResource(mem)); return resources; } private String getNextTaskName(String taskType) { if (taskType.equals(HDFSConstants.NAME_NODE_ID)) { Collection<String> nameNodeTaskNames = persistenceStore.getNameNodeTaskNames().values(); for (int i = 1; i <= HDFSConstants.TOTAL_NAME_NODES; i++) { if (!nameNodeTaskNames.contains(HDFSConstants.NAME_NODE_ID + i)) { return HDFSConstants.NAME_NODE_ID + i; } } String errorStr = "Cluster is in inconsistent state. " + "Trying to launch more namenodes, but they are all already running."; log.error(errorStr); throw new SchedulerException(errorStr); } if (taskType.equals(HDFSConstants.JOURNAL_NODE_ID)) { Collection<String> journalNodeTaskNames = persistenceStore.getJournalNodeTaskNames().values(); for (int i = 1; i <= config.getJournalNodeCount(); i++) { if (!journalNodeTaskNames.contains(HDFSConstants.JOURNAL_NODE_ID + i)) { return HDFSConstants.JOURNAL_NODE_ID + i; } } String errorStr = "Cluster is in inconsistent state. " + "Trying to launch more journalnodes, but they all are already running."; log.error(errorStr); throw new SchedulerException(errorStr); } return taskType; } private List<Resource> getExecutorResources() { double cpu = config.getExecutorCpus(); double mem = config.getExecutorHeap() * config.getJvmOverhead(); return Arrays.asList( resourceFactory.createCpuResource(cpu), resourceFactory.createMemResource(mem)); } protected boolean offerNotEnoughResources(Offer offer, double cpus, int mem) { for (Resource offerResource : offer.getResourcesList()) { if (offerResource.getName().equals("cpus") && cpus + config.getExecutorCpus() > offerResource.getScalar().getValue()) { return true; } if (offerResource.getName().equals("mem") && (mem * config.getJvmOverhead()) + (config.getExecutorHeap() * config.getJvmOverhead()) > offerResource.getScalar().getValue()) { return true; } } return false; } private List<Task> createTasks(Offer offer) { String executorName = getExecutorName(); String taskIdName = String.format("%s.%s.%d", name, executorName, System.currentTimeMillis()); List<Task> tasks = new ArrayList<Task>(); for (String type : getTaskTypes()) { List<Resource> resources = getTaskResources(type); ExecutorInfo execInfo = createExecutor(taskIdName, name, executorName); String taskName = getNextTaskName(type); tasks.add(new Task(resources, execInfo, offer, taskName, type, taskIdName)); } return tasks; } }
package com.psddev.dari.util; import java.io.IOException; import java.io.PrintWriter; import java.util.Iterator; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** Debug servlet that reports application {@link Stats}. */ @DebugFilter.Path("stats") @SuppressWarnings("serial") public class StatsDebugServlet extends HttpServlet { private enum Type { COUNT, DURATION } @Override protected void doGet( HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { @SuppressWarnings("all") WebPageContext page = new WebPageContext(getServletContext(), request, response); Type type = page.param(Type.class, "type"); if (type != null) { String statsName = page.param(String.class, "stats"); if (statsName != null) { for (Stats stats : Stats.Static.getAll()) { if (statsName.equals(stats.getName())) { Stats.Measurement measurement = stats.getMeasurements().get(page.param(String.class, "operation")); if (measurement != null) { response.setContentType("text/javascript"); PrintWriter writer = response.getWriter(); String callback = page.param(String.class, "callback"); if (!ObjectUtils.isBlank(callback)) { writer.write(callback); writer.write("("); } long begin = page.param(long.class, "begin"); long end = page.paramOrDefault(long.class, "end", System.currentTimeMillis()); writer.write("["); if (Type.COUNT.equals(type)) { for (Iterator<Double> i = measurement.getCountAverages(page.param(int.class, "interval"), begin, end).iterator(); i.hasNext();) { double average = i.next(); writer.write(Double.isNaN(average) ? "null" : String.valueOf(average)); if (i.hasNext()) { writer.write(","); } } } else { for (Iterator<Double> i = measurement.getDurationAverages(0, begin, end).iterator(); i.hasNext();) { double average = i.next(); writer.write(Double.isNaN(average) ? "null" : String.valueOf(average * 5e3)); if (i.hasNext()) { writer.write(","); } } } writer.write("]"); if (!ObjectUtils.isBlank(callback)) { writer.write(");"); } return; } } } } } new DebugFilter.PageWriter(getServletContext(), request, response) { { startPage("Stats"); writeStart("style", "type", "text/css"); write(".chart { float: left; margin: 0 20px 10px 0; }"); write("hr { border-color: black; border-top: none; margin-left: -20px; margin-right: -20px; }"); write(".axis {"); write("font: 10px sans-serif;"); write("}"); write(".axis text {"); write("-webkit-transition: fill-opacity 50ms linear;"); write("}"); write(".axis path {"); write("display: none;"); write("}"); write(".axis line {"); write("stroke: #000;"); write("shape-rendering: crispEdges;"); write("}"); write(".horizon {"); write("border-color: black;"); write("border-style: solid;"); write("border-width: 1px 0;"); write("overflow: hidden;"); write("position: relative;"); write("width: 400px;"); write("}"); write(".horizon canvas {"); write("display: block;"); write("}"); write(".horizon .title,"); write(".horizon .value {"); write("bottom: 0;"); write("line-height: 30px;"); write("margin: 0 6px;"); write("position: absolute;"); write("text-shadow: 0 1px 0 rgba(255, 255, 255, .5);"); write("white-space: nowrap;"); write("}"); write(".horizon .title {"); write("left: 0;"); write("top: 0;"); write("}"); write(".horizon .value {"); write("right: 0;"); write("}"); write(".line {"); write("background: #000;"); write("opacity: .2;"); write("z-index: 2;"); write("}"); writeEnd(); writeStart("script", "type", "text/javascript", "src", "/_resource/d3/d3.v2.min.js").writeEnd(); writeStart("script", "type", "text/javascript", "src", "/_resource/d3/cubism.v1.min.js").writeEnd(); writeStart("script", "type", "text/javascript"); write("var maxDataSize = 400;"); write("var context = cubism.context().serverDelay(0).clientDelay(0).step(5e3).size(maxDataSize);"); writeEnd(); for (Iterator<Stats> i = Stats.Static.getAll().iterator(); i.hasNext();) { Stats stats = i.next(); String statsName = stats.getName(); String operation = page.paramOrDefault(String.class, statsName + "/operation", "Total"); int intervalIndex = page.param(int.class, statsName + "/interval"); writeStart("h2").writeHtml(statsName).writeEnd(); for (Type type : Type.values()) { String divId = JspUtils.createId(page.getRequest()); writeStart("div", "class", "chart"); writeStart("h3").writeHtml(Type.COUNT.equals(type) ? "Throughput (/s)" : "Latency (ms)").writeEnd(); writeStart("div", "id", divId).writeEnd(); writeEnd(); writeStart("script", "type", "text/javascript"); write("d3.select('#"); write(divId); write("').call(function(div) {"); write("div.datum(function() {"); write("var last;"); write("return context.metric(function(begin, end, step, callback) {"); write("begin = +begin, end= +end;"); write("var first = isNaN(last);"); write("if (first) last = begin;"); write("$.getJSON('/_debug/stats', {"); write("'stats': '"); write(page.js(statsName)); write("',"); write("'operation': '"); write(page.js(operation)); write("',"); write("'interval': '"); write(page.js(intervalIndex)); write("',"); write("'type': '"); write(page.js(type)); write("',"); write("'begin': begin,"); write("'end': end"); write("}, function(data) {"); write("if (first) {"); write("var padding = maxDataSize - data.length;"); write("if (padding > 0) {"); write("var newData = [ ];"); write("for (var i = 0; i < padding; ++ i) newData.push(0.0);"); write("data = newData.concat(data);"); write("}"); write("}"); write("callback(null, data);"); write("});"); write("}, '"); write(page.js(operation + " over " + stats.getAverageIntervals().get(intervalIndex).intValue() + "s")); write("');"); write("});"); write("div.append('div')"); write(".attr('class', 'axis')"); write(".call(context.axis().orient('top'));"); write("div.append('div')"); write(".attr('class', 'horizon')"); write(".call(context.horizon().height(60));"); write("});"); writeEnd(); } writeStart("h3", "style", "clear: left;").writeHtml("Averages").writeEnd(); writeStart("table", "class", "table table-condensed"); writeStart("thead"); writeStart("tr"); writeStart("th").writeHtml("Operation").writeEnd(); writeStart("th").writeHtml("Total").writeEnd(); for (double averageInterval : stats.getAverageIntervals()) { writeStart("th", "colspan", 2).writeHtml("Over ").writeObject((int) averageInterval).writeHtml("s").writeEnd(); } writeStart("th", "colspan", 2).writeHtml("Over All").writeEnd(); writeEnd(); writeEnd(); writeStart("tbody"); for (Map.Entry<String, Stats.Measurement> entry : stats.getMeasurements().entrySet()) { writeStatsMeasurement(stats, entry.getKey(), entry.getValue()); } writeEnd(); writeEnd(); if (i.hasNext()) { writeElement("hr"); } } writeStart("script", "type", "text/javascript"); write("context.on('focus', function(i) {"); write("d3.selectAll('.value').style('right', i == null ? null : context.size() - i + 'px');"); write("});"); writeEnd(); endPage(); } // Writes individual stats measurement. private void writeStatsMeasurement(Stats stats, String operation, Stats.Measurement measurement) throws IOException { writeStart("tr"); writeStart("th").writeHtml(operation).writeEnd(); writeStart("td").writeObject(measurement.getOverallTotalCount()).writeEnd(); for (int i = 0, size = stats.getAverageIntervals().size(); i < size; ++ i) { writeCountAndDuration(stats, operation, i, measurement.getCurrentCountAverage(i), measurement.getCurrentDurationAverage(i)); } writeCountAndDuration(stats, operation, -1, measurement.getOverallCountAverage(), measurement.getOverallDurationAverage()); writeEnd(); } private void writeCountAndDuration(Stats stats, String operation, int intervalIndex, double count, double duration) throws IOException { boolean link = intervalIndex >= 0; String statsName = stats.getName(); String href = page.url(null, statsName + "/operation", operation, statsName + "/interval", intervalIndex); writeStart("td"); if (link) { writeStart("a", "href", href); } writeObject(count).writeHtml("/s"); if (link) { writeEnd(); } writeEnd(); writeStart("td"); if (link) { writeStart("a", "href", href); } if (Double.isNaN(duration)) { writeStart("span", "class", "label").writeHtml("N/A").writeEnd(); } else { writeObject(duration * 5e3).writeHtml("ms"); } if (link) { writeEnd(); } writeEnd(); } }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.plugins.tree; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.commons.PathUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import static com.google.common.base.Objects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static org.apache.jackrabbit.oak.commons.PathUtils.elements; import static org.apache.jackrabbit.oak.commons.PathUtils.isAbsolute; /** * A {@code TreeLocation} denotes a location inside a tree. * <p> * It can either refer to a inner node (that is a {@link Tree}), to a leaf (that is a * {@link PropertyState}) or to an invalid location which refers to neither of the former. * {@code TreeLocation} instances provide methods for navigating trees such that navigation * always results in new {@code TreeLocation} instances. Navigation never fails. Errors are * deferred until the underlying item itself is accessed. That is, if a {@code TreeLocation} * points to an item which does not exist or is unavailable otherwise (i.e. due to access * control restrictions) accessing the tree will return {@code null} at this point. */ public abstract class TreeLocation { /** * Create a new {@code TreeLocation} instance for a {@code tree} */ public static TreeLocation create(@NotNull Tree tree) { return new NodeLocation(tree); } /** * Create a new {@code TreeLocation} instance for the item * at the given {@code path} in {@code root}. */ public static TreeLocation create(Root root, String path) { checkArgument(isAbsolute(path)); TreeLocation location = create(root.getTree(PathUtils.ROOT_PATH)); for (String name : elements(path)) { location = location.getChild(name); } return location; } /** * Equivalent to {@code create(root, "/")} */ public static TreeLocation create(Root root) { return create(root, PathUtils.ROOT_PATH); } /** * Navigate to the parent or an invalid location for the root of the hierarchy. * @return a {@code TreeLocation} for the parent of this location. */ @NotNull public abstract TreeLocation getParent(); /** * Determine whether the underlying {@link org.apache.jackrabbit.oak.api.Tree} or * {@link org.apache.jackrabbit.oak.api.PropertyState} for this {@code TreeLocation} * is available. * @return {@code true} if the underlying item is available and has not been disconnected. * @see org.apache.jackrabbit.oak.api.Tree#exists() */ public abstract boolean exists(); /** * The name of this location * @return name */ @NotNull public abstract String getName(); /** * The path of this location * @return path */ @NotNull public abstract String getPath(); /** * Remove the underlying item. * * @return {@code true} if the item was removed, {@code false} otherwise. */ public abstract boolean remove(); /** * Navigate to a child of the given {@code name}. * @param name name of the child * @return this default implementation return a non existing location */ @NotNull public TreeLocation getChild(String name) { return new NullLocation(this, name); } /** * Get the underlying {@link org.apache.jackrabbit.oak.api.Tree} for this {@code TreeLocation}. * @return this default implementation return {@code null}. */ @Nullable public Tree getTree() { return null; } /** * Get the underlying {@link org.apache.jackrabbit.oak.api.PropertyState} for this {@code TreeLocation}. * @return this default implementation return {@code null}. */ @Nullable public PropertyState getProperty() { return null; } @Override public String toString() { return toStringHelper(this).add("path", getPath()).toString(); } /** * This {@code TreeLocation} refers to child tree in a * {@code Tree}. */ private static class NodeLocation extends TreeLocation { private final Tree tree; public NodeLocation(Tree tree) { this.tree = tree; } @NotNull @Override public TreeLocation getParent() { return tree.isRoot() ? NullLocation.NULL : new NodeLocation(tree.getParent()); } @NotNull @Override public TreeLocation getChild(String name) { if (tree.hasProperty(name)) { return new PropertyLocation(tree, name); } else { return new NodeLocation(tree.getChild(name)); } } @Override public boolean exists() { return tree.exists(); } @NotNull @Override public String getName() { return tree.getName(); } @Override public Tree getTree() { return exists() ? tree : null; } @NotNull @Override public String getPath() { return tree.getPath(); } @Override public boolean remove() { return exists() && tree.remove(); } } /** * This {@code TreeLocation} refers to property in a * {@code Tree}. */ private static class PropertyLocation extends TreeLocation { private final Tree parent; private final String name; public PropertyLocation(Tree parent, String name) { this.parent = parent; this.name = name; } @NotNull @Override public TreeLocation getParent() { return new NodeLocation(parent); } @Override public boolean exists() { return parent.hasProperty(name); } @NotNull @Override public String getName() { return name; } @Override public PropertyState getProperty() { return parent.getProperty(name); } @NotNull @Override public String getPath() { return PathUtils.concat(parent.getPath(), name); } @Override public boolean remove() { parent.removeProperty(name); return true; } } /** * This {@code TreeLocation} refers to an invalid location in a tree. That is * to a location where no item resides. */ private static final class NullLocation extends TreeLocation { public static final NullLocation NULL = new NullLocation(); private final TreeLocation parent; private final String name; public NullLocation(TreeLocation parent, String name) { this.parent = parent; this.name = name; } private NullLocation() { this.parent = this; this.name = ""; } @NotNull @Override public TreeLocation getParent() { return parent; } /** * @return {@code false} */ @Override public boolean exists() { return false; } @NotNull @Override public String getName() { return name; } @NotNull @Override public String getPath() { return parent == this ? "" : PathUtils.concat(parent.getPath(), name); } /** * @return Always {@code false}. */ @Override public boolean remove() { return false; } } }
/* Copyright 2022 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.openapi.models; import java.util.Iterator; import java.util.List; /** Generated */ public class V2beta1HorizontalPodAutoscalerStatusFluentImpl< A extends io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent<A>> extends io.kubernetes.client.fluent.BaseFluent<A> implements io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent<A> { public V2beta1HorizontalPodAutoscalerStatusFluentImpl() {} public V2beta1HorizontalPodAutoscalerStatusFluentImpl( io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatus instance) { this.withConditions(instance.getConditions()); this.withCurrentMetrics(instance.getCurrentMetrics()); this.withCurrentReplicas(instance.getCurrentReplicas()); this.withDesiredReplicas(instance.getDesiredReplicas()); this.withLastScaleTime(instance.getLastScaleTime()); this.withObservedGeneration(instance.getObservedGeneration()); } private java.util.ArrayList< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder> conditions; private java.util.ArrayList<io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder> currentMetrics; private java.lang.Integer currentReplicas; private java.lang.Integer desiredReplicas; private java.time.OffsetDateTime lastScaleTime; private java.lang.Long observedGeneration; public A addToConditions( java.lang.Integer index, io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder>(); } io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder builder = new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder( item); _visitables .get("conditions") .add(index >= 0 ? index : _visitables.get("conditions").size(), builder); this.conditions.add(index >= 0 ? index : conditions.size(), builder); return (A) this; } public A setToConditions( java.lang.Integer index, io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder>(); } io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder builder = new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder( item); if (index < 0 || index >= _visitables.get("conditions").size()) { _visitables.get("conditions").add(builder); } else { _visitables.get("conditions").set(index, builder); } if (index < 0 || index >= conditions.size()) { conditions.add(builder); } else { conditions.set(index, builder); } return (A) this; } public A addToConditions( io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition... items) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder>(); } for (io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item : items) { io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder builder = new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder( item); _visitables.get("conditions").add(builder); this.conditions.add(builder); } return (A) this; } public A addAllToConditions( java.util.Collection< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition> items) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder>(); } for (io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item : items) { io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder builder = new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder( item); _visitables.get("conditions").add(builder); this.conditions.add(builder); } return (A) this; } public A removeFromConditions( io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition... items) { for (io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item : items) { io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder builder = new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder( item); _visitables.get("conditions").remove(builder); if (this.conditions != null) { this.conditions.remove(builder); } } return (A) this; } public A removeAllFromConditions( java.util.Collection< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition> items) { for (io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item : items) { io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder builder = new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder( item); _visitables.get("conditions").remove(builder); if (this.conditions != null) { this.conditions.remove(builder); } } return (A) this; } public A removeMatchingFromConditions( java.util.function.Predicate< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder> predicate) { if (conditions == null) return (A) this; final Iterator< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder> each = conditions.iterator(); final List visitables = _visitables.get("conditions"); while (each.hasNext()) { io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder builder = each.next(); if (predicate.test(builder)) { visitables.remove(builder); each.remove(); } } return (A) this; } /** * This method has been deprecated, please use method buildConditions instead. * * @return The buildable object. */ @java.lang.Deprecated public java.util.List<io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition> getConditions() { return conditions != null ? build(conditions) : null; } public java.util.List<io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition> buildConditions() { return conditions != null ? build(conditions) : null; } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition buildCondition( java.lang.Integer index) { return this.conditions.get(index).build(); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition buildFirstCondition() { return this.conditions.get(0).build(); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition buildLastCondition() { return this.conditions.get(conditions.size() - 1).build(); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition buildMatchingCondition( java.util.function.Predicate< io.kubernetes.client.openapi.models .V2beta1HorizontalPodAutoscalerConditionBuilder> predicate) { for (io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder item : conditions) { if (predicate.test(item)) { return item.build(); } } return null; } public java.lang.Boolean hasMatchingCondition( java.util.function.Predicate< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder> predicate) { for (io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder item : conditions) { if (predicate.test(item)) { return true; } } return false; } public A withConditions( java.util.List<io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition> conditions) { if (this.conditions != null) { _visitables.get("conditions").removeAll(this.conditions); } if (conditions != null) { this.conditions = new java.util.ArrayList(); for (io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item : conditions) { this.addToConditions(item); } } else { this.conditions = null; } return (A) this; } public A withConditions( io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition... conditions) { if (this.conditions != null) { this.conditions.clear(); } if (conditions != null) { for (io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item : conditions) { this.addToConditions(item); } } return (A) this; } public java.lang.Boolean hasConditions() { return conditions != null && !conditions.isEmpty(); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .ConditionsNested< A> addNewCondition() { return new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluentImpl .ConditionsNestedImpl(); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .ConditionsNested< A> addNewConditionLike( io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item) { return new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluentImpl .ConditionsNestedImpl(-1, item); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .ConditionsNested< A> setNewConditionLike( java.lang.Integer index, io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item) { return new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluentImpl .ConditionsNestedImpl(index, item); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .ConditionsNested< A> editCondition(java.lang.Integer index) { if (conditions.size() <= index) throw new RuntimeException("Can't edit conditions. Index exceeds size."); return setNewConditionLike(index, buildCondition(index)); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .ConditionsNested< A> editFirstCondition() { if (conditions.size() == 0) throw new RuntimeException("Can't edit first conditions. The list is empty."); return setNewConditionLike(0, buildCondition(0)); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .ConditionsNested< A> editLastCondition() { int index = conditions.size() - 1; if (index < 0) throw new RuntimeException("Can't edit last conditions. The list is empty."); return setNewConditionLike(index, buildCondition(index)); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .ConditionsNested< A> editMatchingCondition( java.util.function.Predicate< io.kubernetes.client.openapi.models .V2beta1HorizontalPodAutoscalerConditionBuilder> predicate) { int index = -1; for (int i = 0; i < conditions.size(); i++) { if (predicate.test(conditions.get(i))) { index = i; break; } } if (index < 0) throw new RuntimeException("Can't edit matching conditions. No match found."); return setNewConditionLike(index, buildCondition(index)); } public A addToCurrentMetrics( java.lang.Integer index, io.kubernetes.client.openapi.models.V2beta1MetricStatus item) { if (this.currentMetrics == null) { this.currentMetrics = new java.util.ArrayList<io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder>(); } io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder builder = new io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder(item); _visitables .get("currentMetrics") .add(index >= 0 ? index : _visitables.get("currentMetrics").size(), builder); this.currentMetrics.add(index >= 0 ? index : currentMetrics.size(), builder); return (A) this; } public A setToCurrentMetrics( java.lang.Integer index, io.kubernetes.client.openapi.models.V2beta1MetricStatus item) { if (this.currentMetrics == null) { this.currentMetrics = new java.util.ArrayList<io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder>(); } io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder builder = new io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder(item); if (index < 0 || index >= _visitables.get("currentMetrics").size()) { _visitables.get("currentMetrics").add(builder); } else { _visitables.get("currentMetrics").set(index, builder); } if (index < 0 || index >= currentMetrics.size()) { currentMetrics.add(builder); } else { currentMetrics.set(index, builder); } return (A) this; } public A addToCurrentMetrics(io.kubernetes.client.openapi.models.V2beta1MetricStatus... items) { if (this.currentMetrics == null) { this.currentMetrics = new java.util.ArrayList<io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder>(); } for (io.kubernetes.client.openapi.models.V2beta1MetricStatus item : items) { io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder builder = new io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder(item); _visitables.get("currentMetrics").add(builder); this.currentMetrics.add(builder); } return (A) this; } public A addAllToCurrentMetrics( java.util.Collection<io.kubernetes.client.openapi.models.V2beta1MetricStatus> items) { if (this.currentMetrics == null) { this.currentMetrics = new java.util.ArrayList<io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder>(); } for (io.kubernetes.client.openapi.models.V2beta1MetricStatus item : items) { io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder builder = new io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder(item); _visitables.get("currentMetrics").add(builder); this.currentMetrics.add(builder); } return (A) this; } public A removeFromCurrentMetrics( io.kubernetes.client.openapi.models.V2beta1MetricStatus... items) { for (io.kubernetes.client.openapi.models.V2beta1MetricStatus item : items) { io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder builder = new io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder(item); _visitables.get("currentMetrics").remove(builder); if (this.currentMetrics != null) { this.currentMetrics.remove(builder); } } return (A) this; } public A removeAllFromCurrentMetrics( java.util.Collection<io.kubernetes.client.openapi.models.V2beta1MetricStatus> items) { for (io.kubernetes.client.openapi.models.V2beta1MetricStatus item : items) { io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder builder = new io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder(item); _visitables.get("currentMetrics").remove(builder); if (this.currentMetrics != null) { this.currentMetrics.remove(builder); } } return (A) this; } public A removeMatchingFromCurrentMetrics( java.util.function.Predicate<io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder> predicate) { if (currentMetrics == null) return (A) this; final Iterator<io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder> each = currentMetrics.iterator(); final List visitables = _visitables.get("currentMetrics"); while (each.hasNext()) { io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder builder = each.next(); if (predicate.test(builder)) { visitables.remove(builder); each.remove(); } } return (A) this; } /** * This method has been deprecated, please use method buildCurrentMetrics instead. * * @return The buildable object. */ @java.lang.Deprecated public java.util.List<io.kubernetes.client.openapi.models.V2beta1MetricStatus> getCurrentMetrics() { return currentMetrics != null ? build(currentMetrics) : null; } public java.util.List<io.kubernetes.client.openapi.models.V2beta1MetricStatus> buildCurrentMetrics() { return currentMetrics != null ? build(currentMetrics) : null; } public io.kubernetes.client.openapi.models.V2beta1MetricStatus buildCurrentMetric( java.lang.Integer index) { return this.currentMetrics.get(index).build(); } public io.kubernetes.client.openapi.models.V2beta1MetricStatus buildFirstCurrentMetric() { return this.currentMetrics.get(0).build(); } public io.kubernetes.client.openapi.models.V2beta1MetricStatus buildLastCurrentMetric() { return this.currentMetrics.get(currentMetrics.size() - 1).build(); } public io.kubernetes.client.openapi.models.V2beta1MetricStatus buildMatchingCurrentMetric( java.util.function.Predicate<io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder> predicate) { for (io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder item : currentMetrics) { if (predicate.test(item)) { return item.build(); } } return null; } public java.lang.Boolean hasMatchingCurrentMetric( java.util.function.Predicate<io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder> predicate) { for (io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder item : currentMetrics) { if (predicate.test(item)) { return true; } } return false; } public A withCurrentMetrics( java.util.List<io.kubernetes.client.openapi.models.V2beta1MetricStatus> currentMetrics) { if (this.currentMetrics != null) { _visitables.get("currentMetrics").removeAll(this.currentMetrics); } if (currentMetrics != null) { this.currentMetrics = new java.util.ArrayList(); for (io.kubernetes.client.openapi.models.V2beta1MetricStatus item : currentMetrics) { this.addToCurrentMetrics(item); } } else { this.currentMetrics = null; } return (A) this; } public A withCurrentMetrics( io.kubernetes.client.openapi.models.V2beta1MetricStatus... currentMetrics) { if (this.currentMetrics != null) { this.currentMetrics.clear(); } if (currentMetrics != null) { for (io.kubernetes.client.openapi.models.V2beta1MetricStatus item : currentMetrics) { this.addToCurrentMetrics(item); } } return (A) this; } public java.lang.Boolean hasCurrentMetrics() { return currentMetrics != null && !currentMetrics.isEmpty(); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .CurrentMetricsNested< A> addNewCurrentMetric() { return new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluentImpl .CurrentMetricsNestedImpl(); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .CurrentMetricsNested< A> addNewCurrentMetricLike(io.kubernetes.client.openapi.models.V2beta1MetricStatus item) { return new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluentImpl .CurrentMetricsNestedImpl(-1, item); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .CurrentMetricsNested< A> setNewCurrentMetricLike( java.lang.Integer index, io.kubernetes.client.openapi.models.V2beta1MetricStatus item) { return new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluentImpl .CurrentMetricsNestedImpl(index, item); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .CurrentMetricsNested< A> editCurrentMetric(java.lang.Integer index) { if (currentMetrics.size() <= index) throw new RuntimeException("Can't edit currentMetrics. Index exceeds size."); return setNewCurrentMetricLike(index, buildCurrentMetric(index)); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .CurrentMetricsNested< A> editFirstCurrentMetric() { if (currentMetrics.size() == 0) throw new RuntimeException("Can't edit first currentMetrics. The list is empty."); return setNewCurrentMetricLike(0, buildCurrentMetric(0)); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .CurrentMetricsNested< A> editLastCurrentMetric() { int index = currentMetrics.size() - 1; if (index < 0) throw new RuntimeException("Can't edit last currentMetrics. The list is empty."); return setNewCurrentMetricLike(index, buildCurrentMetric(index)); } public io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .CurrentMetricsNested< A> editMatchingCurrentMetric( java.util.function.Predicate< io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder> predicate) { int index = -1; for (int i = 0; i < currentMetrics.size(); i++) { if (predicate.test(currentMetrics.get(i))) { index = i; break; } } if (index < 0) throw new RuntimeException("Can't edit matching currentMetrics. No match found."); return setNewCurrentMetricLike(index, buildCurrentMetric(index)); } public java.lang.Integer getCurrentReplicas() { return this.currentReplicas; } public A withCurrentReplicas(java.lang.Integer currentReplicas) { this.currentReplicas = currentReplicas; return (A) this; } public java.lang.Boolean hasCurrentReplicas() { return this.currentReplicas != null; } public java.lang.Integer getDesiredReplicas() { return this.desiredReplicas; } public A withDesiredReplicas(java.lang.Integer desiredReplicas) { this.desiredReplicas = desiredReplicas; return (A) this; } public java.lang.Boolean hasDesiredReplicas() { return this.desiredReplicas != null; } public java.time.OffsetDateTime getLastScaleTime() { return this.lastScaleTime; } public A withLastScaleTime(java.time.OffsetDateTime lastScaleTime) { this.lastScaleTime = lastScaleTime; return (A) this; } public java.lang.Boolean hasLastScaleTime() { return this.lastScaleTime != null; } public java.lang.Long getObservedGeneration() { return this.observedGeneration; } public A withObservedGeneration(java.lang.Long observedGeneration) { this.observedGeneration = observedGeneration; return (A) this; } public java.lang.Boolean hasObservedGeneration() { return this.observedGeneration != null; } public boolean equals(java.lang.Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; V2beta1HorizontalPodAutoscalerStatusFluentImpl that = (V2beta1HorizontalPodAutoscalerStatusFluentImpl) o; if (conditions != null ? !conditions.equals(that.conditions) : that.conditions != null) return false; if (currentMetrics != null ? !currentMetrics.equals(that.currentMetrics) : that.currentMetrics != null) return false; if (currentReplicas != null ? !currentReplicas.equals(that.currentReplicas) : that.currentReplicas != null) return false; if (desiredReplicas != null ? !desiredReplicas.equals(that.desiredReplicas) : that.desiredReplicas != null) return false; if (lastScaleTime != null ? !lastScaleTime.equals(that.lastScaleTime) : that.lastScaleTime != null) return false; if (observedGeneration != null ? !observedGeneration.equals(that.observedGeneration) : that.observedGeneration != null) return false; return true; } public int hashCode() { return java.util.Objects.hash( conditions, currentMetrics, currentReplicas, desiredReplicas, lastScaleTime, observedGeneration, super.hashCode()); } public class ConditionsNestedImpl<N> extends io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionFluentImpl< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .ConditionsNested< N>> implements io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .ConditionsNested< N>, io.kubernetes.client.fluent.Nested<N> { ConditionsNestedImpl( java.lang.Integer index, io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerCondition item) { this.index = index; this.builder = new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder( this, item); } ConditionsNestedImpl() { this.index = -1; this.builder = new io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder( this); } io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerConditionBuilder builder; java.lang.Integer index; public N and() { return (N) V2beta1HorizontalPodAutoscalerStatusFluentImpl.this.setToConditions( index, builder.build()); } public N endCondition() { return and(); } } public class CurrentMetricsNestedImpl<N> extends io.kubernetes.client.openapi.models.V2beta1MetricStatusFluentImpl< io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .CurrentMetricsNested< N>> implements io.kubernetes.client.openapi.models.V2beta1HorizontalPodAutoscalerStatusFluent .CurrentMetricsNested< N>, io.kubernetes.client.fluent.Nested<N> { CurrentMetricsNestedImpl( java.lang.Integer index, io.kubernetes.client.openapi.models.V2beta1MetricStatus item) { this.index = index; this.builder = new io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder(this, item); } CurrentMetricsNestedImpl() { this.index = -1; this.builder = new io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder(this); } io.kubernetes.client.openapi.models.V2beta1MetricStatusBuilder builder; java.lang.Integer index; public N and() { return (N) V2beta1HorizontalPodAutoscalerStatusFluentImpl.this.setToCurrentMetrics( index, builder.build()); } public N endCurrentMetric() { return and(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.common.client; import org.apache.axis2.AxisFault; import org.apache.axis2.transport.http.HTTPConstants; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.stratos.cloud.controller.stub.*; import org.apache.stratos.cloud.controller.stub.domain.*; import org.apache.stratos.cloud.controller.stub.domain.kubernetes.KubernetesCluster; import org.apache.stratos.cloud.controller.stub.domain.kubernetes.KubernetesHost; import org.apache.stratos.cloud.controller.stub.domain.kubernetes.KubernetesMaster; import org.apache.stratos.common.constants.StratosConstants; import java.rmi.RemoteException; public class CloudControllerServiceClient { private CloudControllerServiceStub stub; private static final Log log = LogFactory.getLog(CloudControllerServiceClient.class); private static volatile CloudControllerServiceClient instance; private CloudControllerServiceClient(String epr) throws AxisFault { String ccSocketTimeout = System.getProperty(StratosConstants.CLOUD_CONTROLLER_CLIENT_SOCKET_TIMEOUT) == null ? StratosConstants.DEFAULT_CLIENT_SOCKET_TIMEOUT : System.getProperty(StratosConstants.CLOUD_CONTROLLER_CLIENT_SOCKET_TIMEOUT); String ccConnectionTimeout = System.getProperty(StratosConstants.CLOUD_CONTROLLER_CLIENT_CONNECTION_TIMEOUT) == null ? StratosConstants.DEFAULT_CLIENT_CONNECTION_TIMEOUT : System.getProperty(StratosConstants.CLOUD_CONTROLLER_CLIENT_CONNECTION_TIMEOUT); try { stub = new CloudControllerServiceStub(epr); stub._getServiceClient().getOptions() .setProperty(HTTPConstants.SO_TIMEOUT, Integer.valueOf(ccSocketTimeout)); stub._getServiceClient().getOptions() .setProperty(HTTPConstants.CONNECTION_TIMEOUT, new Integer(ccConnectionTimeout)); } catch (AxisFault axisFault) { String msg = "Could not initialize cloud controller service client"; log.error(msg, axisFault); throw new AxisFault(msg, axisFault); } } public static CloudControllerServiceClient getInstance() throws AxisFault { if (instance == null) { synchronized (CloudControllerServiceClient.class) { if (instance == null) { String cloudControllerServiceUrl = System .getProperty(StratosConstants.CLOUD_CONTROLLER_SERVICE_URL); if (StringUtils.isBlank(cloudControllerServiceUrl)) { throw new RuntimeException(String.format("System property not found: %s", StratosConstants.CLOUD_CONTROLLER_SERVICE_URL)); } instance = new CloudControllerServiceClient(cloudControllerServiceUrl); } } } return instance; } public void addCartridge(Cartridge cartridgeConfig) throws RemoteException, CloudControllerServiceCartridgeAlreadyExistsExceptionException, CloudControllerServiceInvalidCartridgeDefinitionExceptionException, CloudControllerServiceInvalidIaasProviderExceptionException { stub.addCartridge(cartridgeConfig); } public void updateCartridge(Cartridge cartridgeConfig) throws RemoteException, CloudControllerServiceInvalidCartridgeDefinitionExceptionException, CloudControllerServiceInvalidIaasProviderExceptionException, CloudControllerServiceCartridgeDefinitionNotExistsExceptionException { stub.updateCartridge(cartridgeConfig); } public void removeCartridge(String cartridgeType) throws RemoteException, CloudControllerServiceInvalidCartridgeTypeExceptionException { stub.removeCartridge(cartridgeType); } public String[] getServiceGroupSubGroups(String name) throws RemoteException, CloudControllerServiceInvalidServiceGroupExceptionException { return stub.getServiceGroupSubGroups(name); } public String[] getServiceGroupCartridges(String name) throws RemoteException, CloudControllerServiceInvalidServiceGroupExceptionException { return stub.getServiceGroupCartridges(name); } public Dependencies getServiceGroupDependencies(String name) throws RemoteException, CloudControllerServiceInvalidServiceGroupExceptionException { return stub.getServiceGroupDependencies(name); } public ServiceGroup getServiceGroup(String name) throws RemoteException, CloudControllerServiceInvalidServiceGroupExceptionException { return stub.getServiceGroup(name); } public String[] getRegisteredCartridges() throws RemoteException { return stub.getCartridges(); } public Cartridge getCartridge(String cartridgeType) throws RemoteException, CloudControllerServiceCartridgeNotFoundExceptionException { return stub.getCartridge(cartridgeType); } public ClusterContext getClusterContext(String clusterId) throws RemoteException { return stub.getClusterContext(clusterId); } public boolean updateKubernetesCluster(KubernetesCluster kubernetesCluster) throws RemoteException, CloudControllerServiceInvalidKubernetesClusterExceptionException { return stub.updateKubernetesCluster(kubernetesCluster); } public boolean deployKubernetesCluster(KubernetesCluster kubernetesCluster) throws RemoteException, CloudControllerServiceInvalidKubernetesClusterExceptionException, CloudControllerServiceKubernetesClusterAlreadyExistsExceptionException { return stub.addKubernetesCluster(kubernetesCluster); } public boolean addKubernetesHost(String kubernetesClusterId, KubernetesHost kubernetesHost) throws RemoteException, CloudControllerServiceInvalidKubernetesHostExceptionException, CloudControllerServiceNonExistingKubernetesClusterExceptionException { return stub.addKubernetesHost(kubernetesClusterId, kubernetesHost); } public boolean updateKubernetesMaster(KubernetesMaster kubernetesMaster) throws RemoteException, CloudControllerServiceInvalidKubernetesMasterExceptionException, CloudControllerServiceNonExistingKubernetesMasterExceptionException { return stub.updateKubernetesMaster(kubernetesMaster); } public KubernetesCluster[] getAvailableKubernetesClusters() throws RemoteException { return stub.getKubernetesClusters(); } public KubernetesCluster getKubernetesCluster(String kubernetesClusterId) throws RemoteException, CloudControllerServiceNonExistingKubernetesClusterExceptionException { return stub.getKubernetesCluster(kubernetesClusterId); } public void undeployKubernetesCluster(String kubernetesClusterId) throws RemoteException, CloudControllerServiceNonExistingKubernetesClusterExceptionException, CloudControllerServiceKubernetesClusterAlreadyUsedExceptionException { stub.removeKubernetesCluster(kubernetesClusterId); } public boolean undeployKubernetesHost(String kubernetesHostId) throws RemoteException, CloudControllerServiceNonExistingKubernetesHostExceptionException { return stub.removeKubernetesHost(kubernetesHostId); } public KubernetesHost[] getKubernetesHosts(String kubernetesClusterId) throws RemoteException, CloudControllerServiceNonExistingKubernetesClusterExceptionException { return stub.getHostsForKubernetesCluster(kubernetesClusterId); } public KubernetesMaster getKubernetesMaster(String kubernetesClusterId) throws RemoteException, CloudControllerServiceNonExistingKubernetesClusterExceptionException { return stub.getMasterForKubernetesCluster(kubernetesClusterId); } public boolean updateKubernetesHost(KubernetesHost kubernetesHost) throws RemoteException, CloudControllerServiceInvalidKubernetesHostExceptionException, CloudControllerServiceNonExistingKubernetesHostExceptionException { return stub.updateKubernetesHost(kubernetesHost); } public void validateNetworkPartitionOfDeploymentPolicy(String cartridgeType, String networkPartitionId) throws RemoteException, CloudControllerServiceInvalidPartitionExceptionException, CloudControllerServiceInvalidCartridgeTypeExceptionException { stub.validateDeploymentPolicyNetworkPartition(cartridgeType, networkPartitionId); } public void addNetworkPartition(NetworkPartition networkPartition) throws RemoteException, CloudControllerServiceNetworkPartitionAlreadyExistsExceptionException, CloudControllerServiceInvalidNetworkPartitionExceptionException { stub.addNetworkPartition(networkPartition); } public void removeNetworkPartition(String networkPartitionId) throws RemoteException, CloudControllerServiceNetworkPartitionNotExistsExceptionException { stub.removeNetworkPartition(networkPartitionId); } public void updateNetworkPartition(NetworkPartition networkPartition) throws RemoteException, CloudControllerServiceNetworkPartitionNotExistsExceptionException { stub.updateNetworkPartition(networkPartition); } public NetworkPartition[] getNetworkPartitions() throws RemoteException { return stub.getNetworkPartitions(); } public NetworkPartition getNetworkPartition(String networkPartitionId) throws RemoteException { return stub.getNetworkPartition(networkPartitionId); } public void createClusterInstance(String serviceType, String clusterId, String alias, String instanceId, String partitionId, String networkPartitionId) throws RemoteException { try { stub.createClusterInstance(serviceType, clusterId, alias, instanceId, partitionId, networkPartitionId); } catch (CloudControllerServiceClusterInstanceCreationExceptionException e) { String msg = e.getFaultMessage().getClusterInstanceCreationException().getMessage(); log.error(msg, e); throw new RuntimeException(msg, e); } } public String[] getIaasProviders() throws RemoteException { return stub.getIaasProviders(); } }
/* Copyright 2021 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.openapi.models; import com.google.gson.annotations.SerializedName; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.time.OffsetDateTime; import java.util.Objects; /** StatefulSetCondition describes the state of a statefulset at a certain point. */ @ApiModel( description = "StatefulSetCondition describes the state of a statefulset at a certain point.") @javax.annotation.Generated( value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2021-12-10T19:11:23.904Z[Etc/UTC]") public class V1StatefulSetCondition { public static final String SERIALIZED_NAME_LAST_TRANSITION_TIME = "lastTransitionTime"; @SerializedName(SERIALIZED_NAME_LAST_TRANSITION_TIME) private OffsetDateTime lastTransitionTime; public static final String SERIALIZED_NAME_MESSAGE = "message"; @SerializedName(SERIALIZED_NAME_MESSAGE) private String message; public static final String SERIALIZED_NAME_REASON = "reason"; @SerializedName(SERIALIZED_NAME_REASON) private String reason; public static final String SERIALIZED_NAME_STATUS = "status"; @SerializedName(SERIALIZED_NAME_STATUS) private String status; public static final String SERIALIZED_NAME_TYPE = "type"; @SerializedName(SERIALIZED_NAME_TYPE) private String type; public V1StatefulSetCondition lastTransitionTime(OffsetDateTime lastTransitionTime) { this.lastTransitionTime = lastTransitionTime; return this; } /** * Last time the condition transitioned from one status to another. * * @return lastTransitionTime */ @javax.annotation.Nullable @ApiModelProperty(value = "Last time the condition transitioned from one status to another.") public OffsetDateTime getLastTransitionTime() { return lastTransitionTime; } public void setLastTransitionTime(OffsetDateTime lastTransitionTime) { this.lastTransitionTime = lastTransitionTime; } public V1StatefulSetCondition message(String message) { this.message = message; return this; } /** * A human readable message indicating details about the transition. * * @return message */ @javax.annotation.Nullable @ApiModelProperty(value = "A human readable message indicating details about the transition.") public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public V1StatefulSetCondition reason(String reason) { this.reason = reason; return this; } /** * The reason for the condition&#39;s last transition. * * @return reason */ @javax.annotation.Nullable @ApiModelProperty(value = "The reason for the condition's last transition.") public String getReason() { return reason; } public void setReason(String reason) { this.reason = reason; } public V1StatefulSetCondition status(String status) { this.status = status; return this; } /** * Status of the condition, one of True, False, Unknown. * * @return status */ @ApiModelProperty( required = true, value = "Status of the condition, one of True, False, Unknown.") public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public V1StatefulSetCondition type(String type) { this.type = type; return this; } /** * Type of statefulset condition. * * @return type */ @ApiModelProperty(required = true, value = "Type of statefulset condition.") public String getType() { return type; } public void setType(String type) { this.type = type; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } V1StatefulSetCondition v1StatefulSetCondition = (V1StatefulSetCondition) o; return Objects.equals(this.lastTransitionTime, v1StatefulSetCondition.lastTransitionTime) && Objects.equals(this.message, v1StatefulSetCondition.message) && Objects.equals(this.reason, v1StatefulSetCondition.reason) && Objects.equals(this.status, v1StatefulSetCondition.status) && Objects.equals(this.type, v1StatefulSetCondition.type); } @Override public int hashCode() { return Objects.hash(lastTransitionTime, message, reason, status, type); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class V1StatefulSetCondition {\n"); sb.append(" lastTransitionTime: ").append(toIndentedString(lastTransitionTime)).append("\n"); sb.append(" message: ").append(toIndentedString(message)).append("\n"); sb.append(" reason: ").append(toIndentedString(reason)).append("\n"); sb.append(" status: ").append(toIndentedString(status)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Copyright 2000-2005 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lang.javascript.psi.impl; import javax.annotation.Nonnull; import org.jetbrains.annotations.NonNls; import com.intellij.javascript.documentation.JSDocumentationUtils; import com.intellij.lang.ASTNode; import com.intellij.lang.javascript.JSElementTypes; import com.intellij.lang.javascript.JSTokenTypes; import com.intellij.lang.javascript.psi.JSAttributeList; import com.intellij.lang.javascript.psi.JSClass; import com.intellij.lang.javascript.psi.JSFile; import com.intellij.lang.javascript.psi.JSFunction; import com.intellij.lang.javascript.psi.JSReferenceExpression; import com.intellij.lang.javascript.psi.JSReferenceList; import com.intellij.lang.javascript.psi.JSStubElementType; import com.intellij.lang.javascript.psi.JSSuppressionHolder; import com.intellij.lang.javascript.psi.resolve.JSImportHandlingUtil; import com.intellij.lang.javascript.psi.resolve.JSResolveUtil; import com.intellij.lang.javascript.psi.stubs.JSClassStub; import com.intellij.psi.PsiElement; import com.intellij.psi.ResolveState; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.util.IncorrectOperationException; import consulo.annotation.access.RequiredReadAction; import consulo.javascript.lang.JavaScriptTokenSets; /** * @by Maxim.Mossienko */ public class JSClassImpl extends JSClassBase implements JSSuppressionHolder { public JSClassImpl(final ASTNode node) { super(node); } public JSClassImpl(final JSClassStub stub, JSStubElementType<JSClassStub, JSClass> elementType) { super(stub, elementType); } @RequiredReadAction @Override public int getTextOffset() { PsiElement nameIdentifier = getNameIdentifier(); return nameIdentifier == null ? super.getTextOffset() : nameIdentifier.getTextOffset(); } @Override public JSAttributeList getAttributeList() { return getStubOrPsiChild(JSElementTypes.ATTRIBUTE_LIST); } @Override @RequiredReadAction public String getName() { final JSClassStub classStub = getStub(); if(classStub != null) { return classStub.getName(); } PsiElement nameIdentifier = getNameIdentifier(); if(nameIdentifier instanceof JSReferenceExpression) { return ((JSReferenceExpression) nameIdentifier).getReferencedName(); } else if(nameIdentifier != null) { return nameIdentifier.getText(); } return null; } @Override @RequiredReadAction public PsiElement setName(@NonNls @Nonnull String newName) throws IncorrectOperationException { newName = newName.substring(newName.lastIndexOf('.') + 1); final String oldName = getName(); if(newName.equals(oldName)) { return this; } final JSFunction constructor = findFunctionByName(oldName); PsiElement nameIdentifier = getNameIdentifier(); assert nameIdentifier != null; getNode().replaceChild(nameIdentifier.getNode(), JSChangeUtil.createExpressionFromText(getProject(), newName).getNode()); if(constructor != null) { constructor.setName(newName); } JSPsiImplUtils.updateFileName(this, newName, oldName); return this; } @Override @RequiredReadAction public PsiElement getNameIdentifier() { return findChildByType(JavaScriptTokenSets.NAME_TOKEN_TYPES); } @Override public JSReferenceList getExtendsList() { return getStubOrPsiChild(JSElementTypes.EXTENDS_LIST); } @Override public JSReferenceList getImplementsList() { return getStubOrPsiChild(JSElementTypes.IMPLEMENTS_LIST); } @Override public @NonNls String getQualifiedName() { final JSClassStub classStub = getStub(); if(classStub != null) { return classStub.getQualifiedName(); } return JSPsiImplUtils.getQName(this); } @Override public boolean isInterface() { final JSClassStub classStub = getStub(); if(classStub != null) { return classStub.isInterface(); } return getNode().findChildByType(JSTokenTypes.INTERFACE_KEYWORD) != null; } @Override public void delete() throws IncorrectOperationException { getNode().getTreeParent().removeChild(getNode()); } @Override public boolean isDeprecated() { final JSClassStub stub = getStub(); if(stub != null) { return stub.isDeprecated(); } return JSDocumentationUtils.calculateDeprecated(this); } @Override protected boolean processMembers(final PsiScopeProcessor processor, final ResolveState substitutor, final PsiElement lastParent, final PsiElement place) { return JSResolveUtil.processDeclarationsInScope(this, processor, substitutor, lastParent, place); } @Override public boolean processDeclarations(@Nonnull final PsiScopeProcessor processor, @Nonnull final ResolveState substitutor, final PsiElement lastParent, @Nonnull final PsiElement place) { boolean b = super.processDeclarations(processor, substitutor, lastParent, place); if(b && lastParent != null && lastParent.getParent() == this && getParent() instanceof JSFile) { b = JSImportHandlingUtil.tryResolveImports(processor, this, place); } return b; } @Override public PsiElement addAfter(@Nonnull final PsiElement element, PsiElement anchor) throws IncorrectOperationException { if(anchor == null) { ASTNode node = getNode().findChildByType(JSTokenTypes.RBRACE); if(node != null) { PsiElement psiElement = super.addAfter(element, node.getTreePrev().getPsi()); CodeStyleManager.getInstance(getProject()).reformatNewlyAddedElement(getNode(), psiElement.getNode()); return psiElement; } } final PsiElement psiElement = super.addAfter(element, anchor); CodeStyleManager.getInstance(getProject()).reformatNewlyAddedElement(getNode(), psiElement.getNode()); return psiElement; } @Override public PsiElement addBefore(@Nonnull final PsiElement element, final PsiElement anchor) throws IncorrectOperationException { final PsiElement superElement = super.addBefore(element, anchor); CodeStyleManager.getInstance(getProject()).reformatNewlyAddedElement(getNode(), superElement.getNode()); return superElement; } @Override public boolean isEquivalentTo(PsiElement another) { return super.isEquivalentTo(another) || (another instanceof JSFile && ((JSFile) another).getVirtualFile().getNameWithoutExtension().equals(getName()) && another == getParent().getParent()) || JSPsiImplUtils.isTheSameClass(another, this); } @Override public PsiElement getNavigationElement() { return JSPsiImplUtils.findTopLevelNavigatableElement(this); } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.textfileoutput; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.util.ArrayList; import java.util.List; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.WriterOutputStream; import org.pentaho.di.core.compress.CompressionProvider; import org.pentaho.di.core.compress.CompressionProviderFactory; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.fileinput.CharsetToolkit; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.EnvUtil; import org.pentaho.di.core.util.StreamLogger; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Converts input rows to text and then writes this text to one or more files. * * @author Matt * @since 4-apr-2003 */ public class TextFileOutput extends BaseStep implements StepInterface { private static Class<?> PKG = TextFileOutputMeta.class; // for i18n purposes, needed by Translator2!! private static final String FILE_COMPRESSION_TYPE_NONE = TextFileOutputMeta.fileCompressionTypeCodes[TextFileOutputMeta.FILE_COMPRESSION_TYPE_NONE]; private static final boolean COMPATIBILITY_APPEND_NO_HEADER = "Y".equals( Const.NVL( System.getProperty( Const.KETTLE_COMPATIBILITY_TEXT_FILE_OUTPUT_APPEND_NO_HEADER ), "N" ) ); public TextFileOutputMeta meta; public TextFileOutputData data; public TextFileOutput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } public synchronized boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (TextFileOutputMeta) smi; data = (TextFileOutputData) sdi; /** * Set default encoding if not set already */ if ( ( meta.getEncoding() == null ) || ( meta.getEncoding().isEmpty() ) ) { meta.setEncoding( CharsetToolkit.getDefaultSystemCharset().name() ); } boolean result = true; boolean bEndedLineWrote = false; boolean fileExist; Object[] r = getRow(); // This also waits for a row to be finished. if ( r != null && meta.getOutputFields().length == 0 ) { data.outputRowMeta = getInputRowMeta().clone(); } if ( r != null && first ) { data.outputRowMeta = getInputRowMeta().clone(); first = false; meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); // if file name in field is enabled then set field name and open file // if ( meta.isFileNameInField() ) { // find and set index of file name field in input stream // data.fileNameFieldIndex = getInputRowMeta().indexOfValue( meta.getFileNameField() ); // set the file name for this row // if ( data.fileNameFieldIndex < 0 ) { throw new KettleStepException( BaseMessages.getString( PKG, "TextFileOutput.Exception.FileNameFieldNotFound", meta.getFileNameField() ) ); } data.fileNameMeta = getInputRowMeta().getValueMeta( data.fileNameFieldIndex ); data.fileName = data.fileNameMeta.getString( r[data.fileNameFieldIndex] ); fileExist = isFileExist( data.fileName ); setDataWriterForFilename( data.fileName, fileExist ); } else if ( meta.isDoNotOpenNewFileInit() && !meta.isFileNameInField() ) { fileExist = isFileExist( meta.getFileName() ); // Open a new file here // openNewFile( meta.getFileName() ); data.oneFileOpened = true; initBinaryDataFields(); } else { fileExist = isFileExist( meta.getFileName() ); } if ( isNeedWriteHeader( fileExist ) ) { writeHeader(); } data.fieldnrs = new int[meta.getOutputFields().length]; for ( int i = 0; i < meta.getOutputFields().length; i++ ) { data.fieldnrs[i] = data.outputRowMeta.indexOfValue( meta.getOutputFields()[i].getName() ); if ( data.fieldnrs[i] < 0 ) { throw new KettleStepException( "Field [" + meta.getOutputFields()[i].getName() + "] couldn't be found in the input stream!" ); } } } if ( ( r == null && data.outputRowMeta != null && meta.isFooterEnabled() ) || ( r != null && getLinesOutput() > 0 && meta.getSplitEvery() > 0 && ( ( getLinesOutput() + meta.getFooterShift() ) % meta.getSplitEvery() ) == 0 ) ) { if ( data.outputRowMeta != null ) { if ( meta.isFooterEnabled() ) { writeHeader(); } } if ( r == null ) { // add tag to last line if needed writeEndedLine(); bEndedLineWrote = true; } // Done with this part or with everything. closeFile(); // Not finished: open another file... if ( r != null ) { openNewFile( meta.getFileName() ); if ( meta.isHeaderEnabled() && data.outputRowMeta != null ) { writeHeader(); } } } if ( r == null ) { // no more input to be expected... if ( !bEndedLineWrote && !Utils.isEmpty( meta.getEndedLine() ) ) { if ( data.writer == null ) { openNewFile( meta.getFileName() ); data.oneFileOpened = true; initBinaryDataFields(); } // add tag to last line if needed writeEndedLine(); bEndedLineWrote = true; } setOutputDone(); return false; } // First handle the file name in field // Write a header line as well if needed // if ( meta.isFileNameInField() ) { String baseFilename = data.fileNameMeta.getString( r[data.fileNameFieldIndex] ); setDataWriterForFilename( baseFilename, isFileExist( baseFilename ) ); } writeRowToFile( data.outputRowMeta, r ); putRow( data.outputRowMeta, r ); // in case we want it to go further... if ( checkFeedback( getLinesOutput() ) ) { logBasic( "linenr " + getLinesOutput() ); } return result; } boolean isFileExist( String fileName ) throws KettleException { boolean fileExist; try { fileExist = getFileObject( buildFilename( environmentSubstitute( fileName ), true ), getTransMeta() ).exists(); } catch ( FileSystemException e ) { throw new KettleException( e ); } return fileExist; } private boolean isNeedWriteHeader( boolean fileExist ) { if ( meta.isFileNameInField() ) { return false; } if ( !meta.isFileAppended() && ( meta.isHeaderEnabled() || meta.isFooterEnabled() ) ) { // See if we have to write a header-line) if ( meta.isHeaderEnabled() && data.outputRowMeta != null ) { return true; } } //PDI-15650 //File Exists=N Flag Set=N Add Header=Y Append=Y //Result = File is created, header is written at top of file (this changed by the fix) return meta.isHeaderEnabled() && !fileExist && meta.isFileAppended() && !COMPATIBILITY_APPEND_NO_HEADER; } /** * This method should only be used when you have a filename in the input stream. * * @param filename * the filename to set the data.writer field for * @throws KettleException */ private void setDataWriterForFilename( String filename, boolean fileExist ) throws KettleException { // First handle the writers themselves. // If we didn't have a writer yet, we create one. // Basically we open a new file // data.writer = data.fileWriterMap.get( filename ); if ( data.writer == null ) { openNewFile( filename ); data.oneFileOpened = true; data.fileWriterMap.put( filename, data.writer ); boolean isNeedWriteHeader = ( !meta.isFileAppended() && meta.isHeaderEnabled() ) || ( meta.isHeaderEnabled() && !fileExist && meta.isFileAppended() && !COMPATIBILITY_APPEND_NO_HEADER ); // If it's the first time we open it and we have a header, we write a header... // if ( isNeedWriteHeader ) { if ( writeHeader() ) { incrementLinesOutput(); } } } } public void writeRowToFile( RowMetaInterface rowMeta, Object[] r ) throws KettleStepException { try { if ( meta.getOutputFields() == null || meta.getOutputFields().length == 0 ) { /* * Write all values in stream to text file. */ for ( int i = 0; i < rowMeta.size(); i++ ) { if ( i > 0 && data.binarySeparator.length > 0 ) { data.writer.write( data.binarySeparator ); } ValueMetaInterface v = rowMeta.getValueMeta( i ); Object valueData = r[i]; // no special null value default was specified since no fields are specified at all // As such, we pass null // writeField( v, valueData, null ); } data.writer.write( data.binaryNewline ); } else { /* * Only write the fields specified! */ for ( int i = 0; i < meta.getOutputFields().length; i++ ) { if ( i > 0 && data.binarySeparator.length > 0 ) { data.writer.write( data.binarySeparator ); } ValueMetaInterface v = rowMeta.getValueMeta( data.fieldnrs[i] ); Object valueData = r[data.fieldnrs[i]]; writeField( v, valueData, data.binaryNullValue[i] ); } data.writer.write( data.binaryNewline ); } incrementLinesOutput(); // flush every 4k lines // if (linesOutput>0 && (linesOutput&0xFFF)==0) data.writer.flush(); } catch ( Exception e ) { throw new KettleStepException( "Error writing line", e ); } } private byte[] formatField( ValueMetaInterface v, Object valueData ) throws KettleValueException { if ( v.isString() ) { if ( v.isStorageBinaryString() && v.getTrimType() == ValueMetaInterface.TRIM_TYPE_NONE && v.getLength() < 0 && Utils.isEmpty( v.getStringEncoding() ) ) { return (byte[]) valueData; } else { String svalue = ( valueData instanceof String ) ? (String) valueData : v.getString( valueData ); // trim or cut to size if needed. // return convertStringToBinaryString( v, Const.trimToType( svalue, v.getTrimType() ) ); } } else { return v.getBinaryString( valueData ); } } private byte[] convertStringToBinaryString( ValueMetaInterface v, String string ) throws KettleValueException { int length = v.getLength(); if ( string == null ) { return new byte[] {}; } if ( length > -1 && length < string.length() ) { // we need to truncate String tmp = string.substring( 0, length ); if ( Utils.isEmpty( v.getStringEncoding() ) ) { return tmp.getBytes(); } else { try { return tmp.getBytes( v.getStringEncoding() ); } catch ( UnsupportedEncodingException e ) { throw new KettleValueException( "Unable to convert String to Binary with specified string encoding [" + v.getStringEncoding() + "]", e ); } } } else { byte[] text; if ( Utils.isEmpty( meta.getEncoding() ) ) { text = string.getBytes(); } else { try { text = string.getBytes( meta.getEncoding() ); } catch ( UnsupportedEncodingException e ) { throw new KettleValueException( "Unable to convert String to Binary with specified string encoding [" + v.getStringEncoding() + "]", e ); } } if ( length > string.length() ) { // we need to pad this // Also for PDI-170: not all encoding use single characters, so we need to cope // with this. int size = 0; byte[] filler = null; try { if ( !Utils.isEmpty( meta.getEncoding() ) ) { filler = " ".getBytes( meta.getEncoding() ); } else { filler = " ".getBytes(); } size = text.length + filler.length * ( length - string.length() ); } catch ( UnsupportedEncodingException uee ) { throw new KettleValueException( uee ); } byte[] bytes = new byte[size]; System.arraycopy( text, 0, bytes, 0, text.length ); if ( filler.length == 1 ) { java.util.Arrays.fill( bytes, text.length, size, filler[0] ); } else { int currIndex = text.length; for ( int i = 0; i < ( length - string.length() ); i++ ) { for ( int j = 0; j < filler.length; j++ ) { bytes[currIndex++] = filler[j]; } } } return bytes; } else { // do not need to pad or truncate return text; } } } private byte[] getBinaryString( String string ) throws KettleStepException { try { if ( data.hasEncoding ) { return string.getBytes( meta.getEncoding() ); } else { return string.getBytes(); } } catch ( Exception e ) { throw new KettleStepException( e ); } } private void writeField( ValueMetaInterface v, Object valueData, byte[] nullString ) throws KettleStepException { try { byte[] str; // First check whether or not we have a null string set // These values should be set when a null value passes // if ( nullString != null && v.isNull( valueData ) ) { str = nullString; } else { if ( meta.isFastDump() ) { if ( valueData instanceof byte[] ) { str = (byte[]) valueData; } else { str = getBinaryString( ( valueData == null ) ? "" : valueData.toString() ); } } else { str = formatField( v, valueData ); } } if ( str != null && str.length > 0 ) { List<Integer> enclosures = null; boolean writeEnclosures = false; if ( v.isString() ) { if ( meta.isEnclosureForced() && !meta.isPadded() ) { writeEnclosures = true; } else if ( !meta.isEnclosureFixDisabled() && containsSeparatorOrEnclosure( str, data.binarySeparator, data.binaryEnclosure ) ) { writeEnclosures = true; } } if ( writeEnclosures ) { data.writer.write( data.binaryEnclosure ); enclosures = getEnclosurePositions( str ); } if ( enclosures == null ) { data.writer.write( str ); } else { // Skip the enclosures, double them instead... int from = 0; for ( int i = 0; i < enclosures.size(); i++ ) { int position = enclosures.get( i ); data.writer.write( str, from, position + data.binaryEnclosure.length - from ); data.writer.write( data.binaryEnclosure ); // write enclosure a second time from = position + data.binaryEnclosure.length; } if ( from < str.length ) { data.writer.write( str, from, str.length - from ); } } if ( writeEnclosures ) { data.writer.write( data.binaryEnclosure ); } } } catch ( Exception e ) { throw new KettleStepException( "Error writing field content to file", e ); } } private List<Integer> getEnclosurePositions( byte[] str ) { List<Integer> positions = null; if ( data.binaryEnclosure != null && data.binaryEnclosure.length > 0 ) { // +1 because otherwise we will not find it at the end for ( int i = 0, len = str.length - data.binaryEnclosure.length + 1; i < len; i++ ) { // verify if on position i there is an enclosure // boolean found = true; for ( int x = 0; found && x < data.binaryEnclosure.length; x++ ) { if ( str[ i + x ] != data.binaryEnclosure[ x ] ) { found = false; } } if ( found ) { if ( positions == null ) { positions = new ArrayList<Integer>(); } positions.add( i ); } } } return positions; } protected boolean writeEndedLine() { boolean retval = false; try { String sLine = meta.getEndedLine(); if ( sLine != null ) { if ( sLine.trim().length() > 0 ) { data.writer.write( getBinaryString( sLine ) ); incrementLinesOutput(); } } } catch ( Exception e ) { logError( "Error writing ended tag line: " + e.toString() ); logError( Const.getStackTracker( e ) ); retval = true; } return retval; } protected boolean writeHeader() { boolean retval = false; RowMetaInterface r = data.outputRowMeta; try { // If we have fields specified: list them in this order! if ( meta.getOutputFields() != null && meta.getOutputFields().length > 0 ) { for ( int i = 0; i < meta.getOutputFields().length; i++ ) { String fieldName = meta.getOutputFields()[i].getName(); ValueMetaInterface v = r.searchValueMeta( fieldName ); if ( i > 0 && data.binarySeparator.length > 0 ) { data.writer.write( data.binarySeparator ); } boolean writeEnclosure = ( meta.isEnclosureForced() && data.binaryEnclosure.length > 0 && v != null && v.isString() ) || ( ( !meta.isEnclosureFixDisabled() && containsSeparatorOrEnclosure( fieldName.getBytes(), data.binarySeparator, data.binaryEnclosure ) ) ); if ( writeEnclosure ) { data.writer.write( data.binaryEnclosure ); } data.writer.write( getBinaryString( fieldName ) ); if ( writeEnclosure ) { data.writer.write( data.binaryEnclosure ); } } data.writer.write( data.binaryNewline ); } else if ( r != null ) { // Just put all field names in the header/footer for ( int i = 0; i < r.size(); i++ ) { if ( i > 0 && data.binarySeparator.length > 0 ) { data.writer.write( data.binarySeparator ); } ValueMetaInterface v = r.getValueMeta( i ); boolean writeEnclosure = ( meta.isEnclosureForced() && data.binaryEnclosure.length > 0 && v != null && v.isString() ) || ( ( !meta.isEnclosureFixDisabled() && containsSeparatorOrEnclosure( v.getName().getBytes(), data.binarySeparator, data.binaryEnclosure ) ) ); if ( writeEnclosure ) { data.writer.write( data.binaryEnclosure ); } data.writer.write( getBinaryString( v.getName() ) ); if ( writeEnclosure ) { data.writer.write( data.binaryEnclosure ); } } data.writer.write( data.binaryNewline ); } else { data.writer.write( getBinaryString( "no rows selected" + Const.CR ) ); } } catch ( Exception e ) { logError( "Error writing header line: " + e.toString() ); logError( Const.getStackTracker( e ) ); retval = true; } incrementLinesOutput(); return retval; } public String buildFilename( String filename, boolean ziparchive ) { return meta.buildFilename( filename, meta.getExtension(), this, getCopy(), getPartitionID(), data.splitnr, ziparchive, meta ); } public void openNewFile( String baseFilename ) throws KettleException { if ( baseFilename == null ) { throw new KettleFileException( BaseMessages.getString( PKG, "TextFileOutput.Exception.FileNameNotSet" ) ); } data.writer = null; String filename = buildFilename( environmentSubstitute( baseFilename ), true ); try { if ( meta.isServletOutput() ) { Writer writer = getTrans().getServletPrintWriter(); if ( Utils.isEmpty( meta.getEncoding() ) ) { data.writer = new WriterOutputStream( writer ); } else { data.writer = new WriterOutputStream( writer, meta.getEncoding() ); } } else if ( meta.isFileAsCommand() ) { if ( log.isDebug() ) { logDebug( "Spawning external process" ); } if ( data.cmdProc != null ) { logError( "Previous command not correctly terminated" ); setErrors( 1 ); } String cmdstr = environmentSubstitute( meta.getFileName() ); if ( Const.getOS().equals( "Windows 95" ) ) { cmdstr = "command.com /C " + cmdstr; } else { if ( Const.getOS().startsWith( "Windows" ) ) { cmdstr = "cmd.exe /C " + cmdstr; } } if ( isDetailed() ) { logDetailed( "Starting: " + cmdstr ); } Runtime r = Runtime.getRuntime(); data.cmdProc = r.exec( cmdstr, EnvUtil.getEnvironmentVariablesForRuntimeExec() ); data.writer = data.cmdProc.getOutputStream(); StreamLogger stdoutLogger = new StreamLogger( log, data.cmdProc.getInputStream(), "(stdout)" ); StreamLogger stderrLogger = new StreamLogger( log, data.cmdProc.getErrorStream(), "(stderr)" ); new Thread( stdoutLogger ).start(); new Thread( stderrLogger ).start(); } else { // Check for parent folder creation only if the user asks for it // if ( meta.isCreateParentFolder() ) { createParentFolder( filename ); } String compressionType = meta.getFileCompression(); // If no file compression is specified, use the "None" provider if ( Utils.isEmpty( compressionType ) ) { compressionType = FILE_COMPRESSION_TYPE_NONE; } CompressionProvider compressionProvider = CompressionProviderFactory.getInstance().getCompressionProviderByName( compressionType ); if ( compressionProvider == null ) { throw new KettleException( "No compression provider found with name = " + compressionType ); } if ( !compressionProvider.supportsOutput() ) { throw new KettleException( "Compression provider " + compressionType + " does not support output streams!" ); } if ( log.isDetailed() ) { logDetailed( "Opening output stream using provider: " + compressionProvider.getName() ); } if ( checkPreviouslyOpened( filename ) ) { data.fos = getOutputStream( filename, getTransMeta(), true ); } else { data.fos = getOutputStream( filename, getTransMeta(), meta.isFileAppended() ); } data.out = compressionProvider.createOutputStream( data.fos ); // The compression output stream may also archive entries. For this we create the filename // (with appropriate extension) and add it as an entry to the output stream. For providers // that do not archive entries, they should use the default no-op implementation. data.out.addEntry( filename, environmentSubstitute( meta.getExtension() ) ); if ( !Utils.isEmpty( meta.getEncoding() ) ) { if ( log.isDetailed() ) { logDetailed( "Opening output stream in encoding: " + meta.getEncoding() ); } data.writer = new BufferedOutputStream( data.out, 5000 ); } else { if ( log.isDetailed() ) { logDetailed( "Opening output stream in default encoding" ); } data.writer = new BufferedOutputStream( data.out, 5000 ); } if ( log.isDetailed() ) { logDetailed( "Opened new file with name [" + KettleVFS.getFriendlyURI( filename ) + "]" ); } } } catch ( Exception e ) { throw new KettleException( "Error opening new file : " + e.toString() ); } data.splitnr++; if ( meta.isAddToResultFiles() ) { // Add this to the result file names... ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, getFileObject( filename, getTransMeta() ), getTransMeta() .getName(), getStepname() ); if ( resultFile != null ) { resultFile.setComment( BaseMessages.getString( PKG, "TextFileOutput.AddResultFile" ) ); addResultFile( resultFile ); } } } protected boolean closeFile() { boolean retval = false; try { if ( data.writer != null ) { data.writer.flush(); // If writing a ZIP or GZIP file not from a command, do not close the writer or else // the closing of the ZipOutputStream below will throw an "already closed" exception. // Rather than checking for compression types, it is easier to check for cmdProc != null // because if that check fails, we know we will get into the ZIP/GZIP processing below. if ( data.cmdProc != null ) { if ( log.isDebug() ) { logDebug( "Closing output stream" ); } data.writer.close(); if ( log.isDebug() ) { logDebug( "Closed output stream" ); } } } data.writer = null; if ( data.cmdProc != null ) { if ( log.isDebug() ) { logDebug( "Ending running external command" ); } int procStatus = data.cmdProc.waitFor(); // close the streams // otherwise you get "Too many open files, java.io.IOException" after a lot of iterations try { data.cmdProc.getErrorStream().close(); data.cmdProc.getOutputStream().flush(); data.cmdProc.getOutputStream().close(); data.cmdProc.getInputStream().close(); } catch ( IOException e ) { if ( log.isDetailed() ) { logDetailed( "Warning: Error closing streams: " + e.getMessage() ); } } data.cmdProc = null; if ( log.isBasic() && procStatus != 0 ) { logBasic( "Command exit status: " + procStatus ); } } else { if ( log.isDebug() ) { logDebug( "Closing normal file ..." ); } if ( data.out != null ) { data.out.close(); } if ( data.fos != null ) { data.fos.close(); data.fos = null; } } retval = true; } catch ( Exception e ) { logError( "Exception trying to close file: " + e.toString() ); setErrors( 1 ); //Clean resources data.writer = null; data.out = null; data.fos = null; retval = false; } return retval; } public boolean checkPreviouslyOpened( String filename ) { return data.getPreviouslyOpenedFiles().contains( filename ); } public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (TextFileOutputMeta) smi; data = (TextFileOutputData) sdi; //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS if ( getTransMeta().getNamedClusterEmbedManager() != null ) { getTransMeta().getNamedClusterEmbedManager() .passEmbeddedMetastoreKey( getTransMeta(), getTransMeta().getEmbeddedMetastoreProviderKey() ); } if ( super.init( smi, sdi ) ) { data.splitnr = 0; // In case user want to create file at first row // In that case, DO NOT create file at Init if ( !meta.isDoNotOpenNewFileInit() ) { try { if ( !meta.isFileNameInField() ) { openNewFile( meta.getFileName() ); } data.oneFileOpened = true; } catch ( Exception e ) { logError( "Couldn't open file " + KettleVFS.getFriendlyURI( getParentVariableSpace().environmentSubstitute( meta.getFileName() ) ) + "." + getParentVariableSpace().environmentSubstitute( meta.getExtension() ), e ); setErrors( 1L ); stopAll(); } } try { initBinaryDataFields(); } catch ( Exception e ) { logError( "Couldn't initialize binary data fields", e ); setErrors( 1L ); stopAll(); } return true; } return false; } private void initBinaryDataFields() throws KettleException { try { data.hasEncoding = !Utils.isEmpty( meta.getEncoding() ); data.binarySeparator = new byte[] {}; data.binaryEnclosure = new byte[] {}; data.binaryNewline = new byte[] {}; if ( data.hasEncoding ) { if ( !Utils.isEmpty( meta.getSeparator() ) ) { data.binarySeparator = environmentSubstitute( meta.getSeparator() ).getBytes( meta.getEncoding() ); } if ( !Utils.isEmpty( meta.getEnclosure() ) ) { data.binaryEnclosure = environmentSubstitute( meta.getEnclosure() ).getBytes( meta.getEncoding() ); } if ( !Utils.isEmpty( meta.getNewline() ) ) { data.binaryNewline = meta.getNewline().getBytes( meta.getEncoding() ); } } else { if ( !Utils.isEmpty( meta.getSeparator() ) ) { data.binarySeparator = environmentSubstitute( meta.getSeparator() ).getBytes(); } if ( !Utils.isEmpty( meta.getEnclosure() ) ) { data.binaryEnclosure = environmentSubstitute( meta.getEnclosure() ).getBytes(); } if ( !Utils.isEmpty( meta.getNewline() ) ) { data.binaryNewline = environmentSubstitute( meta.getNewline() ).getBytes(); } } data.binaryNullValue = new byte[meta.getOutputFields().length][]; for ( int i = 0; i < meta.getOutputFields().length; i++ ) { data.binaryNullValue[i] = null; String nullString = meta.getOutputFields()[i].getNullString(); if ( !Utils.isEmpty( nullString ) ) { if ( data.hasEncoding ) { data.binaryNullValue[i] = nullString.getBytes( meta.getEncoding() ); } else { data.binaryNullValue[i] = nullString.getBytes(); } } } } catch ( Exception e ) { throw new KettleException( "Unexpected error while encoding binary fields", e ); } } public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { meta = (TextFileOutputMeta) smi; data = (TextFileOutputData) sdi; if ( meta.isFileNameInField() ) { for ( OutputStream outputStream : data.fileWriterMap.values() ) { try { outputStream.close(); } catch ( IOException e ) { logError( "Unexpected error closing file", e ); setErrors( 1 ); } } } else { if ( data.oneFileOpened ) { closeFile(); } try { if ( data.fos != null ) { data.fos.close(); } } catch ( Exception e ) { data.fos = null; logError( "Unexpected error closing file", e ); setErrors( 1 ); } } super.dispose( smi, sdi ); } public boolean containsSeparatorOrEnclosure( byte[] source, byte[] separator, byte[] enclosure ) { boolean result = false; boolean enclosureExists = enclosure != null && enclosure.length > 0; boolean separatorExists = separator != null && separator.length > 0; // Skip entire test if neither separator nor enclosure exist if ( separatorExists || enclosureExists ) { // Search for the first occurrence of the separator or enclosure for ( int index = 0; !result && index < source.length; index++ ) { if ( enclosureExists && source[index] == enclosure[0] ) { // Potential match found, make sure there are enough bytes to support a full match if ( index + enclosure.length <= source.length ) { // First byte of enclosure found result = true; // Assume match for ( int i = 1; i < enclosure.length; i++ ) { if ( source[index + i] != enclosure[i] ) { // Enclosure match is proven false result = false; break; } } } } else if ( separatorExists && source[index] == separator[0] ) { // Potential match found, make sure there are enough bytes to support a full match if ( index + separator.length <= source.length ) { // First byte of separator found result = true; // Assume match for ( int i = 1; i < separator.length; i++ ) { if ( source[index + i] != separator[i] ) { // Separator match is proven false result = false; break; } } } } } } return result; } // public boolean containsSeparator(byte[] source, byte[] separator) { // boolean result = false; // // // Is the string long enough to contain the separator // if(source.length > separator.length) { // int index = 0; // // Search for the first occurrence of the separator // do { // index = ArrayUtils.indexOf(source, separator[0], index); // if(index >= 0 && (source.length - index >= separator.length)) { // // Compare the bytes at the index to the contents of the separator // byte[] potentialMatch = ArrayUtils.subarray(source, index, index + separator.length); // // if(Arrays.equals(separator, potentialMatch)) { // result = true; // } // } // } while(!result && ++index > 0); // } // return result; // } // // public boolean containsEnclosure(byte[] source, byte[] enclosure) { // boolean result = false; // // // Is the string long enough to contain the enclosure // if(source.length > enclosure.length) { // int index = 0; // // Search for the first occurrence of the enclosure // do { // index = ArrayUtils.indexOf(source, enclosure[0], index); // if(index >= 0 && (source.length - index >= enclosure.length)) { // // Compare the bytes at the index to the contents of the enclosure // byte[] potentialMatch = ArrayUtils.subarray(source, index, index + enclosure.length); // // if(Arrays.equals(enclosure, potentialMatch)) { // result = true; // } // } // } while(!result && ++index > 0); // } // return result; // } private void createParentFolder( String filename ) throws Exception { // Check for parent folder FileObject parentfolder = null; try { // Get parent folder parentfolder = getFileObject( filename, getTransMeta() ).getParent(); if ( parentfolder.exists() ) { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "TextFileOutput.Log.ParentFolderExist", KettleVFS.getFriendlyURI( parentfolder ) ) ); } } else { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "TextFileOutput.Log.ParentFolderNotExist", KettleVFS.getFriendlyURI( parentfolder ) ) ); } if ( meta.isCreateParentFolder() ) { parentfolder.createFolder(); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "TextFileOutput.Log.ParentFolderCreated", KettleVFS.getFriendlyURI( parentfolder ) ) ); } } else { throw new KettleException( BaseMessages.getString( PKG, "TextFileOutput.Log.ParentFolderNotExistCreateIt", KettleVFS.getFriendlyURI( parentfolder ), KettleVFS.getFriendlyURI( filename ) ) ); } } } finally { if ( parentfolder != null ) { try { parentfolder.close(); } catch ( Exception ex ) { // Ignore } } } } protected FileObject getFileObject( String vfsFilename ) throws KettleFileException { return KettleVFS.getFileObject( vfsFilename ); } protected FileObject getFileObject( String vfsFilename, VariableSpace space ) throws KettleFileException { return KettleVFS.getFileObject( vfsFilename, space ); } protected OutputStream getOutputStream( String vfsFilename, VariableSpace space, boolean append ) throws KettleFileException { return KettleVFS.getOutputStream( vfsFilename, space, append ); } }
/*=========================================================================== * Licensed Materials - Property of IBM * "Restricted Materials of IBM" * * IBM SDK, Java(tm) Technology Edition, v8 * (C) Copyright IBM Corp. 1999, 2013. All Rights Reserved * * US Government Users Restricted Rights - Use, duplication or disclosure * restricted by GSA ADP Schedule Contract with IBM Corp. *=========================================================================== */ /* * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package javax.naming; import java.util.Hashtable; import javax.naming.spi.NamingManager; import com.sun.naming.internal.ResourceManager; /** * This class is the starting context for performing naming operations. *<p> * All naming operations are relative to a context. * The initial context implements the Context interface and * provides the starting point for resolution of names. *<p> * <a name=ENVIRONMENT></a> * When the initial context is constructed, its environment * is initialized with properties defined in the environment parameter * passed to the constructor, and in any * <a href=Context.html#RESOURCEFILES>application resource files</a>. * In addition, a small number of standard JNDI properties may * be specified as system properties or as applet parameters * (through the use of {@link Context#APPLET}). * These special properties are listed in the field detail sections of the * <a href=Context.html#field_detail><tt>Context</tt></a> and * <a href=ldap/LdapContext.html#field_detail><tt>LdapContext</tt></a> * interface documentation. *<p> * JNDI determines each property's value by merging * the values from the following two sources, in order: * <ol> * <li> * The first occurrence of the property from the constructor's * environment parameter and (for appropriate properties) the applet * parameters and system properties. * <li> * The application resource files (<tt>jndi.properties</tt>). * </ol> * For each property found in both of these two sources, or in * more than one application resource file, the property's value * is determined as follows. If the property is * one of the standard JNDI properties that specify a list of JNDI * factories (see <a href=Context.html#LISTPROPS><tt>Context</tt></a>), * all of the values are * concatenated into a single colon-separated list. For other * properties, only the first value found is used. * *<p> * The initial context implementation is determined at runtime. * The default policy uses the environment property * "{@link Context#INITIAL_CONTEXT_FACTORY java.naming.factory.initial}", * which contains the class name of the initial context factory. * An exception to this policy is made when resolving URL strings, as described * below. *<p> * When a URL string (a <tt>String</tt> of the form * <em>scheme_id:rest_of_name</em>) is passed as a name parameter to * any method, a URL context factory for handling that scheme is * located and used to resolve the URL. If no such factory is found, * the initial context specified by * <tt>"java.naming.factory.initial"</tt> is used. Similarly, when a * <tt>CompositeName</tt> object whose first component is a URL string is * passed as a name parameter to any method, a URL context factory is * located and used to resolve the first name component. * See {@link NamingManager#getURLContext * <tt>NamingManager.getURLContext()</tt>} for a description of how URL * context factories are located. *<p> * This default policy of locating the initial context and URL context * factories may be overridden * by calling * <tt>NamingManager.setInitialContextFactoryBuilder()</tt>. *<p> * NoInitialContextException is thrown when an initial context cannot * be instantiated. This exception can be thrown during any interaction * with the InitialContext, not only when the InitialContext is constructed. * For example, the implementation of the initial context might lazily * retrieve the context only when actual methods are invoked on it. * The application should not have any dependency on when the existence * of an initial context is determined. *<p> * When the environment property "java.naming.factory.initial" is * non-null, the InitialContext constructor will attempt to create the * initial context specified therein. At that time, the initial context factory * involved might throw an exception if a problem is encountered. However, * it is provider implementation-dependent when it verifies and indicates * to the users of the initial context any environment property- or * connection- related problems. It can do so lazily--delaying until * an operation is performed on the context, or eagerly, at the time * the context is constructed. *<p> * An InitialContext instance is not synchronized against concurrent * access by multiple threads. Multiple threads each manipulating a * different InitialContext instance need not synchronize. * Threads that need to access a single InitialContext instance * concurrently should synchronize amongst themselves and provide the * necessary locking. * * @author Rosanna Lee * @author Scott Seligman * * @see Context * @see NamingManager#setInitialContextFactoryBuilder * NamingManager.setInitialContextFactoryBuilder * @since JNDI 1.1 / Java 2 Platform, Standard Edition, v 1.3 */ public class InitialContext implements Context { /** * The environment associated with this InitialContext. * It is initialized to null and is updated by the constructor * that accepts an environment or by the <tt>init()</tt> method. * @see #addToEnvironment * @see #removeFromEnvironment * @see #getEnvironment */ protected Hashtable<Object,Object> myProps = null; /** * Field holding the result of calling NamingManager.getInitialContext(). * It is set by getDefaultInitCtx() the first time getDefaultInitCtx() * is called. Subsequent invocations of getDefaultInitCtx() return * the value of defaultInitCtx. * @see #getDefaultInitCtx */ protected Context defaultInitCtx = null; /** * Field indicating whether the initial context has been obtained * by calling NamingManager.getInitialContext(). * If true, its result is in <code>defaultInitCtx</code>. */ protected boolean gotDefault = false; /** * Constructs an initial context with the option of not * initializing it. This may be used by a constructor in * a subclass when the value of the environment parameter * is not yet known at the time the <tt>InitialContext</tt> * constructor is called. The subclass's constructor will * call this constructor, compute the value of the environment, * and then call <tt>init()</tt> before returning. * * @param lazy * true means do not initialize the initial context; false * is equivalent to calling <tt>new InitialContext()</tt> * @throws NamingException if a naming exception is encountered * * @see #init(Hashtable) * @since 1.3 */ protected InitialContext(boolean lazy) throws NamingException { if (!lazy) { init(null); } } /** * Constructs an initial context. * No environment properties are supplied. * Equivalent to <tt>new InitialContext(null)</tt>. * * @throws NamingException if a naming exception is encountered * * @see #InitialContext(Hashtable) */ public InitialContext() throws NamingException { init(null); } /** * Constructs an initial context using the supplied environment. * Environment properties are discussed in the class description. * * <p> This constructor will not modify <tt>environment</tt> * or save a reference to it, but may save a clone. * Caller should not modify mutable keys and values in * <tt>environment</tt> after it has been passed to the constructor. * * @param environment * environment used to create the initial context. * Null indicates an empty environment. * * @throws NamingException if a naming exception is encountered */ public InitialContext(Hashtable<?,?> environment) throws NamingException { if (environment != null) { environment = (Hashtable)environment.clone(); } init(environment); } /** * Initializes the initial context using the supplied environment. * Environment properties are discussed in the class description. * * <p> This method will modify <tt>environment</tt> and save * a reference to it. The caller may no longer modify it. * * @param environment * environment used to create the initial context. * Null indicates an empty environment. * * @throws NamingException if a naming exception is encountered * * @see #InitialContext(boolean) * @since 1.3 */ @SuppressWarnings("unchecked") protected void init(Hashtable<?,?> environment) throws NamingException { myProps = (Hashtable<Object,Object>) ResourceManager.getInitialEnvironment(environment); if (myProps.get(Context.INITIAL_CONTEXT_FACTORY) != null) { // user has specified initial context factory; try to get it getDefaultInitCtx(); } } /** * A static method to retrieve the named object. * This is a shortcut method equivalent to invoking: * <p> * <code> * InitialContext ic = new InitialContext(); * Object obj = ic.lookup(); * </code> * <p> If <tt>name</tt> is empty, returns a new instance of this context * (which represents the same naming context as this context, but its * environment may be modified independently and it may be accessed * concurrently). * * @param <T> the type of the returned object * @param name * the name of the object to look up * @return the object bound to <tt>name</tt> * @throws NamingException if a naming exception is encountered * * @see #doLookup(String) * @see #lookup(Name) * @since 1.6 */ @SuppressWarnings("unchecked") public static <T> T doLookup(Name name) throws NamingException { return (T) (new InitialContext()).lookup(name); } /** * A static method to retrieve the named object. * See {@link #doLookup(Name)} for details. * @param <T> the type of the returned object * @param name * the name of the object to look up * @return the object bound to <tt>name</tt> * @throws NamingException if a naming exception is encountered * @since 1.6 */ @SuppressWarnings("unchecked") public static <T> T doLookup(String name) throws NamingException { return (T) (new InitialContext()).lookup(name); } private static String getURLScheme(String str) { int colon_posn = str.indexOf(':'); int slash_posn = str.indexOf('/'); if (colon_posn > 0 && (slash_posn == -1 || colon_posn < slash_posn)) return str.substring(0, colon_posn); return null; } /** * Retrieves the initial context by calling * <code>NamingManager.getInitialContext()</code> * and cache it in defaultInitCtx. * Set <code>gotDefault</code> so that we know we've tried this before. * @return The non-null cached initial context. * @exception NoInitialContextException If cannot find an initial context. * @exception NamingException If a naming exception was encountered. */ protected Context getDefaultInitCtx() throws NamingException{ if (!gotDefault) { defaultInitCtx = NamingManager.getInitialContext(myProps); gotDefault = true; } if (defaultInitCtx == null) throw new NoInitialContextException(); return defaultInitCtx; } /** * Retrieves a context for resolving the string name <code>name</code>. * If <code>name</code> name is a URL string, then attempt * to find a URL context for it. If none is found, or if * <code>name</code> is not a URL string, then return * <code>getDefaultInitCtx()</code>. *<p> * See getURLOrDefaultInitCtx(Name) for description * of how a subclass should use this method. * @param name The non-null name for which to get the context. * @return A URL context for <code>name</code> or the cached * initial context. The result cannot be null. * @exception NoInitialContextException If cannot find an initial context. * @exception NamingException In a naming exception is encountered. * @see javax.naming.spi.NamingManager#getURLContext */ protected Context getURLOrDefaultInitCtx(String name) throws NamingException { if (NamingManager.hasInitialContextFactoryBuilder()) { return getDefaultInitCtx(); } String scheme = getURLScheme(name); if (scheme != null) { Context ctx = NamingManager.getURLContext(scheme, myProps); if (ctx != null) { return ctx; } } return getDefaultInitCtx(); } /** * Retrieves a context for resolving <code>name</code>. * If the first component of <code>name</code> name is a URL string, * then attempt to find a URL context for it. If none is found, or if * the first component of <code>name</code> is not a URL string, * then return <code>getDefaultInitCtx()</code>. *<p> * When creating a subclass of InitialContext, use this method as * follows. * Define a new method that uses this method to get an initial * context of the desired subclass. * <blockquote><pre> * protected XXXContext getURLOrDefaultInitXXXCtx(Name name) * throws NamingException { * Context answer = getURLOrDefaultInitCtx(name); * if (!(answer instanceof XXXContext)) { * if (answer == null) { * throw new NoInitialContextException(); * } else { * throw new NotContextException("Not an XXXContext"); * } * } * return (XXXContext)answer; * } * </pre></blockquote> * When providing implementations for the new methods in the subclass, * use this newly defined method to get the initial context. * <blockquote><pre> * public Object XXXMethod1(Name name, ...) { * throws NamingException { * return getURLOrDefaultInitXXXCtx(name).XXXMethod1(name, ...); * } * </pre></blockquote> * * @param name The non-null name for which to get the context. * @return A URL context for <code>name</code> or the cached * initial context. The result cannot be null. * @exception NoInitialContextException If cannot find an initial context. * @exception NamingException In a naming exception is encountered. * * @see javax.naming.spi.NamingManager#getURLContext */ protected Context getURLOrDefaultInitCtx(Name name) throws NamingException { if (NamingManager.hasInitialContextFactoryBuilder()) { return getDefaultInitCtx(); } if (name.size() > 0) { String first = name.get(0); String scheme = getURLScheme(first); if (scheme != null) { Context ctx = NamingManager.getURLContext(scheme, myProps); if (ctx != null) { return ctx; } } } return getDefaultInitCtx(); } // Context methods // Most Javadoc is deferred to the Context interface. public Object lookup(String name) throws NamingException { return getURLOrDefaultInitCtx(name).lookup(name); } public Object lookup(Name name) throws NamingException { return getURLOrDefaultInitCtx(name).lookup(name); } public void bind(String name, Object obj) throws NamingException { getURLOrDefaultInitCtx(name).bind(name, obj); } public void bind(Name name, Object obj) throws NamingException { getURLOrDefaultInitCtx(name).bind(name, obj); } public void rebind(String name, Object obj) throws NamingException { getURLOrDefaultInitCtx(name).rebind(name, obj); } public void rebind(Name name, Object obj) throws NamingException { getURLOrDefaultInitCtx(name).rebind(name, obj); } public void unbind(String name) throws NamingException { getURLOrDefaultInitCtx(name).unbind(name); } public void unbind(Name name) throws NamingException { getURLOrDefaultInitCtx(name).unbind(name); } public void rename(String oldName, String newName) throws NamingException { getURLOrDefaultInitCtx(oldName).rename(oldName, newName); } public void rename(Name oldName, Name newName) throws NamingException { getURLOrDefaultInitCtx(oldName).rename(oldName, newName); } public NamingEnumeration<NameClassPair> list(String name) throws NamingException { return (getURLOrDefaultInitCtx(name).list(name)); } public NamingEnumeration<NameClassPair> list(Name name) throws NamingException { return (getURLOrDefaultInitCtx(name).list(name)); } public NamingEnumeration<Binding> listBindings(String name) throws NamingException { return getURLOrDefaultInitCtx(name).listBindings(name); } public NamingEnumeration<Binding> listBindings(Name name) throws NamingException { return getURLOrDefaultInitCtx(name).listBindings(name); } public void destroySubcontext(String name) throws NamingException { getURLOrDefaultInitCtx(name).destroySubcontext(name); } public void destroySubcontext(Name name) throws NamingException { getURLOrDefaultInitCtx(name).destroySubcontext(name); } public Context createSubcontext(String name) throws NamingException { return getURLOrDefaultInitCtx(name).createSubcontext(name); } public Context createSubcontext(Name name) throws NamingException { return getURLOrDefaultInitCtx(name).createSubcontext(name); } public Object lookupLink(String name) throws NamingException { return getURLOrDefaultInitCtx(name).lookupLink(name); } public Object lookupLink(Name name) throws NamingException { return getURLOrDefaultInitCtx(name).lookupLink(name); } public NameParser getNameParser(String name) throws NamingException { return getURLOrDefaultInitCtx(name).getNameParser(name); } public NameParser getNameParser(Name name) throws NamingException { return getURLOrDefaultInitCtx(name).getNameParser(name); } /** * Composes the name of this context with a name relative to * this context. * Since an initial context may never be named relative * to any context other than itself, the value of the * <tt>prefix</tt> parameter must be an empty name (<tt>""</tt>). */ public String composeName(String name, String prefix) throws NamingException { return name; } /** * Composes the name of this context with a name relative to * this context. * Since an initial context may never be named relative * to any context other than itself, the value of the * <tt>prefix</tt> parameter must be an empty name. */ public Name composeName(Name name, Name prefix) throws NamingException { return (Name)name.clone(); } public Object addToEnvironment(String propName, Object propVal) throws NamingException { myProps.put(propName, propVal); return getDefaultInitCtx().addToEnvironment(propName, propVal); } public Object removeFromEnvironment(String propName) throws NamingException { myProps.remove(propName); return getDefaultInitCtx().removeFromEnvironment(propName); } public Hashtable<?,?> getEnvironment() throws NamingException { return getDefaultInitCtx().getEnvironment(); } public void close() throws NamingException { myProps = null; if (defaultInitCtx != null) { defaultInitCtx.close(); defaultInitCtx = null; } gotDefault = false; } public String getNameInNamespace() throws NamingException { return getDefaultInitCtx().getNameInNamespace(); } };
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.screens.guided.dtable.client.editor; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import com.google.gwtmockito.GwtMockitoTestRunner; import org.drools.workbench.models.guided.dtable.shared.model.GuidedDecisionTable52; import org.drools.workbench.screens.guided.dtable.client.type.GuidedDTableResourceType; import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTablePresenter; import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTableView; import org.drools.workbench.screens.guided.dtable.client.widget.table.events.cdi.DecisionTableSelectedEvent; import org.drools.workbench.screens.guided.dtable.model.GuidedDecisionTableEditorContent; import org.guvnor.common.services.project.categories.Decision; import org.guvnor.common.services.shared.metadata.model.Metadata; import org.guvnor.common.services.shared.metadata.model.Overview; import org.guvnor.common.services.shared.validation.model.ValidationMessage; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.soup.project.datamodel.imports.Imports; import org.kie.workbench.common.widgets.client.datamodel.AsyncPackageDataModelOracle; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.uberfire.backend.vfs.ObservablePath; import org.uberfire.backend.vfs.Path; import org.uberfire.client.mvp.UpdatedLockStatusEvent; import org.uberfire.client.workbench.widgets.multipage.MultiPageEditor; import org.uberfire.client.workbench.widgets.multipage.Page; import org.uberfire.ext.editor.commons.client.menu.MenuItems; import org.uberfire.ext.editor.commons.client.menu.common.SaveAndRenameCommandBuilder; import org.uberfire.mvp.Command; import org.uberfire.mvp.PlaceRequest; import org.uberfire.workbench.events.NotificationEvent; import org.uberfire.workbench.model.menu.Menus; import static org.drools.workbench.screens.guided.dtable.client.editor.BaseGuidedDecisionTableEditorPresenter.COLUMNS_TAB_INDEX; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(GwtMockitoTestRunner.class) public class BaseGuidedDecisionTableEditorPresenterTest extends BaseGuidedDecisionTablePresenterTest<BaseGuidedDecisionTableEditorPresenter> { private GuidedDTableResourceType resourceType = new GuidedDTableResourceType(new Decision()); @Mock private SaveAndRenameCommandBuilder<GuidedDecisionTable52, Metadata> saveAndRenameCommandBuilder; @Override protected GuidedDecisionTableEditorPresenter getPresenter() { return new GuidedDecisionTableEditorPresenter(view, dtServiceCaller, notification, decisionTableSelectedEvent, validationPopup, resourceType, editMenuBuilder, viewMenuBuilder, insertMenuBuilder, radarMenuBuilder, modeller, beanManager, placeManager, columnsPage, saveAndRenameCommandBuilder, alertsButtonMenuItemBuilder) { @Override protected Command getSaveAndRenameCommand() { return mock(Command.class); } }; } @Test public void checkInit() { verify(viewMenuBuilder, times(1)).setModeller(eq(modeller)); verify(insertMenuBuilder, times(1)).setModeller(eq(modeller)); verify(radarMenuBuilder, times(1)).setModeller(eq(modeller)); verify(view, times(1)).setModellerView(eq(modellerView)); } @Test public void checkOnStartup() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); presenter.onStartup(path, placeRequest); assertEquals(path, presenter.editorPath); assertEquals(placeRequest, presenter.editorPlaceRequest); verify(view, times(1)).showLoading(); verify(presenter, times(1)).loadDocument(eq(path), eq(placeRequest)); verify(dtService, times(1)).loadContent(eq(path)); verify(modeller, times(1)).addDecisionTable(eq(path), eq(placeRequest), eq(content), any(Boolean.class), eq(null), eq(null)); verify(presenter, times(1)).registerDocument(eq(dtPresenter)); verify(decisionTableSelectedEvent, times(1)).fire(dtSelectedEventCaptor.capture()); verify(view, times(1)).hideBusyIndicator(); final DecisionTableSelectedEvent dtSelectedEvent = dtSelectedEventCaptor.getValue(); assertNotNull(dtSelectedEvent); assertTrue(dtSelectedEvent.getPresenter().isPresent()); assertEquals(dtPresenter, dtSelectedEvent.getPresenter().get()); } @Test public void checkDecisionTableSelectedEventFiredWhenEditorReceivesFocusWithActiveDecisionTable() { final GuidedDecisionTableView.Presenter activeDtable = mock(GuidedDecisionTableView.Presenter.class); when(modeller.getActiveDecisionTable()).thenReturn(Optional.of(activeDtable)); presenter.onFocus(); verify(activeDtable, times(1)).initialiseAnalysis(); verify(decisionTableSelectedEvent, times(1)).fire(dtSelectedEventCaptor.capture()); final DecisionTableSelectedEvent event = dtSelectedEventCaptor.getValue(); assertNotNull(event); assertTrue(event.getPresenter().isPresent()); assertEquals(activeDtable, event.getPresenter().get()); } @Test public void checkDecisionTableSelectedEventNotFiredWhenEditorReceivesFocusWithoutActiveDecisionTable() { when(modeller.getActiveDecisionTable()).thenReturn(Optional.empty()); presenter.onFocus(); verify(decisionTableSelectedEvent, never()).fire(any(DecisionTableSelectedEvent.class)); } @Test public void checkMayCloseWithNoDecisionTable() { assertTrue(presenter.mayClose()); } @Test public void checkMayCloseWithCleanDecisionTable() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); when(dtPresenter.getOriginalHashCode()).thenReturn(0); when(modeller.getAvailableDecisionTables()).thenReturn(new HashSet<GuidedDecisionTableView.Presenter>() {{ add(dtPresenter); }}); assertTrue(presenter.mayClose()); } @Test public void checkMayCloseWithDirtyDecisionTable() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); when(dtPresenter.getOriginalHashCode()).thenReturn(10); when(modeller.getAvailableDecisionTables()).thenReturn(new HashSet<GuidedDecisionTableView.Presenter>() {{ add(dtPresenter); }}); assertFalse(presenter.mayClose()); } @Test public void checkOnClose() { presenter.onClose(); verify(modeller, times(1)).onClose(); } @Test public void checkOnDecisionTableSelectedWhenAvailableSelected() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); final DecisionTableSelectedEvent event = new DecisionTableSelectedEvent(dtPresenter); when(modeller.isDecisionTableAvailable(any(GuidedDecisionTableView.Presenter.class))).thenReturn(true); when(presenter.getActiveDocument()).thenReturn(dtPresenter); presenter.onDecisionTableSelected(event); verify(presenter, never()).activateDocument(any(GuidedDecisionTableView.Presenter.class)); assertTrue(getMenuState(presenter.getMenus(), MenuItems.VALIDATE)); } private boolean getMenuState(final Menus menus, final MenuItems menuItem) { return menus.getItems().stream().filter(m -> m.getIdentifier() != null).filter(m -> m.getCaption().toLowerCase().equals(menuItem.name().toLowerCase())).findFirst().get().isEnabled(); } @Test public void checkOnDecisionTableSelectedWhenAvailableNotSelected() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); final DecisionTableSelectedEvent event = new DecisionTableSelectedEvent(dtPresenter); final MultiPageEditor pageEditor = mock(MultiPageEditor.class); when(modeller.isDecisionTableAvailable(any(GuidedDecisionTableView.Presenter.class))).thenReturn(true); when(presenter.getActiveDocument()).thenReturn(null); when(presenter.getKieEditorWrapperMultiPage()).thenReturn(pageEditor); presenter.onStartup(path, placeRequest); presenter.onDecisionTableSelected(event); verify(presenter, times(1)).activateDocument(any(GuidedDecisionTableView.Presenter.class)); verify(radarMenuItem, atLeast(1)).setEnabled(eq(true)); assertTrue(getMenuState(presenter.getMenus(), MenuItems.VALIDATE)); } @Test public void checkOnDecisionTableSelectedWhenNotAvailable() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); final DecisionTableSelectedEvent event = new DecisionTableSelectedEvent(dtPresenter); when(modeller.isDecisionTableAvailable(any(GuidedDecisionTableView.Presenter.class))).thenReturn(false); presenter.onDecisionTableSelected(event); verify(presenter, never()).activateDocument(any(GuidedDecisionTableView.Presenter.class)); assertTrue(getMenuState(presenter.getMenus(), MenuItems.VALIDATE)); } @Test public void checkOnDecisionTableSelectedEventNoTableSelected() { final DecisionTableSelectedEvent event = DecisionTableSelectedEvent.NONE; presenter.onDecisionTableSelected(event); verify(presenter, never()).activateDocument(any(GuidedDecisionTableView.Presenter.class)); assertFalse(getMenuState(presenter.getMenus(), MenuItems.VALIDATE)); } @Test public void checkOnDecisionTableSelectedEventReselection() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); final DecisionTableSelectedEvent eventSelect = new DecisionTableSelectedEvent(dtPresenter); doReturn(true).when(modeller).isDecisionTableAvailable(dtPresenter); presenter.onStartup(path, placeRequest); presenter.onDecisionTableSelected(eventSelect); assertEquals(dtPresenter, presenter.getActiveDocument()); final DecisionTableSelectedEvent eventDeselect = DecisionTableSelectedEvent.NONE; presenter.onDecisionTableSelected(eventDeselect); assertNull(presenter.getActiveDocument()); presenter.onDecisionTableSelected(eventSelect); assertEquals(dtPresenter, presenter.getActiveDocument()); } @Test public void checkRefreshDocument() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final MultiPageEditor pageEditor = mock(MultiPageEditor.class); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); presenter.onStartup(path, placeRequest); verify(view, times(1)).showLoading(); verify(dtService, times(1)).loadContent(eq(path)); verify(decisionTableSelectedEvent, times(1)).fire(dtSelectedEventCaptor.capture()); verify(view, times(1)).hideBusyIndicator(); final DecisionTableSelectedEvent dtSelectedEvent = dtSelectedEventCaptor.getValue(); assertNotNull(dtSelectedEvent); assertTrue(dtSelectedEvent.getPresenter().isPresent()); assertEquals(dtPresenter, dtSelectedEvent.getPresenter().get()); when(dtPresenter.getCurrentPath()).thenReturn(path); when(presenter.getKieEditorWrapperMultiPage()).thenReturn(pageEditor); presenter.refreshDocument(dtPresenter); verify(view, times(2)).showLoading(); verify(dtService, times(2)).loadContent(eq(path)); verify(modeller, times(1)).refreshDecisionTable(eq(dtPresenter), eq(path), eq(placeRequest), eq(content), any(Boolean.class)); verify(presenter, times(1)).activateDocument(eq(dtPresenter)); verify(view, times(2)).hideBusyIndicator(); } @Test public void checkRemoveDocument() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); presenter.onStartup(path, placeRequest); presenter.removeDocument(dtPresenter); verify(modeller, times(1)).removeDecisionTable(eq(dtPresenter)); verify(presenter, times(1)).deregisterDocument(eq(dtPresenter)); verify(presenter, times(1)).openOtherDecisionTable(); verify(dtPresenter, times(1)).onClose(); } @Test public void checkOpenOtherDecisionTableIsLastDecisionTable() { when(modeller.getAvailableDecisionTables()).thenReturn(Collections.emptySet()); presenter.openOtherDecisionTable(); verify(presenter, never()).activateDocument(any(GuidedDecisionTableView.Presenter.class)); verify(placeManager, never()).forceClosePlace(any(String.class)); verify(placeManager, never()).forceClosePlace(any(PlaceRequest.class)); verify(decisionTableSelectedEvent, times(1)).fire(dtSelectedEventCaptor.capture()); final DecisionTableSelectedEvent dtSelectedEvent = dtSelectedEventCaptor.getValue(); assertNotNull(dtSelectedEvent); assertFalse(dtSelectedEvent.getPresenter().isPresent()); } @Test public void checkOpenOtherDecisionTableIsNotLastDecisionTable() { final GuidedDecisionTableView.Presenter remainingDtPresenter = mock(GuidedDecisionTableView.Presenter.class); when(modeller.getAvailableDecisionTables()).thenReturn(new HashSet<GuidedDecisionTableView.Presenter>() {{ add(remainingDtPresenter); }}); doNothing().when(presenter).activateDocument(any(GuidedDecisionTableView.Presenter.class)); presenter.openOtherDecisionTable(); verify(placeManager, never()).forceClosePlace(any(String.class)); verify(placeManager, never()).forceClosePlace(any(PlaceRequest.class)); verify(decisionTableSelectedEvent, times(2)).fire(dtSelectedEventCaptor.capture()); final List<DecisionTableSelectedEvent> dtSelectedEvents = dtSelectedEventCaptor.getAllValues(); assertNotNull(dtSelectedEvents); assertEquals(2, dtSelectedEvents.size()); assertFalse(dtSelectedEvents.get(0).getPresenter().isPresent()); assertTrue(dtSelectedEvents.get(1).getPresenter().isPresent()); assertEquals(dtSelectedEvents.get(1).getPresenter().get(), remainingDtPresenter); } @Test @SuppressWarnings("unchecked") public void checkOnValidateWithErrors() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); final List<ValidationMessage> validationMessages = new ArrayList<ValidationMessage>() {{ add(new ValidationMessage()); }}; when(dtService.validate(any(Path.class), any(GuidedDecisionTable52.class))).thenReturn(validationMessages); doNothing().when(presenter).showValidationPopup(any(List.class)); presenter.onValidate(dtPresenter); final ArgumentCaptor<GuidedDecisionTable52> modelCaptor = ArgumentCaptor.forClass(GuidedDecisionTable52.class); verify(dtService, times(1)).validate(eq(path), modelCaptor.capture()); assertNotNull(modelCaptor.getValue()); assertEquals(dtPresenter.getModel(), modelCaptor.getValue()); verify(notification, never()).fire(any(NotificationEvent.class)); verify(presenter, times(1)).showValidationPopup(eq(validationMessages)); } @Test public void checkOnValidateWithoutErrors() { final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); when(dtService.validate(any(Path.class), any(GuidedDecisionTable52.class))).thenReturn(Collections.emptyList()); presenter.onValidate(dtPresenter); final ArgumentCaptor<GuidedDecisionTable52> modelCaptor = ArgumentCaptor.forClass(GuidedDecisionTable52.class); verify(dtService, times(1)).validate(eq(path), modelCaptor.capture()); assertNotNull(modelCaptor.getValue()); assertEquals(dtPresenter.getModel(), modelCaptor.getValue()); verify(notification, times(1)).fire(any(NotificationEvent.class)); } @Test public void checkOnSave() { final String commitMessage = "message"; final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); presenter.onSave(dtPresenter, commitMessage); final ArgumentCaptor<GuidedDecisionTable52> modelCaptor = ArgumentCaptor.forClass(GuidedDecisionTable52.class); final ArgumentCaptor<Metadata> metadataCaptor = ArgumentCaptor.forClass(Metadata.class); verify(dtService, times(1)).saveAndUpdateGraphEntries(eq(path), modelCaptor.capture(), metadataCaptor.capture(), eq(commitMessage)); assertNotNull(modelCaptor.getValue()); assertEquals(dtPresenter.getModel(), modelCaptor.getValue()); assertNotNull(metadataCaptor.getValue()); assertEquals(dtPresenter.getOverview().getMetadata(), metadataCaptor.getValue()); } @Test public void checkOnSourceTabSelected() { final String source = "source"; final ObservablePath path = mock(ObservablePath.class); final PlaceRequest placeRequest = mock(PlaceRequest.class); final GuidedDecisionTableEditorContent content = makeDecisionTableContent(); final GuidedDecisionTableView.Presenter dtPresenter = makeDecisionTable(path, path, placeRequest, content); when(dtService.toSource(eq(path), any(GuidedDecisionTable52.class))).thenReturn(source); presenter.onSourceTabSelected(dtPresenter); final ArgumentCaptor<GuidedDecisionTable52> modelCaptor = ArgumentCaptor.forClass(GuidedDecisionTable52.class); verify(dtService, times(1)).toSource(eq(path), modelCaptor.capture()); assertNotNull(modelCaptor.getValue()); assertEquals(dtPresenter.getModel(), modelCaptor.getValue()); verify(presenter, times(1)).updateSource(eq(source)); } @Test public void testActivateDocument() { final GuidedDecisionTableView.Presenter dtPresenter = mock(GuidedDecisionTableView.Presenter.class); final Overview overview = mock(Overview.class); final AsyncPackageDataModelOracle oracle = mock(AsyncPackageDataModelOracle.class); final GuidedDecisionTable52 model = mock(GuidedDecisionTable52.class); final Imports imports = mock(Imports.class); final GuidedDecisionTablePresenter.Access access = mock(GuidedDecisionTablePresenter.Access.class); final MultiPageEditor pageEditor = mock(MultiPageEditor.class); final boolean isEditable = true; doReturn(overview).when(dtPresenter).getOverview(); doReturn(oracle).when(dtPresenter).getDataModelOracle(); doReturn(model).when(dtPresenter).getModel(); doReturn(imports).when(model).getImports(); doReturn(access).when(dtPresenter).getAccess(); doReturn(isEditable).when(access).isEditable(); doReturn(pageEditor).when(presenter).getKieEditorWrapperMultiPage(); doNothing().when(presenter).activateDocument(any(), any(), any(), any(), anyBoolean()); presenter.activateDocument(dtPresenter); verify(dtPresenter).activate(); verify(presenter).enableMenus(true); verify(presenter).addColumnsTab(); verify(presenter).enableColumnsTab(dtPresenter); verify(presenter).activateDocument(dtPresenter, overview, oracle, imports, !isEditable); } @Test public void testEnableColumnsTab() { final GuidedDecisionTableView.Presenter dtPresenter = mock(GuidedDecisionTableView.Presenter.class); final boolean isGuidedDecisionTableEditable = true; doReturn(isGuidedDecisionTableEditable).when(presenter).isGuidedDecisionTableEditable(any()); presenter.enableColumnsTab(dtPresenter); verify(presenter).enableColumnsTab(eq(true)); } @Test public void testIsGuidedDecisionTableEditableWhenDecisionTableDoesNotHaveEditableColumns() { final GuidedDecisionTableView.Presenter dtPresenter = mock(GuidedDecisionTableView.Presenter.class); final GuidedDecisionTablePresenter.Access access = mock(GuidedDecisionTablePresenter.Access.class); doReturn(access).when(dtPresenter).getAccess(); doReturn(false).when(access).isReadOnly(); doReturn(false).when(access).hasEditableColumns(); final boolean isGuidedDecisionTableEditable = presenter.isGuidedDecisionTableEditable(dtPresenter); assertFalse(isGuidedDecisionTableEditable); } @Test public void testIsGuidedDecisionTableEditableWhenDecisionTableIsNotEditable() { final GuidedDecisionTableView.Presenter dtPresenter = mock(GuidedDecisionTableView.Presenter.class); final GuidedDecisionTablePresenter.Access access = mock(GuidedDecisionTablePresenter.Access.class); doReturn(access).when(dtPresenter).getAccess(); doReturn(true).when(access).isReadOnly(); doReturn(true).when(access).hasEditableColumns(); final boolean isGuidedDecisionTableEditable = presenter.isGuidedDecisionTableEditable(dtPresenter); assertFalse(isGuidedDecisionTableEditable); } @Test public void testIsGuidedDecisionTableEditableWhenDecisionTableIsEditable() { final GuidedDecisionTableView.Presenter dtPresenter = mock(GuidedDecisionTableView.Presenter.class); final GuidedDecisionTablePresenter.Access access = mock(GuidedDecisionTablePresenter.Access.class); doReturn(access).when(dtPresenter).getAccess(); doReturn(false).when(access).isReadOnly(); doReturn(true).when(access).hasEditableColumns(); final boolean isGuidedDecisionTableEditable = presenter.isGuidedDecisionTableEditable(dtPresenter); assertTrue(isGuidedDecisionTableEditable); } @Test public void testAddColumnsTab() { final MultiPageEditor pageEditor = mock(MultiPageEditor.class); doReturn(pageEditor).when(presenter).getKieEditorWrapperMultiPage(); presenter.addColumnsTab(); verify(columnsPage).init(modeller); verify(presenter).addEditorPage(COLUMNS_TAB_INDEX, columnsPage); } @Test public void testAddEditorPage() { final MultiPageEditor multiPage = mock(MultiPageEditor.class); final Page page = mock(Page.class); final int index = 1; doReturn(multiPage).when(presenter).getKieEditorWrapperMultiPage(); presenter.addEditorPage(index, page); verify(multiPage).addPage(index, page); } @Test public void testDisableColumnsPage() { final MultiPageEditor multiPage = mock(MultiPageEditor.class); doReturn(multiPage).when(presenter).getKieEditorWrapperMultiPage(); presenter.disableColumnsPage(); verify(multiPage).disablePage(COLUMNS_TAB_INDEX); } @Test public void testEnableColumnsPage() { final MultiPageEditor multiPage = mock(MultiPageEditor.class); doReturn(multiPage).when(presenter).getKieEditorWrapperMultiPage(); presenter.enableColumnsPage(); verify(multiPage).enablePage(COLUMNS_TAB_INDEX); } @Test public void testOnUpdatedLockStatusEventWhenTableIsNotLockedAndIsEditable() { final UpdatedLockStatusEvent event = mock(UpdatedLockStatusEvent.class); final GuidedDecisionTableView.Presenter activeDecisionTable = mock(GuidedDecisionTableView.Presenter.class); doReturn(false).when(event).isLocked(); doReturn(false).when(event).isLockedByCurrentUser(); doReturn(true).when(presenter).isGuidedDecisionTableEditable(activeDecisionTable); doReturn(Optional.of(activeDecisionTable)).when(modeller).getActiveDecisionTable(); doNothing().when(presenter).enableColumnsTab(anyBoolean()); presenter.onUpdatedLockStatusEvent(event); verify(presenter).enableColumnsTab(eq(true)); } @Test public void testOnUpdatedLockStatusEventWhenTableIsNotLockedAndIsNotEditable() { final UpdatedLockStatusEvent event = mock(UpdatedLockStatusEvent.class); final GuidedDecisionTableView.Presenter activeDecisionTable = mock(GuidedDecisionTableView.Presenter.class); doReturn(false).when(event).isLocked(); doReturn(false).when(event).isLockedByCurrentUser(); doReturn(false).when(presenter).isGuidedDecisionTableEditable(activeDecisionTable); doReturn(Optional.of(activeDecisionTable)).when(modeller).getActiveDecisionTable(); doNothing().when(presenter).enableColumnsTab(anyBoolean()); presenter.onUpdatedLockStatusEvent(event); verify(presenter).enableColumnsTab(eq(false)); } @Test public void testOnUpdatedLockStatusEventWhenIsLockedByTheCurrentUser() { final UpdatedLockStatusEvent event = mock(UpdatedLockStatusEvent.class); final GuidedDecisionTableView.Presenter activeDecisionTable = mock(GuidedDecisionTableView.Presenter.class); doReturn(true).when(event).isLocked(); doReturn(true).when(event).isLockedByCurrentUser(); doReturn(true).when(presenter).isGuidedDecisionTableEditable(activeDecisionTable); doReturn(Optional.of(activeDecisionTable)).when(modeller).getActiveDecisionTable(); doNothing().when(presenter).enableColumnsTab(anyBoolean()); presenter.onUpdatedLockStatusEvent(event); verify(presenter).enableColumnsTab(eq(true)); } @Test public void testOnUpdatedLockStatusEventWhenIsLockedByAnotherUser() { final UpdatedLockStatusEvent event = mock(UpdatedLockStatusEvent.class); final GuidedDecisionTableView.Presenter activeDecisionTable = mock(GuidedDecisionTableView.Presenter.class); doReturn(true).when(event).isLocked(); doReturn(false).when(event).isLockedByCurrentUser(); doReturn(true).when(presenter).isGuidedDecisionTableEditable(activeDecisionTable); doReturn(Optional.of(activeDecisionTable)).when(modeller).getActiveDecisionTable(); doNothing().when(presenter).enableColumnsTab(anyBoolean()); presenter.onUpdatedLockStatusEvent(event); verify(presenter).enableColumnsTab(eq(false)); } @Test public void testOnUpdatedLockStatusEventWhenActiveDecisionTableIsNull() { final UpdatedLockStatusEvent event = mock(UpdatedLockStatusEvent.class); doReturn(Optional.empty()).when(modeller).getActiveDecisionTable(); presenter.onUpdatedLockStatusEvent(event); verify(presenter, never()).enableColumnsTab(any()); } }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package software.amazon.awssdk.http.nio.netty.internal; import static org.assertj.core.api.AssertionsForClassTypes.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static software.amazon.awssdk.http.nio.netty.internal.Http1TunnelConnectionPool.TUNNEL_ESTABLISHED_KEY; import io.netty.buffer.ByteBufAllocator; import io.netty.channel.Channel; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelId; import io.netty.channel.ChannelPipeline; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.pool.ChannelPool; import io.netty.channel.pool.ChannelPoolHandler; import io.netty.handler.ssl.ApplicationProtocolNegotiator; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslHandler; import io.netty.util.Attribute; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.Promise; import java.io.IOException; import java.net.URI; import java.util.List; import java.util.concurrent.CountDownLatch; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; import javax.net.ssl.SSLSessionContext; import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import software.amazon.awssdk.http.SdkHttpConfigurationOption; /** * Unit tests for {@link Http1TunnelConnectionPool}. */ @RunWith(MockitoJUnitRunner.class) public class Http1TunnelConnectionPoolTest { private static final NioEventLoopGroup GROUP = new NioEventLoopGroup(1); private static final URI HTTP_PROXY_ADDRESS = URI.create("http://localhost:1234"); private static final URI HTTPS_PROXY_ADDRESS = URI.create("https://localhost:5678"); private static final URI REMOTE_ADDRESS = URI.create("https://s3.amazonaws.com:5678"); private static final String PROXY_USER = "myuser"; private static final String PROXY_PASSWORD = "mypassword"; @Mock private ChannelPool delegatePool; @Mock private ChannelPoolHandler mockHandler; @Mock public Channel mockChannel; @Mock public ChannelPipeline mockPipeline; @Mock public Attribute mockAttr; @Mock public ChannelHandlerContext mockCtx; @Mock public ChannelId mockId; public NettyConfiguration configuration; @Before public void methodSetup() { Future<Channel> channelFuture = GROUP.next().newSucceededFuture(mockChannel); when(delegatePool.acquire(any(Promise.class))).thenReturn(channelFuture); when(mockChannel.attr(eq(TUNNEL_ESTABLISHED_KEY))).thenReturn(mockAttr); when(mockChannel.id()).thenReturn(mockId); when(mockChannel.pipeline()).thenReturn(mockPipeline); configuration = new NettyConfiguration(SdkHttpConfigurationOption.GLOBAL_HTTP_DEFAULTS); } @AfterClass public static void teardown() { GROUP.shutdownGracefully().awaitUninterruptibly(); } @Test public void tunnelAlreadyEstablished_doesNotAddInitHandler() { Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, null, HTTP_PROXY_ADDRESS, REMOTE_ADDRESS, mockHandler, configuration); when(mockAttr.get()).thenReturn(true); tunnelPool.acquire().awaitUninterruptibly(); verify(mockPipeline, never()).addLast(any()); } @Test(timeout = 1000) public void tunnelNotEstablished_addsInitHandler() throws InterruptedException { Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, null, HTTP_PROXY_ADDRESS, REMOTE_ADDRESS, mockHandler, configuration); when(mockAttr.get()).thenReturn(false); CountDownLatch latch = new CountDownLatch(1); when(mockPipeline.addLast(any(ChannelHandler.class))).thenAnswer(i -> { latch.countDown(); return mockPipeline; }); tunnelPool.acquire(); latch.await(); verify(mockPipeline, times(1)).addLast(any(ProxyTunnelInitHandler.class)); } @Test public void tunnelInitFails_acquireFutureFails() { Http1TunnelConnectionPool.InitHandlerSupplier supplier = (srcPool, proxyUser, proxyPassword, remoteAddr, initFuture) -> { initFuture.setFailure(new IOException("boom")); return mock(ChannelHandler.class); }; Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, null, HTTP_PROXY_ADDRESS,null, null, REMOTE_ADDRESS, mockHandler, supplier, configuration); Future<Channel> acquireFuture = tunnelPool.acquire(); assertThat(acquireFuture.awaitUninterruptibly().cause()).hasMessage("boom"); } @Test public void tunnelInitSucceeds_acquireFutureSucceeds() { Http1TunnelConnectionPool.InitHandlerSupplier supplier = (srcPool, proxyUser, proxyPassword, remoteAddr, initFuture) -> { initFuture.setSuccess(mockChannel); return mock(ChannelHandler.class); }; Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, null, HTTP_PROXY_ADDRESS, null, null, REMOTE_ADDRESS, mockHandler, supplier, configuration); Future<Channel> acquireFuture = tunnelPool.acquire(); assertThat(acquireFuture.awaitUninterruptibly().getNow()).isEqualTo(mockChannel); } @Test public void acquireFromDelegatePoolFails_failsFuture() { Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, null, HTTP_PROXY_ADDRESS, REMOTE_ADDRESS, mockHandler, configuration); when(delegatePool.acquire(any(Promise.class))).thenReturn(GROUP.next().newFailedFuture(new IOException("boom"))); Future<Channel> acquireFuture = tunnelPool.acquire(); assertThat(acquireFuture.awaitUninterruptibly().cause()).hasMessage("boom"); } @Test public void sslContextProvided_andProxyUsingHttps_addsSslHandler() { SslHandler mockSslHandler = mock(SslHandler.class); SSLEngine mockSslEngine = mock(SSLEngine.class); when(mockSslHandler.engine()).thenReturn(mockSslEngine); when(mockSslEngine.getSSLParameters()).thenReturn(mock(SSLParameters.class)); TestSslContext mockSslCtx = new TestSslContext(mockSslHandler); Http1TunnelConnectionPool.InitHandlerSupplier supplier = (srcPool, proxyUser, proxyPassword, remoteAddr, initFuture) -> { initFuture.setSuccess(mockChannel); return mock(ChannelHandler.class); }; Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, mockSslCtx, HTTPS_PROXY_ADDRESS, null, null, REMOTE_ADDRESS, mockHandler, supplier, configuration); tunnelPool.acquire().awaitUninterruptibly(); ArgumentCaptor<ChannelHandler> handlersCaptor = ArgumentCaptor.forClass(ChannelHandler.class); verify(mockPipeline, times(2)).addLast(handlersCaptor.capture()); assertThat(handlersCaptor.getAllValues().get(0)).isEqualTo(mockSslHandler); } @Test public void sslContextProvided_andProxyNotUsingHttps_doesNotAddSslHandler() { SslHandler mockSslHandler = mock(SslHandler.class); TestSslContext mockSslCtx = new TestSslContext(mockSslHandler); Http1TunnelConnectionPool.InitHandlerSupplier supplier = (srcPool, proxyUser, proxyPassword, remoteAddr, initFuture) -> { initFuture.setSuccess(mockChannel); return mock(ChannelHandler.class); }; Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, mockSslCtx, HTTP_PROXY_ADDRESS, null, null, REMOTE_ADDRESS, mockHandler, supplier, configuration); tunnelPool.acquire().awaitUninterruptibly(); ArgumentCaptor<ChannelHandler> handlersCaptor = ArgumentCaptor.forClass(ChannelHandler.class); verify(mockPipeline).addLast(handlersCaptor.capture()); assertThat(handlersCaptor.getAllValues().get(0)).isNotInstanceOf(SslHandler.class); } @Test public void release_releasedToDelegatePool() { Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, null, HTTP_PROXY_ADDRESS, REMOTE_ADDRESS, mockHandler, configuration); tunnelPool.release(mockChannel); verify(delegatePool).release(eq(mockChannel), any(Promise.class)); } @Test public void release_withGivenPromise_releasedToDelegatePool() { Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, null, HTTP_PROXY_ADDRESS, REMOTE_ADDRESS, mockHandler, configuration); Promise mockPromise = mock(Promise.class); tunnelPool.release(mockChannel, mockPromise); verify(delegatePool).release(eq(mockChannel), eq(mockPromise)); } @Test public void close_closesDelegatePool() { Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, null, HTTP_PROXY_ADDRESS, REMOTE_ADDRESS, mockHandler, configuration); tunnelPool.close(); verify(delegatePool).close(); } @Test public void proxyAuthProvided_addInitHandler_withAuth(){ TestInitHandlerData data = new TestInitHandlerData(); Http1TunnelConnectionPool.InitHandlerSupplier supplier = (srcPool, proxyUser, proxyPassword, remoteAddr, initFuture) -> { initFuture.setSuccess(mockChannel); data.proxyUser(proxyUser); data.proxyPassword(proxyPassword); return mock(ChannelHandler.class); }; Http1TunnelConnectionPool tunnelPool = new Http1TunnelConnectionPool(GROUP.next(), delegatePool, null, HTTP_PROXY_ADDRESS, PROXY_USER, PROXY_PASSWORD, REMOTE_ADDRESS, mockHandler, supplier, configuration); tunnelPool.acquire().awaitUninterruptibly(); assertThat(data.proxyUser()).isEqualTo(PROXY_USER); assertThat(data.proxyPassword()).isEqualTo(PROXY_PASSWORD); } private static class TestInitHandlerData { private String proxyUser; private String proxyPassword; public void proxyUser(String proxyUser) { this.proxyUser = proxyUser; } public String proxyUser() { return this.proxyUser; } public void proxyPassword(String proxyPassword) { this.proxyPassword = proxyPassword; } public String proxyPassword(){ return this.proxyPassword; } } private static class TestSslContext extends SslContext { private final SslHandler handler; protected TestSslContext(SslHandler handler) { this.handler = handler; } @Override public boolean isClient() { return false; } @Override public List<String> cipherSuites() { return null; } @Override public long sessionCacheSize() { return 0; } @Override public long sessionTimeout() { return 0; } @Override public ApplicationProtocolNegotiator applicationProtocolNegotiator() { return null; } @Override public SSLEngine newEngine(ByteBufAllocator alloc) { return null; } @Override public SSLEngine newEngine(ByteBufAllocator alloc, String peerHost, int peerPort) { return null; } @Override public SSLSessionContext sessionContext() { return null; } @Override public SslHandler newHandler(ByteBufAllocator alloc, String host, int port, boolean startTls) { return handler; } } }
/******************************************************************************* * Any modification, copies of sections of this file must be attached with this * license and shown clearly in the developer's project. The code can be used * as long as you state clearly you do not own it. Any violation might result in * a take-down. * * MIT License * * Copyright (c) 2016, 2017 Anthony Law * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. *******************************************************************************/ package org.ev3dev.hardware; //----------------------------------------------------------------------------- //~autogen autogen-header //~autogen //----------------------------------------------------------------------------- import java.io.File; import org.ev3dev.exception.EV3LibraryException; import org.ev3dev.exception.InvalidLEDException; import org.ev3dev.io.Sysfs; //~autogen generic-class-description classes.led>currentClass //~autogen /*** * Any device controlled by the generic LED driver. <br> * <br> * See <a href="https://www.kernel.org/doc/Documentation/leds/leds-class.txt"> * https://www.kernel.org/doc/Documentation/leds/leds-class.txt</a> * for more details. * @author Anthony * */ public class LED extends Device{ /** * The Sysfs class's <code>max_brightness</code> property name */ public static final String SYSFS_PROPERTY_MAX_BRIGHTNESS = "max_brightness"; /** * The Sysfs class's <code>brightness</code> property name */ public static final String SYSFS_PROPERTY_BRIGHTNESS = "brightness"; /** * The Sysfs class's <code>trigger</code> property name */ public static final String SYSFS_PROPERTY_TRIGGER = "trigger"; /** * The Sysfs class's <code>delay_on</code> property name */ public static final String SYSFS_PROPERTY_DELAY_ON = "delay_on"; /** * The Sysfs class's <code>delay_off</code> property name */ public static final String SYSFS_PROPERTY_DELAY_OFF = "delay_off"; /** * Left EV3 Button */ public static final int LEFT = 0; /** * Right EV3 Button */ public static final int RIGHT = 1; /** * Green color. */ public static final int GREEN = 0; /** * Red color. */ public static final int RED = 1; /** * This Sysfs's class name (e.g. <code>/sys/class/lego-sensor</code>, and <code>lego-sensor</code> is the class name) */ public static final String CLASS_NAME = "leds"; /** * Creates a new LED instance. * @param leftRightField The integer field from <code>LED</code> class (e.g. <code>Button.LEFT</code>, <code>Button.RIGHT</code>) * @param colorField The integer field from <code>LED</code> class (e.g. <code>Button.GREEN</code>, <code>Button.RED</code>) * @throws InvalidLEDException If the specified LEFT RIGHT field or color field isn't valid. */ public LED(int leftRightField, int colorField) throws InvalidLEDException{ super(CLASS_NAME); if (leftRightField != 0 && leftRightField != 1){ throw new InvalidLEDException("You are not specifying a EV3_LEFT_LED or EV3_RIGHT_LED field!"); } if (colorField != 0 && colorField != 1){ throw new InvalidLEDException("You are not specifying a EV3_LED_GREEN or EV3_LED_RED field!"); } String direction = leftRightField == 0 ? "left" : "right"; String color = colorField == 0 ? "green" : "red"; this.setClassFullname("ev3:" + direction + ":" + color + ":ev3dev"); } /** * <b>This function is for advanced users.</b><br> * <b>Use this function for basic users:</b> * <pre> * LED led = new LED(Button.LEFT, Button.GREEN); * </pre> * Creates a new LED instance directly with a <code>ledName</code> * @param ledName LED Name that exists in <code>/sys/class/leds</code> * @throws InvalidLEDException If the specified <code>ledName</code> does not exist */ public LED(String ledName) throws InvalidLEDException{ super(CLASS_NAME); File file = new File(Sysfs.getSysfsPath() + CLASS_NAME + "/" + ledName); if (!file.exists()){ throw new InvalidLEDException("The specified LED does not exist"); } this.setClassName(ledName); } /** * Returns the maximum allowable brightness value. * @return The maximum allowable brightness value. * @throws EV3LibraryException If I/O goes wrong */ public int getMaxBrightness() throws EV3LibraryException{ String str = this.getAttribute(SYSFS_PROPERTY_MAX_BRIGHTNESS); return Integer.parseInt(str); } /** * Gets the brightness level. Possible values are from 0 to max_brightness. * @return The brightness level * @throws EV3LibraryException If I/O goes wrong */ public int getBrightness() throws EV3LibraryException{ String str = this.getAttribute(SYSFS_PROPERTY_BRIGHTNESS); return Integer.parseInt(str); } /** * Sets the brightness level. Possible values are from 0 to max_brightness. * @param brightness The brightness level * @throws EV3LibraryException If I/O goes wrong */ public void setBrightness(int brightness) throws EV3LibraryException{ this.setAttribute(SYSFS_PROPERTY_BRIGHTNESS, Integer.toString(brightness)); } /** * <b>This function only returns a String, a spaced-array String.</b><br> * <b>Use this function to return a String Array directly:</b> * <pre> * getTriggers() * </pre> * Returns a list of available triggers. * @return A spaced-array String * @throws EV3LibraryException If I/O goes wrong */ public String getTriggersViaString() throws EV3LibraryException{ return this.getAttribute(SYSFS_PROPERTY_TRIGGER); } /** * Returns a list of available triggers. * @return A String Array with a list of available triggers * @throws EV3LibraryException If I/O goes wrong */ public String[] getTriggers() throws EV3LibraryException{ String str = getTriggersViaString(); return Sysfs.separateSpace(str); } /** * Gets the led trigger. A trigger is a kernel based source of led events. Triggers can either be simple or complex. * A simple trigger isn't configurable and is designed to slot into existing subsystems with minimal additional code. * Examples are the ide-disk and nand-disk triggers.<br> * <br> * Complex triggers whilst available to all LEDs have LED specific parameters and work on a per LED basis. The timer * trigger is an example. The timer trigger will periodically change the LED brightness between 0 and the current * brightness setting. The on and off time can be specified via delay_{on,off} attributes in milliseconds. You can * change the brightness value of a LED independently of the timer trigger. However, if you set the brightness value * to 0 it will also disable the timer trigger. * @return The LED trigger * @throws EV3LibraryException If I/O goes wrong */ public String getTrigger() throws EV3LibraryException{ return this.getAttribute(SYSFS_PROPERTY_TRIGGER); } /** * Sets the led trigger. A trigger is a kernel based source of led events. Triggers can either be simple or complex. * A simple trigger isn't configurable and is designed to slot into existing subsystems with minimal additional code. * Examples are the ide-disk and nand-disk triggers.<br> * <br> * Complex triggers whilst available to all LEDs have LED specific parameters and work on a per LED basis. The timer * trigger is an example. The timer trigger will periodically change the LED brightness between 0 and the current * brightness setting. The on and off time can be specified via delay_{on,off} attributes in milliseconds. You can * change the brightness value of a LED independently of the timer trigger. However, if you set the brightness value * to 0 it will also disable the timer trigger. * @param selector The LED trigger that listed using <code>getTriggers()</code> * @throws EV3LibraryException If I/O goes wrong */ public void setTrigger(String selector) throws EV3LibraryException{ this.setAttribute(SYSFS_PROPERTY_TRIGGER, selector); } /** * The timer trigger will periodically change the LED brightness between 0 and the current brightness setting. * The on time can be specified via delay_on attribute in milliseconds. * @return The Delay_On Value in milliseconds * @throws EV3LibraryException If I/O goes wrong */ public int getDelay_On() throws EV3LibraryException{ String str = this.getAttribute(SYSFS_PROPERTY_DELAY_ON); return Integer.parseInt(str); } /** * The timer trigger will periodically change the LED brightness between 0 and the current brightness setting. * The off time can be specified via delay_off attribute in milliseconds. * @return The Delay_Off Value in milliseconds * @throws EV3LibraryException If I/O goes wrong */ public int getDelay_Off() throws EV3LibraryException{ String str = this.getAttribute(SYSFS_PROPERTY_DELAY_OFF); return Integer.parseInt(str); } /** * The timer trigger will periodically change the LED brightness between 0 and the current brightness setting. * The on time can be specified via delay_on attribute in milliseconds. * @param delay_on The Delay_On Value in milliseconds * @throws EV3LibraryException If I/O goes wrong */ public void setDelay_On(int delay_on) throws EV3LibraryException{ this.setAttribute(SYSFS_PROPERTY_DELAY_ON, Integer.toString(delay_on)); } /** * The timer trigger will periodically change the LED brightness between 0 and the current brightness setting. * The off time can be specified via delay_off attribute in milliseconds. * @param delay_off The Delay_Off Value in milliseconds * @throws EV3LibraryException If I/O goes wrong */ public void setDelay_Off(int delay_off) throws EV3LibraryException{ this.setAttribute(SYSFS_PROPERTY_DELAY_OFF, Integer.toString(delay_off)); } @Override public String getAddress() throws EV3LibraryException { return null; } @Override public String getDriverName() throws EV3LibraryException { return null; } }
// Copyright 2015 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.android; import com.google.common.base.Joiner; import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableList; import com.google.common.hash.Hashing; import com.google.devtools.build.android.Converters.DependencyAndroidDataListConverter; import com.google.devtools.build.android.Converters.ExistingPathConverter; import com.google.devtools.build.android.Converters.FullRevisionConverter; import com.google.devtools.build.android.Converters.PathConverter; import com.google.devtools.build.android.Converters.UnvalidatedAndroidDataConverter; import com.google.devtools.build.android.Converters.VariantConfigurationTypeConverter; import com.google.devtools.common.options.Converters.CommaSeparatedOptionListConverter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.TriState; import com.android.builder.core.AndroidBuilder; import com.android.builder.core.VariantConfiguration; import com.android.builder.model.AaptOptions; import com.android.ide.common.internal.LoggedErrorException; import com.android.ide.common.res2.MergingException; import com.android.sdklib.repository.FullRevision; import com.android.utils.StdLogger; import java.io.IOException; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collection; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; /** * Provides an entry point for the resource processing using the AOSP build tools. * * <pre> * Example Usage: * java/com/google/build/android/AndroidResourceProcessingAction\ * --sdkRoot path/to/sdk\ * --aapt path/to/sdk/aapt\ * --annotationJar path/to/sdk/annotationJar\ * --adb path/to/sdk/adb\ * --zipAlign path/to/sdk/zipAlign\ * --androidJar path/to/sdk/androidJar\ * --manifest path/to/manifest\ * --primaryData path/to/resources:path/to/assets:path/to/manifest:path/to/R.txt * --data p/t/res1:p/t/assets1:p/t/1/AndroidManifest.xml:p/t/1/R.txt,\ * p/t/res2:p/t/assets2:p/t/2/AndroidManifest.xml:p/t/2/R.txt * --generatedSourcePath path/to/write/generated/sources * --packagePath path/to/write/archive.ap_ * --srcJarOutput path/to/write/archive.srcjar * </pre> */ public class AndroidResourceProcessingAction { private static final StdLogger STD_LOGGER = new StdLogger(com.android.utils.StdLogger.Level.WARNING); private static final Logger LOGGER = Logger.getLogger(AndroidResourceProcessingAction.class.getName()); /** Flag specifications for this action. */ public static final class Options extends OptionsBase { @Option(name = "apiVersion", defaultValue = "21.0.0", converter = FullRevisionConverter.class, category = "config", help = "ApiVersion indicates the version passed to the AndroidBuilder. ApiVersion must be" + " > 19.10 when defined.") // TODO(bazel-team): Determine what the API version changes in AndroidBuilder. public FullRevision apiVersion; @Option(name = "aapt", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Aapt tool location for resource packaging.") public Path aapt; @Option(name = "annotationJar", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Annotation Jar for builder invocations.") public Path annotationJar; @Option(name = "adb", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Path to adb for builder functions.") //TODO(bazel-team): Determine if this is completely necessary for running AndroidBuilder. public Path adb; @Option(name = "zipAlign", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Path to zipAlign for building apks.") public Path zipAlign; @Option(name = "androidJar", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Path to the android jar for resource packaging and building apks.") public Path androidJar; @Option(name = "primaryData", defaultValue = "null", converter = UnvalidatedAndroidDataConverter.class, category = "input", help = "The directory containing the primary resource directory. The contents will override" + " the contents of any other resource directories during merging. The expected format" + " is resources[|resources]:assets[|assets]:manifest") public UnvalidatedAndroidData primaryData; @Option(name = "data", defaultValue = "", converter = DependencyAndroidDataListConverter.class, category = "input", help = "Additional Data dependencies. These values will be used if not defined in the " + "primary resources. The expected format is " + "resources[#resources]:assets[#assets]:manifest:r.txt:symbols.txt" + "[,resources[#resources]:assets[#assets]:manifest:r.txt:symbols.txt]") public List<DependencyAndroidData> data; @Option(name = "generatedSourcePath", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path for generated sources.") public Path generatedSourcePath; @Option(name = "rOutput", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path to where the R.txt should be written.") public Path rOutput; @Option(name = "symbolsTxtOut", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path to where the symbolsTxt should be written.") public Path symbolsTxtOut; @Option(name = "packagePath", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path to the write the archive.") public Path packagePath; @Option(name = "proguardOutput", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path for the proguard file.") public Path proguardOutput; @Option(name = "srcJarOutput", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path for the generated java source jar.") public Path srcJarOutput; @Option(name = "packageType", defaultValue = "DEFAULT", converter = VariantConfigurationTypeConverter.class, category = "config", help = "Variant configuration type for packaging the resources." + " Acceptible values DEFAULT, LIBRARY, TEST") public VariantConfiguration.Type packageType; @Option(name = "densities", defaultValue = "", converter = CommaSeparatedOptionListConverter.class, category = "config", help = "A list densities to filter the resource drawables by.") public List<String> densities; @Option(name = "debug", defaultValue = "false", category = "config", help = "Indicates if it is a debug build.") public boolean debug; @Option(name = "resourceConfigs", defaultValue = "", converter = CommaSeparatedOptionListConverter.class, category = "config", help = "A list of resource config filters to pass to aapt.") public List<String> resourceConfigs; @Option(name = "useAaptCruncher", defaultValue = "auto", category = "config", help = "Use the legacy aapt cruncher, defaults to true for non-LIBRARY packageTypes. " + " LIBRARY packages do not benefit from the additional processing as the resources" + " will need to be reprocessed during the generation of the final apk. See" + " https://code.google.com/p/android/issues/detail?id=67525 for a discussion of the" + " different png crunching methods.") public TriState useAaptCruncher; @Option(name = "uncompressedExtensions", defaultValue = "", converter = CommaSeparatedOptionListConverter.class, category = "config", help = "A list of file extensions not to compress.") public List<String> uncompressedExtensions; @Option(name = "packageForR", defaultValue = "null", category = "config", help = "Custom java package to generate the R symbols files.") public String packageForR; @Option(name = "applicationId", defaultValue = "null", category = "config", help = "Custom application id (package manifest) for the packaged manifest.") public String applicationId; @Option(name = "versionName", defaultValue = "null", category = "config", help = "Version name to stamp into the packaged manifest.") public String versionName; @Option(name = "versionCode", defaultValue = "-1", category = "config", help = "Version code to stamp into the packaged manifest.") public int versionCode; @Option(name = "assetsToIgnore", defaultValue = "", converter = CommaSeparatedOptionListConverter.class, category = "config", help = "A list of assets extensions to ignore.") public List<String> assetsToIgnore; } private static Options options; public static void main(String[] args) { final Stopwatch timer = Stopwatch.createStarted(); OptionsParser optionsParser = OptionsParser.newOptionsParser(Options.class); optionsParser.parseAndExitUponError(args); options = optionsParser.getOptions(Options.class); FileSystem fileSystem = FileSystems.getDefault(); Path working = fileSystem.getPath("").toAbsolutePath(); Path mergedAssets = working.resolve("merged_assets"); Path mergedResources = working.resolve("merged_resources"); final AndroidResourceProcessor resourceProcessor = new AndroidResourceProcessor(STD_LOGGER); final AndroidSdkTools sdkTools = new AndroidSdkTools(options.apiVersion, options.aapt, options.annotationJar, options.adb, options.zipAlign, options.androidJar, STD_LOGGER); try { Path expandedOut = Files.createTempDirectory("tmp-expanded"); expandedOut.toFile().deleteOnExit(); Path deduplicatedOut = Files.createTempDirectory("tmp-deduplicated"); deduplicatedOut.toFile().deleteOnExit(); LOGGER.fine(String.format("Setup finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); final ImmutableList<DirectoryModifier> modifiers = ImmutableList.of( new PackedResourceTarExpander(expandedOut, working), new FileDeDuplicator(Hashing.murmur3_128(), deduplicatedOut, working)); final AndroidBuilder builder = sdkTools.createAndroidBuilder(); final MergedAndroidData mergedData = resourceProcessor.mergeData( options.primaryData, options.data, mergedResources, mergedAssets, modifiers, useAaptCruncher() ? builder.getAaptCruncher() : null, true); LOGGER.info(String.format("Merging finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); final Path filteredResources = fileSystem.getPath("resources-filtered"); final Path densityManifest = fileSystem.getPath("manifest-filtered/AndroidManifest.xml"); final DensityFilteredAndroidData filteredData = mergedData.filter( new DensitySpecificResourceFilter(options.densities, filteredResources, working), new DensitySpecificManifestProcessor(options.densities, densityManifest)); LOGGER.info( String.format("Density filtering finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); resourceProcessor.processResources( builder, options.packageType, options.debug, options.packageForR, new FlagAaptOptions(), options.resourceConfigs, options.applicationId, options.versionCode, options.versionName, filteredData, options.data, working.resolve("manifest"), options.generatedSourcePath, options.packagePath, options.proguardOutput); LOGGER.fine(String.format("appt finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); if (options.srcJarOutput != null) { resourceProcessor.createSrcJar(options.generatedSourcePath, options.srcJarOutput); } if (options.rOutput != null) { resourceProcessor.copyRToOutput(options.generatedSourcePath, options.rOutput); } if (options.symbolsTxtOut != null) { resourceProcessor.copyRToOutput(options.generatedSourcePath, options.symbolsTxtOut); } LOGGER.fine(String.format("Packaging finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); } catch (MergingException e) { LOGGER.log(java.util.logging.Level.SEVERE, "Error during merging resources", e); System.exit(1); } catch (IOException | InterruptedException | LoggedErrorException e) { LOGGER.log(java.util.logging.Level.SEVERE, "Error during processing resources", e); System.exit(2); } catch (Exception e) { LOGGER.log(java.util.logging.Level.SEVERE, "Unexpected", e); System.exit(3); } LOGGER.info(String.format("Resources processed in %sms", timer.elapsed(TimeUnit.MILLISECONDS))); // AOSP code can leave dangling threads. System.exit(0); } private static boolean useAaptCruncher() { // If the value was set, use that. if (options.useAaptCruncher != TriState.AUTO) { return options.useAaptCruncher == TriState.YES; } // By default png cruncher shouldn't be invoked on a library -- the work is just thrown away. return options.packageType != VariantConfiguration.Type.LIBRARY; } private static final class FlagAaptOptions implements AaptOptions { @Override public boolean getUseAaptPngCruncher() { return options.useAaptCruncher != TriState.NO; } @Override public Collection<String> getNoCompress() { if (!options.uncompressedExtensions.isEmpty()) { return options.uncompressedExtensions; } return null; } @Override public String getIgnoreAssets() { if (!options.assetsToIgnore.isEmpty()) { return Joiner.on(":").join(options.assetsToIgnore); } return null; } @Override public boolean getFailOnMissingConfigEntry() { return false; } } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.origin.mongodb; import com.mongodb.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import com.mongodb.client.model.CreateCollectionOptions; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.api.Stage; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.sdk.SourceRunner; import com.streamsets.pipeline.sdk.StageRunner; import org.bson.BsonTimestamp; import org.bson.Document; import org.bson.types.ObjectId; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.GenericContainer; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import static org.junit.Assert.assertEquals; @Ignore public class MongoDBSourceIT { private static final Logger LOG = LoggerFactory.getLogger(MongoDBSourceIT.class); private static final String DATABASE_NAME = "test"; private static final String CAPPED_COLLECTION = "capped"; private static final String COLLECTION = "uncapped"; private static final String UUID_COLLECTION = "uuid"; private static final String BSON_COLLECTION = "bson"; private static final String STRING_ID_COLLECTION = "stringId"; private static final String CAPPED_STRING_ID_COLLECTION = "cappedStringId"; private static final int TEST_COLLECTION_SIZE = 4; private static final int ONE_MB = 1000 * 1000; private static final List<Document> documents = new ArrayList<>(TEST_COLLECTION_SIZE); private static final UUID uuidValue = UUID.randomUUID(); private static final int MONGO_PORT = 27017; private static int timestamp; @ClassRule public static GenericContainer mongoContainer = new GenericContainer("mongo:3.0").withExposedPorts(MONGO_PORT); private static int mongoContainerMappedPort = 0; private static String mongoContainerIp = null; @BeforeClass public static void setUpClass() throws Exception { for (int i = 0; i < TEST_COLLECTION_SIZE; i++) { documents.add(new Document("value", "document " + i)); } mongoContainerMappedPort = mongoContainer.getMappedPort(MONGO_PORT); mongoContainerIp = mongoContainer.getContainerIpAddress(); MongoClient mongo = new MongoClient(mongoContainerIp, mongoContainerMappedPort); MongoDatabase db = mongo.getDatabase(DATABASE_NAME); db.createCollection(CAPPED_COLLECTION, new CreateCollectionOptions().capped(true).sizeInBytes(ONE_MB)); db.createCollection(COLLECTION); db.createCollection(UUID_COLLECTION); db.createCollection(BSON_COLLECTION); db.createCollection(STRING_ID_COLLECTION); db.createCollection(CAPPED_STRING_ID_COLLECTION, new CreateCollectionOptions().capped(true).sizeInBytes(ONE_MB)); MongoCollection<Document> capped = db.getCollection(CAPPED_COLLECTION); MongoCollection<Document> uncapped = db.getCollection(COLLECTION); capped.insertMany(documents); uncapped.insertMany(documents); MongoCollection<Document> uuid = db.getCollection(UUID_COLLECTION); uuid.insertOne(new Document("value", uuidValue)); MongoCollection<Document> bson = db.getCollection(BSON_COLLECTION); Date now = new Date(); timestamp = (int)(now.getTime()/1000); bson.insertOne(new Document("value", new BsonTimestamp(timestamp, 0))); Map<String, Object> mapDocument = new HashMap<>(); mapDocument.put("timestamp", new BsonTimestamp(timestamp, 1)); bson.insertOne(new Document("valueMap", mapDocument)); List<Object> listDocument = new ArrayList<>(); listDocument.add(new BsonTimestamp(timestamp, 2)); bson.insertOne(new Document("valueList", listDocument)); mongo.close(); } @Test public void testInvalidInitialOffset() throws StageException { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp + ":" + mongoContainerMappedPort) .database(DATABASE_NAME) .collection(CAPPED_COLLECTION) .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(1, issues.size()); LOG.info(issues.get(0).toString()); } @Test public void testInvalidHostname() throws StageException { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://localhostsdfsd:" + mongoContainerMappedPort) .database(DATABASE_NAME) .collection(CAPPED_COLLECTION) .initialOffset("2015-06-01 00:00:00") .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(1, issues.size()); LOG.info(issues.get(0).toString()); } @Test public void testInvalidHostPort() throws StageException { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp) .database(DATABASE_NAME) .collection(CAPPED_COLLECTION) .initialOffset("2015-06-01 00:00:00") .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(1, issues.size()); LOG.info(issues.get(0).toString()); } @Test public void testInvalidPort() throws StageException { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp + ":abcd") .database(DATABASE_NAME) .collection(CAPPED_COLLECTION) .initialOffset("2015-06-01 00:00:00") .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(1, issues.size()); LOG.info(issues.get(0).toString()); } @Test public void testReadCappedCollection() throws Exception { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp + ":" + mongoContainerMappedPort) .database(DATABASE_NAME) .collection(CAPPED_COLLECTION) .initialOffset("2015-06-01 00:00:00") .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(0, issues.size()); runner.runInit(); StageRunner.Output output = runner.runProduce(null, 2); List<Record> parsedRecords = output.getRecords().get("lane"); assertEquals(2, parsedRecords.size()); assertEquals("document 0", parsedRecords.get(0).get("/value").getValueAsString()); assertEquals("document 1", parsedRecords.get(1).get("/value").getValueAsString()); for (Record r : parsedRecords) { LOG.info(r.toString()); } String offset = output.getNewOffset(); output = runner.runProduce(offset, 2); parsedRecords = output.getRecords().get("lane"); assertEquals(2, parsedRecords.size()); assertEquals("document 2", parsedRecords.get(0).get("/value").getValueAsString()); assertEquals("document 3", parsedRecords.get(1).get("/value").getValueAsString()); for (Record r : parsedRecords) { LOG.info(r.toString()); } insertNewDocs(CAPPED_COLLECTION); offset = output.getNewOffset(); output = runner.runProduce(offset, 2); parsedRecords = output.getRecords().get("lane"); assertEquals(1, parsedRecords.size()); assertEquals("document 12345", parsedRecords.get(0).get("/value").getValueAsString()); } @Test public void testReadCollection() throws Exception { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp + ":" + mongoContainerMappedPort) .database(DATABASE_NAME) .collection(COLLECTION) .isCapped(false) .initialOffset("2015-06-01 00:00:00") .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(0, issues.size()); runner.runInit(); StageRunner.Output output = runner.runProduce(null, 2); List<Record> parsedRecords = output.getRecords().get("lane"); assertEquals(2, parsedRecords.size()); assertEquals("document 0", parsedRecords.get(0).get("/value").getValueAsString()); assertEquals("document 1", parsedRecords.get(1).get("/value").getValueAsString()); for (Record r : parsedRecords) { LOG.info(r.toString()); } String offset = output.getNewOffset(); output = runner.runProduce(offset, 100); parsedRecords = output.getRecords().get("lane"); assertEquals(2, parsedRecords.size()); assertEquals("document 2", parsedRecords.get(0).get("/value").getValueAsString()); assertEquals("document 3", parsedRecords.get(1).get("/value").getValueAsString()); for (Record r : parsedRecords) { LOG.info(r.toString()); } insertNewDocs(COLLECTION); offset = output.getNewOffset(); // We have to set max batch size to N records + 1 (in this case 3) otherwise we'll // Need an extra produce call before a new cursor is opened. output = runner.runProduce(offset, 3); parsedRecords = output.getRecords().get("lane"); assertEquals(1, parsedRecords.size()); assertEquals("document 12345", parsedRecords.get(0).get("/value").getValueAsString()); } @Test public void testReadUUIDType() throws Exception { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp + ":" + mongoContainerMappedPort) .database(DATABASE_NAME) .collection(UUID_COLLECTION) .isCapped(false) .initialOffset("2015-06-01 00:00:00") .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(0, issues.size()); runner.runInit(); StageRunner.Output output = runner.runProduce(null, 1); List<Record> parsedRecords = output.getRecords().get("lane"); assertEquals(1, parsedRecords.size()); // UUID is converted to a string. assertEquals(uuidValue.toString(), parsedRecords.get(0).get("/value").getValueAsString()); } @Test public void testReadBsonTimestampType() throws Exception { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp + ":" + mongoContainerMappedPort) .database(DATABASE_NAME) .collection(BSON_COLLECTION) .isCapped(false) .initialOffset("2015-06-01 00:00:00") .maxBatchWaitTime(100) .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(0, issues.size()); runner.runInit(); final int maxBatchSize = 10; StageRunner.Output output = runner.runProduce(null, maxBatchSize); List<Record> parsedRecords = output.getRecords().get("lane"); assertEquals(3, parsedRecords.size()); // BSON timestamp is converted into SDC map assertEquals(timestamp*1000L, parsedRecords.get(0).get("/value").getValueAsMap().get("timestamp").getValueAsDate().getTime()); assertEquals(0, parsedRecords.get(0).get("/value").getValueAsMap().get("ordinal").getValueAsInteger()); } @Test public void testReadNestedOffset() throws Exception { final int level = 5; String offsetField = ""; for (int i = 1; i <= level; i++) { offsetField = offsetField + "o" + i + "."; } offsetField = offsetField + "_id"; MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp + ":" + mongoContainerMappedPort) .database(DATABASE_NAME) .collection(COLLECTION) .offsetField(offsetField) .isCapped(false) .initialOffset("2015-06-01 00:00:00") .maxBatchWaitTime(100) .readPreference(ReadPreferenceLabel.NEAREST) .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(0, issues.size()); runner.runInit(); final int maxBatchSize = 10; StageRunner.Output output = runner.runProduce(null, maxBatchSize); List<Record> parsedRecords = output.getRecords().get("lane"); assertEquals(0, parsedRecords.size()); insertNewDocsIdInsideDoc(COLLECTION, level, TEST_COLLECTION_SIZE); output = runner.runProduce(null, maxBatchSize); parsedRecords = output.getRecords().get("lane"); assertEquals(TEST_COLLECTION_SIZE, parsedRecords.size()); } @Test public void testStringOffset() throws Exception { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp + ":" + mongoContainerMappedPort) .database(DATABASE_NAME) .collection(STRING_ID_COLLECTION) .offsetField("_id") .isCapped(false) .setOffsetType(OffsetFieldType.STRING) .maxBatchWaitTime(100) .readPreference(ReadPreferenceLabel.NEAREST) .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(0, issues.size()); runner.runInit(); insertDocsWithStringID(STRING_ID_COLLECTION); final int maxBatchSize = 2; StageRunner.Output output = runner.runProduce(null, maxBatchSize); List<Record> parsedRecords = output.getRecords().get("lane"); assertEquals("First batch should contain 2 records",2, parsedRecords.size()); output = runner.runProduce(output.getNewOffset(), maxBatchSize); parsedRecords = output.getRecords().get("lane"); assertEquals("Second batch should contain 2 records",1, parsedRecords.size()); output = runner.runProduce(output.getNewOffset(), maxBatchSize); parsedRecords = output.getRecords().get("lane"); assertEquals("Last batch should have 0 records",0, parsedRecords.size()); } @Test public void testStringOffsetCappedCollection() throws Exception { MongoDBSource origin = new MongoDBSourceBuilder() .connectionString("mongodb://" + mongoContainerIp + ":" + mongoContainerMappedPort) .database(DATABASE_NAME) .collection(CAPPED_STRING_ID_COLLECTION) .offsetField("_id") .isCapped(false) .setOffsetType(OffsetFieldType.STRING) .maxBatchWaitTime(100) .readPreference(ReadPreferenceLabel.NEAREST) .build(); SourceRunner runner = new SourceRunner.Builder(MongoDBSource.class, origin) .addOutputLane("lane") .build(); List<Stage.ConfigIssue> issues = runner.runValidateConfigs(); assertEquals(0, issues.size()); runner.runInit(); insertDocsWithStringID(CAPPED_STRING_ID_COLLECTION); final int maxBatchSize = 2; StageRunner.Output output = runner.runProduce(null, maxBatchSize); List<Record> parsedRecords = output.getRecords().get("lane"); assertEquals("First batch should contain 2 records",2, parsedRecords.size()); output = runner.runProduce(output.getNewOffset(), maxBatchSize); parsedRecords = output.getRecords().get("lane"); assertEquals("Second batch should contain 2 records",1, parsedRecords.size()); output = runner.runProduce(output.getNewOffset(), maxBatchSize); parsedRecords = output.getRecords().get("lane"); assertEquals("Last batch should have 0 records",0, parsedRecords.size()); } private void insertDocsWithStringID(String collectionname){ MongoClient mongo = new MongoClient(mongoContainerIp, mongoContainerMappedPort); MongoDatabase db = mongo.getDatabase(DATABASE_NAME); MongoCollection<Document> collection = db.getCollection(collectionname); collection.insertOne(new Document("_id", "12345")); collection.insertOne(new Document("_id", "45679")); collection.insertOne(new Document("_id", "56789")); mongo.close(); } private void insertNewDocs(String collectionName) { MongoClient mongo = new MongoClient(mongoContainerIp, mongoContainerMappedPort); MongoDatabase db = mongo.getDatabase(DATABASE_NAME); MongoCollection<Document> collection = db.getCollection(collectionName); collection.insertOne(new Document("value", "document 12345")); mongo.close(); } private void insertNewDocsIdInsideDoc(String collectionName, int level, int size) { MongoClient mongo = new MongoClient(mongoContainerIp, mongoContainerMappedPort); MongoDatabase db = mongo.getDatabase(DATABASE_NAME); MongoCollection<Document> collection = db.getCollection(collectionName); List<Document> docs = new ArrayList<>(); for (int i = 0; i < size; i++) { Document document = new Document("o" + level, new Document("_id", new ObjectId())); for (int j = level-1; j > 0; j--) { document = new Document("o" + j, document); } docs.add(document); } collection.insertMany(docs); mongo.close(); } }
package org.gamecontrolplus.gui; import java.awt.Font; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.gamecontrolplus.Configuration; import org.gamecontrolplus.ControlButton; import org.gamecontrolplus.ControlDevice; import org.gamecontrolplus.ControlHat; import org.gamecontrolplus.ControlIO; import org.gamecontrolplus.ControlInput; import org.gamecontrolplus.ControlSlider; import processing.core.PApplet; import processing.core.PConstants; import processing.event.MouseEvent; public class LConfigUI implements PConstants, LConstants { private final ControlDevice device; final ControlIO controlIO; final PApplet app; final Configuration config; private boolean active = false; List<LBase> uiElements = new ArrayList<LBase>(); List<LConnector> uiConnections = new ArrayList<LConnector>(); private Map<String, LBaseInput> devInpKeys = new HashMap<String, LBaseInput>(); private Map<String, LDescriptor> descriptors = new HashMap<String, LDescriptor>(); private StringBuffer report; private int errCount = 0; private void addConfigToGUI(float spaceNeeded, float spaceForDescs, float spaceForInputs){ float px, py; // Create and add device inputs to UI px = window.papplet.width - 10 - INPUT_UI_LENGTH - PANEL_WIDTH; py = ELEMENT_UI_GAP + (spaceNeeded - spaceForInputs) / 2; for(ControlInput input : device.getInputs()){ LBaseInput ui = LBaseInput.makeInputUI(this, input, px, py); if(ui != null){ uiElements.add(ui); py += ui.UI_HEIGHT + ELEMENT_UI_GAP; devInpKeys.put(ui.name, ui); } } // Create and add descriptors to UI px = 10; py = ELEMENT_UI_GAP + (spaceNeeded - spaceForDescs) / 2; for(Configuration.InputConfig iconfig : config.getGameInputs()){ LDescriptor ui = new LDescriptor(this, px, py, iconfig); uiElements.add(ui); descriptors.put(ui.name, ui); py += ui.UI_HEIGHT + ELEMENT_UI_GAP; } // Now create list of connectors for(LBase ui : uiElements) for(LConnector c : ui.connectors) uiConnections.add(c); } /** * Make existing connections between game inputs and device inputs based on type and name. */ private void makeExistingConnections(){ for(Configuration.InputConfig iconfig : config.getGameInputs()){ LBaseInput di = devInpKeys.get(iconfig.deviceInputName); if(di != null && iconfig.type == di.uiType && iconfig.inputConNo < di.getNbrOfConnectors()){ LDescriptor descUI = descriptors.get(iconfig.key); descUI.connectors[0].conTo = di.connectors[iconfig.inputConNo]; di.connectors[iconfig.inputConNo].conTo = descUI.connectors[0]; di.setMultiplier(iconfig.multiplier); di.setTolerance(iconfig.tolerance); } } } private void addToReport(String line, boolean isError){ report.append(line); if(isError) errCount++; } /** * Verify the configuration * @param chain * @return */ private boolean verifyConfig(boolean chain){ report = new StringBuffer(); for(Configuration.InputConfig iconfig : config.getGameInputs()){ LDescriptor descUI = descriptors.get(iconfig.key); LConnector con = descUI.connectors[0].conTo; if(con != null){ iconfig.deviceInputName = con.owner.name; iconfig.inputConNo = con.conNo; iconfig.multiplier = ((LBaseInput)con.owner).getMultiplier(); iconfig.tolerance = ((LBaseInput)con.owner).getTolerance(); } else { addToReport("No input assigned to: " + descUI.name + "\n", true); } } if(errCount > 0) addToReport("VERIFY - " + errCount + " errors found\n", false); else addToReport("VERIFY - successful\n", false); if(!chain) txaStatus.setText(report.toString()); return errCount == 0; } private boolean saveConfig(){ if(!verifyConfig(true)){ addToReport("SAVE - abandoned\n", false); txaStatus.setText(report.toString()); return false; } Configuration.saveConfiguration(app, config); return true; } // private String[] makeConfigLines() { // String[] data = new String[config.gameInputs.length + 1]; // data[0] = config.usage; // int index = 1; // for(Configuration.InputConfig iconfig : config.gameInputs){ // System.out.println(" " + iconfig.toString()); // data[index++] = iconfig.toString(); // } // return data; // } public void verify_click(MButton button, MEvent event) { verifyConfig(false); } public void use_device_click(MButton button, MEvent event) { if(saveConfig()){ System.out.println("Saving config"); device.matches(config); controlIO.finishedConfig(device); window.forceClose(); } } public void quit_click(MButton button, MEvent event) { controlIO.finishedConfig(null); window.forceClose(); } public void clear_click(MButton button, MEvent event) { txaStatus.setText(""); } synchronized public void pre(MWinApplet appc, MWinData data) { current = null; for(LBase ui : uiElements){ ui.update(); ui.overWhat(appc.mouseX, appc.mouseY); } if(!dragging && current != null && current.conTo != null){ current.conTo.isOver = true; } } synchronized public void mouse(MWinApplet appc, MWinData data, MouseEvent mevent) { switch(mevent.getAction()){ case MouseEvent.PRESS: if(current != null){ start = current; dragging = true; } break; case MouseEvent.RELEASE: if(current != null && start != null && current.type != start.type){ LConnector descCon = (current.type == DESC) ? current : start; LDescriptor descUI = ((LDescriptor)descCon.owner); LConnector inputCon = (start.type == INPUT) ? start : current; LBaseInput inputUI = ((LBaseInput)inputCon.owner); // Make sure the device input is the right type for the descriptor int type0 = ((LDescriptor)descCon.owner).iconfig.type; int type1 = inputCon.owner.uiType; if(type0 == type1){ // Remove any existing connection end = current; current = null; if(start.conTo != null) start.conTo.conTo = null; if(end.conTo != null) end.conTo.conTo = null; // Add new connection start.conTo = end; end.conTo = start; descUI.iconfig.deviceInputName = inputUI.name; // Not sure if needed look at makeConfigLines inputUI.setMultiplier(descUI.iconfig.multiplier); inputUI.setTolerance(descUI.iconfig.tolerance); } } current = start = null; dragging = false; break; case MouseEvent.DRAG: break; } } synchronized public void draw(MWinApplet appc, MWinData data) { appc.background(BACKGROUND); if(!active) return; // Draw control panel at bottom appc.noStroke(); appc.fill(PANEL);; appc.rect(appc.width - PANEL_WIDTH, 0, PANEL_WIDTH, appc.height); // Draw connections appc.strokeWeight(3.5f); for(LConnector c : uiConnections){ if(c.conTo != null && c.type == LConnector.DESC){ appc.stroke(c.isOver ? HIGHLIGHT : CONNECTION); appc.line(c.px, c.py, c.conTo.px, c.conTo.py); } } // Connection in the making if(dragging && start != null){ appc.stroke(CONNECTION); appc.line(start.px, start.py, appc.mouseX, appc.mouseY); } // Draw descriptors and inputs for(LBase ui : uiElements) ui.draw(); } // public void printDevice(int id, ControlDevice device){ // System.out.println("========================================================================"); // System.out.println("Device number " + id + " is called '" + device.getName() + "' and has"); // System.out.println("\t" + device.getNumberOfButtons() + " buttons"); // System.out.println("\t" + device.getNumberOfSliders() + " sliders"); // System.out.println("\t" + device.getNumberOfRumblers() + " rumblers"); // device.printButtons(); // device.printSliders(); // System.out.println("------------------------------------------------------------------------\n\n"); // } // Widow GUI stuff MWindow window; MTabManager tabManager = new MTabManager(); MTextField txfFilename; MTextArea txaStatus; public LConfigUI(PApplet papp, LSelectEntry entry){ float px, py, pw; app = papp; device = entry.device; entry.device.open(); controlIO = entry.controlIO; this.config = LSelectUI.config; float spaceForInputs = ELEMENT_UI_GAP; // Scan through controls to calculate the window height needed for(ControlInput input : device.getInputs()){ if(input instanceof ControlHat){ spaceForInputs += 5 * INPUT_UI_HEIGHT + ELEMENT_UI_GAP + 2; } else if(input instanceof ControlButton){ spaceForInputs += INPUT_UI_HEIGHT + ELEMENT_UI_GAP + 2; } else if(input instanceof ControlSlider){ spaceForInputs += 4 * INPUT_UI_HEIGHT + ELEMENT_UI_GAP + 2; } else System.out.println("Unknown input " + input); } float spaceForDescs = config.nbrInputs() * (DESC_UI_HEIGHT + ELEMENT_UI_GAP + 2); float spaceNeeded = Math.max(spaceForInputs, spaceForDescs); spaceNeeded = Math.max(spaceNeeded, PANEL_HEIGHT); // Now calculate window scaling and height if(papp.displayHeight < spaceNeeded + 40) scale = papp.displayHeight / (spaceNeeded + 40); else scale = 1.0f; int winHeight = Math.round(spaceNeeded * scale); // Apply scaling input_UI_height = INPUT_UI_HEIGHT * scale; desc_UI_height = DESC_UI_HEIGHT * scale; element_UI_gap = ELEMENT_UI_GAP * scale; input_UI_length = INPUT_UI_LENGTH; desc_UI_length = DESC_UI_LENGTH; textfield_gap = TEXTFIELD_GAP * scale; indicator_d = INICATOR_D * scale; connector_size_r = CONNECTOR_SIZE_R * scale; connector_size_d = 2 * connector_size_r; fontSize = FONT_SIZE * scale; font = new Font("Dialog", Font.PLAIN, (int)fontSize); // CREATE THE WINDOW String title = "'" + device.getName() + "' [" + device.getTypeName() + " on " + device.getPortTypeName() + "]"; window = new MWindow(papp, title, 80, 100, 1020, winHeight, false, M4P.JAVA2D); window.setResizable(false); window.addDrawHandler(this, "draw"); window.addMouseHandler(this, "mouse"); window.addPreHandler(this, "pre"); window.papplet.noLoop(); tabManager = new MTabManager(); M4P.setCursor(CROSS); // Create the control panel px = window.papplet.width - PANEL_WIDTH + 10; pw = PANEL_WIDTH - 20; py = 10; MLabel lblFilenamePrompt = new MLabel(window.papplet, px, py, pw, 20, "Config. for: " + config.getUsgae()); lblFilenamePrompt.setTextAlign(MAlign.LEFT, null); lblFilenamePrompt.setLocalColorScheme(M4P.GREEN_SCHEME); lblFilenamePrompt.setTextBold(); lblFilenamePrompt.setOpaque(true); py += 26; float bw = (pw - 20)/3; MButton btnClearStatus = new MButton(window.papplet, px, py, bw, 20); btnClearStatus.setLocalColorScheme(M4P.GREEN_SCHEME); btnClearStatus.setText("Clear Status"); btnClearStatus.addEventHandler(this, "clear_click"); MButton btnVerify = new MButton(window.papplet, px + (pw - bw)/2, py, bw, 20); btnVerify.setLocalColorScheme(M4P.GREEN_SCHEME); btnVerify.setText("Verify"); btnVerify.addEventHandler(this, "verify_click"); MButton btnSave = new MButton(window.papplet, px + pw - bw, py, bw, 20); btnSave.setLocalColorScheme(M4P.GREEN_SCHEME); btnSave.setText("USE"); btnSave.addEventHandler(this, "use_device_click"); py += 26; MLabel lblStatus = new MLabel(window.papplet, px, py, pw, 20, "VERIFY / SAVE STATUS REPORT"); lblStatus.setLocalColorScheme(M4P.GREEN_SCHEME); lblStatus.setTextBold(); lblStatus.setOpaque(true); py += 22; txaStatus = new MTextArea(window.papplet, px, py, pw, 140, M4P.SCROLLBARS_VERTICAL_ONLY); txaStatus.setLocalColorScheme(M4P.GREEN_SCHEME); txaStatus.setPromptText("Verify / save status report"); py += txaStatus.getHeight() + 4; MButton btnQuit = new MButton(window.papplet, px, py, pw, 20); btnQuit.setLocalColorScheme(M4P.RED_SCHEME); btnQuit.setText("CANCEL CONFIGURATION AND EXIT"); btnQuit.addEventHandler(this, "quit_click"); window.papplet.textSize(fontSize); addConfigToGUI(spaceNeeded, spaceForDescs, spaceForInputs); makeExistingConnections(); active = true; window.papplet.loop(); } void close(){ window.forceClose(); } LConnector start = null; LConnector end = null; LConnector current = null; final float scale; final float input_UI_height; final float desc_UI_height; final float element_UI_gap; final float input_UI_length; final float desc_UI_length; final float textfield_gap; final float indicator_d; final float connector_size_r; // radius final float connector_size_d; // diameter final float fontSize; final Font font; private boolean dragging = false; }
/* * Copyright 2008-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.groovy.transform; import org.codehaus.groovy.ast.ASTNode; import org.codehaus.groovy.ast.AnnotationNode; import org.codehaus.groovy.ast.ClassCodeExpressionTransformer; import org.codehaus.groovy.ast.ClassHelper; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.FieldNode; import org.codehaus.groovy.ast.MethodNode; import org.codehaus.groovy.ast.Parameter; import org.codehaus.groovy.ast.PropertyNode; import org.codehaus.groovy.ast.expr.ClassExpression; import org.codehaus.groovy.ast.expr.ClosureExpression; import org.codehaus.groovy.ast.expr.DeclarationExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.ast.expr.PropertyExpression; import org.codehaus.groovy.ast.expr.TupleExpression; import org.codehaus.groovy.ast.expr.VariableExpression; import org.codehaus.groovy.ast.stmt.CatchStatement; import org.codehaus.groovy.classgen.VariableScopeVisitor; import org.codehaus.groovy.control.CompilePhase; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.control.messages.SyntaxErrorMessage; import org.codehaus.groovy.ast.stmt.BlockStatement; import org.codehaus.groovy.ast.stmt.ExpressionStatement; import org.codehaus.groovy.ast.stmt.ForStatement; import org.codehaus.groovy.syntax.SyntaxException; import org.objectweb.asm.Opcodes; import groovy.lang.Reference; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; /** * Handles generation of code for the @Category annotation. * <p> * Transformation logic is as follows: * <ul> * <li>all non-static methods converted to static ones with an additional 'self' parameter</li> * <li>references to 'this' changed to the additional 'self' parameter</li> * </ul> * * @author Alex Tkachman */ @GroovyASTTransformation(phase = CompilePhase.CANONICALIZATION) public class CategoryASTTransformation implements ASTTransformation, Opcodes { // should not use a static variable because of possible changes to node metadata // which would be visible to other compilation units private final VariableExpression thisExpression = createThisExpression(); private static VariableExpression createThisExpression() { VariableExpression expr = new VariableExpression("$this"); expr.setClosureSharedVariable(true); return expr; } /** * Property invocations done on 'this' reference are transformed so that the invocations at runtime are * done on the additional parameter 'self' */ public void visit(ASTNode[] nodes, final SourceUnit source) { if (nodes.length != 2 || !(nodes[0] instanceof AnnotationNode) || !(nodes[1] instanceof ClassNode)) { source.getErrorCollector().addError( new SyntaxErrorMessage(new SyntaxException("@Category can only be added to a ClassNode but got: " + (nodes.length==2?nodes[1]:"nothing"), nodes[0].getLineNumber(), nodes[0].getColumnNumber()), source)); } AnnotationNode annotation = (AnnotationNode) nodes[0]; ClassNode parent = (ClassNode) nodes[1]; ClassNode targetClass = getTargetClass(source, annotation); thisExpression.setType(targetClass); final LinkedList<Set<String>> varStack = new LinkedList<Set<String>>(); if (!ensureNoInstanceFieldOrProperty(source, parent)) return; Set<String> names = new HashSet<String>(); for (FieldNode field : parent.getFields()) { names.add(field.getName()); } for (PropertyNode field : parent.getProperties()) { names.add(field.getName()); } varStack.add(names); final Reference parameter = new Reference(); final ClassCodeExpressionTransformer expressionTransformer = new ClassCodeExpressionTransformer() { protected SourceUnit getSourceUnit() { return source; } private void addVariablesToStack(Parameter[] params) { Set<String> names = new HashSet<String>(); names.addAll(varStack.getLast()); for (Parameter param : params) { names.add(param.getName()); } varStack.add(names); } @Override public void visitCatchStatement(CatchStatement statement) { varStack.getLast().add(statement.getVariable().getName()); super.visitCatchStatement(statement); varStack.getLast().remove(statement.getVariable().getName()); } @Override public void visitMethod(MethodNode node) { addVariablesToStack(node.getParameters()); super.visitMethod(node); varStack.removeLast(); } @Override public void visitBlockStatement(BlockStatement block) { Set<String> names = new HashSet<String>(); names.addAll(varStack.getLast()); varStack.add(names); super.visitBlockStatement(block); varStack.remove(names); } @Override public void visitClosureExpression(ClosureExpression ce) { addVariablesToStack(ce.getParameters()); super.visitClosureExpression(ce); varStack.removeLast(); } @Override public void visitDeclarationExpression(DeclarationExpression expression) { if (expression.isMultipleAssignmentDeclaration()) { TupleExpression te = expression.getTupleExpression(); List<Expression> list = te.getExpressions(); for (Expression arg : list) { VariableExpression ve = (VariableExpression) arg; varStack.getLast().add(ve.getName()); } } else { VariableExpression ve = expression.getVariableExpression(); varStack.getLast().add(ve.getName()); } super.visitDeclarationExpression(expression); } @Override public void visitForLoop(ForStatement forLoop) { Expression exp = forLoop.getCollectionExpression(); exp.visit(this); Parameter loopParam = forLoop.getVariable(); if (loopParam != null) { varStack.getLast().add(loopParam.getName()); } super.visitForLoop(forLoop); } @Override public void visitExpressionStatement(ExpressionStatement es) { // GROOVY-3543: visit the declaration expressions so that declaration variables get added on the varStack Expression exp = es.getExpression(); if (exp instanceof DeclarationExpression) { exp.visit(this); } super.visitExpressionStatement(es); } @Override public Expression transform(Expression exp) { if (exp instanceof VariableExpression) { VariableExpression ve = (VariableExpression) exp; if (ve.getName().equals("this")) return thisExpression; else { if (!varStack.getLast().contains(ve.getName())) { return new PropertyExpression(thisExpression, ve.getName()); } } } else if (exp instanceof PropertyExpression) { PropertyExpression pe = (PropertyExpression) exp; if (pe.getObjectExpression() instanceof VariableExpression) { VariableExpression vex = (VariableExpression) pe.getObjectExpression(); if (vex.isThisExpression()) { pe.setObjectExpression(thisExpression); return pe; } } } else if (exp instanceof ClosureExpression) { ClosureExpression ce = (ClosureExpression) exp; ce.getVariableScope().putReferencedLocalVariable((Parameter) parameter.get()); Parameter[] params = ce.getParameters(); if (params == null) { params = Parameter.EMPTY_ARRAY; } else if (params.length == 0) { params = new Parameter[]{ new Parameter(ClassHelper.OBJECT_TYPE, "it") }; } addVariablesToStack(params); ce.getCode().visit(this); varStack.removeLast(); } return super.transform(exp); } }; for (MethodNode method : parent.getMethods()) { if (!method.isStatic()) { method.setModifiers(method.getModifiers() | Opcodes.ACC_STATIC); final Parameter[] origParams = method.getParameters(); final Parameter[] newParams = new Parameter[origParams.length + 1]; Parameter p = new Parameter(targetClass, "$this"); p.setClosureSharedVariable(true); newParams[0] = p; parameter.set(p); System.arraycopy(origParams, 0, newParams, 1, origParams.length); method.setParameters(newParams); expressionTransformer.visitMethod(method); } } new VariableScopeVisitor(source, true).visitClass(parent); } private boolean ensureNoInstanceFieldOrProperty(final SourceUnit source, final ClassNode parent) { boolean valid = true; for (FieldNode fieldNode : parent.getFields()) { if (!fieldNode.isStatic() && fieldNode.getLineNumber()>0) { // if <0, probably an AST transform or internal code (like generated metaclass field, ...) addUnsupportedError(fieldNode, source); valid = false; } } for (PropertyNode propertyNode : parent.getProperties()) { if (!propertyNode.isStatic() && propertyNode.getLineNumber()>0) { // if <0, probably an AST transform or internal code (like generated metaclass field, ...) addUnsupportedError(propertyNode, source); valid = false; } } return valid; } private static void addUnsupportedError(ASTNode node, SourceUnit unit) { unit.getErrorCollector().addErrorAndContinue( new SyntaxErrorMessage( new SyntaxException("The @Category transformation does not support instance "+ (node instanceof FieldNode?"fields":"properties") + " but found ["+getName(node)+"]", node.getLineNumber(), node.getColumnNumber() ), unit )); } private static String getName(ASTNode node) { if (node instanceof FieldNode) return ((FieldNode) node).getName(); if (node instanceof PropertyNode) return ((PropertyNode) node).getName(); return node.getText(); } private ClassNode getTargetClass(SourceUnit source, AnnotationNode annotation) { Expression value = annotation.getMember("value"); if (value == null || !(value instanceof ClassExpression)) { //noinspection ThrowableInstanceNeverThrown source.getErrorCollector().addErrorAndContinue(new SyntaxErrorMessage( new SyntaxException("@groovy.lang.Category must define 'value' which is the class to apply this category to", annotation.getLineNumber(), annotation.getColumnNumber(), annotation.getLastLineNumber(), annotation.getLastColumnNumber()), source)); return null; } else { ClassExpression ce = (ClassExpression) value; return ce.getType(); } } }
package org.biopax.paxtools.pattern.constraint; import org.biopax.paxtools.controller.PathAccessor; import org.biopax.paxtools.model.BioPAXElement; import org.biopax.paxtools.model.level3.*; import org.biopax.paxtools.pattern.Match; import java.util.Collection; import java.util.HashSet; import java.util.Set; /** * This constraint is used to collect related EntityReference of the participant physical entities. * The constraint let's users to set some participants as taboo, and they are excluded from * analysis. * * Var0 - Interaction * Var1 - Taboo element number 1 * ... * Var(numTabooElements+1) - Last taboo element * Var(numTabooElements+2) - The related EntityReference * * @author Ozgun Babur */ public class InterToPartER extends ConstraintAdapter { /** * Direction to go. When this parameter is used, the interaction has to be a Conversion. */ private Direction direction; /** * Constraint used for traversing towards simpler PE. */ private static final LinkedPE linker = new LinkedPE(LinkedPE.Type.TO_MEMBER); /** * Accessor from PE to ER. */ private static final PathAccessor pe2ER = new PathAccessor("SimplePhysicalEntity/entityReference"); /** * Constructor with parameters. A taboo element is the participant that we want to exclude from * the analysis. User should provide the number of taboo elements, then during execution, these * elements will be fetched from the current match. */ public InterToPartER(int numOfTabooElements) { super(numOfTabooElements + 2); } /** * Constructor with parameters. A taboo element is the participant that we want to exclude from * the analysis. User should provide the number of taboo elements, then during execution, these * elements will be fetched from the current match. The direction is left, or right, or both * sides of the Conversion. */ public InterToPartER(Direction direction, int numOfTabooElements) { this(numOfTabooElements); this.direction = direction; } /** * Constructor without parameters. There are no taboo elements. */ public InterToPartER() { this(0); } /** * Constructor with direction. There are no taboo elements. */ public InterToPartER(Direction direction) { this(); this.direction = direction; } /** * This is a generative constraint. * @return true if the constraint can generate candidates */ @Override public boolean canGenerate() { return true; } /** * Iterated over non-taboo participants and collectes related ER. * @param match current pattern match * @param ind mapped indices * @return related participants */ @Override public Collection<BioPAXElement> generate(Match match, int... ind) { Interaction inter = (Interaction) match.get(ind[0]); Set<Entity> taboo = new HashSet<Entity>(); for (int i = 1; i < getVariableSize() - 1; i++) { taboo.add((Entity) match.get(ind[i])); } if (direction == null) return generate(inter, taboo); else return generate((Conversion) inter, direction, taboo); } /** * Gets the related entity references of the given interaction, * @param inter * @param taboo * @return */ protected Collection<BioPAXElement> generate(Interaction inter, Set<Entity> taboo) { Set<BioPAXElement> simples = new HashSet<BioPAXElement>(); for (Entity part : inter.getParticipant()) { if (part instanceof PhysicalEntity && !taboo.contains(part)) { simples.addAll(linker.getLinkedElements((PhysicalEntity) part)); } } return pe2ER.getValueFromBeans(simples); } /** * Gets the related entity references of the given interaction, * @param conv * @param taboo * @return */ protected Collection<BioPAXElement> generate(Conversion conv, Direction direction, Set<Entity> taboo) { if (direction == null) throw new IllegalArgumentException("Direction cannot be null"); if (!(direction == Direction.BOTHSIDERS || direction == Direction.ONESIDERS)) { Set<BioPAXElement> simples = new HashSet<BioPAXElement>(); for (Entity part : direction == Direction.ANY ? conv.getParticipant() : direction == Direction.LEFT ? conv.getLeft() : conv.getRight()) { if (part instanceof PhysicalEntity && !taboo.contains(part)) { simples.addAll(linker.getLinkedElements((PhysicalEntity) part)); } } return pe2ER.getValueFromBeans(simples); } else { Set<BioPAXElement> leftSimples = new HashSet<BioPAXElement>(); Set<BioPAXElement> rightSimples = new HashSet<BioPAXElement>(); for (PhysicalEntity pe : conv.getLeft()) { if (!taboo.contains(pe)) leftSimples.addAll(linker.getLinkedElements(pe)); } for (PhysicalEntity pe : conv.getRight()) { if (!taboo.contains(pe)) rightSimples.addAll(linker.getLinkedElements(pe)); } Set leftERs = pe2ER.getValueFromBeans(leftSimples); Set rightERs = pe2ER.getValueFromBeans(rightSimples); if (direction == Direction.ONESIDERS) { // get all but intersection Set temp = new HashSet(leftERs); leftERs.removeAll(rightERs); rightERs.removeAll(temp); leftERs.addAll(rightERs); } else // BOTHSIDERS { // get intersection leftERs.retainAll(rightERs); } return leftERs; } } public enum Direction { LEFT, RIGHT, ANY, ONESIDERS, BOTHSIDERS } }
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.vtnweb.gui; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import org.onlab.packet.IpAddress; import org.onosproject.ui.RequestHandler; import org.onosproject.ui.UiMessageHandler; import org.onosproject.ui.table.TableModel; import org.onosproject.ui.table.TableRequestHandler; import org.onosproject.vtnrsc.FixedIp; import org.onosproject.vtnrsc.FlowClassifier; import org.onosproject.vtnrsc.FlowClassifierId; import org.onosproject.vtnrsc.PortChain; import org.onosproject.vtnrsc.PortPair; import org.onosproject.vtnrsc.PortPairGroup; import org.onosproject.vtnrsc.PortPairGroupId; import org.onosproject.vtnrsc.PortPairId; import org.onosproject.vtnrsc.VirtualPort; import org.onosproject.vtnrsc.VirtualPortId; import org.onosproject.vtnrsc.flowclassifier.FlowClassifierService; import org.onosproject.vtnrsc.portchain.PortChainService; import org.onosproject.vtnrsc.portpair.PortPairService; import org.onosproject.vtnrsc.portpairgroup.PortPairGroupService; import org.onosproject.vtnrsc.virtualport.VirtualPortService; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableSet; /** * Message handler for service function chain view related messages. */ public class SfcViewMessageHandler extends UiMessageHandler { private static final String SLASH = " ; "; private static final String NONE = "none"; private static final String SFCTYPE = "Service Function Chain"; private static final String SFC_DATA_REQ = "sfcDataRequest"; private static final String SFC_DATA_RESP = "sfcDataResponse"; private static final String SFCS = "sfcs"; private static final String ID = "id"; private static final String STATE = "_iconid_state"; private static final String PORTCHAINNAME = "portChainName"; private static final String HOSTS = "hosts"; private static final String TYPE = "type"; private static final String SRCIP = "srcIp"; private static final String DSTIP = "dstIp"; private static final String[] COL_IDS = { ID, STATE, PORTCHAINNAME, HOSTS, TYPE, SRCIP, DSTIP }; private static final String ICON_ID_ONLINE = "active"; private static final String ICON_ID_OFFLINE = "inactive"; @Override protected Collection<RequestHandler> createRequestHandlers() { return ImmutableSet.of(new SfcDataRequest()); } // handler for sfc table requests private final class SfcDataRequest extends TableRequestHandler { private static final String NO_ROWS_MESSAGE = "No Service Function Chain found"; private SfcDataRequest() { super(SFC_DATA_REQ, SFC_DATA_RESP, SFCS); } @Override protected String[] getColumnIds() { return COL_IDS; } @Override protected String defaultColumnId() { return PORTCHAINNAME; } @Override protected String noRowsMessage(ObjectNode payload) { return NO_ROWS_MESSAGE; } @Override protected void populateTable(TableModel tm, ObjectNode payload) { PortChainService pcs = get(PortChainService.class); Iterable<PortChain> portChains = pcs.getPortChains(); portChains.forEach(pchain -> populateRow(tm.addRow(), pchain)); } //populate the row of service function chain private void populateRow(TableModel.Row row, PortChain pchain) { PortChainIpRange portChainIpRange = portChainIpRange(pchain); List<VirtualPort> vpList = sfcPorts(pchain); row.cell(ID, pchain.portChainId().value().toString()) .cell(STATE, sfcState(vpList)) .cell(PORTCHAINNAME, pchain.name()) .cell(HOSTS, sfcHosts(vpList)) .cell(TYPE, SFCTYPE) .cell(SRCIP, portChainIpRange.srcip()) .cell(DSTIP, portChainIpRange.dstip()); } //PortChainIpRange private PortChainIpRange portChainIpRange(PortChain pchain) { List<FlowClassifierId> flowClassifierList = pchain.flowClassifiers(); FlowClassifierService fcs = get(FlowClassifierService.class); StringBuffer srcipbuf = new StringBuffer(); StringBuffer dstipbuf = new StringBuffer(); if (flowClassifierList != null) { flowClassifierList.stream().forEach(fcid -> { FlowClassifier fc = fcs.getFlowClassifier(fcid); String srcip = fc.srcIpPrefix().toString(); String dstip = fc.dstIpPrefix().toString(); srcipbuf.append(srcip).append(SLASH); dstipbuf.append(dstip).append(SLASH); }); } String srcip = NONE; String dstip = NONE; if (srcipbuf.length() > 0) { srcip = srcipbuf.substring(0, srcipbuf.length() - SLASH.length()); } if (dstipbuf.length() > 0) { dstip = dstipbuf.substring(0, dstipbuf.length() - SLASH.length()); } PortChainIpRange portChainIpRange = new PortChainIpRange(srcip, dstip); return portChainIpRange; } //the VirtualPorts of service function chain private List<VirtualPort> sfcPorts(PortChain pchain) { List<PortPairGroupId> portPairGroupList = pchain.portPairGroups(); PortPairGroupService ppgs = get(PortPairGroupService.class); PortPairService pps = get(PortPairService.class); VirtualPortService vps = get(VirtualPortService.class); List<VirtualPort> vpList = new ArrayList<VirtualPort>(); if (portPairGroupList != null) { portPairGroupList.stream().forEach(ppgid -> { PortPairGroup ppg = ppgs.getPortPairGroup(ppgid); List<PortPairId> portPairList = ppg.portPairs(); if (portPairList != null) { portPairList.stream().forEach(ppid -> { PortPair pp = pps.getPortPair(ppid); VirtualPort vp = vps.getPort(VirtualPortId.portId(pp.ingress())); vpList.add(vp); }); } }); } return vpList; } //the state of service function chain private String sfcState(List<VirtualPort> vpList) { for (VirtualPort vp : vpList) { if (vp.state().equals(VirtualPort.State.DOWN)) { return ICON_ID_OFFLINE; } } return ICON_ID_ONLINE; } //the hosts of service function chain private String sfcHosts(List<VirtualPort> vpList) { StringBuffer hostsbuf = new StringBuffer(); for (VirtualPort vp : vpList) { Iterator<FixedIp> fixedIps = vp.fixedIps().iterator(); if (fixedIps.hasNext()) { FixedIp fixedIp = fixedIps.next(); IpAddress ip = fixedIp.ip(); hostsbuf.append(ip.toString()).append(SLASH); } } if (hostsbuf.length() > 0) { return hostsbuf.substring(0, hostsbuf.length() - SLASH.length()); } return hostsbuf.toString(); } //source ip prefix and destination ip prefix private final class PortChainIpRange { private final String srcip; private final String dstip; private PortChainIpRange(String srcip, String dstip) { this.srcip = srcip; this.dstip = dstip; } public String srcip() { return srcip; } public String dstip() { return dstip; } } } }
/* First created by JCasGen Sat Mar 07 22:05:56 CET 2015 */ package ch.epfl.bbp.uima.types; import org.apache.uima.jcas.JCas; import org.apache.uima.jcas.JCasRegistry; import org.apache.uima.cas.impl.CASImpl; import org.apache.uima.cas.impl.FSGenerator; import org.apache.uima.cas.FeatureStructure; import org.apache.uima.cas.impl.TypeImpl; import org.apache.uima.cas.Type; import org.apache.uima.cas.impl.FeatureImpl; import org.apache.uima.cas.Feature; import org.apache.uima.jcas.tcas.Annotation_Type; /** * Updated by JCasGen Sat Mar 07 22:05:56 CET 2015 * @generated */ public class DocumentElement_Type extends Annotation_Type { /** @generated * @return the generator for this type */ @Override protected FSGenerator getFSGenerator() {return fsGenerator;} /** @generated */ private final FSGenerator fsGenerator = new FSGenerator() { public FeatureStructure createFS(int addr, CASImpl cas) { if (DocumentElement_Type.this.useExistingInstance) { // Return eq fs instance if already created FeatureStructure fs = DocumentElement_Type.this.jcas.getJfsFromCaddr(addr); if (null == fs) { fs = new DocumentElement(addr, DocumentElement_Type.this); DocumentElement_Type.this.jcas.putJfsFromCaddr(addr, fs); return fs; } return fs; } else return new DocumentElement(addr, DocumentElement_Type.this); } }; /** @generated */ @SuppressWarnings ("hiding") public final static int typeIndexID = DocumentElement.typeIndexID; /** @generated @modifiable */ @SuppressWarnings ("hiding") public final static boolean featOkTst = JCasRegistry.getFeatOkTst("ch.epfl.bbp.uima.types.DocumentElement"); /** @generated */ final Feature casFeat_ElementId; /** @generated */ final int casFeatCode_ElementId; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public int getElementId(int addr) { if (featOkTst && casFeat_ElementId == null) jcas.throwFeatMissing("ElementId", "ch.epfl.bbp.uima.types.DocumentElement"); return ll_cas.ll_getIntValue(addr, casFeatCode_ElementId); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setElementId(int addr, int v) { if (featOkTst && casFeat_ElementId == null) jcas.throwFeatMissing("ElementId", "ch.epfl.bbp.uima.types.DocumentElement"); ll_cas.ll_setIntValue(addr, casFeatCode_ElementId, v);} /** @generated */ final Feature casFeat_isBold; /** @generated */ final int casFeatCode_isBold; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public boolean getIsBold(int addr) { if (featOkTst && casFeat_isBold == null) jcas.throwFeatMissing("isBold", "ch.epfl.bbp.uima.types.DocumentElement"); return ll_cas.ll_getBooleanValue(addr, casFeatCode_isBold); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setIsBold(int addr, boolean v) { if (featOkTst && casFeat_isBold == null) jcas.throwFeatMissing("isBold", "ch.epfl.bbp.uima.types.DocumentElement"); ll_cas.ll_setBooleanValue(addr, casFeatCode_isBold, v);} /** @generated */ final Feature casFeat_height; /** @generated */ final int casFeatCode_height; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public float getHeight(int addr) { if (featOkTst && casFeat_height == null) jcas.throwFeatMissing("height", "ch.epfl.bbp.uima.types.DocumentElement"); return ll_cas.ll_getFloatValue(addr, casFeatCode_height); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setHeight(int addr, float v) { if (featOkTst && casFeat_height == null) jcas.throwFeatMissing("height", "ch.epfl.bbp.uima.types.DocumentElement"); ll_cas.ll_setFloatValue(addr, casFeatCode_height, v);} /** @generated */ final Feature casFeat_width; /** @generated */ final int casFeatCode_width; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public float getWidth(int addr) { if (featOkTst && casFeat_width == null) jcas.throwFeatMissing("width", "ch.epfl.bbp.uima.types.DocumentElement"); return ll_cas.ll_getFloatValue(addr, casFeatCode_width); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setWidth(int addr, float v) { if (featOkTst && casFeat_width == null) jcas.throwFeatMissing("width", "ch.epfl.bbp.uima.types.DocumentElement"); ll_cas.ll_setFloatValue(addr, casFeatCode_width, v);} /** @generated */ final Feature casFeat_x; /** @generated */ final int casFeatCode_x; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public float getX(int addr) { if (featOkTst && casFeat_x == null) jcas.throwFeatMissing("x", "ch.epfl.bbp.uima.types.DocumentElement"); return ll_cas.ll_getFloatValue(addr, casFeatCode_x); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setX(int addr, float v) { if (featOkTst && casFeat_x == null) jcas.throwFeatMissing("x", "ch.epfl.bbp.uima.types.DocumentElement"); ll_cas.ll_setFloatValue(addr, casFeatCode_x, v);} /** @generated */ final Feature casFeat_y; /** @generated */ final int casFeatCode_y; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public float getY(int addr) { if (featOkTst && casFeat_y == null) jcas.throwFeatMissing("y", "ch.epfl.bbp.uima.types.DocumentElement"); return ll_cas.ll_getFloatValue(addr, casFeatCode_y); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setY(int addr, float v) { if (featOkTst && casFeat_y == null) jcas.throwFeatMissing("y", "ch.epfl.bbp.uima.types.DocumentElement"); ll_cas.ll_setFloatValue(addr, casFeatCode_y, v);} /** @generated */ final Feature casFeat_pageId; /** @generated */ final int casFeatCode_pageId; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public int getPageId(int addr) { if (featOkTst && casFeat_pageId == null) jcas.throwFeatMissing("pageId", "ch.epfl.bbp.uima.types.DocumentElement"); return ll_cas.ll_getIntValue(addr, casFeatCode_pageId); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setPageId(int addr, int v) { if (featOkTst && casFeat_pageId == null) jcas.throwFeatMissing("pageId", "ch.epfl.bbp.uima.types.DocumentElement"); ll_cas.ll_setIntValue(addr, casFeatCode_pageId, v);} /** @generated */ final Feature casFeat_label; /** @generated */ final int casFeatCode_label; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public String getLabel(int addr) { if (featOkTst && casFeat_label == null) jcas.throwFeatMissing("label", "ch.epfl.bbp.uima.types.DocumentElement"); return ll_cas.ll_getStringValue(addr, casFeatCode_label); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setLabel(int addr, String v) { if (featOkTst && casFeat_label == null) jcas.throwFeatMissing("label", "ch.epfl.bbp.uima.types.DocumentElement"); ll_cas.ll_setStringValue(addr, casFeatCode_label, v);} /** @generated */ final Feature casFeat_medianFontsize; /** @generated */ final int casFeatCode_medianFontsize; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public double getMedianFontsize(int addr) { if (featOkTst && casFeat_medianFontsize == null) jcas.throwFeatMissing("medianFontsize", "ch.epfl.bbp.uima.types.DocumentElement"); return ll_cas.ll_getDoubleValue(addr, casFeatCode_medianFontsize); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setMedianFontsize(int addr, double v) { if (featOkTst && casFeat_medianFontsize == null) jcas.throwFeatMissing("medianFontsize", "ch.epfl.bbp.uima.types.DocumentElement"); ll_cas.ll_setDoubleValue(addr, casFeatCode_medianFontsize, v);} /** initialize variables to correspond with Cas Type and Features * @generated * @param jcas JCas * @param casType Type */ public DocumentElement_Type(JCas jcas, Type casType) { super(jcas, casType); casImpl.getFSClassRegistry().addGeneratorForType((TypeImpl)this.casType, getFSGenerator()); casFeat_ElementId = jcas.getRequiredFeatureDE(casType, "ElementId", "uima.cas.Integer", featOkTst); casFeatCode_ElementId = (null == casFeat_ElementId) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_ElementId).getCode(); casFeat_isBold = jcas.getRequiredFeatureDE(casType, "isBold", "uima.cas.Boolean", featOkTst); casFeatCode_isBold = (null == casFeat_isBold) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_isBold).getCode(); casFeat_height = jcas.getRequiredFeatureDE(casType, "height", "uima.cas.Float", featOkTst); casFeatCode_height = (null == casFeat_height) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_height).getCode(); casFeat_width = jcas.getRequiredFeatureDE(casType, "width", "uima.cas.Float", featOkTst); casFeatCode_width = (null == casFeat_width) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_width).getCode(); casFeat_x = jcas.getRequiredFeatureDE(casType, "x", "uima.cas.Float", featOkTst); casFeatCode_x = (null == casFeat_x) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_x).getCode(); casFeat_y = jcas.getRequiredFeatureDE(casType, "y", "uima.cas.Float", featOkTst); casFeatCode_y = (null == casFeat_y) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_y).getCode(); casFeat_pageId = jcas.getRequiredFeatureDE(casType, "pageId", "uima.cas.Integer", featOkTst); casFeatCode_pageId = (null == casFeat_pageId) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_pageId).getCode(); casFeat_label = jcas.getRequiredFeatureDE(casType, "label", "uima.cas.String", featOkTst); casFeatCode_label = (null == casFeat_label) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_label).getCode(); casFeat_medianFontsize = jcas.getRequiredFeatureDE(casType, "medianFontsize", "uima.cas.Double", featOkTst); casFeatCode_medianFontsize = (null == casFeat_medianFontsize) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_medianFontsize).getCode(); } }
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.introduceField; import com.intellij.codeInsight.TestFrameworks; import com.intellij.ide.util.PropertiesComponent; import com.intellij.psi.*; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.ui.TypeSelectorManager; import com.intellij.ui.NonFocusableCheckBox; import com.intellij.ui.StateRestoringCheckBox; import com.intellij.util.Processor; import consulo.logging.Logger; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import java.awt.*; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; /** * User: anna * Date: 3/16/11 */ public abstract class IntroduceFieldCentralPanel { protected static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.introduceField.IntroduceFieldDialog"); private static final String INTRODUCE_FIELD_FINAL_CHECKBOX = "introduce.final.checkbox"; public static boolean ourLastCbFinalState = PropertiesComponent.getInstance().getBoolean(INTRODUCE_FIELD_FINAL_CHECKBOX, true); protected final PsiClass myParentClass; protected final PsiExpression myInitializerExpression; protected final PsiLocalVariable myLocalVariable; protected final boolean myIsCurrentMethodConstructor; protected final boolean myIsInvokedOnDeclaration; protected final boolean myWillBeDeclaredStatic; protected final int myOccurrencesCount; protected final boolean myAllowInitInMethod; protected final boolean myAllowInitInMethodIfAll; protected final TypeSelectorManager myTypeSelectorManager; private JCheckBox myCbReplaceAll; private StateRestoringCheckBox myCbDeleteVariable; private StateRestoringCheckBox myCbFinal; private boolean myHasWriteAccess; public IntroduceFieldCentralPanel(PsiClass parentClass, PsiExpression initializerExpression, PsiLocalVariable localVariable, boolean isCurrentMethodConstructor, boolean isInvokedOnDeclaration, boolean willBeDeclaredStatic, PsiExpression[] occurrences, boolean allowInitInMethod, boolean allowInitInMethodIfAll, TypeSelectorManager typeSelectorManager) { myParentClass = parentClass; myInitializerExpression = initializerExpression; myLocalVariable = localVariable; myIsCurrentMethodConstructor = isCurrentMethodConstructor; myIsInvokedOnDeclaration = isInvokedOnDeclaration; myWillBeDeclaredStatic = willBeDeclaredStatic; myOccurrencesCount = occurrences.length; myHasWriteAccess = false; for (PsiExpression occurrence : occurrences) { if (PsiUtil.isAccessedForWriting(occurrence)) { myHasWriteAccess = true; break; } } myAllowInitInMethod = allowInitInMethod; myAllowInitInMethodIfAll = allowInitInMethodIfAll; myTypeSelectorManager = typeSelectorManager; } protected boolean setEnabledInitializationPlaces(PsiElement initializerPart, PsiElement initializer) { if (initializerPart instanceof PsiReferenceExpression) { PsiReferenceExpression refExpr = (PsiReferenceExpression)initializerPart; if (refExpr.getQualifierExpression() == null) { PsiElement refElement = refExpr.resolve(); if (refElement == null || (refElement instanceof PsiLocalVariable || refElement instanceof PsiParameter || (refElement instanceof PsiField && !((PsiField)refElement).hasInitializer())) && !PsiTreeUtil.isAncestor(initializer, refElement, true)) { return updateInitializationPlaceModel(initializedInSetUp(refElement)); } } } PsiElement[] children = initializerPart.getChildren(); for (PsiElement child : children) { if (!setEnabledInitializationPlaces(child, initializer)) return false; } return true; } private boolean initializedInSetUp(PsiElement refElement) { if (refElement instanceof PsiField && hasSetUpChoice()) { final PsiMethod setUpMethod = TestFrameworks.getInstance().findSetUpMethod(((PsiField)refElement).getContainingClass()); if (setUpMethod != null) { final Processor<PsiReference> initializerSearcher = new Processor<PsiReference>() { @Override public boolean process(PsiReference reference) { final PsiElement referenceElement = reference.getElement(); if (referenceElement instanceof PsiExpression) { return !PsiUtil.isAccessedForWriting((PsiExpression)referenceElement); } return true; } }; if (!ReferencesSearch.search(refElement, new LocalSearchScope(setUpMethod)).forEach(initializerSearcher)) { return true; } } } return false; } public abstract BaseExpressionToFieldHandler.InitializationPlace getInitializerPlace(); protected abstract void initializeInitializerPlace(PsiExpression initializerExpression, BaseExpressionToFieldHandler.InitializationPlace ourLastInitializerPlace); protected abstract JComponent createInitializerPlacePanel(ItemListener itemListener, ItemListener finalUpdater); public abstract void setInitializeInFieldDeclaration(); public abstract void setVisibility(String visibility); public abstract String getFieldVisibility(); protected void initializeControls(PsiExpression initializerExpression, BaseExpressionToFieldHandler.InitializationPlace ourLastInitializerPlace) { myCbFinal.setSelected(myCbFinal.isEnabled() && ourLastCbFinalState); } public boolean isReplaceAllOccurrences() { if (myIsInvokedOnDeclaration) return true; if (myOccurrencesCount <= 1) return false; return myCbReplaceAll.isSelected(); } public boolean isDeleteVariable() { if (myIsInvokedOnDeclaration) return true; if (myCbDeleteVariable == null) return false; return myCbDeleteVariable.isSelected(); } public boolean isDeclareFinal() { return myCbFinal.isSelected(); } protected JComponent createCenterPanel() { ItemListener itemListener = new ItemListener() { public void itemStateChanged(ItemEvent e) { if (myCbReplaceAll != null && myAllowInitInMethod) { updateInitializerSelection(); } if (shouldUpdateTypeSelector()) { updateTypeSelector(); } } }; ItemListener finalUpdater = new ItemListener() { public void itemStateChanged(ItemEvent e) { updateCbFinal(); } }; final JComponent initializerPlacePanel = createInitializerPlacePanel(itemListener, finalUpdater); final JPanel checkboxes = appendCheckboxes(itemListener); JPanel panel = composeWholePanel(initializerPlacePanel, checkboxes); updateTypeSelector(); updateCbFinal(); return panel; } protected abstract JPanel composeWholePanel(JComponent initializerPlacePanel, JPanel checkboxPanel); protected void updateInitializerSelection() { } protected boolean shouldUpdateTypeSelector() { return true; } protected JPanel appendCheckboxes(ItemListener itemListener) { GridBagConstraints gbConstraints = new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1,1,0,0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0,0,0,0), 0,0); JPanel panel = new JPanel(new GridBagLayout()); myCbFinal = new StateRestoringCheckBox(); myCbFinal.setFocusable(false); myCbFinal.setText(RefactoringBundle.message("declare.final")); myCbFinal.addItemListener(itemListener); gbConstraints.gridy++; panel.add(myCbFinal, gbConstraints); appendOccurrences(itemListener, gbConstraints, panel); if (myLocalVariable != null) { gbConstraints.gridy++; if (myCbReplaceAll != null) { gbConstraints.insets = new Insets(0, 8, 0, 0); } myCbDeleteVariable = new StateRestoringCheckBox(); myCbDeleteVariable.setText(RefactoringBundle.message("delete.variable.declaration")); panel.add(myCbDeleteVariable, gbConstraints); if (myIsInvokedOnDeclaration) { myCbDeleteVariable.setEnabled(false); myCbDeleteVariable.setSelected(true); } else if (myCbReplaceAll != null) { updateCbDeleteVariable(); myCbReplaceAll.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { updateCbDeleteVariable(); } } ); } } return panel; } public void appendOccurrences(ItemListener itemListener, GridBagConstraints gbConstraints, JPanel panel) { if (myOccurrencesCount > 1) { myCbReplaceAll = new NonFocusableCheckBox(); myCbReplaceAll.setText(RefactoringBundle.message("replace.all.occurrences.of.expression.0.occurrences", myOccurrencesCount)); gbConstraints.gridy++; panel.add(myCbReplaceAll, gbConstraints); myCbReplaceAll.addItemListener(itemListener); if (myIsInvokedOnDeclaration) { myCbReplaceAll.setEnabled(false); myCbReplaceAll.setSelected(true); } } } private void updateTypeSelector() { if (myCbReplaceAll != null) { myTypeSelectorManager.setAllOccurrences(myCbReplaceAll.isSelected()); } else { myTypeSelectorManager.setAllOccurrences(false); } } private void updateCbDeleteVariable() { if (!myCbReplaceAll.isSelected()) { myCbDeleteVariable.makeUnselectable(false); } else { myCbDeleteVariable.makeSelectable(); } } protected void updateCbFinal() { if (!allowFinal()) { myCbFinal.makeUnselectable(false); } else { myCbFinal.makeSelectable(); } } protected boolean allowFinal() { if (myHasWriteAccess && isReplaceAllOccurrences()) return false; return true; } public void addOccurrenceListener(ItemListener itemListener) { if (myCbReplaceAll != null) { myCbReplaceAll.addItemListener(itemListener); } } public void setReplaceAllOccurrences(boolean replaceAllOccurrences) { if (myCbReplaceAll != null) { myCbReplaceAll.setSelected(replaceAllOccurrences); } } @TestOnly public void setCreateFinal(boolean createFinal) { myCbFinal.setSelected(createFinal); myCbFinal.setEnabled(true); } protected void enableFinal(boolean enable){ myCbFinal.setEnabled(enable); } public void saveFinalState() { if (myCbFinal != null && myCbFinal.isEnabled()) { ourLastCbFinalState = myCbFinal.isSelected(); PropertiesComponent.getInstance().setValue(INTRODUCE_FIELD_FINAL_CHECKBOX, String.valueOf(ourLastCbFinalState)); } } protected abstract boolean updateInitializationPlaceModel(boolean initializedInsetup); protected abstract boolean hasSetUpChoice(); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.ra; import javax.jms.Connection; import javax.jms.JMSContext; import javax.jms.JMSException; import javax.jms.JMSSecurityException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Queue; import javax.jms.QueueConnection; import javax.jms.QueueSession; import javax.jms.Session; import javax.jms.TextMessage; import javax.jms.TopicConnection; import javax.jms.XAConnection; import javax.jms.XAQueueConnection; import javax.jms.XASession; import javax.resource.spi.ManagedConnection; import javax.transaction.xa.XAResource; import javax.transaction.xa.Xid; import java.util.HashSet; import java.util.Set; import org.apache.activemq.artemis.api.jms.ActiveMQJMSClient; import org.apache.activemq.artemis.core.remoting.impl.invm.InVMConnectorFactory; import org.apache.activemq.artemis.core.security.Role; import org.apache.activemq.artemis.core.transaction.impl.XidImpl; import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory; import org.apache.activemq.artemis.ra.ActiveMQRAConnectionFactory; import org.apache.activemq.artemis.ra.ActiveMQRAConnectionFactoryImpl; import org.apache.activemq.artemis.ra.ActiveMQRAConnectionManager; import org.apache.activemq.artemis.ra.ActiveMQRAManagedConnection; import org.apache.activemq.artemis.ra.ActiveMQRAManagedConnectionFactory; import org.apache.activemq.artemis.ra.ActiveMQRASession; import org.apache.activemq.artemis.ra.ActiveMQResourceAdapter; import org.apache.activemq.artemis.service.extensions.xa.ActiveMQXAResourceWrapper; import org.apache.activemq.artemis.service.extensions.xa.ActiveMQXAResourceWrapperImpl; import org.apache.activemq.artemis.spi.core.security.ActiveMQSecurityManagerImpl; import org.apache.activemq.artemis.utils.UUIDGenerator; import org.apache.activemq.artemis.utils.VersionLoader; import org.junit.After; import org.junit.Before; import org.junit.Test; public class OutgoingConnectionTest extends ActiveMQRATestBase { private ActiveMQResourceAdapter resourceAdapter; private ActiveMQRAConnectionFactory qraConnectionFactory; private ActiveMQRAManagedConnectionFactory mcf; @Override public boolean useSecurity() { return true; } ActiveMQRAConnectionManager qraConnectionManager = new ActiveMQRAConnectionManager(); @Override @Before public void setUp() throws Exception { super.setUp(); ActiveMQSecurityManagerImpl securityManager = (ActiveMQSecurityManagerImpl) server.getSecurityManager(); securityManager.getConfiguration().addUser("testuser", "testpassword"); securityManager.getConfiguration().addUser("guest", "guest"); securityManager.getConfiguration().setDefaultUser("guest"); securityManager.getConfiguration().addRole("testuser", "arole"); securityManager.getConfiguration().addRole("guest", "arole"); Role role = new Role("arole", true, true, true, true, true, true, true); Set<Role> roles = new HashSet<Role>(); roles.add(role); server.getSecurityRepository().addMatch(MDBQUEUEPREFIXED, roles); resourceAdapter = new ActiveMQResourceAdapter(); resourceAdapter.setEntries("[\"java://jmsXA\"]"); resourceAdapter.setConnectorClassName(InVMConnectorFactory.class.getName()); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); } @Override @After public void tearDown() throws Exception { if (resourceAdapter != null) { resourceAdapter.stop(); } qraConnectionManager.stop(); super.tearDown(); } @Test public void testSimpleMessageSendAndReceiveXA() throws Exception { Xid xid = new XidImpl("xa1".getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes()); XAQueueConnection queueConnection = qraConnectionFactory.createXAQueueConnection(); XASession s = queueConnection.createXASession(); XAResource resource = s.getXAResource(); resource.start(xid, XAResource.TMNOFLAGS); Queue q = ActiveMQJMSClient.createQueue(MDBQUEUE); MessageProducer mp = s.createProducer(q); MessageConsumer consumer = s.createConsumer(q); Message message = s.createTextMessage("test"); mp.send(message); queueConnection.start(); TextMessage textMessage = (TextMessage) consumer.receiveNoWait(); assertNull(textMessage); resource.end(xid, XAResource.TMSUCCESS); resource.commit(xid, true); resource.start(xid, XAResource.TMNOFLAGS); textMessage = (TextMessage) consumer.receiveNoWait(); resource.end(xid, XAResource.TMSUCCESS); resource.commit(xid, true); assertNotNull(textMessage); assertEquals(textMessage.getText(), "test"); // When I wrote this call, this method was doing an infinite loop. // this is just to avoid such thing again textMessage.getJMSDeliveryTime(); } @Test public void testInexistentUserOnCreateConnection() throws Exception { resourceAdapter = newResourceAdapter(); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); ActiveMQRAManagedConnectionFactory mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); ActiveMQRAConnectionFactory qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); Connection conn = null; try { conn = qraConnectionFactory.createConnection("IDont", "Exist"); fail("Exception was expected"); } catch (JMSSecurityException expected) { } conn = qraConnectionFactory.createConnection("testuser", "testpassword"); conn.close(); try { XAConnection xaconn = qraConnectionFactory.createXAConnection("IDont", "Exist"); fail("Exception was expected"); } catch (JMSSecurityException expected) { } XAConnection xaconn = qraConnectionFactory.createXAConnection("testuser", "testpassword"); xaconn.close(); try { TopicConnection topicconn = qraConnectionFactory.createTopicConnection("IDont", "Exist"); fail("Exception was expected"); } catch (JMSSecurityException expected) { } TopicConnection topicconn = qraConnectionFactory.createTopicConnection("testuser", "testpassword"); topicconn.close(); try { QueueConnection queueconn = qraConnectionFactory.createQueueConnection("IDont", "Exist"); fail("Exception was expected"); } catch (JMSSecurityException expected) { } QueueConnection queueconn = qraConnectionFactory.createQueueConnection("testuser", "testpassword"); queueconn.close(); mcf.stop(); } @Test public void testMultipleSessionsThrowsException() throws Exception { resourceAdapter = newResourceAdapter(); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); ActiveMQRAManagedConnectionFactory mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); ActiveMQRAConnectionFactory qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); QueueConnection queueConnection = qraConnectionFactory.createQueueConnection(); Session s = queueConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); try { Session s2 = queueConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); fail("should throw javax,jms.IllegalStateException: Only allowed one session per connection. See the J2EE spec, e.g. J2EE1.4 Section 6.6"); } catch (JMSException e) { } } @Test public void testConnectionCredentials() throws Exception { resourceAdapter = newResourceAdapter(); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); ActiveMQRAManagedConnectionFactory mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); ActiveMQRAConnectionFactory qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); QueueConnection queueConnection = qraConnectionFactory.createQueueConnection(); QueueSession session = queueConnection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE); queueConnection = qraConnectionFactory.createQueueConnection("testuser", "testpassword"); session = queueConnection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE); } @Test public void testConnectionCredentialsFail() throws Exception { resourceAdapter = newResourceAdapter(); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); ActiveMQRAManagedConnectionFactory mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); ActiveMQRAConnectionFactory qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); QueueConnection queueConnection = qraConnectionFactory.createQueueConnection(); QueueSession session = queueConnection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE); ManagedConnection mc = ((ActiveMQRASession) session).getManagedConnection(); queueConnection.close(); mc.destroy(); try { queueConnection = qraConnectionFactory.createQueueConnection("testuser", "testwrongpassword"); queueConnection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE).close(); fail("should throw esxception"); } catch (JMSException e) { //pass } } @Test public void testConnectionCredentialsFailRecovery() throws Exception { resourceAdapter = newResourceAdapter(); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); ActiveMQRAManagedConnectionFactory mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); ActiveMQRAConnectionFactory qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); try { QueueConnection queueConnection = qraConnectionFactory.createQueueConnection("testuser", "testwrongpassword"); queueConnection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE).close(); fail("should throw esxception"); } catch (JMSException e) { //make sure the recovery is null assertNull(mcf.getResourceRecovery()); } } @Test public void testConnectionCredentialsOKRecovery() throws Exception { resourceAdapter = newResourceAdapter(); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); ActiveMQRAManagedConnectionFactory mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); ActiveMQRAConnectionFactory qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); QueueConnection queueConnection = qraConnectionFactory.createQueueConnection(); QueueSession session = queueConnection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE); assertNotNull(mcf.getResourceRecovery()); } @Test public void testJMSContext() throws Exception { resourceAdapter = newResourceAdapter(); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); ActiveMQRAManagedConnectionFactory mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); ActiveMQRAConnectionFactory qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); JMSContext jmsctx = qraConnectionFactory.createContext(JMSContext.DUPS_OK_ACKNOWLEDGE); assertEquals(JMSContext.DUPS_OK_ACKNOWLEDGE, jmsctx.getSessionMode()); } @Test public void testOutgoingXAResourceWrapper() throws Exception { XAQueueConnection queueConnection = qraConnectionFactory.createXAQueueConnection(); XASession s = queueConnection.createXASession(); XAResource resource = s.getXAResource(); assertTrue(resource instanceof ActiveMQXAResourceWrapper); ActiveMQXAResourceWrapperImpl xaResourceWrapper = (ActiveMQXAResourceWrapperImpl) resource; assertTrue(xaResourceWrapper.getJndiName().equals("java://jmsXA NodeId:" + server.getNodeID())); assertTrue(xaResourceWrapper.getProductVersion().equals(VersionLoader.getVersion().getFullVersion())); assertTrue(xaResourceWrapper.getProductName().equals(ActiveMQResourceAdapter.PRODUCT_NAME)); } @Test public void testSharedActiveMQConnectionFactory() throws Exception { Session s = null; Session s2 = null; ActiveMQRAManagedConnection mc = null; ActiveMQRAManagedConnection mc2 = null; try { resourceAdapter = new ActiveMQResourceAdapter(); resourceAdapter.setConnectorClassName(InVMConnectorFactory.class.getName()); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); ActiveMQRAConnectionManager qraConnectionManager = new ActiveMQRAConnectionManager(); ActiveMQRAManagedConnectionFactory mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); ActiveMQRAConnectionFactory qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); QueueConnection queueConnection = qraConnectionFactory.createQueueConnection(); s = queueConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); mc = (ActiveMQRAManagedConnection) ((ActiveMQRASession) s).getManagedConnection(); ActiveMQConnectionFactory cf1 = mc.getConnectionFactory(); QueueConnection queueConnection2 = qraConnectionFactory.createQueueConnection(); s2 = queueConnection2.createSession(false, Session.AUTO_ACKNOWLEDGE); mc2 = (ActiveMQRAManagedConnection) ((ActiveMQRASession) s2).getManagedConnection(); ActiveMQConnectionFactory cf2 = mc2.getConnectionFactory(); // we're not testing equality so don't use equals(); we're testing if they are actually the *same* object assertTrue(cf1 == cf2); } finally { if (s != null) { s.close(); } if (mc != null) { mc.destroy(); } if (s2 != null) { s2.close(); } if (mc2 != null) { mc2.destroy(); } } } @Test public void testSharedActiveMQConnectionFactoryWithClose() throws Exception { Session s = null; Session s2 = null; ActiveMQRAManagedConnection mc = null; ActiveMQRAManagedConnection mc2 = null; try { server.getConfiguration().setSecurityEnabled(false); resourceAdapter = new ActiveMQResourceAdapter(); resourceAdapter.setConnectorClassName(InVMConnectorFactory.class.getName()); MyBootstrapContext ctx = new MyBootstrapContext(); resourceAdapter.start(ctx); ActiveMQRAConnectionManager qraConnectionManager = new ActiveMQRAConnectionManager(); ActiveMQRAManagedConnectionFactory mcf = new ActiveMQRAManagedConnectionFactory(); mcf.setResourceAdapter(resourceAdapter); ActiveMQRAConnectionFactory qraConnectionFactory = new ActiveMQRAConnectionFactoryImpl(mcf, qraConnectionManager); QueueConnection queueConnection = qraConnectionFactory.createQueueConnection(); s = queueConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); mc = (ActiveMQRAManagedConnection) ((ActiveMQRASession) s).getManagedConnection(); QueueConnection queueConnection2 = qraConnectionFactory.createQueueConnection(); s2 = queueConnection2.createSession(false, Session.AUTO_ACKNOWLEDGE); mc2 = (ActiveMQRAManagedConnection) ((ActiveMQRASession) s2).getManagedConnection(); mc.destroy(); MessageProducer producer = s2.createProducer(ActiveMQJMSClient.createQueue(MDBQUEUE)); producer.send(s2.createTextMessage("x")); } finally { if (s != null) { s.close(); } if (mc != null) { mc.destroy(); } if (s2 != null) { s2.close(); } if (mc2 != null) { mc2.destroy(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.serialization; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; /** * An interface that implements data serialization for internal Geode product classes that have a * fixed id. The fixed id is used to represent the class, on the wire, at serialization time and * used in locating the constructor code for the class at deserialization. * <p> * Implementors MUST have a public zero-arg constructor. * * <p> * Note that this class is for internal use only. * * <p> * To add a new DataSerializableFixedID do this following: * <ol> * <li>Define a constant with an id that is free and put it either * in<code>DataSerializableFixedID</code> * or in the DSFID registration code for your module (for instance, in Services in the geode- * membership module). Make sure to update the "unused" comments to no longer mention your new id. * If * implementing a class used only for tests in geode-core and downstream modules then there is no * need to consume a fixed id and you should use {@link #NO_FIXED_ID}. In this case you can skip * steps 3 and 4. * For modules below geode-core you may not use {@link #NO_FIXED_ID}. * <li>Define a method in the class that implements <code>DataSerializableFixedID</code> named * {@link #getDSFID} that returns the constant from step 1. * <li>Add registration of your class in your module using DSFIDFactory's registerDSFID() method. * <li>Implement {@link #toData} and {@link #fromData} just like you would on a * DataSerializable class. Make sure you follow the javadocs for these methods to add support * for rolling upgrades. * <li>Implement the SerializationVersions API for backward-compatibility when necessary. This * lets you implement multiple versions of toData and fromData based on the Geode version of * the destination or source, respectively. * </ol> * */ public interface DataSerializableFixedID extends SerializationVersions, BasicSerializable { // NOTE, codes < -65536 will take 4 bytes to serialize // NOTE, codes < -128 will take 2 bytes to serialize short DISTRIBUTED_PING_MESSAGE = -162; short REGION_REDUNDANCY_STATUS = -161; short RESTORE_REDUNDANCY_RESULTS = -160; short CREATE_REGION_MESSAGE_LUCENE = -159; short FINAL_CHECK_PASSED_MESSAGE = -158; short NETWORK_PARTITION_MESSAGE = -157; short SUSPECT_MEMBERS_MESSAGE = -156; short HEARTBEAT_RESPONSE = -155; short HEARTBEAT_REQUEST = -154; short REMOVE_MEMBER_REQUEST = -153; short LEAVE_REQUEST_MESSAGE = -152; short VIEW_ACK_MESSAGE = -151; short INSTALL_VIEW_MESSAGE = -150; short NETVIEW = -148; short GET_VIEW_REQ = -147; short GET_VIEW_RESP = -146; short FIND_COORDINATOR_REQ = -145; short FIND_COORDINATOR_RESP = -144; short JOIN_RESPONSE = -143; short JOIN_REQUEST = -142; short SNAPPY_COMPRESSED_CACHED_DESERIALIZABLE = -140; short GATEWAY_EVENT_IMPL = -136; short GATEWAY_SENDER_EVENT_CALLBACK_ARGUMENT = -135; short GATEWAY_SENDER_EVENT_IMPL = -134; short CLIENT_TOMBSTONE_MESSAGE = -133; short R_CLEAR_MSG_REPLY = -132; short R_CLEAR_MSG = -131; short WAIT_FOR_VIEW_INSTALLATION = -130; short DISPATCHED_AND_CURRENT_EVENTS = -129; byte DLOCK_QUERY_MESSAGE = -128; byte DLOCK_QUERY_REPLY = -127; byte CLIENT_HEALTH_STATS = -126; byte PR_MANAGE_BACKUP_BUCKET_MESSAGE = -125; byte PR_MANAGE_BACKUP_BUCKET_REPLY_MESSAGE = -124; byte SIZED_BASED_LOAD_PROBE = -123; byte CLIENT_PING_MESSAGE_IMPL = -122; byte REMOTE_PUTALL_REPLY_MESSAGE = -121; byte VERSION_TAG = -120; byte REMOTE_PUTALL_MESSAGE = -119; byte ADD_CACHESERVER_PROFILE_UPDATE = -118; byte SERVER_INTEREST_REGISTRATION_MESSAGE = -117; byte FILTER_PROFILE_UPDATE = -116; byte JTA_AFTER_COMPLETION_MESSAGE = -115; byte JTA_BEFORE_COMPLETION_MESSAGE = -114; byte INVALIDATE_PARTITIONED_REGION_MESSAGE = -113; byte TX_REMOTE_COMMIT_MESSAGE = -112; byte TX_REMOTE_ROLLBACK_MESSAGE = -111; byte PR_PUTALL_REPLY_MESSAGE = -110; byte PR_PUTALL_MESSAGE = -109; byte RESOURCE_PROFILE_MESSAGE = -108; byte RESOURCE_MANAGER_PROFILE = -107; byte PR_CREATE_BUCKET_MESSAGE = -106; byte PR_CREATE_BUCKET_REPLY_MESSAGE = -105; // -104 through -101 unused byte PARTITION_REGION_CONFIG = -100; byte PR_FETCH_KEYS_REPLY_MESSAGE = -99; byte PR_DUMP_B2N_REGION_MSG = -98; byte PR_DUMP_B2N_REPLY_MESSAGE = -97; byte PR_INVALIDATE_MESSAGE = -96; byte PR_INVALIDATE_REPLY_MESSAGE = -95; // -94 and -93 unused byte PROFILES_REPLY_MESSAGE = -92; byte CACHE_SERVER_PROFILE = -91; byte CONTROLLER_PROFILE = -90; byte CREATE_REGION_MESSAGE = -89; byte DESTROY_PARTITIONED_REGION_MESSAGE = -88; byte COMMIT_PROCESS_QUERY_MESSAGE = -87; byte COMMIT_PROCESS_QUERY_REPLY_MESSAGE = -86; byte DESTROY_REGION_WITH_CONTEXT_MESSAGE = -85; byte PUT_ALL_MESSAGE = -84; byte CLEAR_REGION_MESSAGE = -83; byte INVALIDATE_REGION_MESSAGE = -82; byte STATE_MARKER_MESSAGE = -80; byte STATE_STABILIZATION_MESSAGE = -79; byte STATE_STABILIZED_MESSAGE = -78; byte CLIENT_MARKER_MESSAGE_IMPL = -77; byte TX_LOCK_UPDATE_PARTICIPANTS_MESSAGE = -76; byte TX_ORIGINATOR_RECOVERY_MESSAGE = -75; byte TX_ORIGINATOR_RECOVERY_REPLY_MESSAGE = -74; byte QUEUE_REMOVAL_MESSAGE = -73; byte DLOCK_RECOVER_GRANTOR_MESSAGE = -72; byte DLOCK_RECOVER_GRANTOR_REPLY_MESSAGE = -71; byte NON_GRANTOR_DESTROYED_REPLY_MESSAGE = -70; byte TOMBSTONE_MESSAGE = -69; byte IDS_REGISTRATION_MESSAGE = -68; byte TX_LOCK_UPDATE_PARTICIPANTS_REPLY_MESSAGE = -67; byte STREAMING_REPLY_MESSAGE = -66; byte PREFER_BYTES_CACHED_DESERIALIZABLE = -65; byte VM_CACHED_DESERIALIZABLE = -64; byte GATEWAY_EVENT_IMPL_66 = -63; byte SUSPEND_LOCKING_TOKEN = -62; byte OBJECT_TYPE_IMPL = -61; byte STRUCT_TYPE_IMPL = -60; byte COLLECTION_TYPE_IMPL = -59; byte TX_LOCK_BATCH = -58; byte STORE_ALL_CACHED_DESERIALIZABLE = -57; // -56 unused byte MAP_TYPE_IMPL = -55; byte LOCATOR_LIST_REQUEST = -54; byte CLIENT_CONNECTION_REQUEST = -53; byte QUEUE_CONNECTION_REQUEST = -52; byte LOCATOR_LIST_RESPONSE = -51; byte CLIENT_CONNECTION_RESPONSE = -50; byte QUEUE_CONNECTION_RESPONSE = -49; byte CLIENT_REPLACEMENT_REQUEST = -48; byte INTEREST_EVENT_MESSAGE = -47; byte INTEREST_EVENT_REPLY_MESSAGE = -46; byte CLIENT_DENYLIST_MESSAGE = -45; byte REMOVE_CLIENT_FROM_DENYLIST_MESSAGE = -44; byte GET_ALL_SERVERS_REQUEST = -43; byte GET_ALL_SERVRES_RESPONSE = -42; byte FIND_REMOTE_TX_REPLY = -41; byte FIND_REMOTE_TX_MESSAGE = -40; byte R_REMOTE_COMMIT_REPLY_MESSAGE = -39; byte R_FETCH_KEYS_REPLY = -38; byte R_FETCH_KEYS_MESSAGE = -37; byte R_SIZE_MESSAGE = -36; byte R_SIZE_REPLY_MESSAGE = -35; byte R_FETCH_ENTRY_REPLY_MESSAGE = -34; byte R_FETCH_ENTRY_MESSAGE = -33; byte R_DESTROY_MESSAGE = -32; byte R_INVALIDATE_MESSAGE = -31; byte R_INVALIDATE_REPLY_MESSAGE = -30; byte R_PUT_MESSAGE = -29; byte R_PUT_REPLY_MESSAGE = -28; byte R_CONTAINS_MESSAGE = -27; byte R_CONTAINS_REPLY_MESSAGE = -26; byte R_GET_MESSAGE = -24; byte R_GET_REPLY_MESSAGE = -25; byte DURABLE_CLIENT_INFO_RESPONSE = -23; byte DURABLE_CLIENT_INFO_REQUEST = -22; byte CLIENT_INTEREST_MESSAGE = -21; byte LATEST_LAST_ACCESS_TIME_MESSAGE = -20; byte REMOVE_CACHESERVER_PROFILE_UPDATE = -19; byte QUEUE_SYNCHRONIZATION_MESSAGE = -18; byte QUEUE_SYNCHRONIZATION_REPLY_MESSAGE = -17; // IDs -16 through -10 unused byte PR_REMOVE_ALL_MESSAGE = -9; byte REMOVE_ALL_MESSAGE = -8; byte PR_REMOVE_ALL_REPLY_MESSAGE = -7; byte REMOTE_REMOVE_ALL_MESSAGE = -6; byte REMOTE_REMOVE_ALL_REPLY_MESSAGE = -5; byte DISTTX_COMMIT_MESSAGE = -4; byte DISTTX_PRE_COMMIT_MESSAGE = -3; byte DISTTX_COMMIT_REPLY_MESSAGE = -2; byte DISTTX_PRE_COMMIT_REPLY_MESSAGE = -1; byte ILLEGAL = 0; // 1 through 2 unused byte PUTALL_VERSIONS_LIST = 3; byte INITIAL_IMAGE_VERSIONED_OBJECT_LIST = 4; byte FIND_VERSION_TAG = 5; byte VERSION_TAG_REPLY = 6; byte VERSIONED_OBJECT_LIST = 7; byte ENUM_ID = 8; byte ENUM_INFO = 9; byte REGION_STATE_MESSAGE = 10; byte CLIENT_INSTANTIATOR_MESSAGE = 11; byte REGISTRATION_MESSAGE = 12; byte REGISTRATION_CONTEXT_MESSAGE = 13; /** More Query Result Classes */ byte END_OF_BUCKET = 14; byte RESULTS_BAG = 15; byte STRUCT_BAG = 16; byte BUCKET_PROFILE = 17; byte PARTITION_PROFILE = 18; byte ROLE_EVENT = 19; byte CLIENT_REGION_EVENT = 20; // 21 unused byte FIND_DURABLE_QUEUE = 22; byte FIND_DURABLE_QUEUE_REPLY = 23; byte CACHE_SERVER_LOAD_MESSAGE = 24; byte OBJECT_PART_LIST = 25; byte REGION = 26; /****** Query Result Classes *******/ byte RESULTS_COLLECTION_WRAPPER = 27; byte RESULTS_SET = 28; byte SORTED_RESULT_SET = 29; byte SORTED_STRUCT_SET = 30; byte UNDEFINED = 31; byte STRUCT_IMPL = 32; byte STRUCT_SET = 33; byte CLEAR_REGION_MESSAGE_WITH_CONTEXT = 34; byte CLIENT_UPDATE_MESSAGE = 35; byte EVENT_ID = 36; byte INTEREST_RESULT_POLICY = 37; byte CLIENT_PROXY_MEMBERSHIPID = 38; byte PR_BUCKET_BACKUP_MESSAGE = 39; byte SERVER_BUCKET_PROFILE = 40; byte PR_BUCKET_PROFILE_UPDATE_MESSAGE = 41; byte PR_BUCKET_SIZE_MESSAGE = 42; byte PR_CONTAINS_KEY_VALUE_MESSAGE = 43; byte PR_DUMP_ALL_PR_CONFIG_MESSAGE = 44; byte PR_DUMP_BUCKETS_MESSAGE = 45; byte PR_FETCH_ENTRIES_MESSAGE = 46; byte PR_FETCH_ENTRY_MESSAGE = 47; byte PR_FETCH_KEYS_MESSAGE = 48; byte PR_FLUSH_MESSAGE = 49; byte PR_IDENTITY_REQUEST_MESSAGE = 50; byte PR_IDENTITY_UPDATE_MESSAGE = 51; byte PR_INDEX_CREATION_MSG = 52; byte PR_MANAGE_BUCKET_MESSAGE = 53; byte PR_PRIMARY_REQUEST_MESSAGE = 54; byte PR_PRIMARY_REQUEST_REPLY_MESSAGE = 55; byte PR_SANITY_CHECK_MESSAGE = 56; byte PR_PUT_REPLY_MESSAGE = 57; byte PR_QUERY_MESSAGE = 58; byte PR_REMOVE_INDEXES_MESSAGE = 59; byte PR_REMOVE_INDEXES_REPLY_MESSAGE = 60; byte PR_SIZE_MESSAGE = 61; byte PR_SIZE_REPLY_MESSAGE = 62; byte PR_BUCKET_SIZE_REPLY_MESSAGE = 63; byte PR_CONTAINS_KEY_VALUE_REPLY_MESSAGE = 64; byte PR_FETCH_ENTRIES_REPLY_MESSAGE = 65; byte PR_FETCH_ENTRY_REPLY_MESSAGE = 66; byte PR_IDENTITY_REPLY_MESSAGE = 67; byte PR_INDEX_CREATION_REPLY_MSG = 68; byte PR_MANAGE_BUCKET_REPLY_MESSAGE = 69; // 70 unused byte UPDATE_MESSAGE = 71; byte REPLY_MESSAGE = 72; byte PR_DESTROY = 73; byte CREATE_REGION_REPLY_MESSAGE = 74; byte QUERY_MESSAGE = 75; byte RESPONSE_MESSAGE = 76; byte NET_SEARCH_REQUEST_MESSAGE = 77; byte NET_SEARCH_REPLY_MESSAGE = 78; byte NET_LOAD_REQUEST_MESSAGE = 79; byte NET_LOAD_REPLY_MESSAGE = 80; byte NET_WRITE_REQUEST_MESSAGE = 81; byte NET_WRITE_REPLY_MESSAGE = 82; // DLockRequestProcessor byte DLOCK_REQUEST_MESSAGE = 83; byte DLOCK_RESPONSE_MESSAGE = 84; byte DLOCK_RELEASE_MESSAGE = 85; byte ADMIN_CACHE_EVENT_MESSAGE = 86; byte CQ_ENTRY_EVENT = 87; // InitialImageOperation byte REQUEST_IMAGE_MESSAGE = 88; byte IMAGE_REPLY_MESSAGE = 89; byte IMAGE_ENTRY = 90; byte CLOSE_CACHE_MESSAGE = 91; byte DISTRIBUTED_MEMBER = 92; byte UPDATE_WITH_CONTEXT_MESSAGE = 93; byte GRANTOR_REQUEST_MESSAGE = 94; byte GRANTOR_INFO_REPLY_MESSAGE = 95; byte STARTUP_MESSAGE = 96; byte STARTUP_RESPONSE_MESSAGE = 97; byte SHUTDOWN_MESSAGE = 98; byte DESTROY_REGION_MESSAGE = 99; byte PR_PUT_MESSAGE = 100; byte INVALIDATE_MESSAGE = 101; byte DESTROY_MESSAGE = 102; byte DA_PROFILE = 103; // DistributionAdvisor profile byte CACHE_PROFILE = 104; // CacheDistributionAdvisor profile byte ENTRY_EVENT = 105; byte UPDATE_ATTRIBUTES_MESSAGE = 106; byte PROFILE_REPLY_MESSAGE = 107; byte REGION_EVENT = 108; byte TRANSACTION_ID = 109; byte TX_COMMIT_MESSAGE = 110; byte HA_PROFILE = 111; byte ELDER_INIT_MESSAGE = 112; byte ELDER_INIT_REPLY_MESSAGE = 113; byte DEPOSE_GRANTOR_MESSAGE = 114; byte HA_EVENT_WRAPPER = 115; byte DLOCK_RELEASE_REPLY = 116; byte DLOCK_REMOTE_TOKEN = 117; byte COMMIT_PROCESS_FOR_TXID_MESSAGE = 118; byte FILTER_PROFILE = 119; byte PR_GET_MESSAGE = 120; byte TRANSACTION_LOCK_ID = 121; byte COMMIT_PROCESS_FOR_LOCKID_MESSAGE = 122; byte NON_GRANTOR_DESTROYED_MESSAGE = 123; byte END_OF_STREAM_TOKEN = 124; byte PR_GET_REPLY_MESSAGE = 125; byte PR_NODE = 126; byte DESTROY_WITH_CONTEXT_MESSAGE = 127; // NOTE, CODES > 127 will take two bytes to serialize short PR_FETCH_PARTITION_DETAILS_MESSAGE = 128; short PR_FETCH_PARTITION_DETAILS_REPLY = 129; short PR_DEPOSE_PRIMARY_BUCKET_MESSAGE = 130; short PR_DEPOSE_PRIMARY_BUCKET_REPLY = 131; short PR_BECOME_PRIMARY_BUCKET_MESSAGE = 132; short PR_BECOME_PRIMARY_BUCKET_REPLY = 133; short PR_REMOVE_BUCKET_MESSAGE = 134; short PR_REMOVE_BUCKET_REPLY = 135; short PR_MOVE_BUCKET_MESSAGE = 136; short PR_MOVE_BUCKET_REPLY = 137; // Geode-5401, message changed from remove transaction to expire transactions. short EXPIRE_CLIENT_TRANSACTIONS = 138; short REGION_VERSION_VECTOR = 139; short INVALIDATE_WITH_CONTEXT_MESSAGE = 140; short TOKEN_INVALID = 141; short TOKEN_LOCAL_INVALID = 142; short TOKEN_DESTROYED = 143; short TOKEN_REMOVED = 144; short TOKEN_REMOVED2 = 145; short STARTUP_RESPONSE_WITHVERSION_MESSAGE = 146; short SHUTDOWN_ALL_GATEWAYHUBS_REQUEST = 147; short TOKEN_TOMBSTONE = 149; short PR_DESTROY_REPLY_MESSAGE = 150; short R_DESTROY_REPLY_MESSAGE = 151; short CLI_FUNCTION_RESULT = 152; short JMX_MANAGER_PROFILE = 153; short JMX_MANAGER_PROFILE_MESSAGE = 154; short R_FETCH_VERSION_MESSAGE = 155; short R_FETCH_VERSION_REPLY = 156; short PR_TOMBSTONE_MESSAGE = 157; short UPDATE_ENTRY_VERSION_MESSAGE = 158; short PR_UPDATE_ENTRY_VERSION_MESSAGE = 159; short REDIS_KEY = 160; // 161 through 164 unused short PR_FETCH_BULK_ENTRIES_MESSAGE = 165; short PR_FETCH_BULK_ENTRIES_REPLY_MESSAGE = 166; short NWAY_MERGE_RESULTS = 167; short CUMULATIVE_RESULTS = 168; short DISTTX_ROLLBACK_MESSAGE = 169; short DISTTX_ROLLBACK_REPLY_MESSAGE = 170; // 171 through 999 unused short ADD_HEALTH_LISTENER_REQUEST = 1000; short ADD_HEALTH_LISTENER_RESPONSE = 1001; short ADD_STAT_LISTENER_REQUEST = 1002; short ADD_STAT_LISTENER_RESPONSE = 1003; short ADMIN_CONSOLE_DISCONNECT_MESSAGE = 1004; short ADMIN_CONSOLE_MESSAGE = 1005; short ADMIN_FAILURE_RESPONSE = 1006; short ALERT_LEVEL_CHANGE_MESSAGE = 1007; short ALERT_LISTENER_MESSAGE = 1008; short APP_CACHE_SNAPSHOT_MESSAGE = 1009; short BRIDGE_SERVER_REQUEST = 1010; short BRIDGE_SERVER_RESPONSE = 1011; short CACHE_CONFIG_REQUEST = 1012; short CACHE_CONFIG_RESPONSE = 1013; short CACHE_INFO_REQUEST = 1014; short CACHE_INFO_RESPONSE = 1015; short CANCELLATION_MESSAGE = 1016; short CANCEL_STAT_LISTENER_REQUEST = 1017; short CANCEL_STAT_LISTENER_RESPONSE = 1018; short DESTROY_ENTRY_MESSAGE = 1019; short ADMIN_DESTROY_REGION_MESSAGE = 1020; short FETCH_DIST_LOCK_INFO_REQUEST = 1021; short FETCH_DIST_LOCK_INFO_RESPONSE = 1022; short FETCH_HEALTH_DIAGNOSIS_REQUEST = 1023; short FETCH_HEALTH_DIAGNOSIS_RESPONSE = 1024; short FETCH_HOST_REQUEST = 1025; short FETCH_HOST_RESPONSE = 1026; short FETCH_RESOURCE_ATTRIBUTES_REQUEST = 1027; short FETCH_RESOURCE_ATTRIBUTES_RESPONSE = 1028; short FETCH_STATS_REQUEST = 1029; short FETCH_STATS_RESPONSE = 1030; short FETCH_SYS_CFG_REQUEST = 1031; short FETCH_SYS_CFG_RESPONSE = 1032; short FLUSH_APP_CACHE_SNAPSHOT_MESSAGE = 1033; short HEALTH_LISTENER_MESSAGE = 1034; short LICENSE_INFO_REQUEST = 1035; short LICENSE_INFO_RESPONSE = 1036; short OBJECT_DETAILS_REQUEST = 1037; short OBJECT_DETAILS_RESPONSE = 1038; short OBJECT_NAMES_REQUEST = 1039; short OBJECT_NAMES_RESPONSE = 1040; short REGION_ATTRIBUTES_REQUEST = 1041; short REGION_ATTRIBUTES_RESPONSE = 1042; short REGION_REQUEST = 1043; short REGION_RESPONSE = 1044; short REGION_SIZE_REQUEST = 1045; short REGION_SIZE_RESPONSE = 1046; short REGION_STATISTICS_REQUEST = 1047; short REGION_STATISTICS_RESPONSE = 1048; short REMOVE_HEALTH_LISTENER_REQUEST = 1049; short REMOVE_HEALTH_LISTENER_RESPONSE = 1050; short RESET_HEALTH_STATUS_REQUEST = 1051; short RESET_HEALTH_STATUS_RESPONSE = 1052; short ROOT_REGION_REQUEST = 1053; short ROOT_REGION_RESPONSE = 1054; short SNAPSHOT_RESULT_MESSAGE = 1055; short STAT_LISTENER_MESSAGE = 1056; short STORE_SYS_CFG_REQUEST = 1057; short STORE_SYS_CFG_RESPONSE = 1058; short SUB_REGION_REQUEST = 1059; short SUB_REGION_RESPONSE = 1060; short TAIL_LOG_REQUEST = 1061; short TAIL_LOG_RESPONSE = 1062; short VERSION_INFO_REQUEST = 1063; short VERSION_INFO_RESPONSE = 1064; short STAT_ALERTS_MGR_ASSIGN_MESSAGE = 1065; short UPDATE_ALERTS_DEFN_MESSAGE = 1066; short REFRESH_MEMBER_SNAP_REQUEST = 1067; short REFRESH_MEMBER_SNAP_RESPONSE = 1068; short REGION_SUB_SIZE_REQUEST = 1069; short REGION_SUB_SIZE_RESPONSE = 1070; short CHANGE_REFRESH_INT_MESSAGE = 1071; short ALERTS_NOTIF_MESSAGE = 1072; short STAT_ALERT_DEFN_NUM_THRESHOLD = 1073; short STAT_ALERT_DEFN_GAUGE_THRESHOLD = 1074; short STAT_ALERT_NOTIFICATION = 1075; short FILTER_INFO_MESSAGE = 1076; short REQUEST_FILTERINFO_MESSAGE = 1077; short REQUEST_RVV_MESSAGE = 1078; short RVV_REPLY_MESSAGE = 1079; short CLIENT_MEMBERSHIP_MESSAGE = 1080; // 1,081...1,199 reserved for more admin msgs short PR_FUNCTION_STREAMING_MESSAGE = 1201; short MEMBER_FUNCTION_STREAMING_MESSAGE = 1202; short DR_FUNCTION_STREAMING_MESSAGE = 1203; short FUNCTION_STREAMING_REPLY_MESSAGE = 1204; short FUNCTION_STREAMING_ORDERED_REPLY_MESSAGE = 1205; short REQUEST_SYNC_MESSAGE = 1206; // 1,209..1,999 unused short HIGH_PRIORITY_ACKED_MESSAGE = 2000; short SERIAL_ACKED_MESSAGE = 2001; short CLIENT_DATASERIALIZER_MESSAGE = 2002; // 2003..2098 unused short BUCKET_COUNT_LOAD_PROBE = 2099; short PERSISTENT_MEMBERSHIP_VIEW_REQUEST = 2100; short PERSISTENT_MEMBERSHIP_VIEW_REPLY = 2101; short PERSISTENT_STATE_QUERY_REQUEST = 2102; short PERSISTENT_STATE_QUERY_REPLY = 2103; short PREPARE_NEW_PERSISTENT_MEMBER_REQUEST = 2104; short MISSING_PERSISTENT_IDS_REQUEST = 2105; short MISSING_PERSISTENT_IDS_RESPONSE = 2106; short REVOKE_PERSISTENT_ID_REQUEST = 2107; short REVOKE_PERSISTENT_ID_RESPONSE = 2108; short REMOVE_PERSISTENT_MEMBER_REQUEST = 2109; short PERSISTENT_MEMBERSHIP_FLUSH_REQUEST = 2110; short SHUTDOWN_ALL_REQUEST = 2111; short SHUTDOWN_ALL_RESPONSE = 2112; short END_BUCKET_CREATION_MESSAGE = 2113; short FINISH_BACKUP_REQUEST = 2114; short FINISH_BACKUP_RESPONSE = 2115; short PREPARE_BACKUP_REQUEST = 2116; short BACKUP_RESPONSE = 2117; short COMPACT_REQUEST = 2118; short COMPACT_RESPONSE = 2119; short FLOW_CONTROL_PERMIT_MESSAGE = 2120; short OBJECT_PART_LIST66 = 2121; short LINKED_RESULTSET = 2122; short LINKED_STRUCTSET = 2123; short PR_ALL_BUCKET_PROFILES_UPDATE_MESSAGE = 2124; short SERIALIZED_OBJECT_PART_LIST = 2125; short FLUSH_TO_DISK_REQUEST = 2126; short FLUSH_TO_DISK_RESPONSE = 2127; short CHECK_TYPE_REGISTRY_STATE = 2128; short PREPARE_REVOKE_PERSISTENT_ID_REQUEST = 2129; // 2130 unused short PERSISTENT_VERSION_TAG = 2131; short PERSISTENT_RVV = 2132; short DISK_STORE_ID = 2133; short SNAPSHOT_PACKET = 2134; short SNAPSHOT_RECORD = 2135; short FLOW_CONTROL_ACK = 2136; short FLOW_CONTROL_ABORT = 2137; short REMOTE_LOCATOR_RESPONSE = 2138; short LOCATOR_JOIN_MESSAGE = 2139; // 2140 through 2141 unused short REMOTE_LOCATOR_PING_REQUEST = 2142; short REMOTE_LOCATOR_PING_RESPONSE = 2143; short GATEWAY_SENDER_PROFILE = 2144; short REMOTE_LOCATOR_JOIN_REQUEST = 2145; short REMOTE_LOCATOR_JOIN_RESPONSE = 2146; short REMOTE_LOCATOR_REQUEST = 2147; short BATCH_DESTROY_MESSAGE = 2148; short MANAGER_STARTUP_MESSAGE = 2149; short JMX_MANAGER_LOCATOR_REQUEST = 2150; short JMX_MANAGER_LOCATOR_RESPONSE = 2151; short MGMT_COMPACT_REQUEST = 2152; short MGMT_COMPACT_RESPONSE = 2153; short MGMT_FEDERATION_COMPONENT = 2154; short LOCATOR_STATUS_REQUEST = 2155; short LOCATOR_STATUS_RESPONSE = 2156; short RELEASE_CLEAR_LOCK_MESSAGE = 2157; short NULL_TOKEN = 2158; short CONFIGURATION_RESPONSE = 2160; short PARALLEL_QUEUE_REMOVAL_MESSAGE = 2161; short PR_QUERY_TRACE_INFO = 2162; short INDEX_CREATION_DATA = 2163; short SERVER_PING_MESSAGE = 2164; short PR_DESTROY_ON_DATA_STORE_MESSAGE = 2165; short DIST_TX_OP = 2166; short DIST_TX_PRE_COMMIT_RESPONSE = 2167; short DIST_TX_THIN_ENTRY_STATE = 2168; short LUCENE_CHUNK_KEY = 2169; short LUCENE_FILE = 2170; short LUCENE_FUNCTION_CONTEXT = 2171; short LUCENE_STRING_QUERY_PROVIDER = 2172; short LUCENE_TOP_ENTRIES_COLLECTOR_MANAGER = 2173; short LUCENE_ENTRY_SCORE = 2174; short LUCENE_TOP_ENTRIES = 2175; short LUCENE_TOP_ENTRIES_COLLECTOR = 2176; short WAIT_UNTIL_FLUSHED_FUNCTION_CONTEXT = 2177; short DESTROY_LUCENE_INDEX_MESSAGE = 2178; short LUCENE_PAGE_RESULTS = 2179; short LUCENE_RESULT_STRUCT = 2180; short GATEWAY_SENDER_QUEUE_ENTRY_SYNCHRONIZATION_MESSAGE = 2181; short GATEWAY_SENDER_QUEUE_ENTRY_SYNCHRONIZATION_ENTRY = 2182; short ABORT_BACKUP_REQUEST = 2183; short MEMBER_IDENTIFIER = 2184; short HOST_AND_PORT = 2185; short REDIS_SET_ID = 2186; short REDIS_STRING_ID = 2187; short REDIS_HASH_ID = 2188; short REDIS_NULL_DATA_ID = 2189; short REDIS_SET_OPTIONS_ID = 2190; short REDIS_MEMBER_INFO_ID = 2191; short REDIS_SORTED_SET_ID = 2192; short REDIS_SORTED_SET_OPTIONS_ID = 2193; // NOTE, codes > 65535 will take 4 bytes to serialize /** * This special code is a way for an implementor if this interface to say that it does not have a * fixed id. In that case its class name is serialized. Currently only test classes just return * this code and it is only available for use in geode-core and its downstream modules. */ int NO_FIXED_ID = Integer.MAX_VALUE; //////////////// END CODES //////////// /** * Returns the DataSerializer fixed id for the class that implements this method. */ int getDSFID(); /** * Writes the state of this object as primitive data to the given <code>DataOutput</code>.<br> * <br> * Note: For rolling upgrades, if there is a change in the object format from previous version, * add a new toDataPre_GFE_X_X_X_X() method and add an entry for the current {@link * KnownVersion} in the getSerializationVersions array of the * implementing class. e.g. if msg format changed in version 80, create toDataPre_GFE_8_0_0_0, add * Version.GFE_80 to the getSerializationVersions array and copy previous toData contents to this * newly created toDataPre_GFE_X_X_X_X() method. * * @throws IOException A problem occurs while writing to <code>out</code> */ void toData(DataOutput out, SerializationContext context) throws IOException; /** * Reads the state of this object as primitive data from the given <code>DataInput</code>. <br> * <br> * Note: For rolling upgrades, if there is a change in the object format from previous version, * add a new fromDataPre_GFE_X_X_X_X() method and add an entry for the current {@link * KnownVersion} in the getSerializationVersions array of the * implementing class. e.g. if msg format changed in version 80, create fromDataPre_GFE_8_0_0_0, * add Version.GFE_80 to the getSerializationVersions array and copy previous fromData contents to * this newly created fromDataPre_GFE_X_X_X_X() method. * * @throws IOException A problem occurs while reading from <code>in</code> * @throws ClassNotFoundException A class could not be loaded while reading from <code>in</code> */ void fromData(DataInput in, DeserializationContext context) throws IOException, ClassNotFoundException; }
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.dmn.client.widgets.grid.columns; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Predicate; import com.ait.lienzo.client.core.types.Point2D; import org.jboss.errai.ui.client.local.spi.TranslationService; import org.kie.workbench.common.dmn.api.definition.HasName; import org.kie.workbench.common.dmn.api.definition.HasTypeRef; import org.kie.workbench.common.dmn.api.definition.model.DMNModelInstrumentedBase; import org.kie.workbench.common.dmn.api.definition.model.Expression; import org.kie.workbench.common.dmn.api.property.dmn.Name; import org.kie.workbench.common.dmn.api.property.dmn.QName; import org.kie.workbench.common.dmn.client.editors.expressions.types.context.InformationItemCell; import org.kie.workbench.common.dmn.client.editors.expressions.util.NameUtils; import org.kie.workbench.common.dmn.client.editors.types.HasValueAndTypeRef; import org.kie.workbench.common.dmn.client.editors.types.ValueAndDataTypePopoverView; import org.kie.workbench.common.dmn.client.resources.i18n.DMNEditorConstants; import org.kie.workbench.common.dmn.client.widgets.grid.BaseExpressionGrid; import org.kie.workbench.common.dmn.client.widgets.grid.controls.container.CellEditorControlsView; import org.kie.workbench.common.dmn.client.widgets.grid.model.BaseUIModelMapper; import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNSimpleGridColumn; import org.uberfire.ext.wires.core.grids.client.model.GridCell; import org.uberfire.ext.wires.core.grids.client.model.GridCellValue; import org.uberfire.ext.wires.core.grids.client.model.GridData; import org.uberfire.ext.wires.core.grids.client.model.impl.BaseGridCellValue; import org.uberfire.ext.wires.core.grids.client.widget.context.GridBodyCellEditContext; import org.uberfire.ext.wires.core.grids.client.widget.context.GridBodyCellRenderContext; import static org.kie.workbench.common.dmn.api.definition.model.common.HasTypeRefHelper.getNotNullHasTypeRefs; public abstract class EditableNameAndDataTypeColumn<G extends BaseExpressionGrid<? extends Expression, ? extends GridData, ? extends BaseUIModelMapper>> extends DMNSimpleGridColumn<G, InformationItemCell.HasNameCell> { protected final Predicate<Integer> isEditable; protected final Consumer<HasName> clearValueConsumer; protected final BiConsumer<HasName, Name> setValueConsumer; protected final BiConsumer<HasTypeRef, QName> setTypeRefConsumer; protected final TranslationService translationService; protected final CellEditorControlsView.Presenter cellEditorControls; protected final ValueAndDataTypePopoverView.Presenter editor; public EditableNameAndDataTypeColumn(final HeaderMetaData headerMetaData, final double width, final G gridWidget, final Predicate<Integer> isEditable, final Consumer<HasName> clearValueConsumer, final BiConsumer<HasName, Name> setValueConsumer, final BiConsumer<HasTypeRef, QName> setTypeRefConsumer, final TranslationService translationService, final CellEditorControlsView.Presenter cellEditorControls, final ValueAndDataTypePopoverView.Presenter editor) { this(Collections.singletonList(headerMetaData), width, gridWidget, isEditable, clearValueConsumer, setValueConsumer, setTypeRefConsumer, translationService, cellEditorControls, editor); } public EditableNameAndDataTypeColumn(final List<HeaderMetaData> headerMetaData, final double width, final G gridWidget, final Predicate<Integer> isEditable, final Consumer<HasName> clearValueConsumer, final BiConsumer<HasName, Name> setValueConsumer, final BiConsumer<HasTypeRef, QName> setTypeRefConsumer, final TranslationService translationService, final CellEditorControlsView.Presenter cellEditorControls, final ValueAndDataTypePopoverView.Presenter editor) { super(headerMetaData, new NameAndDataTypeColumnRenderer(), width, gridWidget); this.isEditable = isEditable; this.clearValueConsumer = clearValueConsumer; this.setValueConsumer = setValueConsumer; this.setTypeRefConsumer = setTypeRefConsumer; this.translationService = translationService; this.cellEditorControls = cellEditorControls; this.editor = editor; setMovable(false); setResizable(true); } protected abstract String getPopoverTitle(); @Override public void edit(final GridCell<InformationItemCell.HasNameCell> cell, final GridBodyCellRenderContext context, final Consumer<GridCellValue<InformationItemCell.HasNameCell>> callback) { final int rowIndex = context.getRowIndex(); if (!isEditable.test(rowIndex)) { return; } final int uiRowIndex = context.getRowIndex(); final int uiColumnIndex = context.getColumnIndex(); final double cellWidth = context.getCellWidth(); final double cellHeight = context.getCellHeight(); final double absoluteCellX = context.getAbsoluteCellX(); final double absoluteCellY = context.getAbsoluteCellY(); final InformationItemCell.HasNameAndDataTypeCell binding = (InformationItemCell.HasNameAndDataTypeCell) cell.getValue().getValue(); editor.bind(new HasValueAndTypeRef<Name>() { @Override public QName getTypeRef() { return binding.getTypeRef(); } @Override public void setTypeRef(final QName typeRef) { if (Objects.equals(typeRef, getTypeRef())) { return; } setTypeRefConsumer.accept(binding, typeRef); } @Override public Name getValue() { return binding.getName(); } @Override public void setValue(final Name name) { if (Objects.equals(name, getValue())) { return; } if (name == null || name.getValue() == null || name.getValue().trim().isEmpty()) { clearValueConsumer.accept(binding); } else { setValueConsumer.accept(binding, name); } } @Override public String getPopoverTitle() { return EditableNameAndDataTypeColumn.this.getPopoverTitle(); } @Override public Name toModelValue(final String componentValue) { return new Name(componentValue); } @Override public String toWidgetValue(final Name modelValue) { return modelValue.getValue(); } @Override public String getValueLabel() { return translationService.getTranslation(DMNEditorConstants.NameAndDataTypePopover_NameLabel); } @Override public String normaliseValue(final String componentValue) { return NameUtils.normaliseName(componentValue); } @Override public DMNModelInstrumentedBase asDMNModelInstrumentedBase() { return binding.asDMNModelInstrumentedBase(); } @Override public List<HasTypeRef> getHasTypeRefs() { return getNotNullHasTypeRefs(binding); } }, uiRowIndex, uiColumnIndex); final double[] dxy = {absoluteCellX + cellWidth / 2, absoluteCellY + cellHeight / 2}; final Optional<Point2D> rx = ((GridBodyCellEditContext) context).getRelativeLocation(); rx.ifPresent(r -> { dxy[0] = r.getX(); dxy[1] = r.getY(); }); cellEditorControls.show(editor, (int) (dxy[0]), (int) (dxy[1])); } @Override protected GridCellValue<InformationItemCell.HasNameCell> makeDefaultCellValue() { return new BaseGridCellValue<>(InformationItemCell.HasNameCell.wrap("")); } @Override public void setWidth(final double width) { super.setWidth(width); updateWidthOfPeers(); } }
/* * Copyright 2011 Marc Grue. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package org.qi4j.sample.dcicargo.sample_a.context.shipping.booking; import java.util.Date; import java.util.List; import org.junit.Before; import org.junit.Test; import org.qi4j.api.unitofwork.UnitOfWork; import org.qi4j.sample.dcicargo.sample_a.bootstrap.test.TestApplication; import org.qi4j.sample.dcicargo.sample_a.context.support.FoundNoRoutesException; import org.qi4j.sample.dcicargo.sample_a.data.entity.CargosEntity; import org.qi4j.sample.dcicargo.sample_a.data.shipping.cargo.Cargo; import org.qi4j.sample.dcicargo.sample_a.data.shipping.cargo.Cargos; import org.qi4j.sample.dcicargo.sample_a.data.shipping.cargo.TrackingId; import org.qi4j.sample.dcicargo.sample_a.data.shipping.delivery.Delivery; import org.qi4j.sample.dcicargo.sample_a.data.shipping.delivery.RoutingStatus; import org.qi4j.sample.dcicargo.sample_a.data.shipping.delivery.TransportStatus; import org.qi4j.sample.dcicargo.sample_a.data.shipping.handling.HandlingEventType; import org.qi4j.sample.dcicargo.sample_a.data.shipping.itinerary.Itinerary; import org.qi4j.sample.dcicargo.sample_a.data.shipping.location.Location; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.*; /** * Test of Book New Cargo use case. * * This is a test suite where all steps and deviations in the use case are tested. * Some data will carry over from one test to another (all tests run within the same UnitOfWork). * * Test method names describe the test purpose. The prefix refers to the step in the use case. */ public class BookNewCargoTest extends TestApplication { private static final Date TODAY = new Date(); @Before public void prepareTest() throws Exception { super.prepareTest(); } @Test( expected = RouteException.class ) public void deviation2a_OriginAndDestinationSame() throws Exception { UnitOfWork uow = module.currentUnitOfWork(); Location HONGKONG = uow.get( Location.class, CNHKG.code().get() ); Cargos CARGOS = uow.get( Cargos.class, CargosEntity.CARGOS_ID ); new BookNewCargo( CARGOS, HONGKONG, HONGKONG, day( 17 ) ).book(); } @Test( expected = RouteException.class ) public void deviation_2b_1_DeadlineInThePastNotAccepted() throws Exception { UnitOfWork uow = module.currentUnitOfWork(); Location HONGKONG = uow.get( Location.class, CNHKG.code().get() ); Location STOCKHOLM = uow.get( Location.class, SESTO.code().get() ); Cargos CARGOS = uow.get( Cargos.class, CargosEntity.CARGOS_ID ); new BookNewCargo( CARGOS, HONGKONG, STOCKHOLM, day( -1 ) ).book(); } @Test( expected = RouteException.class ) public void deviation_2b_2_DeadlineTodayIsTooEarly() throws Exception { UnitOfWork uow = module.currentUnitOfWork(); Location HONGKONG = uow.get( Location.class, CNHKG.code().get() ); Location STOCKHOLM = uow.get( Location.class, SESTO.code().get() ); Cargos CARGOS = uow.get( Cargos.class, CargosEntity.CARGOS_ID ); new BookNewCargo( CARGOS, HONGKONG, STOCKHOLM, day( 0 ) ).book(); } @Test public void deviation_2b_3_DeadlineTomorrowIsOkay() throws Exception { UnitOfWork uow = module.currentUnitOfWork(); Location HONGKONG = uow.get( Location.class, CNHKG.code().get() ); Location STOCKHOLM = uow.get( Location.class, SESTO.code().get() ); Cargos CARGOS = uow.get( Cargos.class, CargosEntity.CARGOS_ID ); new BookNewCargo( CARGOS, HONGKONG, STOCKHOLM, day( 1 ) ).book(); } @Test public void step_2_CreateNewCargo() throws Exception { UnitOfWork uow = module.currentUnitOfWork(); Location HONGKONG = uow.get( Location.class, CNHKG.code().get() ); Location STOCKHOLM = uow.get( Location.class, SESTO.code().get() ); Cargos CARGOS = uow.get( Cargos.class, CargosEntity.CARGOS_ID ); // Create cargo with valid input from customer TrackingId trackingId = new BookNewCargo( CARGOS, HONGKONG, STOCKHOLM, day( 17 ) ).book(); // Retrieve created cargo from store Cargo cargo = uow.get( Cargo.class, trackingId.id().get() ); // Test cargo data assertThat( cargo.trackingId().get(), is( equalTo( trackingId ) ) ); assertThat( cargo.origin().get(), is( equalTo( HONGKONG ) ) ); // Test route specification assertThat( cargo.routeSpecification().get().destination().get(), is( equalTo( STOCKHOLM ) ) ); // day(17) here is calculated a few milliseconds after initial day(17), so it will be later... assertThat( cargo.routeSpecification().get().arrivalDeadline().get(), equalTo( day( 17 ) )); // (Itinerary is not assigned yet) // Test derived delivery snapshot Delivery delivery = cargo.delivery().get(); assertThat( delivery.timestamp().get().after( TODAY ), is( equalTo( true ) ) ); // TODAY is set first assertThat( delivery.routingStatus().get(), is( equalTo( RoutingStatus.NOT_ROUTED ) ) ); assertThat( delivery.transportStatus().get(), is( equalTo( TransportStatus.NOT_RECEIVED ) ) ); assertThat( delivery.nextExpectedHandlingEvent().get().handlingEventType().get(), is( equalTo( HandlingEventType.RECEIVE ) ) ); assertThat( delivery.nextExpectedHandlingEvent().get().location().get(), is( equalTo( HONGKONG ) ) ); assertThat( delivery.nextExpectedHandlingEvent().get().voyage().get(), is( equalTo( null ) ) ); assertThat( delivery.lastHandlingEvent().get(), is( equalTo( null ) ) ); assertThat( delivery.lastKnownLocation().get(), is( equalTo( null ) ) ); assertThat( delivery.currentVoyage().get(), is( equalTo( null ) ) ); assertThat( delivery.eta().get(), is( equalTo( null ) ) ); // Is set when itinerary is assigned assertThat( delivery.isMisdirected().get(), is( equalTo( false ) ) ); assertThat( delivery.isUnloadedAtDestination().get(), is( equalTo( false ) ) ); } @Test( expected = FoundNoRoutesException.class ) public void deviation_3a_NoRoutesCanBeThatFast() throws Exception { UnitOfWork uow = module.currentUnitOfWork(); Location HONGKONG = uow.get( Location.class, CNHKG.code().get() ); Location STOCKHOLM = uow.get( Location.class, SESTO.code().get() ); Cargos CARGOS = uow.get( Cargos.class, CargosEntity.CARGOS_ID ); TrackingId trackingId = new BookNewCargo( CARGOS, HONGKONG, STOCKHOLM, day( 1 ) ).book(); Cargo cargo = uow.get( Cargo.class, trackingId.id().get() ); // No routes will be found new BookNewCargo( cargo ).routeCandidates(); } @Test public void step_3_CalculatePossibleRoutes() throws Exception { UnitOfWork uow = module.currentUnitOfWork(); Location HONGKONG = uow.get( Location.class, CNHKG.code().get() ); Location STOCKHOLM = uow.get( Location.class, SESTO.code().get() ); Cargos CARGOS = uow.get( Cargos.class, CargosEntity.CARGOS_ID ); // Create valid cargo TrackingId trackingId = new BookNewCargo( CARGOS, HONGKONG, STOCKHOLM, day( 30 ) ).book(); Cargo cargo = uow.get( Cargo.class, trackingId.id().get() ); // Step 3 - Find possible routes List<Itinerary> routeCandidates = new BookNewCargo( cargo ).routeCandidates(); // Check possible routes for (Itinerary itinerary : routeCandidates) { assertThat( "First load location equals origin location.", itinerary.firstLeg().loadLocation().get(), is( equalTo( cargo.routeSpecification().get().origin().get() ) ) ); assertThat( "Last unload location equals destination location.", itinerary.lastLeg().unloadLocation().get(), is( equalTo( cargo.routeSpecification().get().destination().get() ) ) ); assertThat( "Cargo will be delivered in time.", itinerary.finalArrivalDate().before( cargo.routeSpecification().get().arrivalDeadline().get() ), is( equalTo( true ) ) ); } } @Test public void step_5_AssignCargoToRoute() throws Exception { UnitOfWork uow = module.currentUnitOfWork(); Location HONGKONG = uow.get( Location.class, CNHKG.code().get() ); Location STOCKHOLM = uow.get( Location.class, SESTO.code().get() ); Cargos CARGOS = uow.get( Cargos.class, CargosEntity.CARGOS_ID ); // Create valid cargo Date deadline = day( 30 ); TrackingId trackingId = new BookNewCargo( CARGOS, HONGKONG, STOCKHOLM, deadline ).book(); Cargo cargo = uow.get( Cargo.class, trackingId.id().get() ); List<Itinerary> routeCandidates = new BookNewCargo( cargo ).routeCandidates(); // Get first route found // Would normally be found with an Itinerary id from customer selection Itinerary itinerary = routeCandidates.get( 0 ); // Use case step 5 - System assigns cargo to route new BookNewCargo( cargo, itinerary ).assignCargoToRoute(); assertThat( "Itinerary has been assigned to cargo.", itinerary, is( equalTo( cargo.itinerary().get() ) ) ); // BuildDeliverySnapshot will check if itinerary is valid. No need to check it here. // Check values set in new delivery snapshot Delivery delivery = cargo.delivery().get(); assertThat( delivery.routingStatus().get(), is( equalTo( RoutingStatus.ROUTED ) ) ); // ETA (= Unload time of last Leg) is before Deadline (set in previous test) assertTrue( delivery.eta().get().before( deadline ) ); } }
/* * #%L * ACS AEM Commons Bundle * %% * Copyright (C) 2018 Adobe * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.adobe.acs.commons.mcp.impl.processes; import com.adobe.acs.commons.data.CompositeVariant; import com.adobe.acs.commons.data.Spreadsheet; import com.adobe.acs.commons.fam.ActionManager; import com.adobe.acs.commons.mcp.ProcessDefinition; import com.adobe.acs.commons.mcp.ProcessInstance; import com.adobe.acs.commons.mcp.form.CheckboxComponent; import com.adobe.acs.commons.mcp.form.FileUploadComponent; import com.adobe.acs.commons.mcp.form.FormField; import com.adobe.acs.commons.mcp.form.RadioComponent; import com.adobe.acs.commons.mcp.model.GenericReport; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import javax.jcr.RepositoryException; import com.day.crx.JcrConstants; import org.apache.commons.lang3.StringUtils; import org.apache.sling.api.request.RequestParameter; import org.apache.sling.api.resource.LoginException; import org.apache.sling.api.resource.ModifiableValueMap; import org.apache.sling.api.resource.PersistenceException; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.ResourceUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.adobe.acs.commons.data.Spreadsheet.ROW_NUMBER; import static javax.jcr.Property.JCR_PRIMARY_TYPE; /** * Read node and metadata from a spreadsheet and update underlying node storage * with provided data. */ public class DataImporter extends ProcessDefinition { private static final Logger LOG = LoggerFactory.getLogger(DataImporter.class); private static final String PATH = "path"; private static final String SLASH = "/"; public enum MergeMode { CREATE_AND_OVERWRITE_PROPERTIES(true, true, true), CREATE_AND_MERGE_PROPERTIES(true, true, false), CREATE_ONLY_SKIP_EXISTING(true, false, false), OVERWRITE_EXISTING_ONLY(false, true, true), MERGE_EXISTING_ONLY(false, true, false), DO_NOTHING(false, false, false); boolean create = false; boolean update = false; boolean overwriteProps = false; // Note that this is moot if update is false MergeMode(boolean c, boolean u, boolean o) { create = c; update = u; overwriteProps = o; } } @FormField( name = "Excel File", description = "Provide the .xlsx file that defines the nodes being imported", component = FileUploadComponent.class, options = {"mimeTypes=application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", "required"} ) private transient RequestParameter importFile; @FormField( name = "Existing action", description = "What to do if an asset exists", component = RadioComponent.EnumerationSelector.class, options = {"default=create_and_overwrite_properties", "vertical"} ) private MergeMode mergeMode = MergeMode.CREATE_AND_OVERWRITE_PROPERTIES; @FormField( name = "Structure node type", description = "Type assigned to new nodes (ignored if spreadsheet has a jcr:primaryType column) -- for ordered folders use sling:OrderedFolder", options = {"default=sling:Folder"} ) private String defaultNodeType = "sling:Folder"; @FormField( name = "Include jcr:content nodes", description = "If checked, jcr:content nodes are created/updated under nodes", component = CheckboxComponent.class ) private boolean includeJcrContent = false; @FormField( name = "jcr:content node type", description = "Type assigned to new jcr:content child nodes (ignored if spreadsheet has a jcr:content/jcr:primaryType column)", options = {"default=nt:unstructured"} ) private String defaultJcrContentType = "nt:unstructured"; @FormField( name = "Convert header names", description = "If checked, property names in the header are converted to lower-case and non-compatible characters are converted to underscores", component = CheckboxComponent.class ) private boolean enableHeaderNameConversion = false; @FormField( name = "Dry run", description = "If checked, no import happens. Useful for data validation", component = CheckboxComponent.class, options = "checked" ) boolean dryRunMode = true; @FormField( name = "Detailed report", description = "If checked, information about every asset is recorded", component = CheckboxComponent.class, options = "checked" ) private boolean detailedReport = true; @FormField( name = "Import in sorted order", description = "If checked, nodes will be imported in the order determined by their paths", component = CheckboxComponent.class, options = "checked" ) private boolean presortData = true; public static final String TOTAL = "Total"; private EnumMap<ReportColumns, Object> createdNodes = trackActivity(TOTAL, "Create", 0); private EnumMap<ReportColumns, Object> updatedNodes = trackActivity(TOTAL, "Updated", 0); private EnumMap<ReportColumns, Object> skippedNodes = trackActivity(TOTAL, "Skipped", 0); private EnumMap<ReportColumns, Object> noChangeNodes = trackActivity(TOTAL, "No Change", 0); @SuppressWarnings("squid:S00115") public static enum ReportColumns { item, action, count } Spreadsheet data; private List<EnumMap<ReportColumns, Object>> reportRows; protected synchronized EnumMap<ReportColumns, Object> trackActivity(String item, String action, Integer count) { if (reportRows == null) { reportRows = Collections.synchronizedList(new ArrayList<>()); } EnumMap<ReportColumns, Object> reportRow = new EnumMap<>(ReportColumns.class); reportRow.put(ReportColumns.item, item); reportRow.put(ReportColumns.action, action); reportRow.put(ReportColumns.count, count); reportRows.add(reportRow); return reportRow; } @SuppressWarnings("squid:S2445") protected void incrementCount(EnumMap<ReportColumns, Object> row, int amt) { synchronized (row) { row.put(ReportColumns.count, (int) row.getOrDefault(ReportColumns.count, 0) + amt); } } @Override public void init() throws RepositoryException { // Nothing to do here } @Override public void buildProcess(ProcessInstance instance, ResourceResolver rr) throws LoginException, RepositoryException { if (data == null && importFile != null) { try { data = new Spreadsheet(enableHeaderNameConversion, importFile, PATH).buildSpreadsheet(); if (presortData) { Collections.sort(data.getDataRowsAsCompositeVariants(), (a, b) -> b.get(PATH).toString().compareTo(a.get(PATH).toString())); } instance.getInfo().setDescription("Import " + data.getFileName() + " (" + data.getRowCount() + " rows)"); } catch (IOException ex) { LOG.error("Unable to process import", ex); instance.getInfo().setDescription("Import " + data.getFileName() + " (failed)"); throw new RepositoryException("Unable to parse input file", ex); } } instance.defineCriticalAction("Import Data", rr, this::importData); } private transient GenericReport report = new GenericReport(); @Override public synchronized void storeReport(ProcessInstance instance, ResourceResolver rr) throws RepositoryException, PersistenceException { report.setRows(reportRows, ReportColumns.class); report.persist(rr, instance.getPath() + "/jcr:content/report"); } private void importData(ActionManager manager) { data.getDataRowsAsCompositeVariants().forEach((row) -> { manager.deferredWithResolver(rr -> { String path = row.get(PATH).toString(); Resource r = rr.getResource(path); if (r == null) { handleMissingNode(path, rr, row); } else if (mergeMode.update) { updateMetadata(path, rr, row); } else { incrementCount(skippedNodes, 1); if (detailedReport) { trackActivity(path, "Skipped", null); } } }); }); } public void handleMissingNode(String path, ResourceResolver rr, Map<String, CompositeVariant> row) throws PersistenceException { if (mergeMode.create) { if (!dryRunMode) { createMissingNode(path, rr, row); } incrementCount(createdNodes, 1); if (detailedReport) { trackActivity(path, "Created", null); } } else { incrementCount(skippedNodes, 1); if (detailedReport) { trackActivity(path, "Skipped missing", null); } } } /** * Create missing node at the given path with the properties from the passed row. * If properties are pre-appended with "jcr:content/", create jcr:content node. * * @param path Path of node. * @param rr ResourceResolver. * @param row Row from XLSX file. * @throws PersistenceException PersistenceException */ private void createMissingNode(String path, ResourceResolver rr, Map<String, CompositeVariant> row) throws PersistenceException { LOG.debug("Start of createMissingNode for node {}", path); String parentPath = StringUtils.substringBeforeLast(path, SLASH); Map<String, Object> resourceProperties = new HashMap<>(); resourceProperties.put(JcrConstants.JCR_PRIMARYTYPE, defaultNodeType); Resource parent = ResourceUtil.getOrCreateResource(rr, parentPath, resourceProperties, defaultNodeType, true); String nodeName = StringUtils.substringAfterLast(path, SLASH); if (!row.containsKey(JCR_PRIMARY_TYPE) && !row.containsKey(JcrConstants.JCR_PRIMARYTYPE)) { row.put(JcrConstants.JCR_PRIMARYTYPE, new CompositeVariant(defaultNodeType)); } Map<String, Object> nodeProps = createPropertyMap(row); rr.refresh(); Resource main = rr.create(parent, nodeName, nodeProps); if (includeJcrContent) { if (!row.containsKey(JcrConstants.JCR_CONTENT + SLASH + JcrConstants.JCR_PRIMARYTYPE)) { row.put(JcrConstants.JCR_CONTENT + SLASH + JcrConstants.JCR_PRIMARYTYPE, new CompositeVariant(defaultJcrContentType)); } Map<String, Object> jcrContentProps = createJcrContentPropertyMap(row); if (!jcrContentProps.isEmpty()) { rr.create(main, JcrConstants.JCR_CONTENT, jcrContentProps); } } LOG.debug("End of createMissingNode for node {}", path); } /** * Get the ModifiableValueMap of the resource and update with the properties from the row. * If jcr:content/jcr:primaryType is provided, get the jcr:content resource and update. * * @param path Path of node. * @param rr ResourceResolver * @param nodeInfo Map of properties from the row. * @throws PersistenceException PersistenceException */ private void updateMetadata(String path, ResourceResolver rr, Map<String, CompositeVariant> nodeInfo) throws PersistenceException { LOG.debug("Start of updateMetaData"); Resource node = rr.getResource(path); ModifiableValueMap resourceProperties = node.adaptTo(ModifiableValueMap.class); populateMetadataFromRow(resourceProperties, createPropertyMap(nodeInfo)); if (includeJcrContent) { Map<String, Object> jcrContentProps = createJcrContentPropertyMap(nodeInfo); Resource jcrContent = node.getChild(JcrConstants.JCR_CONTENT); if (jcrContent == null) { if (!jcrContentProps.containsKey(JcrConstants.JCR_CONTENT + SLASH + JcrConstants.JCR_PRIMARYTYPE)) { jcrContentProps.put(JcrConstants.JCR_PRIMARYTYPE, defaultJcrContentType); } rr.create(node, JcrConstants.JCR_CONTENT, jcrContentProps); } else { ModifiableValueMap contentResourceProperties = jcrContent.adaptTo(ModifiableValueMap.class); populateMetadataFromRow(contentResourceProperties, jcrContentProps); } } if (rr.hasChanges()) { incrementCount(updatedNodes, 1); if (detailedReport) { trackActivity(path, "Updated Properties", null); } if (!dryRunMode) { rr.commit(); } rr.refresh(); } else { if (detailedReport) { trackActivity(path, "No Change", null); } incrementCount(noChangeNodes, 1); } LOG.debug("End of updateMetadata"); } /** * Update the resourceProperties with the properties from the row. * * @param resourceProperties ModifiableValueMap of resource. * @param nodeInfo Map of properties from the row. */ private void populateMetadataFromRow(ModifiableValueMap resourceProperties, Map<String, Object> nodeInfo) { LOG.debug("Start of populateMetadataFromRow"); for (Map.Entry entry : nodeInfo.entrySet()) { String key = (String)entry.getKey(); if (key != null && (mergeMode.overwriteProps || !resourceProperties.containsKey(key))) { Object value = entry.getValue(); if (value != null) { resourceProperties.put(key, value); } } } LOG.debug("End of populateMetadataFromRow"); } /** * Create map of properties for node. * * @param row Row of data from XLSX. * @return Map of property names and values. */ private Map<String, Object> createPropertyMap(Map<String, CompositeVariant> row) { return row.entrySet().stream() .filter(e -> !e.getKey().equals(ROW_NUMBER) && !e.getKey().equals(PATH) && e.getValue() != null && !e.getKey().contains(SLASH)) .collect( Collectors.toMap( e -> e.getKey(), e -> e.getValue().toPropertyValue() ) ); } /** * Create map of properties for jcr:content node. * * @param row Row of data from XLSX. * @return Map of property names and values. */ private Map<String, Object> createJcrContentPropertyMap(Map<String, CompositeVariant> row) { return row.entrySet().stream() .filter(e -> e.getKey().startsWith(JcrConstants.JCR_CONTENT)) .collect( Collectors.toMap( e -> e.getKey().replace(JcrConstants.JCR_CONTENT + SLASH, ""), e -> e.getValue().toPropertyValue() ) ); } }
package org.metaborg.spoofax.core.stratego; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.Reader; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.util.Map; import org.apache.commons.vfs2.AllFileSelector; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileType; import org.metaborg.core.resource.IResourceService; import org.metaborg.util.log.Level; import org.metaborg.util.log.LoggerUtils; import org.spoofax.interpreter.core.InterpreterException; import org.spoofax.interpreter.library.IOAgent; import org.spoofax.interpreter.library.PrintStreamWriter; import com.google.common.collect.Maps; public class ResourceAgent extends IOAgent { private static class ResourceHandle { public final FileObject resource; public Reader reader; public Writer writer; public InputStream inputStream; public OutputStream outputStream; ResourceHandle(FileObject resource) { this.resource = resource; } } private final IResourceService resourceService; private final FileObject tempDir; private final Map<Integer, ResourceHandle> openFiles = Maps.newHashMap(); private final OutputStream stdout; private final Writer stdoutWriter; private final OutputStream stderr; private final Writer stderrWriter; private FileObject workingDir; private FileObject definitionDir; private boolean acceptDirChanges = false; public static OutputStream defaultStdout(String... excludePatterns) { return LoggerUtils.stream(LoggerUtils.logger("stdout"), Level.Info, excludePatterns); } public static OutputStream defaultStderr(String... excludePatterns) { return LoggerUtils.stream(LoggerUtils.logger("stderr"), Level.Info, excludePatterns); } public ResourceAgent(IResourceService resourceService) { this(resourceService, resourceService.resolve(System.getProperty("user.dir"))); } public ResourceAgent(IResourceService resourceService, FileObject initialDir) { this(resourceService, initialDir, defaultStdout()); } public ResourceAgent(IResourceService resourceService, FileObject initialDir, OutputStream stdout) { this(resourceService, initialDir, stdout, defaultStderr()); } public ResourceAgent(IResourceService resourceService, FileObject initialDir, OutputStream stdout, OutputStream stderr) { super(); this.acceptDirChanges = true; // Start accepting dir changes after IOAgent constructor call. this.resourceService = resourceService; this.tempDir = resourceService.resolve(System.getProperty("java.io.tmpdir")); this.workingDir = initialDir; this.definitionDir = initialDir; this.stdout = stdout; this.stdoutWriter = new PrintStreamWriter(new PrintStream(stdout)); this.stderr = stderr; this.stderrWriter = new PrintStreamWriter(new PrintStream(stderr)); } @Override public String getWorkingDir() { return workingDir.getName().getURI(); } public FileObject getWorkingDirResource() { return workingDir; } @Override public String getDefinitionDir() { return definitionDir.getName().getURI(); } public FileObject getDefinitionDirResource() { return definitionDir; } @Override public String getTempDir() { return tempDir.getName().getURI(); } public FileObject getTempDirResource() { return tempDir; } @Override public void setWorkingDir(String newWorkingDir) throws IOException { if(!acceptDirChanges) return; workingDir = resourceService.resolve(workingDir, newWorkingDir); } public void setAbsoluteWorkingDir(FileObject dir) { workingDir = dir; } @Override public void setDefinitionDir(String newDefinitionDir) { if(!acceptDirChanges) return; definitionDir = resourceService.resolve(definitionDir, newDefinitionDir); } public void setAbsoluteDefinitionDir(FileObject dir) { definitionDir = dir; } @Override public Writer getWriter(int fd) { if(fd == CONST_STDOUT) { return stdoutWriter; } else if(fd == CONST_STDERR) { return stderrWriter; } else { final ResourceHandle handle = openFiles.get(fd); if(handle.writer == null) { assert handle.outputStream == null; try { handle.writer = new BufferedWriter(new OutputStreamWriter(internalGetOutputStream(fd), FILE_ENCODING)); } catch(UnsupportedEncodingException e) { throw new RuntimeException(e); } } return handle.writer; } } @Override public OutputStream internalGetOutputStream(int fd) { if(fd == CONST_STDOUT) { return stdout; } else if(fd == CONST_STDERR) { return stderr; } else { final ResourceHandle handle = openFiles.get(fd); if(handle.outputStream == null) { assert handle.writer == null; try { handle.outputStream = handle.resource.getContent().getOutputStream(); } catch(FileSystemException e) { throw new RuntimeException("Could not get output stream for resource", e); } } return handle.outputStream; } } @Override public void writeChar(int fd, int c) throws IOException { if(fd == CONST_STDOUT || fd == CONST_STDERR) { getWriter(fd).append((char) c); } else { getWriter(fd).append((char) c); } } @Override public boolean closeRandomAccessFile(int fd) throws InterpreterException { if(fd == CONST_STDOUT || fd == CONST_STDERR || fd == CONST_STDIN) { return true; } final ResourceHandle handle = openFiles.remove(fd); if(handle == null) return true; // already closed: be forgiving try { if(handle.writer != null) handle.writer.close(); if(handle.outputStream != null) handle.outputStream.close(); handle.resource.getContent().close(); } catch(IOException e) { throw new RuntimeException("Could not close resource", e); } return true; } @Override public void closeAllFiles() { for(ResourceHandle handle : openFiles.values()) { try { if(handle.writer != null) handle.writer.close(); if(handle.outputStream != null) handle.outputStream.close(); handle.resource.getContent().close(); } catch(IOException e) { throw new RuntimeException("Could not close resource", e); } } openFiles.clear(); } @Override public int openRandomAccessFile(String fn, String mode) throws IOException { boolean appendMode = mode.indexOf('a') >= 0; boolean writeMode = appendMode || mode.indexOf('w') >= 0; boolean clearFile = false; final FileObject resource = resourceService.resolve(workingDir, fn); if(writeMode) { if(!resource.exists()) { resource.createFile(); } else if(!appendMode) { clearFile = true; } } if(clearFile) { resource.delete(); resource.createFile(); } openFiles.put(fileCounter, new ResourceHandle(resource)); return fileCounter++; } @Override public InputStream internalGetInputStream(int fd) { if(fd == CONST_STDIN) { return stdin; } final ResourceHandle handle = openFiles.get(fd); if(handle.inputStream == null) { try { handle.inputStream = handle.resource.getContent().getInputStream(); } catch(FileSystemException e) { throw new RuntimeException("Could not get input stream for resource", e); } } return handle.inputStream; } @Override public Reader getReader(int fd) { if(fd == CONST_STDIN) { return stdinReader; } final ResourceHandle handle = openFiles.get(fd); try { if(handle.reader == null) handle.reader = new BufferedReader(new InputStreamReader(internalGetInputStream(fd), FILE_ENCODING)); } catch(UnsupportedEncodingException e) { throw new RuntimeException("Could not get reader for resource", e); } return handle.reader; } @Override public String readString(int fd) throws IOException { char[] buffer = new char[2048]; final StringBuilder result = new StringBuilder(); final Reader reader = getReader(fd); for(int read = 0; read != -1; read = reader.read(buffer)) { result.append(buffer, 0, read); } return result.toString(); } @Override public String[] readdir(String fn) { try { final FileObject resource = resourceService.resolve(workingDir, fn); if(!resource.exists() || resource.getType() == FileType.FILE) { return new String[0]; } final FileName name = resource.getName(); final FileObject[] children = resource.getChildren(); final String[] strings = new String[children.length]; for(int i = 0; i < children.length; ++i) { final FileName absName = children[i].getName(); strings[i] = name.getRelativeName(absName); } return strings; } catch(FileSystemException e) { throw new RuntimeException("Could not list contents of directory " + fn, e); } } @Override public void printError(String error) { try { getWriter(CONST_STDERR).write(error + "\n"); } catch(IOException e) { // Like System.err.println, we swallow exceptions } } @Override public InputStream openInputStream(String fn, boolean isDefinitionFile) throws FileNotFoundException { final FileObject dir = isDefinitionFile ? definitionDir : workingDir; try { final FileObject file = resourceService.resolve(dir, fn); return file.getContent().getInputStream(); } catch(FileSystemException e) { throw new RuntimeException("Could not get input stream for resource", e); } } @Override public OutputStream openFileOutputStream(String fn) throws FileNotFoundException { try { return resourceService.resolve(workingDir, fn).getContent().getOutputStream(); } catch(FileSystemException e) { throw new RuntimeException("Could not get output stream for resource", e); } } @Override public File openFile(String fn) { final FileObject resource = resourceService.resolve(workingDir, fn); File localResource = resourceService.localPath(resource); if(localResource == null) { final File localWorkingDir = resourceService.localPath(workingDir); if(localWorkingDir == null) { // Local working directory does not reside on the local file system, just return a File. return new File(fn); } // Could not get a local File using the FileObject interface, fall back to composing Files. return new File(getAbsolutePath(localWorkingDir.getPath(), fn)); } return localResource; } @Override public String createTempFile(String prefix) throws IOException { // GTODO: should use FileObject interface final File tempFile = File.createTempFile(prefix, null); tempFile.deleteOnExit(); return tempFile.getPath(); } @Override public String createTempDir(String prefix) throws IOException { // GTODO: should use FileObject interface File result; do { result = File.createTempFile(prefix, null); result.delete(); } while(!result.mkdir()); result.deleteOnExit(); return result.getPath(); } @Override public boolean mkdir(String dn) { try { final FileObject resource = resourceService.resolve(workingDir, dn); final boolean created = !resource.exists(); resource.createFolder(); return created; } catch(FileSystemException e) { throw new RuntimeException("Could not create directories", e); } } @Override @Deprecated public boolean mkDirs(String dn) { return mkdir(dn); } @Override public boolean rmdir(String dn) { try { final FileObject resource = resourceService.resolve(workingDir, dn); return resource.delete(new AllFileSelector()) > 0; } catch(FileSystemException e) { throw new RuntimeException("Could not delete directory " + dn, e); } } @Override public boolean exists(String fn) { try { final FileObject resource = resourceService.resolve(workingDir, fn); return resource.exists(); } catch(FileSystemException e) { throw new RuntimeException("Could not check if file " + fn + " exists", e); } } @Override public boolean readable(String fn) { try { final FileObject resource = resourceService.resolve(workingDir, fn); return resource.isReadable(); } catch(FileSystemException e) { throw new RuntimeException("Could not check if file " + fn + " is readable", e); } } @Override public boolean writable(String fn) { try { final FileObject resource = resourceService.resolve(workingDir, fn); return resource.isWriteable(); } catch(FileSystemException e) { throw new RuntimeException("Could not check if file " + fn + " is writeable", e); } } @Override public boolean isDirectory(String fn) { try { final FileObject resource = resourceService.resolve(workingDir, fn); final FileType type = resource.getType(); return type == FileType.FOLDER || type == FileType.FILE_OR_FOLDER; } catch(FileSystemException e) { throw new RuntimeException("Could not check if file " + fn + " is a directory", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.impl; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import org.apache.hadoop.thirdparty.com.google.common.base.Charsets; import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.compress.utils.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.BBPartHandle; import org.apache.hadoop.fs.BBUploadHandle; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStreamBuilder; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.InternalOperations; import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.PartHandle; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathHandle; import org.apache.hadoop.fs.UploadHandle; import org.apache.hadoop.fs.permission.FsPermission; import static org.apache.hadoop.fs.Path.mergePaths; import static org.apache.hadoop.io.IOUtils.cleanupWithLogger; /** * A MultipartUploader that uses the basic FileSystem commands. * This is done in three stages: * <ul> * <li>Init - create a temp {@code _multipart} directory.</li> * <li>PutPart - copying the individual parts of the file to the temp * directory.</li> * <li>Complete - use {@link FileSystem#concat} to merge the files; * and then delete the temp directory.</li> * </ul> */ @InterfaceAudience.Private @InterfaceStability.Unstable public class FileSystemMultipartUploader extends AbstractMultipartUploader { private static final Logger LOG = LoggerFactory.getLogger( FileSystemMultipartUploader.class); private final FileSystem fs; private final FileSystemMultipartUploaderBuilder builder; private final FsPermission permission; private final long blockSize; private final Options.ChecksumOpt checksumOpt; public FileSystemMultipartUploader( final FileSystemMultipartUploaderBuilder builder, FileSystem fs) { super(builder.getPath()); this.builder = builder; this.fs = fs; blockSize = builder.getBlockSize(); checksumOpt = builder.getChecksumOpt(); permission = builder.getPermission(); } @Override public CompletableFuture<UploadHandle> startUpload(Path filePath) throws IOException { checkPath(filePath); return FutureIOSupport.eval(() -> { Path collectorPath = createCollectorPath(filePath); fs.mkdirs(collectorPath, FsPermission.getDirDefault()); ByteBuffer byteBuffer = ByteBuffer.wrap( collectorPath.toString().getBytes(Charsets.UTF_8)); return BBUploadHandle.from(byteBuffer); }); } @Override public CompletableFuture<PartHandle> putPart(UploadHandle uploadId, int partNumber, Path filePath, InputStream inputStream, long lengthInBytes) throws IOException { checkPutArguments(filePath, inputStream, partNumber, uploadId, lengthInBytes); return FutureIOSupport.eval(() -> innerPutPart(filePath, inputStream, partNumber, uploadId, lengthInBytes)); } private PartHandle innerPutPart(Path filePath, InputStream inputStream, int partNumber, UploadHandle uploadId, long lengthInBytes) throws IOException { byte[] uploadIdByteArray = uploadId.toByteArray(); checkUploadId(uploadIdByteArray); Path collectorPath = new Path(new String(uploadIdByteArray, 0, uploadIdByteArray.length, Charsets.UTF_8)); Path partPath = mergePaths(collectorPath, mergePaths(new Path(Path.SEPARATOR), new Path(partNumber + ".part"))); final FSDataOutputStreamBuilder fileBuilder = fs.createFile(partPath); if (checksumOpt != null) { fileBuilder.checksumOpt(checksumOpt); } if (permission != null) { fileBuilder.permission(permission); } try (FSDataOutputStream fsDataOutputStream = fileBuilder.blockSize(blockSize).build()) { IOUtils.copy(inputStream, fsDataOutputStream, this.builder.getBufferSize()); } finally { cleanupWithLogger(LOG, inputStream); } return BBPartHandle.from(ByteBuffer.wrap( partPath.toString().getBytes(Charsets.UTF_8))); } private Path createCollectorPath(Path filePath) { String uuid = UUID.randomUUID().toString(); return mergePaths(filePath.getParent(), mergePaths(new Path(filePath.getName().split("\\.")[0]), mergePaths(new Path("_multipart_" + uuid), new Path(Path.SEPARATOR)))); } private PathHandle getPathHandle(Path filePath) throws IOException { FileStatus status = fs.getFileStatus(filePath); return fs.getPathHandle(status); } private long totalPartsLen(List<Path> partHandles) throws IOException { long totalLen = 0; for (Path p : partHandles) { totalLen += fs.getFileStatus(p).getLen(); } return totalLen; } @Override public CompletableFuture<PathHandle> complete( UploadHandle uploadId, Path filePath, Map<Integer, PartHandle> handleMap) throws IOException { checkPath(filePath); return FutureIOSupport.eval(() -> innerComplete(uploadId, filePath, handleMap)); } /** * The upload complete operation. * @param multipartUploadId the ID of the upload * @param filePath path * @param handleMap map of handles * @return the path handle * @throws IOException failure */ private PathHandle innerComplete( UploadHandle multipartUploadId, Path filePath, Map<Integer, PartHandle> handleMap) throws IOException { checkPath(filePath); checkUploadId(multipartUploadId.toByteArray()); checkPartHandles(handleMap); List<Map.Entry<Integer, PartHandle>> handles = new ArrayList<>(handleMap.entrySet()); handles.sort(Comparator.comparingInt(Map.Entry::getKey)); List<Path> partHandles = handles .stream() .map(pair -> { byte[] byteArray = pair.getValue().toByteArray(); return new Path(new String(byteArray, 0, byteArray.length, Charsets.UTF_8)); }) .collect(Collectors.toList()); int count = partHandles.size(); // built up to identify duplicates -if the size of this set is // below that of the number of parts, then there's a duplicate entry. Set<Path> values = new HashSet<>(count); values.addAll(partHandles); Preconditions.checkArgument(values.size() == count, "Duplicate PartHandles"); byte[] uploadIdByteArray = multipartUploadId.toByteArray(); Path collectorPath = new Path(new String(uploadIdByteArray, 0, uploadIdByteArray.length, Charsets.UTF_8)); boolean emptyFile = totalPartsLen(partHandles) == 0; if (emptyFile) { fs.create(filePath).close(); } else { Path filePathInsideCollector = mergePaths(collectorPath, new Path(Path.SEPARATOR + filePath.getName())); fs.create(filePathInsideCollector).close(); fs.concat(filePathInsideCollector, partHandles.toArray(new Path[handles.size()])); new InternalOperations() .rename(fs, filePathInsideCollector, filePath, Options.Rename.OVERWRITE); } fs.delete(collectorPath, true); return getPathHandle(filePath); } @Override public CompletableFuture<Void> abort(UploadHandle uploadId, Path filePath) throws IOException { checkPath(filePath); byte[] uploadIdByteArray = uploadId.toByteArray(); checkUploadId(uploadIdByteArray); Path collectorPath = new Path(new String(uploadIdByteArray, 0, uploadIdByteArray.length, Charsets.UTF_8)); return FutureIOSupport.eval(() -> { // force a check for a file existing; raises FNFE if not found fs.getFileStatus(collectorPath); fs.delete(collectorPath, true); return null; }); } }
package controllers; import java.util.*; import play.mvc.Controller; import play.mvc.Result; import play.mvc.With; import play.data.Form; import play.data.DynamicForm; import play.db.ebean.Model.Finder; import play.libs.Json; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.avaje.ebean.Ebean; import models.*; import auth0.*; public class ProductController extends Controller { private static Exception error; private static Map<String, Object> profile; public static Result create() { if (!authorize()) { ObjectNode result = Json.newObject(); result.put("code", "unauthorized"); result.put("msg", "" + error); return ok(result); } Product prod = bindProduct(); prod.setUserId(parseLong(session("user.id"))); prod.setCreatedDate(new Date()); prod.setLastModifiedDate(prod.getCreatedDate()); Ebean.save(prod); ObjectNode result = Json.newObject(); result.put("code", "success"); result.put("msg", "Successfully Created " + prod); return ok(result); } public static Result delete(Long id) { if (!authorize()) { ObjectNode result = Json.newObject(); result.put("code", "unauthorized"); result.put("msg", "" + error); return ok(result); } Product prod = retrieveProduct(id); Ebean.delete(prod); ObjectNode result = Json.newObject(); result.put("code", "success"); result.put("msg", ""); return ok(result); } public static boolean authorize() { try { JWTFilter jf = new JWTFilter(); jf.doFilter(); profile = jf.getProfile(); String user_id = (String) profile.get("user_id"); String email = (String) profile.get("email"); String name = (String) profile.get("name"); String fName = ""; String lName = ""; try { String[] tt = name.split(" ", 2); fName = tt[0]; lName = tt[1]; } catch (Exception ex) { } User user = null; try { user = retrieveUser(user_id); } catch (IndexOutOfBoundsException ex) { user = new User(fName, lName, email, user_id); Ebean.save(user); } user = retrieveUser(user_id); session("user.name", user.getFirstName() + " " + user.getLastName()); session("user.id", user.getId() + ""); return true; } catch (Exception ex) { error = ex; return false; } } public static User retrieveUser(String user_id) { return Ebean.find(User.class) .where() .eq("userId", user_id) .findList() .get(0); } public static Product retrieveProduct(long id) { Product prod = Ebean.find(Product.class) .where() .eq("id", id) .eq("userId", parseLong(session("user.id"))) .findList() .get(0); return prod; } private static Product bindProduct() { Product prod = new Product(); return bindProduct(prod); } private static Product bindProduct(Product prod) { DynamicForm requestData = Form.form().bindFromRequest(); String name = requestData.get("name"); String description = requestData.get("description"); prod.setName(name); prod.setDescription(description); return prod; } public static Result update(Long id) { if (!authorize()) { ObjectNode result = Json.newObject(); result.put("code", "unauthorized"); result.put("msg", "" + error); return ok(result); } Product prod = retrieveProduct(id); bindProduct(prod); prod.setLastModifiedDate(new Date()); Ebean.update(prod); ObjectNode result = Json.newObject(); result.put("code", "success"); result.put("msg", ""); return ok(result); } public static Result list() { if (!authorize()) { ObjectNode result = Json.newObject(); result.put("code", "unauthorized"); result.put("msg", "" + error); return ok(result); } long page = requestLong("page"); long size = requestLong("size"); size = Math.min(size, 50l); size = size <= 0l ? 20l : size; long offset = (page - 1l) * size; List<Product> prods = Ebean.find(Product.class) .where() .eq("userId", parseLong(session("user.id"))) .setFirstRow((int) offset) .setMaxRows((int) size) .findList(); String u_name = session("user.name"); return ok(Json.toJson(prods)); } static long requestLong(String key) { try { DynamicForm requestData = Form.form().bindFromRequest(); long l = Long.parseLong(requestData.get(key)); // System.out.println("###[[parse : " + l + " => " + session("user.id")); return l; } catch (NullPointerException | NumberFormatException ex) { return 0; } } static long parseLong(String key) { try { DynamicForm requestData = Form.form().bindFromRequest(); long l = Long.parseLong(key); // System.out.println("###[[parse : " + l + " => " + session("user.id")); return l; } catch (NullPointerException | NumberFormatException ex) { return 0; } } public static Result show(Long id) { if (!authorize()) { ObjectNode result = Json.newObject(); result.put("code", "unauthorized"); result.put("msg", "" + error); return ok(result); } Product prod = retrieveProduct(id); String u_name = session("user.name"); return ok(Json.toJson(prod)); } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.spring; import com.hazelcast.config.AbstractXmlConfigHelper; import com.hazelcast.config.AliasedDiscoveryConfig; import com.hazelcast.config.AutoDetectionConfig; import com.hazelcast.config.ClassFilter; import com.hazelcast.config.DiscoveryConfig; import com.hazelcast.config.DiscoveryStrategyConfig; import com.hazelcast.config.EvictionConfig; import com.hazelcast.config.EvictionPolicy; import com.hazelcast.config.GlobalSerializerConfig; import com.hazelcast.config.IndexConfig; import com.hazelcast.config.IndexType; import com.hazelcast.config.InvalidConfigurationException; import com.hazelcast.config.JavaSerializationFilterConfig; import com.hazelcast.config.MapConfig; import com.hazelcast.config.MaxSizePolicy; import com.hazelcast.config.NativeMemoryConfig; import com.hazelcast.config.NearCachePreloaderConfig; import com.hazelcast.config.PersistentMemoryConfig; import com.hazelcast.config.PersistentMemoryDirectoryConfig; import com.hazelcast.config.PersistentMemoryMode; import com.hazelcast.config.SerializationConfig; import com.hazelcast.config.SerializerConfig; import com.hazelcast.config.SocketInterceptorConfig; import com.hazelcast.internal.config.DomConfigHelper; import com.hazelcast.internal.util.StringUtil; import com.hazelcast.memory.MemorySize; import com.hazelcast.memory.MemoryUnit; import com.hazelcast.query.impl.IndexUtils; import com.hazelcast.spring.context.SpringManagedContext; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanReference; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.ManagedList; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.support.ManagedSet; import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import static com.hazelcast.internal.config.ConfigValidator.COMMONLY_SUPPORTED_EVICTION_POLICIES; import static com.hazelcast.internal.config.ConfigValidator.checkEvictionConfig; import static com.hazelcast.internal.config.ConfigValidator.checkMapEvictionConfig; import static com.hazelcast.internal.config.ConfigValidator.checkNearCacheEvictionConfig; import static com.hazelcast.internal.config.DomConfigHelper.childElements; import static com.hazelcast.internal.config.DomConfigHelper.cleanNodeName; import static com.hazelcast.internal.config.DomConfigHelper.getBooleanValue; import static com.hazelcast.internal.config.DomConfigHelper.getIntegerValue; import static com.hazelcast.internal.util.StringUtil.upperCaseInternal; import static java.lang.Integer.parseInt; import static java.util.Arrays.asList; import static org.springframework.util.Assert.isTrue; /** * Base class of all Hazelcast BeanDefinitionParser implementations. * <p> * <ul> * <li>{@link HazelcastClientBeanDefinitionParser}</li> * <li>{@link HazelcastConfigBeanDefinitionParser}</li> * <li>{@link HazelcastInstanceDefinitionParser}</li> * <li>{@link HazelcastTypeBeanDefinitionParser}</li> * </ul> */ @SuppressWarnings({"WeakerAccess", "checkstyle:methodcount"}) public abstract class AbstractHazelcastBeanDefinitionParser extends AbstractBeanDefinitionParser { /** * Base Helper class for Spring Xml Builder */ public abstract static class SpringXmlBuilderHelper extends AbstractXmlConfigHelper { protected BeanDefinitionBuilder configBuilder; protected void handleCommonBeanAttributes(Node node, BeanDefinitionBuilder builder, ParserContext parserContext) { NamedNodeMap attributes = node.getAttributes(); if (attributes != null) { Node lazyInitAttr = attributes.getNamedItem("lazy-init"); if (lazyInitAttr != null) { builder.setLazyInit(Boolean.valueOf(getTextContent(lazyInitAttr))); } else { builder.setLazyInit(parserContext.isDefaultLazyInit()); } if (parserContext.isNested()) { BeanDefinition containingBeanDefinition = parserContext.getContainingBeanDefinition(); if (containingBeanDefinition != null) { builder.setScope(containingBeanDefinition.getScope()); } } else { Node scopeNode = attributes.getNamedItem("scope"); if (scopeNode != null) { builder.setScope(getTextContent(scopeNode)); } } Node dependsOnNode = attributes.getNamedItem("depends-on"); if (dependsOnNode != null) { String[] dependsOn = getTextContent(dependsOnNode).split("[,;]"); for (String dep : dependsOn) { builder.addDependsOn(dep.trim()); } } } } protected String getTextContent(Node node) { return DomConfigHelper.getTextContent(node, domLevel3); } protected String getAttribute(Node node, String attName) { return DomConfigHelper.getAttribute(node, attName, domLevel3); } protected BeanDefinitionBuilder createBeanBuilder(Class clazz) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(clazz); builder.setScope(configBuilder.getBeanDefinition().getScope()); builder.setLazyInit(configBuilder.getBeanDefinition().isLazyInit()); return builder; } protected BeanDefinitionBuilder createBeanBuilder(String className) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(className); builder.setScope(configBuilder.getBeanDefinition().getScope()); builder.setLazyInit(configBuilder.getBeanDefinition().isLazyInit()); return builder; } protected BeanDefinitionBuilder createAndFillBeanBuilder(Node node, Class clazz, String propertyName, BeanDefinitionBuilder parent, String... exceptPropertyNames) { BeanDefinitionBuilder builder = createBeanBuilder(clazz); AbstractBeanDefinition beanDefinition = builder.getBeanDefinition(); fillValues(node, builder, exceptPropertyNames); parent.addPropertyValue(propertyName, beanDefinition); return builder; } @SuppressWarnings("SameParameterValue") protected BeanDefinitionBuilder createAndFillListedBean(Node node, Class clazz, String propertyName, ManagedMap<String, AbstractBeanDefinition> managedMap, String... excludeNames) { BeanDefinitionBuilder builder = createBeanBuilder(clazz); AbstractBeanDefinition beanDefinition = builder.getBeanDefinition(); // "name" String name = getAttribute(node, propertyName); builder.addPropertyValue("name", name); fillValues(node, builder, excludeNames); managedMap.put(name, beanDefinition); return builder; } protected void fillValues(Node node, BeanDefinitionBuilder builder, String... excludeNames) { Collection<String> epn = excludeNames != null && excludeNames.length > 0 ? new HashSet<>(asList(excludeNames)) : null; fillAttributeValues(node, builder, epn); for (Node n : childElements(node)) { String name = xmlToJavaName(cleanNodeName(n)); if (epn != null && epn.contains(name)) { continue; } String value = getTextContent(n); builder.addPropertyValue(name, value); } } protected void fillAttributeValues(Node node, BeanDefinitionBuilder builder, String... excludeNames) { Collection<String> epn = excludeNames != null && excludeNames.length > 0 ? new HashSet<>(asList(excludeNames)) : null; fillAttributeValues(node, builder, epn); } protected void fillAttributeValues(Node node, BeanDefinitionBuilder builder, Collection<String> epn) { NamedNodeMap attributes = node.getAttributes(); if (attributes != null) { for (int a = 0; a < attributes.getLength(); a++) { Node attribute = attributes.item(a); String name = xmlToJavaName(attribute.getNodeName()); if (epn != null && epn.contains(name)) { continue; } String value = attribute.getNodeValue(); builder.addPropertyValue(name, value); } } } protected void fillAttributesForAliasedDiscoveryStrategy(AliasedDiscoveryConfig config, Node node, BeanDefinitionBuilder builder, String name) { NamedNodeMap attributes = node.getAttributes(); if (attributes != null) { for (int i = 0; i < attributes.getLength(); i++) { Node attribute = attributes.item(i); config.setProperty(attribute.getNodeName(), attribute.getNodeValue()); } } String propertyName = String.format("%sConfig", name); builder.addPropertyValue(propertyName, config); } protected ManagedList parseListeners(Node node, Class listenerConfigClass) { ManagedList<BeanDefinition> listeners = new ManagedList<>(); String implementationAttr = "implementation"; for (Node listenerNode : childElements(node)) { BeanDefinitionBuilder listenerConfBuilder = createBeanBuilder(listenerConfigClass); fillAttributeValues(listenerNode, listenerConfBuilder, implementationAttr); Node implementationNode = listenerNode.getAttributes().getNamedItem(implementationAttr); if (implementationNode != null) { listenerConfBuilder.addPropertyReference(implementationAttr, getTextContent(implementationNode)); } listeners.add(listenerConfBuilder.getBeanDefinition()); } return listeners; } protected ManagedList parseProxyFactories(Node node, Class proxyFactoryConfigClass) { ManagedList<BeanDefinition> list = new ManagedList<>(); for (Node instanceNode : childElements(node)) { BeanDefinitionBuilder confBuilder = createBeanBuilder(proxyFactoryConfigClass); fillAttributeValues(instanceNode, confBuilder); list.add(confBuilder.getBeanDefinition()); } return list; } protected void handleDataSerializableFactories(Node node, BeanDefinitionBuilder serializationConfigBuilder) { ManagedMap<Integer, BeanReference> factories = new ManagedMap<>(); ManagedMap<Integer, String> classNames = new ManagedMap<>(); for (Node child : childElements(node)) { String name = cleanNodeName(child); if ("data-serializable-factory".equals(name)) { NamedNodeMap attributes = child.getAttributes(); Node implRef = attributes.getNamedItem("implementation"); Node classNode = attributes.getNamedItem("class-name"); Node fidNode = attributes.getNamedItem("factory-id"); if (implRef != null) { factories.put(parseInt(getTextContent(fidNode)), new RuntimeBeanReference(getTextContent(implRef))); } if (classNode != null) { classNames.put(parseInt(getTextContent(fidNode)), getTextContent(classNode)); } } } serializationConfigBuilder.addPropertyValue("dataSerializableFactoryClasses", classNames); serializationConfigBuilder.addPropertyValue("dataSerializableFactories", factories); } protected void handleSerializers(Node node, BeanDefinitionBuilder serializationConfigBuilder) { BeanDefinitionBuilder globalSerializerConfigBuilder = null; String implementation = "implementation"; String className = "class-name"; String typeClassName = "type-class"; String overrideJavaSerializationName = "override-java-serialization"; ManagedList<BeanDefinition> serializers = new ManagedList<>(); for (Node child : childElements(node)) { String name = cleanNodeName(child); if ("global-serializer".equals(name)) { globalSerializerConfigBuilder = createGSConfigBuilder(GlobalSerializerConfig.class, child, implementation, className, overrideJavaSerializationName); } if ("serializer".equals(name)) { BeanDefinitionBuilder serializerConfigBuilder = createBeanBuilder(SerializerConfig.class); fillAttributeValues(child, serializerConfigBuilder); NamedNodeMap attributes = child.getAttributes(); Node implRef = attributes.getNamedItem(implementation); Node classNode = attributes.getNamedItem(className); Node typeClass = attributes.getNamedItem(typeClassName); if (typeClass != null) { serializerConfigBuilder.addPropertyValue("typeClassName", getTextContent(typeClass)); } if (implRef != null) { serializerConfigBuilder.addPropertyReference(xmlToJavaName(implementation), getTextContent(implRef)); } if (classNode != null) { serializerConfigBuilder.addPropertyValue(xmlToJavaName(className), getTextContent(classNode)); } serializers.add(serializerConfigBuilder.getBeanDefinition()); } } if (globalSerializerConfigBuilder != null) { serializationConfigBuilder.addPropertyValue("globalSerializerConfig", globalSerializerConfigBuilder.getBeanDefinition()); } serializationConfigBuilder.addPropertyValue("serializerConfigs", serializers); } private BeanDefinitionBuilder createGSConfigBuilder(Class<GlobalSerializerConfig> globalSerializerConfigClass, Node child, String implementation, String className, String overrideJavaSerializationName) { BeanDefinitionBuilder globalSerializerConfigBuilder = createBeanBuilder(globalSerializerConfigClass); NamedNodeMap attributes = child.getAttributes(); Node implRef = attributes.getNamedItem(implementation); Node classNode = attributes.getNamedItem(className); Node overrideJavaSerializationNode = attributes.getNamedItem(overrideJavaSerializationName); if (implRef != null) { globalSerializerConfigBuilder.addPropertyReference(xmlToJavaName(implementation), getTextContent(implRef)); } if (classNode != null) { globalSerializerConfigBuilder.addPropertyValue(xmlToJavaName(className), getTextContent(classNode)); } if (overrideJavaSerializationNode != null) { boolean value = getBooleanValue(getTextContent(overrideJavaSerializationNode)); globalSerializerConfigBuilder.addPropertyValue(xmlToJavaName(overrideJavaSerializationName), value); } return globalSerializerConfigBuilder; } protected void handlePortableFactories(Node node, BeanDefinitionBuilder serializationConfigBuilder) { ManagedMap<Integer, BeanReference> factories = new ManagedMap<>(); ManagedMap<Integer, String> classNames = new ManagedMap<>(); for (Node child : childElements(node)) { String name = cleanNodeName(child); if ("portable-factory".equals(name)) { NamedNodeMap attributes = child.getAttributes(); Node implRef = attributes.getNamedItem("implementation"); Node classNode = attributes.getNamedItem("class-name"); Node fidNode = attributes.getNamedItem("factory-id"); if (implRef != null) { factories.put(parseInt(getTextContent(fidNode)), new RuntimeBeanReference(getTextContent(implRef))); } if (classNode != null) { classNames.put(parseInt(getTextContent(fidNode)), getTextContent(classNode)); } } } serializationConfigBuilder.addPropertyValue("portableFactoryClasses", classNames); serializationConfigBuilder.addPropertyValue("portableFactories", factories); } protected void handleSerialization(Node node) { BeanDefinitionBuilder serializationConfigBuilder = createBeanBuilder(SerializationConfig.class); AbstractBeanDefinition beanDefinition = serializationConfigBuilder.getBeanDefinition(); fillAttributeValues(node, serializationConfigBuilder); for (Node child : childElements(node)) { String nodeName = cleanNodeName(child); if ("data-serializable-factories".equals(nodeName)) { handleDataSerializableFactories(child, serializationConfigBuilder); } else if ("portable-factories".equals(nodeName)) { handlePortableFactories(child, serializationConfigBuilder); } else if ("serializers".equals(nodeName)) { handleSerializers(child, serializationConfigBuilder); } else if ("java-serialization-filter".equals(nodeName)) { handleJavaSerializationFilter(child, serializationConfigBuilder); } } configBuilder.addPropertyValue("serializationConfig", beanDefinition); } protected void handleSocketInterceptorConfig(Node node, BeanDefinitionBuilder networkConfigBuilder) { BeanDefinitionBuilder socketInterceptorConfigBuilder = createBeanBuilder(SocketInterceptorConfig.class); String implAttribute = "implementation"; fillAttributeValues(node, socketInterceptorConfigBuilder, implAttribute); Node implNode = node.getAttributes().getNamedItem(implAttribute); String implementation = implNode != null ? getTextContent(implNode) : null; if (implementation != null) { socketInterceptorConfigBuilder.addPropertyReference(xmlToJavaName(implAttribute), implementation); } for (Node child : childElements(node)) { String name = cleanNodeName(child); if ("properties".equals(name)) { handleProperties(child, socketInterceptorConfigBuilder); } } networkConfigBuilder.addPropertyValue("socketInterceptorConfig", socketInterceptorConfigBuilder.getBeanDefinition()); } void handleClusterAttributes(Node node) { NamedNodeMap attributes = node.getAttributes(); if (attributes != null) { for (int a = 0; a < attributes.getLength(); a++) { Node att = attributes.item(a); String name = att.getNodeName(); String value = att.getNodeValue(); if ("name".equals(name)) { configBuilder.addPropertyValue("clusterName", value); } else if ("password".equals(name)) { configBuilder.addPropertyValue("clusterPassword", value); } } } } protected void handleProperties(Node node, BeanDefinitionBuilder beanDefinitionBuilder) { ManagedMap properties = parseProperties(node); beanDefinitionBuilder.addPropertyValue("properties", properties); } protected ManagedMap parseProperties(Node node) { ManagedMap<String, String> properties = new ManagedMap<>(); for (Node n : childElements(node)) { String name = cleanNodeName(n); String propertyName; if (!"property".equals(name)) { continue; } propertyName = getTextContent(n.getAttributes().getNamedItem("name")).trim(); String value = getTextContent(n); properties.put(propertyName, value); } return properties; } protected void handleSpringAware() { BeanDefinitionBuilder managedContextBeanBuilder = createBeanBuilder(SpringManagedContext.class); configBuilder.addPropertyValue("managedContext", managedContextBeanBuilder.getBeanDefinition()); } @SuppressWarnings({"checkstyle:npathcomplexity", "checkstyle:cyclomaticcomplexity"}) protected BeanDefinition getEvictionConfig(Node node, boolean isNearCache, boolean isIMap) { Node size = node.getAttributes().getNamedItem("size"); Node maxSizePolicy = node.getAttributes().getNamedItem("max-size-policy"); Node evictionPolicy = node.getAttributes().getNamedItem("eviction-policy"); Node comparatorClassName = node.getAttributes().getNamedItem("comparator-class-name"); Node comparatorBean = node.getAttributes().getNamedItem("comparator-bean"); if (comparatorClassName != null && comparatorBean != null) { throw new InvalidConfigurationException("Only one of the `comparator-class-name` and `comparator-bean`" + " attributes can be configured inside eviction configuration!"); } BeanDefinitionBuilder evictionConfigBuilder = createBeanBuilder(EvictionConfig.class); Integer maxSize = isIMap ? MapConfig.DEFAULT_MAX_SIZE : EvictionConfig.DEFAULT_MAX_ENTRY_COUNT; MaxSizePolicy maxSizePolicyValue = isIMap ? MapConfig.DEFAULT_MAX_SIZE_POLICY : EvictionConfig.DEFAULT_MAX_SIZE_POLICY; EvictionPolicy evictionPolicyValue = isIMap ? MapConfig.DEFAULT_EVICTION_POLICY : EvictionConfig.DEFAULT_EVICTION_POLICY; String comparatorClassNameValue = null; String comparatorBeanValue = null; if (size != null) { maxSize = parseInt(getTextContent(size)); if (isIMap && maxSize == 0) { maxSize = MapConfig.DEFAULT_MAX_SIZE; } } if (maxSizePolicy != null) { maxSizePolicyValue = MaxSizePolicy.valueOf( upperCaseInternal(getTextContent(maxSizePolicy))); } if (evictionPolicy != null) { evictionPolicyValue = EvictionPolicy.valueOf( upperCaseInternal(getTextContent(evictionPolicy))); } if (comparatorClassName != null) { comparatorClassNameValue = getTextContent(comparatorClassName); } if (comparatorBean != null) { comparatorBeanValue = getTextContent(comparatorBean); } try { doEvictionConfigChecks(maxSizePolicyValue, evictionPolicyValue, comparatorClassNameValue, comparatorBeanValue, isIMap, isNearCache); } catch (IllegalArgumentException e) { throw new InvalidConfigurationException(e.getMessage()); } evictionConfigBuilder.addPropertyValue("size", maxSize); evictionConfigBuilder.addPropertyValue("maxSizePolicy", maxSizePolicyValue); evictionConfigBuilder.addPropertyValue("evictionPolicy", evictionPolicyValue); if (comparatorClassNameValue != null) { evictionConfigBuilder.addPropertyValue("comparatorClassName", comparatorClassNameValue); } if (comparatorBean != null) { evictionConfigBuilder.addPropertyReference("comparator", comparatorBeanValue); } return evictionConfigBuilder.getBeanDefinition(); } protected BeanDefinition getPreloaderConfig(Node node) { Node enabled = node.getAttributes().getNamedItem("enabled"); Node directory = node.getAttributes().getNamedItem("directory"); Node storeInitialDelaySeconds = node.getAttributes().getNamedItem("store-initial-delay-seconds"); Node storeIntervalSeconds = node.getAttributes().getNamedItem("store-interval-seconds"); BeanDefinitionBuilder nearCachePreloaderConfigBuilder = createBeanBuilder(NearCachePreloaderConfig.class); Boolean enabledValue = Boolean.FALSE; String directoryValue = ""; Integer storeInitialDelaySecondsValue = NearCachePreloaderConfig.DEFAULT_STORE_INITIAL_DELAY_SECONDS; Integer storeIntervalSecondsValue = NearCachePreloaderConfig.DEFAULT_STORE_INTERVAL_SECONDS; if (enabled != null) { enabledValue = Boolean.parseBoolean(getTextContent(enabled)); } if (directory != null) { directoryValue = getTextContent(directory); } if (storeInitialDelaySeconds != null) { storeInitialDelaySecondsValue = parseInt(getTextContent(storeInitialDelaySeconds)); } if (storeIntervalSeconds != null) { storeIntervalSecondsValue = parseInt(getTextContent(storeIntervalSeconds)); } nearCachePreloaderConfigBuilder.addPropertyValue("enabled", enabledValue); nearCachePreloaderConfigBuilder.addPropertyValue("directory", directoryValue); nearCachePreloaderConfigBuilder.addPropertyValue("storeInitialDelaySeconds", storeInitialDelaySecondsValue); nearCachePreloaderConfigBuilder.addPropertyValue("storeIntervalSeconds", storeIntervalSecondsValue); return nearCachePreloaderConfigBuilder.getBeanDefinition(); } protected void handleNativeMemory(Node node) { BeanDefinitionBuilder nativeMemoryConfigBuilder = createBeanBuilder(NativeMemoryConfig.class); AbstractBeanDefinition beanDefinition = nativeMemoryConfigBuilder.getBeanDefinition(); fillAttributeValues(node, nativeMemoryConfigBuilder, "persistentMemoryDirectory"); ManagedList<BeanDefinition> directories = new ManagedList<>(); BeanDefinitionBuilder pmemConfigBuilder = createBeanBuilder(PersistentMemoryConfig.class); for (Node child : childElements(node)) { String nodeName = cleanNodeName(child); if ("size".equals(nodeName)) { handleMemorySizeConfig(child, nativeMemoryConfigBuilder); } else if ("persistent-memory".equals(nodeName)) { handlePersistentMemoryConfig(child, pmemConfigBuilder, directories); } } Node attrPmemDirectory = node.getAttributes().getNamedItem("persistent-memory-directory"); if (attrPmemDirectory != null) { BeanDefinitionBuilder pmemDirConfigBuilder = createBeanBuilder(PersistentMemoryDirectoryConfig.class); pmemDirConfigBuilder.addConstructorArgValue(getTextContent(attrPmemDirectory)); directories.add(pmemDirConfigBuilder.getBeanDefinition()); } if (!directories.isEmpty()) { pmemConfigBuilder.addPropertyValue("directoryConfigs", directories); } nativeMemoryConfigBuilder.addPropertyValue("persistentMemoryConfig", pmemConfigBuilder.getBeanDefinition()); configBuilder.addPropertyValue("nativeMemoryConfig", beanDefinition); } private void handlePersistentMemoryConfig(Node pmemNode, BeanDefinitionBuilder pmemConfigBuilder, ManagedList<BeanDefinition> directoriesList) { Node enabledNode = pmemNode.getAttributes().getNamedItem("enabled"); if (enabledNode != null) { boolean enabled = getBooleanValue(getTextContent(enabledNode)); pmemConfigBuilder.addPropertyValue("enabled", enabled); } Node mode = pmemNode.getAttributes().getNamedItem("mode"); if (mode != null) { String modeValue = getTextContent(mode); try { pmemConfigBuilder.addPropertyValue("mode", PersistentMemoryMode.valueOf(modeValue)); } catch (Exception ex) { throw new InvalidConfigurationException("Invalid 'mode' for 'persistent-memory': " + modeValue); } } for (Node dirsNode : childElements(pmemNode)) { String dirsNodeName = cleanNodeName(dirsNode); if ("directories".equals(dirsNodeName)) { for (Node dirNode : childElements(dirsNode)) { String dirNodeName = cleanNodeName(dirNode); if ("directory".equals(dirNodeName)) { BeanDefinitionBuilder pmemDirConfigBuilder = createBeanBuilder(PersistentMemoryDirectoryConfig.class); NamedNodeMap attributes = dirNode.getAttributes(); Node numaNode = attributes.getNamedItem("numa-node"); pmemDirConfigBuilder.addConstructorArgValue(getTextContent(dirNode)); String numaNodeStr = getTextContent(numaNode); if (!StringUtil.isNullOrEmptyAfterTrim(numaNodeStr)) { pmemDirConfigBuilder.addConstructorArgValue(getIntegerValue("numa-node", numaNodeStr)); } directoriesList.add(pmemDirConfigBuilder.getBeanDefinition()); } } } } } private void handleMemorySizeConfig(Node node, BeanDefinitionBuilder nativeMemoryConfigBuilder) { BeanDefinitionBuilder memorySizeConfigBuilder = createBeanBuilder(MemorySize.class); NamedNodeMap attributes = node.getAttributes(); Node value = attributes.getNamedItem("value"); Node unit = attributes.getNamedItem("unit"); memorySizeConfigBuilder.addConstructorArgValue(getTextContent(value)); memorySizeConfigBuilder.addConstructorArgValue(MemoryUnit.valueOf(getTextContent(unit))); nativeMemoryConfigBuilder.addPropertyValue("size", memorySizeConfigBuilder.getBeanDefinition()); } protected void handleDiscoveryStrategies(Node node, BeanDefinitionBuilder joinConfigBuilder) { BeanDefinitionBuilder discoveryConfigBuilder = createBeanBuilder(DiscoveryConfig.class); ManagedList<BeanDefinition> discoveryStrategyConfigs = new ManagedList<>(); for (Node child : childElements(node)) { String name = cleanNodeName(child); if ("discovery-strategy".equals(name)) { handleDiscoveryStrategy(child, discoveryStrategyConfigs); } else if ("node-filter".equals(name)) { handleDiscoveryNodeFilter(child, discoveryConfigBuilder); } else if ("discovery-service-provider".equals(name)) { handleDiscoveryServiceProvider(child, discoveryConfigBuilder); } } discoveryConfigBuilder.addPropertyValue("discoveryStrategyConfigs", discoveryStrategyConfigs); joinConfigBuilder.addPropertyValue("discoveryConfig", discoveryConfigBuilder.getBeanDefinition()); } protected void handleAutoDetection(Node node, BeanDefinitionBuilder joinConfigBuilder) { BeanDefinitionBuilder autoDetectionConfigBuilder = createBeanBuilder(AutoDetectionConfig.class); autoDetectionConfigBuilder.addPropertyValue("enabled", getBooleanValue(getAttribute(node, "enabled"))); joinConfigBuilder.addPropertyValue("autoDetectionConfig", autoDetectionConfigBuilder.getBeanDefinition()); } protected void handleIndex(ManagedList<BeanDefinition> indexes, Node indexNode) { BeanDefinitionBuilder indexConfBuilder = createBeanBuilder(IndexConfig.class); NamedNodeMap attributes = indexNode.getAttributes(); // Resolve name. String name = getTextContent(attributes.getNamedItem("name")); indexConfBuilder.addPropertyValue("name", name.isEmpty() ? null : name); // Resolve type. String typeStr = getTextContent(attributes.getNamedItem("type")); IndexType type = IndexUtils.getIndexTypeFromXmlName(typeStr); indexConfBuilder.addPropertyValue("type", type); // Resolve columns. List<String> columns = new ArrayList<>(); for (Node columnsNode : childElements(indexNode)) { if ("attributes".equals(cleanNodeName(columnsNode))) { for (Node columnNode : childElements(columnsNode)) { if ("attribute".equals(cleanNodeName(columnNode))) { columns.add(getTextContent(columnNode)); } } } } indexConfBuilder.addPropertyValue("attributes", columns); indexes.add(indexConfBuilder.getBeanDefinition()); } private void handleDiscoveryServiceProvider(Node node, BeanDefinitionBuilder discoveryConfigBuilder) { NamedNodeMap attributes = node.getAttributes(); Node implNode = attributes.getNamedItem("implementation"); String implementation = getTextContent(implNode).trim(); isTrue(!implementation.isEmpty(), "'implementation' attribute is required to create DiscoveryServiceProvider!"); discoveryConfigBuilder.addPropertyReference("discoveryServiceProvider", implementation); } private void handleDiscoveryNodeFilter(Node node, BeanDefinitionBuilder discoveryConfigBuilder) { NamedNodeMap attributes = node.getAttributes(); Node classNameNode = attributes.getNamedItem("class-name"); String className = getTextContent(classNameNode).trim(); Node implNode = attributes.getNamedItem("implementation"); String implementation = getTextContent(implNode).trim(); isTrue(!className.isEmpty() || !implementation.isEmpty(), "One of 'class-name' or 'implementation' attributes is required to create NodeFilter!"); if (!className.isEmpty()) { discoveryConfigBuilder.addPropertyValue("nodeFilterClass", className); } if (!implementation.isEmpty()) { discoveryConfigBuilder.addPropertyReference("nodeFilter", implementation); } } private void handleDiscoveryStrategy(Node node, ManagedList<BeanDefinition> discoveryStrategyConfigs) { BeanDefinitionBuilder discoveryStrategyConfigBuilder = createBeanBuilder(DiscoveryStrategyConfig.class); NamedNodeMap attributes = node.getAttributes(); Node classNameNode = attributes.getNamedItem("class-name"); String className = getTextContent(classNameNode).trim(); Node implNode = attributes.getNamedItem("discovery-strategy-factory"); String implementation = getTextContent(implNode).trim(); isTrue(!className.isEmpty() || !implementation.isEmpty(), "One of 'class-name' or 'implementation' attributes is required to create DiscoveryStrategyConfig!"); if (!implementation.isEmpty()) { discoveryStrategyConfigBuilder.addConstructorArgReference(implementation); } else { discoveryStrategyConfigBuilder.addConstructorArgValue(className); } for (Node child : childElements(node)) { String name = cleanNodeName(child); if ("properties".equals(name)) { ManagedMap properties = parseProperties(child); if (!properties.isEmpty()) { discoveryStrategyConfigBuilder.addConstructorArgValue(properties); } } } discoveryStrategyConfigs.add(discoveryStrategyConfigBuilder.getBeanDefinition()); } protected void handleJavaSerializationFilter(final Node node, BeanDefinitionBuilder serializationConfigBuilder) { BeanDefinitionBuilder filterConfigBuilder = createBeanBuilder(JavaSerializationFilterConfig.class); for (Node child : childElements(node)) { String name = cleanNodeName(child); if ("blacklist".equals(name)) { filterConfigBuilder.addPropertyValue("blacklist", createFilterListBean(child)); } else if ("whitelist".equals(name)) { filterConfigBuilder.addPropertyValue("whitelist", createFilterListBean(child)); } } Node defaultsDisabledAttr = node.getAttributes().getNamedItem("defaults-disabled"); boolean defaultsDisabled = getBooleanValue(getTextContent(defaultsDisabledAttr)); filterConfigBuilder.addPropertyValue("defaultsDisabled", defaultsDisabled); serializationConfigBuilder.addPropertyValue("javaSerializationFilterConfig", filterConfigBuilder.getBeanDefinition()); } private AbstractBeanDefinition createFilterListBean(Node node) { BeanDefinitionBuilder filterListBuilder = createBeanBuilder(ClassFilter.class); ManagedSet<String> classes = new ManagedSet<>(); ManagedSet<String> packages = new ManagedSet<>(); ManagedSet<String> prefixes = new ManagedSet<>(); for (Node child : childElements(node)) { String name = cleanNodeName(child); if ("class".equals(name)) { classes.add(getTextContent(child)); } else if ("package".equals(name)) { packages.add(getTextContent(child)); } else if ("prefix".equals(name)) { prefixes.add(getTextContent(child)); } } filterListBuilder.addPropertyValue("classes", classes); filterListBuilder.addPropertyValue("packages", packages); filterListBuilder.addPropertyValue("prefixes", prefixes); return filterListBuilder.getBeanDefinition(); } } private static void doEvictionConfigChecks(MaxSizePolicy maxSizePolicyValue, EvictionPolicy evictionPolicyValue, String comparatorClassNameValue, String comparatorBeanValue, boolean isIMap, boolean isNearCache) { if (isIMap) { checkMapEvictionConfig(maxSizePolicyValue, evictionPolicyValue, comparatorClassNameValue, comparatorBeanValue); return; } if (isNearCache) { checkNearCacheEvictionConfig(evictionPolicyValue, comparatorClassNameValue, comparatorBeanValue); return; } checkEvictionConfig(evictionPolicyValue, comparatorClassNameValue, comparatorBeanValue, COMMONLY_SUPPORTED_EVICTION_POLICIES); } }
/* * Copyright 2003-2014 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.psiutils; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleSettingsFacade; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.*; import com.intellij.psi.util.InheritanceUtil; import com.siyeh.HardcodedMethodConstants; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; public final class ImportUtils { private ImportUtils() {} public static void addImportIfNeeded(@NotNull PsiClass aClass, @NotNull PsiElement context) { final PsiFile file = context.getContainingFile(); if (!(file instanceof PsiJavaFile)) { return; } final PsiJavaFile javaFile = (PsiJavaFile)file; final PsiClass outerClass = aClass.getContainingClass(); if (outerClass == null) { if (PsiTreeUtil.isAncestor(javaFile, aClass, true)) { return; } } else if (PsiTreeUtil.isAncestor(outerClass, context, true)) { final PsiElement brace = outerClass.getLBrace(); if (brace != null && brace.getTextOffset() < context.getTextOffset()) { return; } } final String qualifiedName = aClass.getQualifiedName(); if (qualifiedName == null) { return; } final PsiImportList importList = javaFile.getImportList(); if (importList == null) { return; } final String containingPackageName = javaFile.getPackageName(); @NonNls final String packageName = ClassUtil.extractPackageName(qualifiedName); if (CommonClassNames.DEFAULT_PACKAGE.equals(packageName)) { return; } if (containingPackageName.equals(packageName) || importList.findSingleClassImportStatement(qualifiedName) != null) { return; } if (importList.findOnDemandImportStatement(packageName) != null && !hasOnDemandImportConflict(qualifiedName, javaFile)) { return; } if (hasExactImportConflict(qualifiedName, javaFile)) { return; } final Project project = importList.getProject(); final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); final PsiElementFactory elementFactory = psiFacade.getElementFactory(); final PsiImportStatement importStatement = elementFactory.createImportStatement(aClass); importList.add(importStatement); } private static boolean nameCanBeStaticallyImported(@NotNull String fqName, @NotNull String memberName, @NotNull PsiElement context) { final PsiClass containingClass = PsiTreeUtil.getParentOfType(context, PsiClass.class); if (containingClass == null) { return false; } if (InheritanceUtil.isInheritor(containingClass, fqName)) { return true; } final PsiField field = containingClass.findFieldByName(memberName, true); if (field != null && PsiUtil.isAccessible(field, context, null)) { return false; } final PsiMethod[] methods = containingClass.findMethodsByName(memberName, true); for (PsiMethod method : methods) { if (PsiUtil.isAccessible(method, context, null)) { return false; } } final PsiClass innerClass = containingClass.findInnerClassByName(memberName, true); if (innerClass != null && PsiUtil.isAccessible(innerClass, context, null)) { return false; } return !hasOnDemandImportConflict(fqName + '.' + memberName, context, true) && !hasExactImportStaticConflict(fqName, memberName, context); } public static boolean nameCanBeImported(@NotNull String fqName, @NotNull PsiElement context) { final PsiClass containingClass = PsiTreeUtil.getParentOfType(context, PsiClass.class); if (containingClass != null) { if (fqName.equals(containingClass.getQualifiedName())) { return true; } final String shortName = ClassUtil.extractClassName(fqName); final PsiClass[] innerClasses = containingClass.getAllInnerClasses(); for (PsiClass innerClass : innerClasses) { if (innerClass.hasModifierProperty(PsiModifier.PRIVATE)) { continue; } if (innerClass.hasModifierProperty(PsiModifier.PACKAGE_LOCAL)) { if (!ClassUtils.inSamePackage(innerClass, containingClass)) { continue; } } final String className = innerClass.getName(); if (shortName.equals(className)) { return false; } } PsiField field = containingClass.findFieldByName(shortName, false); if (field != null) { return false; } field = containingClass.findFieldByName(shortName, true); if (field != null && PsiUtil.isAccessible(field, containingClass, null)) { return false; } } final PsiJavaFile file = PsiTreeUtil.getParentOfType(context, PsiJavaFile.class); if (file == null) { return false; } if (hasExactImportConflict(fqName, file)) { return false; } if (hasOnDemandImportConflict(fqName, file, true)) { return false; } if (containsConflictingReference(file, fqName)) { return false; } if (containsConflictingClass(fqName, file)) { return false; } return !containsConflictingClassName(fqName, file); } private static boolean containsConflictingClassName(String fqName, PsiJavaFile file) { final int lastDotIndex = fqName.lastIndexOf((int)'.'); final String shortName = fqName.substring(lastDotIndex + 1); final PsiClass[] classes = file.getClasses(); for (PsiClass aClass : classes) { if (shortName.equals(aClass.getName())) { return true; } } return false; } public static boolean hasExactImportConflict(String fqName, PsiJavaFile file) { final PsiImportList imports = file.getImportList(); if (imports == null) { return false; } final PsiImportStatement[] importStatements = imports.getImportStatements(); final int lastDotIndex = fqName.lastIndexOf((int)'.'); final String shortName = fqName.substring(lastDotIndex + 1); final String dottedShortName = '.' + shortName; for (final PsiImportStatement importStatement : importStatements) { if (importStatement.isOnDemand()) { continue; } final String importName = importStatement.getQualifiedName(); if (importName == null) { return false; } if (!importName.equals(fqName) && importName.endsWith(dottedShortName)) { return true; } } return false; } private static boolean hasExactImportStaticConflict(String qualifierClass, String memberName, PsiElement context) { final PsiFile file = context.getContainingFile(); if (!(file instanceof PsiJavaFile)) { return false; } final PsiJavaFile javaFile = (PsiJavaFile)file; final PsiImportList importList = javaFile.getImportList(); if (importList == null) { return false; } final PsiImportStaticStatement[] importStaticStatements = importList.getImportStaticStatements(); for (PsiImportStaticStatement importStaticStatement : importStaticStatements) { if (importStaticStatement.isOnDemand()) { continue; } final String name = importStaticStatement.getReferenceName(); if (!memberName.equals(name)) { continue; } final PsiJavaCodeReferenceElement importReference = importStaticStatement.getImportReference(); if (importReference == null) { continue; } final PsiElement qualifier = importReference.getQualifier(); if (qualifier == null) { continue; } final String qualifierText = qualifier.getText(); if (!qualifierClass.equals(qualifierText)) { return true; } } return false; } public static boolean hasOnDemandImportConflict(@NotNull String fqName, @NotNull PsiElement context) { return hasOnDemandImportConflict(fqName, context, false); } /** * @param strict if strict is true this method checks if the conflicting * class which is imported is actually used in the file. If it isn't the * on demand import can be overridden with an exact import for the fqName * without breaking stuff. */ private static boolean hasOnDemandImportConflict(@NotNull String fqName, @NotNull PsiElement context, boolean strict) { final PsiFile containingFile = context.getContainingFile(); if (!(containingFile instanceof PsiJavaFile)) { return false; } final PsiJavaFile javaFile = (PsiJavaFile)containingFile; final PsiImportList imports = javaFile.getImportList(); if (imports == null) { return false; } final PsiImportStatementBase[] importStatements = imports.getAllImportStatements(); final String shortName = ClassUtil.extractClassName(fqName); final String packageName = ClassUtil.extractPackageName(fqName); for (final PsiImportStatementBase importStatement : importStatements) { if (!importStatement.isOnDemand()) { continue; } final PsiJavaCodeReferenceElement importReference = importStatement.getImportReference(); if (importReference == null) { continue; } final String packageText = importReference.getText(); if (packageText.equals(packageName)) { continue; } final PsiElement element = importReference.resolve(); if (element instanceof PsiPackage) { final PsiPackage aPackage = (PsiPackage)element; if (!strict) { if (aPackage.findClassByShortName(shortName, containingFile.getResolveScope()).length > 0) { return true; } } else { final PsiClass[] classes = aPackage.findClassByShortName(shortName, containingFile.getResolveScope()); for (final PsiClass aClass : classes) { final String qualifiedClassName = aClass.getQualifiedName(); if (qualifiedClassName == null || fqName.equals(qualifiedClassName)) { continue; } return containsConflictingReference(containingFile, qualifiedClassName); } } } else if (element instanceof PsiClass) { final PsiClass aClass = (PsiClass)element; final PsiClass innerClass = aClass.findInnerClassByName(shortName, true); if (importStatement instanceof PsiImportStatement) { if (innerClass != null && PsiUtil.isAccessible(innerClass, containingFile, null)) { final String qualifiedName = innerClass.getQualifiedName(); if (!fqName.equals(qualifiedName) && (!strict || containsConflictingReference(containingFile, qualifiedName))) { return true; } } } else { if (innerClass != null && PsiUtil.isAccessible(innerClass, containingFile, null) && innerClass.hasModifierProperty(PsiModifier.STATIC)) { final String qualifiedName = innerClass.getQualifiedName(); if (!fqName.equals(qualifiedName) && (!strict || memberReferenced(innerClass, javaFile))) { return true; } } final PsiField field = aClass.findFieldByName(shortName, true); if (field != null && PsiUtil.isAccessible(field, containingFile, null) && field.hasModifierProperty(PsiModifier.STATIC)) { final PsiClass containingClass = field.getContainingClass(); if (containingClass == null) { continue; } final String qualifiedName = containingClass.getQualifiedName() + '.' + field.getName(); if (!fqName.equals(qualifiedName) && (!strict || memberReferenced(field, javaFile))) { return true; } } final PsiMethod[] methods = aClass.findMethodsByName(shortName, true); for (PsiMethod method : methods) { if (!PsiUtil.isAccessible(method, containingFile, null) || !method.hasModifierProperty(PsiModifier.STATIC)) { continue; } final PsiClass containingClass = method.getContainingClass(); if (containingClass == null) { continue; } final String qualifiedName = containingClass.getQualifiedName() + '.' + method.getName(); if (!fqName.equals(qualifiedName) && (!strict || memberReferenced(method, javaFile))) { return true; } } } } } return hasJavaLangImportConflict(fqName, javaFile) || hasDefaultImportConflict(fqName, javaFile); } private static boolean hasDefaultImportConflict(String fqName, PsiJavaFile file) { final String shortName = ClassUtil.extractClassName(fqName); final String packageName = ClassUtil.extractPackageName(fqName); final String filePackageName = file.getPackageName(); if (filePackageName.equals(packageName)) { return false; } final Project project = file.getProject(); final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); final PsiPackage filePackage = psiFacade.findPackage(filePackageName); if (filePackage == null) { return false; } return filePackage.containsClassNamed(shortName); } private static boolean hasJavaLangImportConflict(String fqName, PsiJavaFile file) { final String shortName = ClassUtil.extractClassName(fqName); final String packageName = ClassUtil.extractPackageName(fqName); if (HardcodedMethodConstants.JAVA_LANG.equals(packageName)) { return false; } final Project project = file.getProject(); final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); final PsiPackage javaLangPackage = psiFacade.findPackage(HardcodedMethodConstants.JAVA_LANG); if (javaLangPackage == null) { return false; } return javaLangPackage.containsClassNamed(shortName); } private static boolean containsConflictingClass(String fqName, PsiJavaFile file) { final PsiClass[] classes = file.getClasses(); for (PsiClass aClass : classes) { if (containsConflictingInnerClass(fqName, aClass)) { return true; } } return false; } /** * ImportUtils currently checks all inner classes, even those that are * contained in inner classes themselves, because it doesn't know the * location of the original fully qualified reference. It should really only * check if the containing class of the fully qualified reference has any * conflicting inner classes. */ private static boolean containsConflictingInnerClass(String fqName, PsiClass aClass) { final String shortName = ClassUtil.extractClassName(fqName); if (shortName.equals(aClass.getName()) && !fqName.equals(aClass.getQualifiedName())) { return true; } final PsiClass[] classes = aClass.getInnerClasses(); for (PsiClass innerClass : classes) { if (containsConflictingInnerClass(fqName, innerClass)) { return true; } } return false; } public static boolean addStaticImport(@NotNull String qualifierClass, @NonNls @NotNull String memberName, @NotNull PsiElement context) { if (!nameCanBeStaticallyImported(qualifierClass, memberName, context)) { return false; } final PsiClass containingClass = PsiTreeUtil.getParentOfType(context, PsiClass.class); if (InheritanceUtil.isInheritor(containingClass, qualifierClass)) { return true; } final PsiFile psiFile = context.getContainingFile(); if (!(psiFile instanceof PsiJavaFile)) { return false; } final PsiJavaFile javaFile = (PsiJavaFile)psiFile; final PsiImportList importList = javaFile.getImportList(); if (importList == null) { return false; } final PsiImportStatementBase existingImportStatement = importList.findSingleImportStatement(memberName); if (existingImportStatement != null) { if (existingImportStatement instanceof PsiImportStaticStatement) { final PsiImportStaticStatement importStaticStatement = (PsiImportStaticStatement)existingImportStatement; if (!memberName.equals(importStaticStatement.getReferenceName())) { return false; } final PsiClass targetClass = importStaticStatement.resolveTargetClass(); return targetClass != null && qualifierClass.equals(targetClass.getQualifiedName()); } return false; } final PsiImportStaticStatement onDemandImportStatement = findOnDemandImportStaticStatement(importList, qualifierClass); if (onDemandImportStatement != null && !hasOnDemandImportConflict(qualifierClass + '.' + memberName, javaFile)) { return true; } final Project project = context.getProject(); final GlobalSearchScope scope = context.getResolveScope(); final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); final PsiClass aClass = psiFacade.findClass(qualifierClass, scope); if (aClass == null) { return false; } final String qualifiedName = aClass.getQualifiedName(); if (qualifiedName == null) { return false; } final List<PsiImportStaticStatement> imports = getMatchingImports(importList, qualifiedName); final int onDemandCount = JavaCodeStyleSettingsFacade.getInstance(project).getNamesCountToUseImportOnDemand(); final PsiElementFactory elementFactory = psiFacade.getElementFactory(); if (imports.size() + 1 < onDemandCount) { importList.add(elementFactory.createImportStaticStatement(aClass, memberName)); } else { for (PsiImportStaticStatement importStatement : imports) { importStatement.delete(); } importList.add(elementFactory.createImportStaticStatement(aClass, "*")); } return true; } @Nullable private static PsiImportStaticStatement findOnDemandImportStaticStatement(PsiImportList importList, String qualifierClass) { final PsiImportStaticStatement[] importStaticStatements = importList.getImportStaticStatements(); for (PsiImportStaticStatement importStaticStatement : importStaticStatements) { if (!importStaticStatement.isOnDemand()) { continue; } final PsiJavaCodeReferenceElement importReference = importStaticStatement.getImportReference(); if (importReference == null) { continue; } final String text = importReference.getText(); if (qualifierClass.equals(text)) { return importStaticStatement; } } return null; } private static List<PsiImportStaticStatement> getMatchingImports(@NotNull PsiImportList importList, @NotNull String className) { final List<PsiImportStaticStatement> imports = new ArrayList(); for (PsiImportStaticStatement staticStatement : importList.getImportStaticStatements()) { final PsiClass psiClass = staticStatement.resolveTargetClass(); if (psiClass == null) { continue; } if (!className.equals(psiClass.getQualifiedName())) { continue; } imports.add(staticStatement); } return imports; } public static boolean isStaticallyImported(@NotNull PsiMember member, @NotNull PsiElement context) { final PsiClass memberClass = member.getContainingClass(); if (memberClass == null) { return false; } final PsiClass containingClass = PsiTreeUtil.getParentOfType(context, PsiClass.class); if (InheritanceUtil.isInheritorOrSelf(containingClass, memberClass, true)) { return false; } final PsiFile psiFile = context.getContainingFile(); if (!(psiFile instanceof PsiJavaFile)) { return false; } final PsiJavaFile javaFile = (PsiJavaFile)psiFile; final PsiImportList importList = javaFile.getImportList(); if (importList == null) { return false; } final String memberName = member.getName(); if (memberName == null) { return false; } final PsiImportStatementBase existingImportStatement = importList.findSingleImportStatement(memberName); if (existingImportStatement instanceof PsiImportStaticStatement) { final PsiClass importClass = ((PsiImportStaticStatement)existingImportStatement).resolveTargetClass(); if (InheritanceUtil.isInheritorOrSelf(importClass, memberClass, true)) { return true; } } final String memberClassName = memberClass.getQualifiedName(); if (memberClassName == null) { return false; } final PsiImportStaticStatement onDemandImportStatement = findOnDemandImportStaticStatement(importList, memberClassName); if (onDemandImportStatement != null) { if (!hasOnDemandImportConflict(memberClassName + '.' + memberName, javaFile)) { return true; } } return false; } private static boolean memberReferenced(PsiMember member, PsiElement context) { final MemberReferenceVisitor visitor = new MemberReferenceVisitor(member); context.accept(visitor); return visitor.isReferenceFound(); } private static boolean membersReferenced(PsiMember[] members, PsiElement context) { final MemberReferenceVisitor visitor = new MemberReferenceVisitor(members); context.accept(visitor); return visitor.isReferenceFound(); } private static class MemberReferenceVisitor extends JavaRecursiveElementVisitor { private final PsiMember[] members; private boolean referenceFound = false; public MemberReferenceVisitor(PsiMember member) { members = new PsiMember[]{member}; } public MemberReferenceVisitor(PsiMember[] members) { this.members = members; } @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { if (referenceFound) { return; } super.visitReferenceElement(reference); if (reference.isQualified()) { return; } final PsiElement target = reference.resolve(); for (PsiMember member : members) { if (member.equals(target)) { referenceFound = true; return; } } } public boolean isReferenceFound() { return referenceFound; } } /** * @return true, if the element contains a reference to a different class than fullyQualifiedName but which has the same class name */ public static boolean containsConflictingReference(PsiFile element, String fullyQualifiedName) { final Map<String, Boolean> cachedValue = CachedValuesManager.getCachedValue(element, new CachedValueProvider<Map<String, Boolean>>() { @Nullable @Override public Result<Map<String, Boolean>> compute() { return new Result<Map<String, Boolean>>(Collections.synchronizedMap(new HashMap<String, Boolean>()), PsiModificationTracker.MODIFICATION_COUNT); } }); Boolean conflictingRef = cachedValue.get(fullyQualifiedName); if (conflictingRef != null) { return conflictingRef.booleanValue(); } final ConflictingClassReferenceVisitor visitor = new ConflictingClassReferenceVisitor(fullyQualifiedName); element.accept(visitor); conflictingRef = visitor.isConflictingReferenceFound(); cachedValue.put(fullyQualifiedName, conflictingRef); return conflictingRef.booleanValue(); } private static class ConflictingClassReferenceVisitor extends JavaRecursiveElementVisitor { private final String name; private final String fullyQualifiedName; private boolean referenceFound = false; private ConflictingClassReferenceVisitor(String fullyQualifiedName) { name = ClassUtil.extractClassName(fullyQualifiedName); this.fullyQualifiedName = fullyQualifiedName; } @Override public void visitElement(PsiElement element) { if (referenceFound) return; super.visitElement(element); } @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { if (referenceFound) { return; } super.visitReferenceElement(reference); if (reference.getQualifier() != null) return; final PsiElement element = reference.resolve(); if (!(element instanceof PsiClass) || element instanceof PsiTypeParameter) { return; } final PsiClass aClass = (PsiClass)element; final String testClassName = aClass.getName(); final String testClassQualifiedName = aClass.getQualifiedName(); if (testClassQualifiedName == null || testClassName == null || testClassQualifiedName.equals(fullyQualifiedName) || !testClassName.equals(name)) { return; } referenceFound = true; } public boolean isConflictingReferenceFound() { return referenceFound; } } }
/* * Copyright 2011 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.drools.guvnor.client.widgets.tables; import com.google.gwt.cell.client.ButtonCell; import com.google.gwt.cell.client.FieldUpdater; import com.google.gwt.core.client.GWT; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.uibinder.client.UiHandler; import com.google.gwt.user.cellview.client.CellTable; import com.google.gwt.user.cellview.client.Column; import com.google.gwt.user.cellview.client.TextColumn; import com.google.gwt.user.cellview.client.TextHeader; import com.google.gwt.user.client.Command; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.Widget; import com.google.gwt.view.client.AsyncDataProvider; import com.google.gwt.view.client.MultiSelectionModel; import com.google.gwt.view.client.ProvidesKey; import org.drools.guvnor.client.asseteditor.MultiViewRow; import org.drools.guvnor.client.common.GenericCallback; import org.drools.guvnor.client.explorer.AssetEditorPlace; import org.drools.guvnor.client.explorer.ClientFactory; import org.drools.guvnor.client.messages.ConstantsCore; import org.drools.guvnor.client.rpc.AbstractAssetPageRow; import org.drools.guvnor.client.rpc.Path; import org.drools.guvnor.client.rpc.PathImpl; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Widget that shows rows of paged data where columns "uuid", "name" and * "format" are common. A "checkbox" and "open" button column are added by * default. Additional columns can be inserted inbetween these columns by * overriding <code>addAncillaryColumns()</code>. A "RSS Feed" button can also * be included if required. * <p/> * Based upon work by Geoffrey de Smet. */ public abstract class AbstractAssetPagedTable<T extends AbstractAssetPageRow> extends AbstractPagedTable<T> { // UI @SuppressWarnings("rawtypes") interface AssetPagedTableBinder extends UiBinder<Widget, AbstractAssetPagedTable> { } protected static final ConstantsCore constants = GWT.create( ConstantsCore.class ); private static AssetPagedTableBinder uiBinder = GWT.create( AssetPagedTableBinder.class ); @UiField() protected Image feedImage; protected Set<Command> unloadListenerSet = new HashSet<Command>(); protected MultiSelectionModel<T> selectionModel; protected String feedURL; private final ClientFactory clientFactory; public AbstractAssetPagedTable(int pageSize, ClientFactory clientFactory) { this( pageSize, null, clientFactory ); } public AbstractAssetPagedTable(int pageSize, String feedURL, ClientFactory clientFactory) { super( pageSize ); this.feedURL = feedURL; if ( this.feedURL == null || "".equals( feedURL ) ) { this.feedImage.setVisible( false ); } this.clientFactory = clientFactory; } /** * Register an UnloadListener used to remove "RSS Feed Listeners" when the * table is unloaded * * @param unloadListener */ public void addUnloadListener(Command unloadListener) { unloadListenerSet.add( unloadListener ); } /** * Return an array of selected Paths. API is maintained for backwards * compatibility of legacy code with AssetItemGrid's implementation * * @return */ public Path[] getSelectedRowUUIDs() { Set<T> selectedRows = selectionModel.getSelectedSet(); // Compatibility with existing API if ( selectedRows.size() == 0 ) { return null; } // Create the array of Paths Path[] uuids = new PathImpl[selectedRows.size()]; int rowCount = 0; for (T row : selectedRows) { uuids[rowCount++] = row.getPath(); } return uuids; } @UiHandler("archiveSelectedButton") public void archiveSelectedAssets(ClickEvent e) { if (getSelectedRowUUIDs() == null) { Window.alert(constants.PleaseSelectAnItemToArchive()); return; } if (!Window.confirm(constants.AreYouSureYouWantToArchiveTheseItems())) { return; } assetService.archiveAssets(getSelectedRowUUIDs(), true, new GenericCallback<Void>() { public void onSuccess(Void arg0) { Window.alert(constants.ArchivedAssets()); refresh(); } }); } /** * Open selected item(s) to a single tab * * @param e */ @UiHandler("openSelectedToSingleTabButton") public void openSelectedToSingleTab(ClickEvent e) { Set<T> selectedSet = selectionModel.getSelectedSet(); List<MultiViewRow> multiViewRowList = new ArrayList<MultiViewRow>( selectedSet.size() ); for (T selected : selectedSet) { multiViewRowList.add( new MultiViewRow( selected.getPath().getUUID(), selected.getName(), selected.getFormat() ) ); } // clientFactory.getDeprecatedPlaceController().goTo( new MultiAssetPlace( multiViewRowList ) ); } /** * Refresh table programmatically */ public void refresh() { selectionModel.clear(); cellTable.setVisibleRangeAndClearData( cellTable.getVisibleRange(), true ); } /** * Set up table and common columns. Additional columns can be appended * between the "checkbox" and "open" columns by overriding * <code>addAncillaryColumns()</code> */ @Override protected void doCellTable() { ProvidesKey<T> providesKey = new ProvidesKey<T>() { public Object getKey(T row) { return row.getPath(); } }; cellTable = new CellTable<T>( providesKey ); selectionModel = new MultiSelectionModel<T>( providesKey ); cellTable.setSelectionModel( selectionModel ); SelectionColumn.createAndAddSelectionColumn( cellTable ); ColumnPicker<T> columnPicker = new ColumnPicker<T>( cellTable ); SortableHeaderGroup<T> sortableHeaderGroup = new SortableHeaderGroup<T>( cellTable ); final TextColumn<T> uuidNumberColumn = new TextColumn<T>() { public String getValue(T row) { return row.getPath().getUUID(); } }; columnPicker.addColumn( uuidNumberColumn, new SortableHeader<T, String>( sortableHeaderGroup, constants.uuid(), uuidNumberColumn ), false ); // Add any additional columns addAncillaryColumns( columnPicker, sortableHeaderGroup ); // Add "Open" button column Column<T, String> openColumn = new Column<T, String>( new ButtonCell() ) { public String getValue(T row) { return constants.Open(); } }; openColumn.setFieldUpdater( new FieldUpdater<T, String>() { public void update(int index, T row, String value) { clientFactory.getPlaceManager().goTo( new AssetEditorPlace( row.getPath().getUUID() ) ); } } ); columnPicker.addColumn( openColumn, new TextHeader( constants.Open() ), true ); cellTable.setWidth( "100%" ); columnPickerButton = columnPicker.createToggleButton(); } /** * Disconnect all listening consumers */ @Override protected void onUnload() { super.onUnload(); for (Command unloadListener : unloadListenerSet) { unloadListener.execute(); } } /** * Link a data provider to the table * * @param dataProvider */ public void setDataProvider(AsyncDataProvider<T> dataProvider) { this.dataProvider = dataProvider; this.dataProvider.addDataDisplay( cellTable ); } /** * Construct a widget representing the table */ @Override protected Widget makeWidget() { return uiBinder.createAndBindUi( this ); } @UiHandler("feedImage") void openFeed(ClickEvent e) { if ( !feedImage.isVisible() || feedURL == null || "".equals( feedURL ) ) { return; } Window.open( feedURL, "_blank", null ); } /** * Open selected item(s) to separate tabs * * @param e */ @UiHandler("openSelectedButton") void openSelected(ClickEvent e) { Set<T> selectedSet = selectionModel.getSelectedSet(); for (T selected : selectedSet) { clientFactory.getPlaceManager().goTo( new AssetEditorPlace( selected.getPath().getUUID() ) ); } } /** * Refresh table in response to ClickEvent * * @param e */ @UiHandler("refreshButton") void refresh(ClickEvent e) { refresh(); } }
/** * The MIT License * Copyright (c) 2015 Estonian Information System Authority (RIA), Population Register Centre (VRK) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package ee.ria.xroad.proxy.clientproxy; import ee.ria.xroad.common.CodedException; import ee.ria.xroad.common.SystemProperties; import ee.ria.xroad.common.conf.globalconf.GlobalConf; import ee.ria.xroad.common.conf.serverconf.ServerConf; import ee.ria.xroad.common.identifier.CentralServiceId; import ee.ria.xroad.common.identifier.SecurityServerId; import ee.ria.xroad.common.identifier.ServiceId; import ee.ria.xroad.common.message.SoapFault; import ee.ria.xroad.common.message.SoapMessageImpl; import ee.ria.xroad.common.util.MimeUtils; import ee.ria.xroad.proxy.conf.KeyConf; import ee.ria.xroad.proxy.testsuite.TestGlobalConf; import ee.ria.xroad.proxy.testsuite.TestKeyConf; import ee.ria.xroad.proxy.testsuite.TestServerConf; import com.github.tomakehurst.wiremock.WireMockServer; import com.github.tomakehurst.wiremock.client.WireMock; import org.apache.http.HttpHeaders; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.contrib.java.lang.system.ProvideSystemProperty; import org.junit.rules.ExpectedException; import org.mockito.ArgumentCaptor; import org.xml.sax.InputSource; import javax.net.ServerSocketFactory; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.wsdl.Definition; import javax.wsdl.factory.WSDLFactory; import java.io.IOException; import java.net.ServerSocket; import java.util.Arrays; import java.util.List; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; import static ee.ria.xroad.common.ErrorCodes.X_INVALID_REQUEST; import static ee.ria.xroad.common.util.MimeTypes.MULTIPART_RELATED; import static ee.ria.xroad.common.util.MimeTypes.TEXT_XML; import static ee.ria.xroad.common.util.MimeTypes.TEXT_XML_UTF8; import static ee.ria.xroad.proxy.util.MetaserviceTestUtil.CodedExceptionMatcher.faultCodeEquals; import static ee.ria.xroad.proxy.util.MetaserviceTestUtil.StubServletOutputStream; import static ee.ria.xroad.proxy.util.MetaserviceTestUtil.parseOperationNamesFromWSDLDefinition; import static java.util.Collections.singletonList; import static javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.isIn; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Unit test for {@link MetadataClientRequestProcessor} */ public class WsdlRequestProcessorTest { private static final String EXPECTED_XR_INSTANCE = "EE"; private static final int WSDL_SERVER_PORT; static { try (ServerSocket s = ServerSocketFactory.getDefault().createServerSocket(0)) { s.setReuseAddress(true); WSDL_SERVER_PORT = s.getLocalPort(); } catch (IOException e) { throw new IllegalStateException("Unable to select port"); } } private static final String EXPECTED_WSDL_QUERY_PATH = "/"; private WireMockServer mockServer; @Rule public ExpectedException thrown = ExpectedException.none(); @Rule // by default, the request processor contacts a client proxy public final ProvideSystemProperty targetServerProperty = new ProvideSystemProperty(SystemProperties.PROXY_CLIENT_HTTP_PORT, Integer.toString(WSDL_SERVER_PORT)); @Rule public final ProvideSystemProperty keepAlive = new ProvideSystemProperty("http.keepAlive", "false"); private HttpServletRequest mockRequest; private HttpServletResponse mockResponse; private StubServletOutputStream mockServletOutputStream; /** * Init data for tests */ @Before public void init() throws IOException { GlobalConf.reload(new TestGlobalConf()); KeyConf.reload(new TestKeyConf()); mockRequest = mock(HttpServletRequest.class); mockResponse = mock(HttpServletResponse.class); mockServletOutputStream = new StubServletOutputStream(); when(mockResponse.getOutputStream()).thenReturn(mockServletOutputStream); this.mockServer = new WireMockServer(options().port(WSDL_SERVER_PORT)); } @After public void tearDown() { this.mockServer.stop(); } @Test public void shouldCreateConnection() throws Exception { // setup SoapMessageImpl mockSoapMessage = mock(SoapMessageImpl.class); mockServer.stubFor(WireMock.any(urlPathEqualTo(EXPECTED_WSDL_QUERY_PATH)) .willReturn(aResponse())); mockServer.start(); final String expectedMessage = "expectedMessage135122"; when(mockSoapMessage.getBytes()).thenReturn(expectedMessage.getBytes()); WsdlRequestProcessor processorToTest = new WsdlRequestProcessor(mockRequest, mockResponse); // execution processorToTest.createConnection(mockSoapMessage); // verification mockServer.verify(postRequestedFor(urlEqualTo(EXPECTED_WSDL_QUERY_PATH)) .withHeader(HttpHeaders.CONTENT_TYPE, equalTo(TEXT_XML_UTF8)) .withRequestBody(equalTo(expectedMessage))); } @Test public void shouldThrowIfProxyResponseNotOk() throws Exception { // setup SoapMessageImpl mockSoapMessage = mock(SoapMessageImpl.class); final String expectedErrorMessage = "Some error happened!"; final int expectedErrorCode = SC_INTERNAL_SERVER_ERROR; mockServer.stubFor(WireMock.any(urlPathEqualTo(EXPECTED_WSDL_QUERY_PATH)) .willReturn(WireMock.aResponse().withStatus(expectedErrorCode) .withStatusMessage(expectedErrorMessage))); mockServer.start(); when(mockSoapMessage.getBytes()).thenReturn("justsomething".getBytes()); WsdlRequestProcessor processorToTest = new WsdlRequestProcessor(mockRequest, mockResponse); thrown.expectMessage(is("Received HTTP error: " + expectedErrorCode + " - " + expectedErrorMessage)); // execution processorToTest.createConnection(mockSoapMessage); // expecting an exception.. } @Test public void shouldGetServiceId() throws Exception { // setup final ServiceId expectedServiceId = ServiceId.create(EXPECTED_XR_INSTANCE, "someMember", "serviceCode3322", "subsystem3", "serviceCode3", "version1.1"); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_INSTANCE_IDENTIFIER))) .thenReturn(expectedServiceId.getXRoadInstance()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_MEMBER_CLASS))) .thenReturn(expectedServiceId.getMemberClass()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_MEMBER_CODE))) .thenReturn(expectedServiceId.getMemberCode()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_SERVICE_CODE))) .thenReturn(expectedServiceId.getServiceCode()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_SUBSYSTEM_CODE))) .thenReturn(expectedServiceId.getSubsystemCode()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_VERSION))) .thenReturn(expectedServiceId.getServiceVersion()); WsdlRequestProcessor processorToTest = new WsdlRequestProcessor(mockRequest, mockResponse); // execution ServiceId serviceId = processorToTest.getServiceId(); // verification assertThat("Service id does not match", serviceId, is(expectedServiceId)); } @Test public void shouldGetCentralServiceIdWhenNoMemberClassOrCode() throws Exception { // setup final CentralServiceId expectedCentralServiceId = CentralServiceId.create(EXPECTED_XR_INSTANCE, "serviceCode3322"); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_INSTANCE_IDENTIFIER))) .thenReturn(expectedCentralServiceId.getXRoadInstance()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_SERVICE_CODE))) .thenReturn(expectedCentralServiceId.getServiceCode()); WsdlRequestProcessor processorToTest = new WsdlRequestProcessor(mockRequest, mockResponse); // execution ServiceId serviceId = processorToTest.getServiceId(); // verification assertThat("Service id does not match", serviceId, is(expectedCentralServiceId)); } @Test public void shouldNotGetServiceIdWhenMissingXroadInstance() throws Exception { // setup when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_INSTANCE_IDENTIFIER))) .thenReturn(""); WsdlRequestProcessor processorToTest = new WsdlRequestProcessor(mockRequest, mockResponse); thrown.expect(CodedException.class); thrown.expect(faultCodeEquals(X_INVALID_REQUEST)); thrown.expectMessage(containsString("Must specify instance identifier")); // execution processorToTest.getServiceId(); // expecting an exception.. } @Test public void shouldNotGetServiceIdWhenMissingServiceCode() throws Exception { // setup when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_INSTANCE_IDENTIFIER))) .thenReturn(EXPECTED_XR_INSTANCE); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_SERVICE_CODE))) .thenReturn(" "); WsdlRequestProcessor processorToTest = new WsdlRequestProcessor(mockRequest, mockResponse); thrown.expect(CodedException.class); thrown.expect(faultCodeEquals(X_INVALID_REQUEST)); thrown.expectMessage(containsString("Must specify service code")); // execution processorToTest.getServiceId(); // expecting an exception.. } @Test public void shouldNotGetServiceIdWhenMissingMemberCode() throws Exception { // setup when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_INSTANCE_IDENTIFIER))) .thenReturn(EXPECTED_XR_INSTANCE); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_MEMBER_CLASS))) .thenReturn("government"); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_SERVICE_CODE))) .thenReturn("someServiceCode"); WsdlRequestProcessor processorToTest = new WsdlRequestProcessor(mockRequest, mockResponse); thrown.expect(CodedException.class); thrown.expect(faultCodeEquals(X_INVALID_REQUEST)); thrown.expectMessage(containsString("InvalidRequest: 'memberCode' must not be blank")); // execution processorToTest.getServiceId(); // expecting an exception.. } @Test public void shouldGetWsdl() throws Exception { // setup mockServer.stubFor(WireMock.any(urlPathEqualTo(EXPECTED_WSDL_QUERY_PATH)) .willReturn(aResponse() //file under resources/__files .withBodyFile("wsdl_response.xml") // this needs to match the SOAP message boundary in the file .withHeader(HttpHeaders.CONTENT_TYPE, MULTIPART_RELATED + "; type=\"text/xml\"; charset=UTF-8; boundary=xroadZTLLyIMMYnAYliBumWCqHJYAhutxNf"))); mockServer.start(); final SecurityServerId providedIdentifier = SecurityServerId.create(EXPECTED_XR_INSTANCE, "memberClassGov", "memberCode11", "serverCode_"); ServerConf.reload(new TestServerConf() { @Override public SecurityServerId getIdentifier() { return providedIdentifier; } }); final ServiceId expectedServiceId = ServiceId.create(EXPECTED_XR_INSTANCE, "someMember", "serviceCode3322", "subsystem3", "serviceCode3", "version1.1"); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_INSTANCE_IDENTIFIER))) .thenReturn(expectedServiceId.getXRoadInstance()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_MEMBER_CLASS))) .thenReturn(expectedServiceId.getMemberClass()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_MEMBER_CODE))) .thenReturn(expectedServiceId.getMemberCode()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_SERVICE_CODE))) .thenReturn(expectedServiceId.getServiceCode()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_SUBSYSTEM_CODE))) .thenReturn(expectedServiceId.getSubsystemCode()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_VERSION))) .thenReturn(expectedServiceId.getServiceVersion()); final List<String> expectedWSDLServiceNames = Arrays.asList("getRandom", "helloService"); WsdlRequestProcessor processorToTest = new WsdlRequestProcessor(mockRequest, mockResponse); // execution processorToTest.process(); // verification assertContentTypeIsIn(singletonList(TEXT_XML)); Definition definition = WSDLFactory.newInstance().newWSDLReader() .readWSDL(null, new InputSource(mockServletOutputStream.getAsInputStream())); List<String> operationNames = parseOperationNamesFromWSDLDefinition(definition); assertThat("Expected to find certain operations", operationNames, containsInAnyOrder(expectedWSDLServiceNames.toArray())); } @Test public void shouldThrowWhenReceivingSoapFault() throws Exception { // setup final String expectedMessage = "That was an invalid request, buddy!"; final String expectedErrorCode = X_INVALID_REQUEST; final CodedException generatedEx = new CodedException(expectedErrorCode, expectedMessage); mockServer.stubFor(WireMock.any(urlPathEqualTo(EXPECTED_WSDL_QUERY_PATH)) .willReturn(aResponse() .withBody(SoapFault.createFaultXml(generatedEx).getBytes(MimeUtils.UTF8)) .withHeader(HttpHeaders.CONTENT_TYPE, TEXT_XML_UTF8))); mockServer.start(); final SecurityServerId providedIdentifier = SecurityServerId.create(EXPECTED_XR_INSTANCE, "memberClassGov", "memberCode11", "serverCode_"); ServerConf.reload(new TestServerConf() { @Override public SecurityServerId getIdentifier() { return providedIdentifier; } }); final CentralServiceId expectedCentralServiceId = CentralServiceId.create(EXPECTED_XR_INSTANCE, "serviceCode3322"); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_INSTANCE_IDENTIFIER))) .thenReturn(expectedCentralServiceId.getXRoadInstance()); when(mockRequest.getParameter(eq(WsdlRequestProcessor.PARAM_SERVICE_CODE))) .thenReturn(expectedCentralServiceId.getServiceCode()); WsdlRequestProcessor processorToTest = new WsdlRequestProcessor(mockRequest, mockResponse); thrown.expect(CodedException.class); thrown.expect(faultCodeEquals(expectedErrorCode)); thrown.expectMessage(allOf(containsString(expectedErrorCode), containsString(expectedMessage))); // execution processorToTest.process(); // expecting an exception.. } private void assertContentTypeIsIn(List<String> allowedContentTypes) { ArgumentCaptor<String> contentTypeCaptor = ArgumentCaptor.forClass(String.class); verify(mockResponse).setContentType(contentTypeCaptor.capture()); assertThat("Wrong content type", contentTypeCaptor.getValue(), isIn(allowedContentTypes)); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.junit2.configuration; import java.util.Arrays; import java.util.LinkedHashSet; import java.util.List; import javax.swing.text.BadLocationException; import javax.swing.text.PlainDocument; import com.intellij.execution.JavaExecutionUtil; import com.intellij.execution.junit.JUnitConfiguration; import com.intellij.execution.junit.JUnitUtil; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.Document; import com.intellij.openapi.module.Module; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiClass; // Author: dyoma public class JUnitConfigurationModel { public static final int ALL_IN_PACKAGE = 0; public static final int CLASS = 1; public static final int METHOD = 2; public static final int PATTERN = 3; public static final int DIR = 4; public static final int CATEGORY = 5; public static final int UNIQUE_ID = 6; public static final int BY_SOURCE_POSITION = 7; public static final int BY_SOURCE_CHANGES = 8; private static final List<String> ourTestObjects; static { ourTestObjects = Arrays.asList(JUnitConfiguration.TEST_PACKAGE, JUnitConfiguration.TEST_CLASS, JUnitConfiguration.TEST_METHOD, JUnitConfiguration.TEST_PATTERN, JUnitConfiguration .TEST_DIRECTORY, JUnitConfiguration.TEST_CATEGORY, JUnitConfiguration.TEST_UNIQUE_ID, JUnitConfiguration.BY_SOURCE_POSITION, JUnitConfiguration.BY_SOURCE_CHANGES); } private JUnitConfigurable myListener; private int myType = -1; private final Object[] myJUnitDocuments = new Object[6]; private final Project myProject; public JUnitConfigurationModel(final Project project) { myProject = project; } public boolean setType(int type) { if(type == myType) { return false; } if(type < 0 || type >= ourTestObjects.size()) { type = CLASS; } myType = type; fireTypeChanged(type); return true; } private void fireTypeChanged(final int newType) { myListener.onTypeChanged(newType); } public void setListener(final JUnitConfigurable listener) { myListener = listener; } public Object getJUnitDocument(final int i) { return myJUnitDocuments[i]; } public void setJUnitDocument(final int i, Object doc) { myJUnitDocuments[i] = doc; } public void apply(final Module module, final JUnitConfiguration configuration) { final boolean shouldUpdateName = configuration.isGeneratedName(); applyTo(configuration.getPersistentData(), module); if(shouldUpdateName && !JavaExecutionUtil.isNewName(configuration.getName())) { configuration.setGeneratedName(); } } private void applyTo(final JUnitConfiguration.Data data, final Module module) { final String testObject = getTestObject(); final String className = getJUnitTextValue(CLASS); data.TEST_OBJECT = testObject; if(testObject != JUnitConfiguration.TEST_PACKAGE && testObject != JUnitConfiguration.TEST_PATTERN && testObject != JUnitConfiguration.TEST_DIRECTORY && testObject != JUnitConfiguration .TEST_CATEGORY && testObject != JUnitConfiguration.BY_SOURCE_CHANGES) { try { data.METHOD_NAME = getJUnitTextValue(METHOD); final PsiClass testClass = !myProject.isDefault() && !StringUtil.isEmptyOrSpaces(className) ? JUnitUtil.findPsiClass(className, module, myProject) : null; if(testClass != null && testClass.isValid()) { data.setMainClass(testClass); } else { data.MAIN_CLASS_NAME = className; } } catch(ProcessCanceledException | IndexNotReadyException e) { data.MAIN_CLASS_NAME = className; } } else if(testObject != JUnitConfiguration.BY_SOURCE_CHANGES) { if(testObject == JUnitConfiguration.TEST_PACKAGE) { data.PACKAGE_NAME = getJUnitTextValue(ALL_IN_PACKAGE); } else if(testObject == JUnitConfiguration.TEST_DIRECTORY) { data.setDirName(getJUnitTextValue(DIR)); } else if(testObject == JUnitConfiguration.TEST_CATEGORY) { data.setCategoryName(getJUnitTextValue(CATEGORY)); } else { final LinkedHashSet<String> set = new LinkedHashSet<>(); final String[] patterns = getJUnitTextValue(PATTERN).split("\\|\\|"); for(String pattern : patterns) { if(pattern.length() > 0) { set.add(pattern); } } data.setPatterns(set); } data.MAIN_CLASS_NAME = ""; data.METHOD_NAME = ""; } } private String getTestObject() { return ourTestObjects.get(myType); } private String getJUnitTextValue(final int index) { return getDocumentText(index, myJUnitDocuments); } private static String getDocumentText(final int index, final Object[] documents) { final Object document = documents[index]; if(document instanceof PlainDocument) { try { return ((PlainDocument) document).getText(0, ((PlainDocument) document).getLength()); } catch(BadLocationException e) { throw new RuntimeException(e); } } return ((Document) document).getText(); } public void reset(final JUnitConfiguration configuration) { final JUnitConfiguration.Data data = configuration.getPersistentData(); setTestType(data.TEST_OBJECT); setJUnitTextValue(ALL_IN_PACKAGE, data.getPackageName()); setJUnitTextValue(CLASS, data.getMainClassName() != null ? data.getMainClassName().replaceAll("\\$", "\\.") : ""); setJUnitTextValue(METHOD, data.getMethodNameWithSignature()); setJUnitTextValue(PATTERN, data.getPatternPresentation()); setJUnitTextValue(DIR, data.getDirName()); setJUnitTextValue(CATEGORY, data.getCategory()); } private void setJUnitTextValue(final int index, final String text) { setDocumentText(index, text, myJUnitDocuments); } private void setDocumentText(final int index, final String text, final Object[] documents) { final Object document = documents[index]; if(document instanceof PlainDocument) { try { ((PlainDocument) document).remove(0, ((PlainDocument) document).getLength()); ((PlainDocument) document).insertString(0, text, null); } catch(BadLocationException e) { throw new RuntimeException(e); } } else { WriteCommandAction.runWriteCommandAction(myProject, () -> ((Document) document).replaceString(0, ((Document) document).getTextLength(), text)); } } private void setTestType(final String testObject) { setType(ourTestObjects.indexOf(testObject)); } }
package com.outbrain.ob1k.concurrent.lazy; import com.google.common.base.Function; import com.google.common.base.Supplier; import com.outbrain.ob1k.concurrent.*; import com.outbrain.ob1k.concurrent.handlers.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; /** * a future that contains a producer. each time the future is consumed the producer is activated * to deliver a value or an error to the consumer. * <p/> * the final result is never stored so the future is actually stateless as opposed to the eager future * that eventually holds the final result. * <p/> * the lazy future represent a computation that eventually creates a new value. that value can be consumed many times * by calling consume and supplying a consumer. * * @author asy ronen */ public final class LazyComposableFuture<T> implements ComposableFuture<T> { private final Producer<T> producer; private final Executor executor; private LazyComposableFuture(final Producer<T> producer) { this(producer, null); } private LazyComposableFuture(final Producer<T> producer, final Executor executor) { this.producer = producer; this.executor = executor; } public static <T> LazyComposableFuture<T> fromValue(final T value) { return new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { consumer.consume(Try.fromValue(value)); } }); } public static <T> LazyComposableFuture<T> fromError(final Throwable error) { return new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { consumer.consume(Try.<T>fromError(error)); } }); } public static <T> ComposableFuture<T> build(final Producer<T> producer) { return new LazyComposableFuture<>(producer); } public static <T> LazyComposableFuture<T> apply(final Supplier<T> supplier) { return new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { try { consumer.consume(Try.fromValue(supplier.get())); } catch (final Exception e) { consumer.consume(Try.<T>fromError(e)); } } }); } public static <T> LazyComposableFuture<T> submit(final Executor executor, final Callable<T> task, final boolean delegateHandler) { return new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { executor.execute(new Runnable() { @Override public void run() { try { consumer.consume(new Try.Success<>(task.call())); } catch (final Exception e) { consumer.consume(Try.<T>fromError(e)); } } }); } }, delegateHandler ? executor : null); } public static <T> LazyComposableFuture<T> schedule(final Scheduler scheduler, final Callable<T> task, final long delay, final TimeUnit timeUnit) { return new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { scheduler.schedule(new Runnable() { @Override public void run() { try { consumer.consume(new Try.Success<>(task.call())); } catch (final Exception e) { consumer.consume(Try.<T>fromError(e)); } } }, delay, timeUnit); } }); } public static <T> LazyComposableFuture<T> collectFirst(final List<ComposableFuture<T>> futures) { return new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { final AtomicBoolean done = new AtomicBoolean(); for (final ComposableFuture<T> future : futures) { future.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { if (done.compareAndSet(false, true)) { consumer.consume(result); } } }); } } }); } public static <T> LazyComposableFuture<List<T>> collectAll(final List<ComposableFuture<T>> futures) { return new LazyComposableFuture<>(new Producer<List<T>>() { @Override public void produce(final Consumer<List<T>> consumer) { final AtomicInteger counter = new AtomicInteger(futures.size()); final AtomicBoolean errorTrigger = new AtomicBoolean(false); final ConcurrentMap<Integer, Try<T>> results = new ConcurrentHashMap<>(futures.size()); int index = 0; for (final ComposableFuture<T> future : futures) { final int i = index++; future.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { results.put(i, result); if (result.isSuccess()) { final int count = counter.decrementAndGet(); if (count == 0) { consumer.consume(Try.fromValue(createResultList(results))); } } else { if (errorTrigger.compareAndSet(false, true)) { counter.set(0); consumer.consume(Try.<List<T>>fromError(result.getError())); } } } }); } } }); } private static <T> List<T> createResultList(final ConcurrentMap<Integer, Try<T>> results) { final List<T> list = new ArrayList<>(results.size()); for (int i = 0; i < results.size(); i++) { final Try<T> tryValue = results.get(i); list.add(tryValue != null ? tryValue.getValue() : null); } return list; } @Override public void consume(final Consumer<T> consumer) { if (executor != null) { executor.execute(new Runnable() { @Override public void run() { producer.produce(consumer); } }); } else { producer.produce(consumer); } } public void consumeSync(final Consumer<T> consumer) throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); this.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { consumer.consume(result); latch.countDown(); } }); latch.await(); } @Override public <U> ComposableFuture<U> continueOnSuccess(final SuccessHandler<? super T, ? extends U> handler) { final LazyComposableFuture<T> outer = this; return new LazyComposableFuture<>(new Producer<U>() { @Override public void produce(final Consumer<U> consumer) { outer.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { if (result.isSuccess()) { try { consumer.consume(Try.fromValue(handler.handle(result.getValue()))); } catch (final ExecutionException e) { final Throwable error = e.getCause() != null ? e.getCause() : e; consumer.consume(Try.<U>fromError(error)); } catch (final Exception e) { consumer.consume(Try.<U>fromError(e)); } } else { consumer.consume(Try.<U>fromError(result.getError())); } } }); } }); } @Override public <U> ComposableFuture<U> continueOnSuccess(final FutureSuccessHandler<? super T, U> handler) { final LazyComposableFuture<T> outer = this; return new LazyComposableFuture<>(new Producer<U>() { @Override public void produce(final Consumer<U> consumer) { outer.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { if (result.isSuccess()) { try { final ComposableFuture<U> next = handler.handle(result.getValue()); if (next == null) { consumer.consume(Try.<U>fromValue(null)); } else { next.consume(new Consumer<U>() { @Override public void consume(final Try<U> nextResult) { consumer.consume(nextResult); } }); } } catch (final Exception e) { consumer.consume(Try.<U>fromError(e)); } } else { consumer.consume(Try.<U>fromError(result.getError())); } } }); } }); } @Override public ComposableFuture<T> continueOnError(final ErrorHandler<? extends T> handler) { final LazyComposableFuture<T> outer = this; return new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { outer.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { if (result.isSuccess()) { consumer.consume(result); } else { try { consumer.consume(Try.fromValue(handler.handle(result.getError()))); } catch (final ExecutionException e) { consumer.consume(Try.<T>fromError(e)); } } } }); } }); } @Override public ComposableFuture<T> continueOnError(final FutureErrorHandler<T> handler) { final LazyComposableFuture<T> outer = this; return new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { outer.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { if (result.isSuccess()) { consumer.consume(result); } else { try { final ComposableFuture<T> next = handler.handle(result.getError()); if (next == null) { consumer.consume(Try.<T>fromValue(null)); } else { next.consume(new Consumer<T>() { @Override public void consume(final Try<T> nextResult) { consumer.consume(nextResult); } }); } } catch (final Exception e) { consumer.consume(Try.<T>fromError(e)); } } } }); } }); } @Override public LazyComposableFuture<T> withTimeout(final Scheduler scheduler, final long timeout, final TimeUnit unit, final String taskDescription) { final LazyComposableFuture<T> deadline = new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { scheduler.schedule(new Runnable() { @Override public void run() { consumer.consume(Try.<T>fromError(new TimeoutException("Timeout occurred on task ('" + taskDescription + "' " + timeout + " " + unit + ")"))); } }, timeout, unit); } }); return collectFirst(Arrays.<ComposableFuture<T>>asList(this, deadline)); } @Override public LazyComposableFuture<T> withTimeout(final Scheduler scheduler, final long timeout, final TimeUnit unit) { return withTimeout(scheduler, timeout, unit, "unspecified context"); } @Override public LazyComposableFuture<T> withTimeout(final long timeout, final TimeUnit unit, final String taskDescription) { return withTimeout(ComposableFutures.getScheduler(), timeout, unit, taskDescription); } @Override public LazyComposableFuture<T> withTimeout(final long timeout, final TimeUnit unit) { return withTimeout(ComposableFutures.getScheduler(), timeout, unit); } @Override public <R> ComposableFuture<R> continueWith(final ResultHandler<T, R> handler) { final LazyComposableFuture<T> outer = this; return new LazyComposableFuture<>(new Producer<R>() { @Override public void produce(final Consumer<R> consumer) { outer.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { try { consumer.consume(Try.fromValue(handler.handle(result))); } catch (final ExecutionException e) { final Throwable error = e.getCause() != null ? e.getCause() : e; consumer.consume(Try.<R>fromError(error)); } catch (final Exception e) { consumer.consume(Try.<R>fromError(e)); } } }); } }); } @Override public <R> ComposableFuture<R> continueWith(final FutureResultHandler<T, R> handler) { final LazyComposableFuture<T> outer = this; return new LazyComposableFuture<>(new Producer<R>() { @Override public void produce(final Consumer<R> consumer) { outer.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { try { final ComposableFuture<R> next = handler.handle(result); if (next == null) { consumer.consume(Try.<R>fromValue(null)); } else { next.consume(new Consumer<R>() { @Override public void consume(final Try<R> nextResult) { consumer.consume(nextResult); } }); } } catch (final Exception e) { consumer.consume(Try.<R>fromError(e)); } } }); } }); } @Override public <R> ComposableFuture<R> transform(final Function<? super T, ? extends R> function) { return continueOnSuccess(new SuccessHandler<T, R>() { @Override public R handle(final T result) { return function.apply(result); } }); } @Override public ComposableFuture<T> materialize() { return ComposableFutures.buildEager(producer); } public LazyComposableFuture<T> doubleDispatch(final Scheduler scheduler, final long timeout, final TimeUnit unit) { final LazyComposableFuture<T> outer = this; return new LazyComposableFuture<>(new Producer<T>() { @Override public void produce(final Consumer<T> consumer) { final AtomicBoolean done = new AtomicBoolean(); outer.consume(new Consumer<T>() { @Override public void consume(final Try<T> firstRes) { if (done.compareAndSet(false, true)) { consumer.consume(firstRes); } } }); scheduler.schedule(new Runnable() { @Override public void run() { if (!done.get()) { outer.consume(new Consumer<T>() { @Override public void consume(final Try<T> secondRes) { if (done.compareAndSet(false, true)) { consumer.consume(secondRes); } } }); } } }, timeout, unit); } }); } @Override public T get() throws ExecutionException, InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference<Try<T>> box = new AtomicReference<>(); this.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { box.set(result); latch.countDown(); } }); latch.await(); final Try<T> res = box.get(); if (res == null) { throw new ExecutionException(new NullPointerException("no result error.")); } else if (res.isSuccess()) { return res.getValue(); } else { throw new ExecutionException(res.getError()); } } @Override public T get(final long timeout, final TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference<Try<T>> box = new AtomicReference<>(); this.consume(new Consumer<T>() { @Override public void consume(final Try<T> result) { box.set(result); latch.countDown(); } }); if (latch.await(timeout, unit)) { final Try<T> res = box.get(); if (res.isSuccess()) { return res.getValue(); } else { throw new ExecutionException(res.getError()); } } else { throw new TimeoutException("Timeout occurred while waiting for value (" + timeout + unit + ")"); } } }
/* * @(#)LinkedList.java 1.46 03/01/23 * * Copyright 2003 Sun Microsystems, Inc. All rights reserved. * SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ package java.util; /** * Linked list implementation of the <tt>List</tt> interface. Implements all * optional list operations, and permits all elements (including * <tt>null</tt>). In addition to implementing the <tt>List</tt> interface, * the <tt>LinkedList</tt> class provides uniformly named methods to * <tt>get</tt>, <tt>remove</tt> and <tt>insert</tt> an element at the * beginning and end of the list. These operations allow linked lists to be * used as a stack, queue, or double-ended queue (deque).<p> * * All of the stack/queue/deque operations could be easily recast in terms of * the standard list operations. They're included here primarily for * convenience, though they may run slightly faster than the equivalent List * operations.<p> * * All of the operations perform as could be expected for a doubly-linked * list. Operations that index into the list will traverse the list from * the begining or the end, whichever is closer to the specified index.<p> * * <b>Note that this implementation is not synchronized.</b> If multiple * threads access a list concurrently, and at least one of the threads * modifies the list structurally, it <i>must</i> be synchronized * externally. (A structural modification is any operation that adds or * deletes one or more elements; merely setting the value of an element is not * a structural modification.) This is typically accomplished by * synchronizing on some object that naturally encapsulates the list. If no * such object exists, the list should be "wrapped" using the * Collections.synchronizedList method. This is best done at creation time, * to prevent accidental unsynchronized access to the list: <pre> * List list = Collections.synchronizedList(new LinkedList(...)); * </pre><p> * * The iterators returned by the this class's <tt>iterator</tt> and * <tt>listIterator</tt> methods are <i>fail-fast</i>: if the list is * structurally modified at any time after the iterator is created, in any way * except through the Iterator's own <tt>remove</tt> or <tt>add</tt> methods, * the iterator will throw a <tt>ConcurrentModificationException</tt>. Thus, * in the face of concurrent modification, the iterator fails quickly and * cleanly, rather than risking arbitrary, non-deterministic behavior at an * undetermined time in the future. * * <p>Note that the fail-fast behavior of an iterator cannot be guaranteed * as it is, generally speaking, impossible to make any hard guarantees in the * presence of unsynchronized concurrent modification. Fail-fast iterators * throw <tt>ConcurrentModificationException</tt> on a best-effort basis. * Therefore, it would be wrong to write a program that depended on this * exception for its correctness: <i>the fail-fast behavior of iterators * should be used only to detect bugs.</i><p> * * This class is a member of the * <a href="{@docRoot}/../guide/collections/index.html"> * Java Collections Framework</a>. * * @author Josh Bloch * @version 1.46, 01/23/03 * @see List * @see ArrayList * @see Vector * @see Collections#synchronizedList(List) * @since 1.2 */ public class LinkedList extends AbstractSequentialList implements List { private transient Entry header = new Entry(null, null, null); private transient int size = 0; /** * Constructs an empty list. */ public LinkedList() { header.next = header.previous = header; } /** * Constructs a list containing the elements of the specified * collection, in the order they are returned by the collection's * iterator. * * @param c the collection whose elements are to be placed into this list. * @throws NullPointerException if the specified collection is null. */ public LinkedList(Collection c) { this(); addAll(c); } /** * Returns the first element in this list. * * @return the first element in this list. * @throws NoSuchElementException if this list is empty. */ public Object getFirst() { if (size==0) throw new NoSuchElementException(); return header.next.element; } /** * Returns the last element in this list. * * @return the last element in this list. * @throws NoSuchElementException if this list is empty. */ public Object getLast() { if (size==0) throw new NoSuchElementException(); return header.previous.element; } /** * Removes and returns the first element from this list. * * @return the first element from this list. * @throws NoSuchElementException if this list is empty. */ public Object removeFirst() { Object first = header.next.element; remove(header.next); return first; } /** * Removes and returns the last element from this list. * * @return the last element from this list. * @throws NoSuchElementException if this list is empty. */ public Object removeLast() { Object last = header.previous.element; remove(header.previous); return last; } /** * Inserts the given element at the beginning of this list. * * @param o the element to be inserted at the beginning of this list. */ public void addFirst(Object o) { addBefore(o, header.next); } /** * Appends the given element to the end of this list. (Identical in * function to the <tt>add</tt> method; included only for consistency.) * * @param o the element to be inserted at the end of this list. */ public void addLast(Object o) { addBefore(o, header); } /** * Returns <tt>true</tt> if this list contains the specified element. * More formally, returns <tt>true</tt> if and only if this list contains * at least one element <tt>e</tt> such that <tt>(o==null ? e==null * : o.equals(e))</tt>. * * @param o element whose presence in this list is to be tested. * @return <tt>true</tt> if this list contains the specified element. */ public boolean contains(Object o) { return indexOf(o) != -1; } /** * Returns the number of elements in this list. * * @return the number of elements in this list. */ public int size() { return size; } /** * Appends the specified element to the end of this list. * * @param o element to be appended to this list. * @return <tt>true</tt> (as per the general contract of * <tt>Collection.add</tt>). */ public boolean add(Object o) { addBefore(o, header); return true; } /** * Removes the first occurrence of the specified element in this list. If * the list does not contain the element, it is unchanged. More formally, * removes the element with the lowest index <tt>i</tt> such that * <tt>(o==null ? get(i)==null : o.equals(get(i)))</tt> (if such an * element exists). * * @param o element to be removed from this list, if present. * @return <tt>true</tt> if the list contained the specified element. */ public boolean remove(Object o) { if (o==null) { for (Entry e = header.next; e != header; e = e.next) { if (e.element==null) { remove(e); return true; } } } else { for (Entry e = header.next; e != header; e = e.next) { if (o.equals(e.element)) { remove(e); return true; } } } return false; } /** * Appends all of the elements in the specified collection to the end of * this list, in the order that they are returned by the specified * collection's iterator. The behavior of this operation is undefined if * the specified collection is modified while the operation is in * progress. (This implies that the behavior of this call is undefined if * the specified Collection is this list, and this list is nonempty.) * * @param c the elements to be inserted into this list. * @return <tt>true</tt> if this list changed as a result of the call. * @throws NullPointerException if the specified collection is null. */ public boolean addAll(Collection c) { return addAll(size, c); } /** * Inserts all of the elements in the specified collection into this * list, starting at the specified position. Shifts the element * currently at that position (if any) and any subsequent elements to * the right (increases their indices). The new elements will appear * in the list in the order that they are returned by the * specified collection's iterator. * * @param index index at which to insert first element * from the specified collection. * @param c elements to be inserted into this list. * @return <tt>true</tt> if this list changed as a result of the call. * @throws IndexOutOfBoundsException if the specified index is out of * range (<tt>index &lt; 0 || index &gt; size()</tt>). * @throws NullPointerException if the specified collection is null. */ public boolean addAll(int index, Collection c) { Object[] a = c.toArray(); int numNew = a.length; if (numNew==0) return false; modCount++; Entry successor = (index==size ? header : entry(index)); Entry predecessor = successor.previous; for (int i=0; i<numNew; i++) { Entry e = new Entry(a[i], successor, predecessor); predecessor.next = e; predecessor = e; } successor.previous = predecessor; size += numNew; return true; } /** * Removes all of the elements from this list. */ public void clear() { modCount++; header.next = header.previous = header; size = 0; } // Positional Access Operations /** * Returns the element at the specified position in this list. * * @param index index of element to return. * @return the element at the specified position in this list. * * @throws IndexOutOfBoundsException if the specified index is is out of * range (<tt>index &lt; 0 || index &gt;= size()</tt>). */ public Object get(int index) { return entry(index).element; } /** * Replaces the element at the specified position in this list with the * specified element. * * @param index index of element to replace. * @param element element to be stored at the specified position. * @return the element previously at the specified position. * @throws IndexOutOfBoundsException if the specified index is out of * range (<tt>index &lt; 0 || index &gt;= size()</tt>). */ public Object set(int index, Object element) { Entry e = entry(index); Object oldVal = e.element; e.element = element; return oldVal; } /** * Inserts the specified element at the specified position in this list. * Shifts the element currently at that position (if any) and any * subsequent elements to the right (adds one to their indices). * * @param index index at which the specified element is to be inserted. * @param element element to be inserted. * * @throws IndexOutOfBoundsException if the specified index is out of * range (<tt>index &lt; 0 || index &gt; size()</tt>). */ public void add(int index, Object element) { addBefore(element, (index==size ? header : entry(index))); } /** * Removes the element at the specified position in this list. Shifts any * subsequent elements to the left (subtracts one from their indices). * Returns the element that was removed from the list. * * @param index the index of the element to removed. * @return the element previously at the specified position. * * @throws IndexOutOfBoundsException if the specified index is out of * range (<tt>index &lt; 0 || index &gt;= size()</tt>). */ public Object remove(int index) { Entry e = entry(index); remove(e); return e.element; } /** * Return the indexed entry. */ private Entry entry(int index) { if (index < 0 || index >= size) throw new IndexOutOfBoundsException("Index: "+index+ ", Size: "+size); Entry e = header; if (index < (size >> 1)) { for (int i = 0; i <= index; i++) e = e.next; } else { for (int i = size; i > index; i--) e = e.previous; } return e; } // Search Operations /** * Returns the index in this list of the first occurrence of the * specified element, or -1 if the List does not contain this * element. More formally, returns the lowest index i such that * <tt>(o==null ? get(i)==null : o.equals(get(i)))</tt>, or -1 if * there is no such index. * * @param o element to search for. * @return the index in this list of the first occurrence of the * specified element, or -1 if the list does not contain this * element. */ public int indexOf(Object o) { int index = 0; if (o==null) { for (Entry e = header.next; e != header; e = e.next) { if (e.element==null) return index; index++; } } else { for (Entry e = header.next; e != header; e = e.next) { if (o.equals(e.element)) return index; index++; } } return -1; } /** * Returns the index in this list of the last occurrence of the * specified element, or -1 if the list does not contain this * element. More formally, returns the highest index i such that * <tt>(o==null ? get(i)==null : o.equals(get(i)))</tt>, or -1 if * there is no such index. * * @param o element to search for. * @return the index in this list of the last occurrence of the * specified element, or -1 if the list does not contain this * element. */ public int lastIndexOf(Object o) { int index = size; if (o==null) { for (Entry e = header.previous; e != header; e = e.previous) { index--; if (e.element==null) return index; } } else { for (Entry e = header.previous; e != header; e = e.previous) { index--; if (o.equals(e.element)) return index; } } return -1; } /** * Returns a list-iterator of the elements in this list (in proper * sequence), starting at the specified position in the list. * Obeys the general contract of <tt>List.listIterator(int)</tt>.<p> * * The list-iterator is <i>fail-fast</i>: if the list is structurally * modified at any time after the Iterator is created, in any way except * through the list-iterator's own <tt>remove</tt> or <tt>add</tt> * methods, the list-iterator will throw a * <tt>ConcurrentModificationException</tt>. Thus, in the face of * concurrent modification, the iterator fails quickly and cleanly, rather * than risking arbitrary, non-deterministic behavior at an undetermined * time in the future. * * @param index index of first element to be returned from the * list-iterator (by a call to <tt>next</tt>). * @return a ListIterator of the elements in this list (in proper * sequence), starting at the specified position in the list. * @throws IndexOutOfBoundsException if index is out of range * (<tt>index &lt; 0 || index &gt; size()</tt>). * @see List#listIterator(int) */ public ListIterator listIterator(int index) { return new ListItr(index); } private class ListItr implements ListIterator { private Entry lastReturned = header; private Entry next; private int nextIndex; private int expectedModCount = modCount; ListItr(int index) { if (index < 0 || index > size) throw new IndexOutOfBoundsException("Index: "+index+ ", Size: "+size); if (index < (size >> 1)) { next = header.next; for (nextIndex=0; nextIndex<index; nextIndex++) next = next.next; } else { next = header; for (nextIndex=size; nextIndex>index; nextIndex--) next = next.previous; } } public boolean hasNext() { return nextIndex != size; } public Object next() { checkForComodification(); if (nextIndex == size) throw new NoSuchElementException(); lastReturned = next; next = next.next; nextIndex++; return lastReturned.element; } public boolean hasPrevious() { return nextIndex != 0; } public Object previous() { if (nextIndex == 0) throw new NoSuchElementException(); lastReturned = next = next.previous; nextIndex--; checkForComodification(); return lastReturned.element; } public int nextIndex() { return nextIndex; } public int previousIndex() { return nextIndex-1; } public void remove() { checkForComodification(); try { LinkedList.this.remove(lastReturned); } catch (NoSuchElementException e) { throw new IllegalStateException(); } if (next==lastReturned) next = lastReturned.next; else nextIndex--; lastReturned = header; expectedModCount++; } public void set(Object o) { if (lastReturned == header) throw new IllegalStateException(); checkForComodification(); lastReturned.element = o; } public void add(Object o) { checkForComodification(); lastReturned = header; addBefore(o, next); nextIndex++; expectedModCount++; } final void checkForComodification() { if (modCount != expectedModCount) throw new ConcurrentModificationException(); } } private static class Entry { Object element; Entry next; Entry previous; Entry(Object element, Entry next, Entry previous) { this.element = element; this.next = next; this.previous = previous; } } private Entry addBefore(Object o, Entry e) { Entry newEntry = new Entry(o, e, e.previous); newEntry.previous.next = newEntry; newEntry.next.previous = newEntry; size++; modCount++; return newEntry; } private void remove(Entry e) { if (e == header) throw new NoSuchElementException(); e.previous.next = e.next; e.next.previous = e.previous; size--; modCount++; } /** * Returns an array containing all of the elements in this list * in the correct order. * * @return an array containing all of the elements in this list * in the correct order. */ public Object[] toArray() { Object[] result = new Object[size]; int i = 0; for (Entry e = header.next; e != header; e = e.next) result[i++] = e.element; return result; } }
package org.broadinstitute.hellbender.tools.exome.conversion.allelicbalancecaller; import org.apache.commons.math3.util.Pair; import org.apache.spark.api.java.JavaSparkContext; import org.broadinstitute.hellbender.engine.spark.SparkContextFactory; import org.broadinstitute.hellbender.tools.exome.ACNVModeledSegment; import org.broadinstitute.hellbender.tools.exome.SegmentUtils; import org.broadinstitute.hellbender.utils.test.SparkTestUtils; import org.broadinstitute.hellbender.utils.test.BaseTest; import org.broadinstitute.hellbender.utils.variant.HomoSapiensConstants; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.util.Arrays; import java.util.List; public class AllelicSplitCallerUnitTest extends BaseTest { private static final String TEST_DIR = "src/test/resources/org/broadinstitute/hellbender/tools/exome/conversion/allelicbalancecaller/"; private static final File ACNV_SEG_FILE = new File(TEST_DIR, "cell_line-sim-final.seg"); @Test public void testMakeCalls() { // This mostly just checks that the calling does not crash and does produce results. final CNLOHCaller cnlohCaller = new CNLOHCaller(); final JavaSparkContext ctx = SparkContextFactory.getTestSparkContext(); final List<ACNVModeledSegment> segs = SegmentUtils.readACNVModeledSegmentFile(ACNV_SEG_FILE); SparkTestUtils.roundTripInKryo(segs.get(0), ACNVModeledSegment.class, ctx.getConf()); // Make sure the CNLOH Caller is serializable before making calls. SparkTestUtils.roundTripInKryo(cnlohCaller, CNLOHCaller.class, ctx.getConf()); final List<AllelicCalls> calls = cnlohCaller.makeCalls(segs, 2, ctx); Assert.assertNotNull(calls); Assert.assertTrue(calls.size() > 0); Assert.assertTrue(calls.stream().allMatch(c -> c.getBalancedCall() != null)); Assert.assertTrue(calls.stream().allMatch(c -> c.getCnlohCall() != null)); Assert.assertTrue(calls.stream().allMatch(c -> c.getAcnvSegment() != null)); // Make sure the CNLOH Caller is serializable after making calls. SparkTestUtils.roundTripInKryo(cnlohCaller, CNLOHCaller.class, ctx.getConf()); SparkTestUtils.roundTripInKryo(calls.get(0), AllelicCalls.class, ctx.getConf()); } @Test(dataProvider = "mafValues") public void testCalculateMaf(double rho, int m, int n, double gt) { Assert.assertEquals(CNLOHCaller.calculateMaf(rho, m, n, HomoSapiensConstants.DEFAULT_PLOIDY), gt, 1e-4); Assert.assertEquals(CNLOHCaller.calculateMaf(rho, n, m, HomoSapiensConstants.DEFAULT_PLOIDY), gt, 1e-4); } @Test(expectedExceptions = IllegalArgumentException.class) public void testCalculateMafNaN() { CNLOHCaller.calculateMaf(Double.NaN, 1, 1, HomoSapiensConstants.DEFAULT_PLOIDY); } @Test(expectedExceptions = IllegalArgumentException.class) public void testCalculateMafInf() { CNLOHCaller.calculateMaf(Double.NEGATIVE_INFINITY, 1, 1, HomoSapiensConstants.DEFAULT_PLOIDY); } @Test(expectedExceptions = IllegalArgumentException.class) public void testCalculateMafNegative1() { CNLOHCaller.calculateMaf(0.5, -5, 1, HomoSapiensConstants.DEFAULT_PLOIDY); } @Test(expectedExceptions = IllegalArgumentException.class) public void testCalculateMafNegative2() { CNLOHCaller.calculateMaf(0.5, 2, -1, HomoSapiensConstants.DEFAULT_PLOIDY); } @DataProvider(name="mafValues") public Object[][] mafValues() { return new Object[][] { { 1.0, 0, 0, CNLOHCaller.MIN_L }, { 0, 0, 5, 0.5}, { .5, 0, 2, 0.25}, { .5, 0, 1, 0.33333333333}, { .5, 0, 5, 0.1429}, { 1, 0, 5, CNLOHCaller.MIN_L}, { .5, 0, 0, 0.5} }; } @Test(dataProvider = "fmafValues") public void testBasicFMaf(final double rho, final int m, final int n, final double credibleMode, final double credibleLow, final double credibleHigh, final double gt) { final double guess = CNLOHCaller.calculateFmaf(rho, m, n, credibleMode, credibleLow, credibleHigh, HomoSapiensConstants.DEFAULT_PLOIDY); final double guessCNSwitched = CNLOHCaller.calculateFmaf(rho, n, m, credibleMode, credibleLow, credibleHigh, HomoSapiensConstants.DEFAULT_PLOIDY); Assert.assertEquals(guess, gt, 1e-9); Assert.assertEquals(guessCNSwitched, gt, 1e-9); } @DataProvider(name="fmafValues") public Object[][] fmafValues() { return new Object[][] { // rho, m, n, mode, low, high // truth = Minimum value { 1.0, 0, 0, 0.4, 0.39, 0.41, CNLOHCaller.MIN_L }, // Matlab: 2.1846e-20 { .5, 0, 0, 0.25, 0.2, 0.3, 2.1846e-20 }, // Matlab: 81.1065489973630 { .5, 0, 0, 0.499, 0.49, 0.4999, 81.1065489973630 }, // Matlab: 0.00134286628603481 { .3, 0, 2, 0.333, 0.33, 0.34, 0.00134286628603481} }; } @Test(dataProvider = "crValues") public void testCalculateCr(double rho, int m, int n, double lambda, double gt) { Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, m, n, lambda, HomoSapiensConstants.DEFAULT_PLOIDY), gt, 1e-6); Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, n, m, lambda, HomoSapiensConstants.DEFAULT_PLOIDY), gt, 1e-6); Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, m, n, lambda*2, HomoSapiensConstants.DEFAULT_PLOIDY), gt/2, 1e-6); Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, n, m, lambda/2, HomoSapiensConstants.DEFAULT_PLOIDY), gt*2, 1e-6); Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, m, n, lambda/2, HomoSapiensConstants.DEFAULT_PLOIDY), gt*2, 1e-6); Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, n, m, lambda*2, HomoSapiensConstants.DEFAULT_PLOIDY), gt/2, 1e-6); Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, m, n, lambda*4, HomoSapiensConstants.DEFAULT_PLOIDY), gt/4, 1e-6); Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, n, m, lambda/4, HomoSapiensConstants.DEFAULT_PLOIDY), gt*4, 1e-6); Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, m, n, lambda/4, HomoSapiensConstants.DEFAULT_PLOIDY), gt*4, 1e-6); Assert.assertEquals(CNLOHCaller.calculateCopyRatio(rho, n, m, lambda*4, HomoSapiensConstants.DEFAULT_PLOIDY), gt/4, 1e-6); } // calculateCopyRatio(final double rho, final int m, final int n, final double lambda), ground truth @DataProvider(name="crValues") public Object[][] crValues() { return new Object[][] { { 1.0, 0, 0, 2, 0}, { 0, 0, 5, 2, 1}, // rho == 0 --> CR = 1 { .5, 0, 2, 2, 1}, { .5, 0, 1, 2, 0.75}, { .5, 0, 5, 2, 1.75}, { 1, 0, 5, 2, 2.5}, { 1, 0, 0, 2, 0}, //hom del at CCF & purity of 1 { 0.5, 0, 0, 2, 0.5}, //hom del at CCF * purity of 0.5 }; } @Test(dataProvider = "3dArray") public void testSumOnlyFirstDimension(double[][][] array3d, double[][] gt) { Assert.assertTrue(Arrays.deepEquals(CNLOHCaller.sumOverFirstDimension(array3d), gt)); } @DataProvider(name="3dArray") public Object[][] threeDArrayValues() { return new Object[][]{ { new double[][][]{{{100, 200, 300}, {10, 20, 30}}, {{400, 500, 600}, {40, 50, 60}}}, new double[][] {{500, 700, 900}, {50, 70, 90}} } }; } @Test(dataProvider = "2dArray") public void testMax2DIndex(double [][] array, Pair<Integer, Integer> gt) { Assert.assertEquals(CNLOHCaller.max2dIndices(array), gt); } @DataProvider(name="2dArray") public Object[][] twoDArrayValues() { return new Object[][]{ { new double[][] {{500, 700, 900}, {50, 70, 90}}, new Pair<>(0,2) } }; } @Test(dataProvider = "crMafSegDists") public void testCalcE_zsk_vsm_wsn(final double mafMode, final double mafLo, final double mafHi, final double crMode, final double crLow, final double crHigh, final double lambda, final double gt) { final List<ACNVModeledSegment> segments = SegmentUtils.readACNVModeledSegmentFile(ACNV_SEG_FILE); AllelicBalanceCallerModelState state = AllelicBalanceCallerModelState.createInitialCNLOHCallerModelState(0.2, segments, HomoSapiensConstants.DEFAULT_PLOIDY, CNLOHCaller.NUM_RHOS); final CNLOHCaller cnlohCaller = new CNLOHCaller(); final double[][][] responsibilities = cnlohCaller.calculateResponsibilities(state.getEffectivePhis(), state.getEffectivePis(), state.getRhos(), mafMode, mafLo, mafHi, crMode, crLow, crHigh, lambda, state.getmVals(), state.getnVals()); // Will be slightly less than 1.0, but should be pretty close. rho == 0, M == N == 1 Assert.assertEquals(responsibilities[0][1][1], gt, 1e-4); } @DataProvider(name="crMafSegDists") public Object[][] crMafSegDists() { return new Object[][]{ // maf mode, maf lo, maf hi, cr mode, cr low, cr high, lambda, gt { 0.499, 0.48, 0.4999, 1.0, 0.9, 1.1, 2.0, 1.0 }, { 0.1, 0.09, 0.014, 1.0, 0.9, 1.1, 2.0, 0.0 } }; } @Test(dataProvider = "doubleGaussian") public void testDoubleGaussian(final double val, final double low, final double mode, final double high, final double gt) { Assert.assertEquals(CNLOHCaller.calculateDoubleGaussian(val, mode, low, high), gt, 1e-10); } @DataProvider(name="doubleGaussian") public Object[][] doubleGaussian() { return new Object[][] { // val, mode, low, high, gt {0, .3, .5, .6, 2.39018153097550e-05}, {.1, .3, .5, .6, 0.00180038248042408}, {.2, .3, .5, .6, 0.0519041698800480}, {.3, .3, .5, .6, 0.572721254467825}, {.4, .3, .5, .6, 2.41873300755702}, {.5, .3, .5, .6, 7.81926869586808}, {.6, .3, .5, .6, 1.14544250893565}, {.7, .3, .5, .6, 0.00360076496084816}, {.8, .3, .5, .6, 2.42901708557338e-07}, {.9, .3, .5, .6, 3.51625759909017e-13}, {1.0, .3, .5, .6, 1.09230800445324e-20}, }; } }
package com.xianle.traffic_sh; import java.io.File; import java.io.InputStream; import java.text.Collator; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Hashtable; import java.util.List; import android.app.AlertDialog; import android.app.ListActivity; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.util.Log; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.ImageView; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; public class MainActivity extends ListActivity { private ArrayList<RowModel> directoryEntries = new ArrayList<RowModel>(); TextView tv; private DataDownloader downloader = null; private File currentDirectory; private ProgressDialog mDialog; // because the zip lib doesn't support chinese file name // so, we have to define a hashtable to map english file name to chinese file name final Hashtable<String, String> mFileName = new Hashtable<String, String>(); private void initFileNameMap() { mFileName.put("shanghai", this.getResources().getString(R.string.shanghai)); mFileName.put("beijing", this.getResources().getString(R.string.beijing)); mFileName.put("guangzhou", this.getResources().getString(R.string.guangzhou)); mFileName.put("shenzhen", this.getResources().getString(R.string.shenzhen)); mFileName.put("chengdu", this.getResources().getString(R.string.chengdu)); mFileName.put("fuzhou", this.getResources().getString(R.string.fuzhou)); mFileName.put("hefei", this.getResources().getString(R.string.hefei)); mFileName.put("wuhan", this.getResources().getString(R.string.wuhan)); mFileName.put("zhixiashi", this.getResources().getString(R.string.zhixiashi)); } /** Called when the activity is first created. */ @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); initFileNameMap(); unzip(); // browseToRoot(); } protected void OnPause() { super.onPause(); if (downloader != null) { synchronized (downloader) { downloader.setStatusField(null); } } } protected void OnResume() { super.onResume(); if (downloader != null) { synchronized (downloader) { downloader.setStatusField(tv); // if( downloader.DownloadComplete ) // initSDL(); } } } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { boolean result = true; if (keyCode == KeyEvent.KEYCODE_BACK) { if(currentDirectory.getName().equals("busline")) { finish(); } else { browseTo(currentDirectory.getParentFile(),0); } } return result; } /** * move the file in asset to directory /sdcard/busline */ private void unzip() { Log.v(Globals.TAG, "start unzip file or download file"); tv = new TextView(this); class CallBack implements Runnable { public MainActivity mParent; public void run() { if (mParent.downloader == null) mParent.downloader = new DataDownloader(mParent, tv); } } CallBack cb = new CallBack(); cb.mParent = this; this.runOnUiThread(cb); mDialog = CreateDialog(); mDialog.show(); } protected ProgressDialog CreateDialog() { ProgressDialog dialog = new ProgressDialog(this); dialog .setMessage(this.getResources().getString( R.string.pregress_diag)); dialog.setIndeterminate(true); dialog.setCancelable(false); return dialog; } private void browseTo(final File aDirectory, final long id) { if (aDirectory.isDirectory()) { this.currentDirectory = aDirectory; fill(aDirectory.listFiles()); } else { DialogInterface.OnClickListener okButtonListener = new DialogInterface.OnClickListener() { // @Override public void onClick(DialogInterface arg0, int arg1) { try { InputStream checkFile = null; try { Intent in = new Intent(MainActivity.this, Traffic.class); in.putExtra(Globals.FILENAME, aDirectory.getPath()); in.putExtra(Globals.Title, directoryEntries.get((int)id).mChineseName); MainActivity.this.startActivity(in); } catch (Exception e) { Context context = getApplicationContext(); CharSequence text = MainActivity.this .getResources().getString( R.string.diag_err); int duration = Toast.LENGTH_SHORT; Toast toast = Toast.makeText(context, text, duration); toast.show(); } ; } catch (Exception e) { e.printStackTrace(); } } }; DialogInterface.OnClickListener cancelButtonListener = new DialogInterface.OnClickListener() { // @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }; AlertDialog ad = new AlertDialog.Builder(this).setMessage( R.string.diag_msg).setPositiveButton(android.R.string.ok, okButtonListener).setNegativeButton( android.R.string.cancel, cancelButtonListener).create(); ad.show(); } } private void fill(File[] files) { this.directoryEntries.clear(); int type = 0; for (File file : files) { if (!file.getName().endsWith(".txt") && !file.isDirectory()) continue; final String name; if (!file.isDirectory()) { name = file.getName().substring(0, file.getName().lastIndexOf('.')); type = 0; } else { type = 1; name = file.getName(); } this.directoryEntries.add(new RowModel(type, name)); } Comparator<RowModel> cmp = new ChinsesCharComp(); Collections.sort(directoryEntries, cmp); IconAdapter directoryList = new IconAdapter(directoryEntries); this.setListAdapter(directoryList); } @Override protected void onListItemClick(ListView l, View v, int position, long id) { File clickedFile = null; if(this.directoryEntries.get(position).mRowtype == 1) { Log.v(Globals.TAG, "is a directory"); clickedFile = new File(this.currentDirectory.getAbsolutePath() + File.separator + this.directoryEntries.get(position).mLabel); this.browseTo(clickedFile, id); return; } clickedFile = new File(this.currentDirectory.getAbsolutePath() + File.separator + this.directoryEntries.get(position).mLabel + ".txt"); try { if (clickedFile != null && clickedFile.isFile()) this.browseTo(clickedFile, id); } catch (Exception e) { //don't throw } } class IconAdapter extends ArrayAdapter<RowModel> { IconAdapter(List<RowModel> _items) { super(MainActivity.this, R.layout.row, _items); } public View getView(int position, View convertView, ViewGroup parent) { View row = convertView; if (row == null) { LayoutInflater inflater = getLayoutInflater(); row = inflater.inflate(R.layout.row, parent, false); } TextView tv= (TextView)row.findViewById(R.id.label); tv.setText(directoryEntries.get(position).mChineseName); ImageView iv = (ImageView)row.findViewById(R.id.icon); if(directoryEntries.get(position).mRowtype == 0) { iv.setImageResource(R.drawable.alert_dialog_icon); } else if (directoryEntries.get(position).mRowtype == 1) { iv.setImageResource(R.drawable.icon); } return row; } } class ChinsesCharComp implements Comparator<RowModel> { public int compare(RowModel o1, RowModel o2) { String c1 = (String) o1.mChineseName; String c2 = (String) o2.mChineseName; Collator myCollator = Collator.getInstance(java.util.Locale.CHINA); if (myCollator.compare(c1, c2) < 0) return -1; else if (myCollator.compare(c1, c2) > 0) return 1; else return 0; } } public void getFileList() { mDialog.dismiss(); browseTo(new File(Globals.DataDir),0); } class RowModel { int mRowtype; //0:file 1:directory String mLabel; String mChineseName; RowModel(int type, String label) { mRowtype = type; mChineseName = mLabel = label; String temp = mFileName.get(label); if (temp != null) { mChineseName = temp; } } public String toString() { return mChineseName; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.connectors.elasticsearch7; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.MemorySize; import org.apache.flink.streaming.api.functions.sink.SinkFunction; import org.apache.flink.streaming.connectors.elasticsearch.ActionRequestFailureHandler; import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkBase; import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase; import org.apache.flink.table.api.TableSchema; import org.apache.flink.types.Row; import org.apache.http.HttpHost; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.xcontent.XContentType; import javax.annotation.Nullable; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import static org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase.SinkOption.BULK_FLUSH_BACKOFF_DELAY; import static org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase.SinkOption.BULK_FLUSH_BACKOFF_ENABLED; import static org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase.SinkOption.BULK_FLUSH_BACKOFF_RETRIES; import static org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase.SinkOption.BULK_FLUSH_BACKOFF_TYPE; import static org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase.SinkOption.BULK_FLUSH_INTERVAL; import static org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase.SinkOption.BULK_FLUSH_MAX_ACTIONS; import static org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase.SinkOption.BULK_FLUSH_MAX_SIZE; import static org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase.SinkOption.DISABLE_FLUSH_ON_CHECKPOINT; import static org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchUpsertTableSinkBase.SinkOption.REST_PATH_PREFIX; /** Version-specific upsert table sink for Elasticsearch 7. */ @Internal public class Elasticsearch7UpsertTableSink extends ElasticsearchUpsertTableSinkBase { @VisibleForTesting static final RequestFactory UPDATE_REQUEST_FACTORY = new Elasticsearch7RequestFactory(); public Elasticsearch7UpsertTableSink( boolean isAppendOnly, TableSchema schema, List<Host> hosts, String index, String keyDelimiter, String keyNullLiteral, SerializationSchema<Row> serializationSchema, XContentType contentType, ActionRequestFailureHandler failureHandler, Map<SinkOption, String> sinkOptions) { super( isAppendOnly, schema, hosts, index, "", keyDelimiter, keyNullLiteral, serializationSchema, contentType, failureHandler, sinkOptions, UPDATE_REQUEST_FACTORY); } @VisibleForTesting Elasticsearch7UpsertTableSink( boolean isAppendOnly, TableSchema schema, List<Host> hosts, String index, String docType, String keyDelimiter, String keyNullLiteral, SerializationSchema<Row> serializationSchema, XContentType contentType, ActionRequestFailureHandler failureHandler, Map<SinkOption, String> sinkOptions) { super( isAppendOnly, schema, hosts, index, docType, keyDelimiter, keyNullLiteral, serializationSchema, contentType, failureHandler, sinkOptions, UPDATE_REQUEST_FACTORY); } @Override protected ElasticsearchUpsertTableSinkBase copy( boolean isAppendOnly, TableSchema schema, List<Host> hosts, String index, String docType, String keyDelimiter, String keyNullLiteral, SerializationSchema<Row> serializationSchema, XContentType contentType, ActionRequestFailureHandler failureHandler, Map<SinkOption, String> sinkOptions, RequestFactory requestFactory) { return new Elasticsearch7UpsertTableSink( isAppendOnly, schema, hosts, index, keyDelimiter, keyNullLiteral, serializationSchema, contentType, failureHandler, sinkOptions); } @Override protected SinkFunction<Tuple2<Boolean, Row>> createSinkFunction( List<Host> hosts, ActionRequestFailureHandler failureHandler, Map<SinkOption, String> sinkOptions, ElasticsearchUpsertSinkFunction upsertSinkFunction) { final List<HttpHost> httpHosts = hosts.stream() .map((host) -> new HttpHost(host.hostname, host.port, host.protocol)) .collect(Collectors.toList()); final ElasticsearchSink.Builder<Tuple2<Boolean, Row>> builder = createBuilder(upsertSinkFunction, httpHosts); builder.setFailureHandler(failureHandler); Optional.ofNullable(sinkOptions.get(BULK_FLUSH_MAX_ACTIONS)) .ifPresent(v -> builder.setBulkFlushMaxActions(Integer.valueOf(v))); Optional.ofNullable(sinkOptions.get(BULK_FLUSH_MAX_SIZE)) .ifPresent(v -> builder.setBulkFlushMaxSizeMb(MemorySize.parse(v).getMebiBytes())); Optional.ofNullable(sinkOptions.get(BULK_FLUSH_INTERVAL)) .ifPresent(v -> builder.setBulkFlushInterval(Long.valueOf(v))); Optional.ofNullable(sinkOptions.get(BULK_FLUSH_BACKOFF_ENABLED)) .ifPresent(v -> builder.setBulkFlushBackoff(Boolean.valueOf(v))); Optional.ofNullable(sinkOptions.get(BULK_FLUSH_BACKOFF_TYPE)) .ifPresent( v -> builder.setBulkFlushBackoffType( ElasticsearchSinkBase.FlushBackoffType.valueOf(v))); Optional.ofNullable(sinkOptions.get(BULK_FLUSH_BACKOFF_RETRIES)) .ifPresent(v -> builder.setBulkFlushBackoffRetries(Integer.valueOf(v))); Optional.ofNullable(sinkOptions.get(BULK_FLUSH_BACKOFF_DELAY)) .ifPresent(v -> builder.setBulkFlushBackoffDelay(Long.valueOf(v))); builder.setRestClientFactory( new DefaultRestClientFactory(sinkOptions.get(REST_PATH_PREFIX))); final ElasticsearchSink<Tuple2<Boolean, Row>> sink = builder.build(); Optional.ofNullable(sinkOptions.get(DISABLE_FLUSH_ON_CHECKPOINT)) .ifPresent( v -> { if (Boolean.valueOf(v)) { sink.disableFlushOnCheckpoint(); } }); return sink; } @VisibleForTesting ElasticsearchSink.Builder<Tuple2<Boolean, Row>> createBuilder( ElasticsearchUpsertSinkFunction upsertSinkFunction, List<HttpHost> httpHosts) { return new ElasticsearchSink.Builder<>(httpHosts, upsertSinkFunction); } // -------------------------------------------------------------------------------------------- // Helper classes // -------------------------------------------------------------------------------------------- /** Serializable {@link RestClientFactory} used by the sink. */ @VisibleForTesting static class DefaultRestClientFactory implements RestClientFactory { private String pathPrefix; public DefaultRestClientFactory(@Nullable String pathPrefix) { this.pathPrefix = pathPrefix; } @Override public void configureRestClientBuilder(RestClientBuilder restClientBuilder) { if (pathPrefix != null) { restClientBuilder.setPathPrefix(pathPrefix); } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DefaultRestClientFactory that = (DefaultRestClientFactory) o; return Objects.equals(pathPrefix, that.pathPrefix); } @Override public int hashCode() { return Objects.hash(pathPrefix); } } /** * Version-specific creation of {@link org.elasticsearch.action.ActionRequest}s used by the * sink. */ private static class Elasticsearch7RequestFactory implements RequestFactory { @Override public UpdateRequest createUpdateRequest( String index, String docType, String key, XContentType contentType, byte[] document) { return new UpdateRequest(index, key) .doc(document, contentType) .upsert(document, contentType); } @Override public IndexRequest createIndexRequest( String index, String docType, XContentType contentType, byte[] document) { return new IndexRequest(index).source(document, contentType); } @Override public DeleteRequest createDeleteRequest(String index, String docType, String key) { return new DeleteRequest(index, key); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v10/enums/operating_system_version_operator_type.proto package com.google.ads.googleads.v10.enums; /** * <pre> * Container for enum describing the type of OS operators. * </pre> * * Protobuf type {@code google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum} */ public final class OperatingSystemVersionOperatorTypeEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum) OperatingSystemVersionOperatorTypeEnumOrBuilder { private static final long serialVersionUID = 0L; // Use OperatingSystemVersionOperatorTypeEnum.newBuilder() to construct. private OperatingSystemVersionOperatorTypeEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private OperatingSystemVersionOperatorTypeEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new OperatingSystemVersionOperatorTypeEnum(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private OperatingSystemVersionOperatorTypeEnum( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeProto.internal_static_google_ads_googleads_v10_enums_OperatingSystemVersionOperatorTypeEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeProto.internal_static_google_ads_googleads_v10_enums_OperatingSystemVersionOperatorTypeEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.class, com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.Builder.class); } /** * <pre> * The type of operating system version. * </pre> * * Protobuf enum {@code google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.OperatingSystemVersionOperatorType} */ public enum OperatingSystemVersionOperatorType implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Not specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * Used for return value only. Represents value unknown in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * Equals to the specified version. * </pre> * * <code>EQUALS_TO = 2;</code> */ EQUALS_TO(2), /** * <pre> * Greater than or equals to the specified version. * </pre> * * <code>GREATER_THAN_EQUALS_TO = 4;</code> */ GREATER_THAN_EQUALS_TO(4), UNRECOGNIZED(-1), ; /** * <pre> * Not specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * Used for return value only. Represents value unknown in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * Equals to the specified version. * </pre> * * <code>EQUALS_TO = 2;</code> */ public static final int EQUALS_TO_VALUE = 2; /** * <pre> * Greater than or equals to the specified version. * </pre> * * <code>GREATER_THAN_EQUALS_TO = 4;</code> */ public static final int GREATER_THAN_EQUALS_TO_VALUE = 4; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperatingSystemVersionOperatorType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static OperatingSystemVersionOperatorType forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return EQUALS_TO; case 4: return GREATER_THAN_EQUALS_TO; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<OperatingSystemVersionOperatorType> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< OperatingSystemVersionOperatorType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<OperatingSystemVersionOperatorType>() { public OperatingSystemVersionOperatorType findValueByNumber(int number) { return OperatingSystemVersionOperatorType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.getDescriptor().getEnumTypes().get(0); } private static final OperatingSystemVersionOperatorType[] VALUES = values(); public static OperatingSystemVersionOperatorType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private OperatingSystemVersionOperatorType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.OperatingSystemVersionOperatorType) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum)) { return super.equals(obj); } com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum other = (com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing the type of OS operators. * </pre> * * Protobuf type {@code google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum) com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeProto.internal_static_google_ads_googleads_v10_enums_OperatingSystemVersionOperatorTypeEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeProto.internal_static_google_ads_googleads_v10_enums_OperatingSystemVersionOperatorTypeEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.class, com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.Builder.class); } // Construct using com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeProto.internal_static_google_ads_googleads_v10_enums_OperatingSystemVersionOperatorTypeEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum getDefaultInstanceForType() { return com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum build() { com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum buildPartial() { com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum result = new com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum) { return mergeFrom((com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum other) { if (other == com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum) private static final com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum(); } public static com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<OperatingSystemVersionOperatorTypeEnum> PARSER = new com.google.protobuf.AbstractParser<OperatingSystemVersionOperatorTypeEnum>() { @java.lang.Override public OperatingSystemVersionOperatorTypeEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new OperatingSystemVersionOperatorTypeEnum(input, extensionRegistry); } }; public static com.google.protobuf.Parser<OperatingSystemVersionOperatorTypeEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<OperatingSystemVersionOperatorTypeEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v10.enums.OperatingSystemVersionOperatorTypeEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.neo4j.structure.trait; import org.apache.tinkerpop.gremlin.neo4j.process.traversal.LabelP; import org.apache.tinkerpop.gremlin.neo4j.structure.Neo4jGraph; import org.apache.tinkerpop.gremlin.neo4j.structure.Neo4jHelper; import org.apache.tinkerpop.gremlin.neo4j.structure.Neo4jProperty; import org.apache.tinkerpop.gremlin.neo4j.structure.Neo4jVertex; import org.apache.tinkerpop.gremlin.neo4j.structure.Neo4jVertexProperty; import org.apache.tinkerpop.gremlin.process.traversal.Compare; import org.apache.tinkerpop.gremlin.process.traversal.step.util.HasContainer; import org.apache.tinkerpop.gremlin.structure.Element; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.apache.tinkerpop.gremlin.structure.util.ElementHelper; import org.apache.tinkerpop.gremlin.structure.util.wrapped.WrappedGraph; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; import org.neo4j.tinkerpop.api.Neo4jDirection; import org.neo4j.tinkerpop.api.Neo4jGraphAPI; import org.neo4j.tinkerpop.api.Neo4jNode; import org.neo4j.tinkerpop.api.Neo4jRelationship; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.function.Predicate; import java.util.stream.Stream; /** * @author Marko A. Rodriguez (http://markorodriguez.com) */ public final class MultiMetaNeo4jTrait implements Neo4jTrait { private static final MultiMetaNeo4jTrait INSTANCE = new MultiMetaNeo4jTrait(); public static final String VERTEX_PROPERTY_LABEL = "vertexProperty"; public static final String VERTEX_PROPERTY_TOKEN = Graph.Hidden.hide("vertexProperty"); private static final Predicate<Neo4jNode> NODE_PREDICATE = node -> !node.hasLabel(VERTEX_PROPERTY_LABEL); private static final Predicate<Neo4jRelationship> RELATIONSHIP_PREDICATE = relationship -> !Graph.Hidden.isHidden(relationship.type()); private MultiMetaNeo4jTrait() { } public static MultiMetaNeo4jTrait instance() { return INSTANCE; } @Override public Predicate<Neo4jNode> getNodePredicate() { return NODE_PREDICATE; } @Override public Predicate<Neo4jRelationship> getRelationshipPredicate() { return RELATIONSHIP_PREDICATE; } @Override public void removeVertex(final Neo4jVertex vertex) { try { final Neo4jNode node = vertex.getBaseVertex(); for (final Neo4jRelationship relationship : node.relationships(Neo4jDirection.BOTH)) { final Neo4jNode otherNode = relationship.other(node); if (otherNode.hasLabel(VERTEX_PROPERTY_LABEL)) { otherNode.delete(); // meta property node } relationship.delete(); } node.delete(); } catch (final IllegalStateException ignored) { // this one happens if the vertex is still chilling in the tx } catch (final RuntimeException ex) { if (!Neo4jHelper.isNotFound(ex)) throw ex; // this one happens if the vertex is committed } } @Override public <V> VertexProperty<V> getVertexProperty(final Neo4jVertex vertex, final String key) { final Neo4jNode node = vertex.getBaseVertex(); if (node.hasProperty(key)) { if (node.getProperty(key).equals(VERTEX_PROPERTY_TOKEN)) { if (node.degree(Neo4jDirection.OUTGOING, Graph.Hidden.hide(key)) > 1) throw Vertex.Exceptions.multiplePropertiesExistForProvidedKey(key); else { return (VertexProperty<V>) new Neo4jVertexProperty<>(vertex, node.relationships(Neo4jDirection.OUTGOING, Graph.Hidden.hide(key)).iterator().next().end()); } } else { return new Neo4jVertexProperty<>(vertex, key, (V) node.getProperty(key)); } } else return VertexProperty.<V>empty(); } @Override public <V> Iterator<VertexProperty<V>> getVertexProperties(final Neo4jVertex vertex, final String... keys) { if (Neo4jHelper.isDeleted(vertex.getBaseVertex())) return Collections.emptyIterator(); // TODO: I believe its because the vertex property is deleted, but then seen again in the iterator. ? return IteratorUtils.stream(vertex.getBaseVertex().getKeys()) .filter(key -> ElementHelper.keyExists(key, keys)) .flatMap(key -> { if (vertex.getBaseVertex().getProperty(key).equals(VERTEX_PROPERTY_TOKEN)) return IteratorUtils.stream(vertex.getBaseVertex().relationships(Neo4jDirection.OUTGOING, Graph.Hidden.hide(key))) .map(relationship -> (VertexProperty<V>) new Neo4jVertexProperty<>(vertex, relationship.end())); else return Stream.of(new Neo4jVertexProperty<>(vertex, key, (V) vertex.getBaseVertex().getProperty(key))); }).iterator(); } @Override public <V> VertexProperty<V> setVertexProperty(Neo4jVertex vertex, VertexProperty.Cardinality cardinality, String key, V value, Object... keyValues) { try { final Optional<VertexProperty<V>> optionalVertexProperty = ElementHelper.stageVertexProperty(vertex, cardinality, key, value, keyValues); if (optionalVertexProperty.isPresent()) return optionalVertexProperty.get(); final Neo4jNode node = vertex.getBaseVertex(); final Neo4jGraphAPI graph = ((Neo4jGraph) vertex.graph()).getBaseGraph(); final String prefixedKey = Graph.Hidden.hide(key); if (node.hasProperty(key)) { if (node.getProperty(key).equals(VERTEX_PROPERTY_TOKEN)) { final Neo4jNode vertexPropertyNode = graph.createNode(VERTEX_PROPERTY_LABEL, key); vertexPropertyNode.setProperty(T.key.getAccessor(), key); vertexPropertyNode.setProperty(T.value.getAccessor(), value); vertexPropertyNode.setProperty(key, value); node.connectTo(vertexPropertyNode, prefixedKey); final Neo4jVertexProperty<V> property = new Neo4jVertexProperty<>(vertex, key, value, vertexPropertyNode); ElementHelper.attachProperties(property, keyValues); // TODO: make this inlined return property; } else { // move current key to be a vertex property node Neo4jNode vertexPropertyNode = graph.createNode(VERTEX_PROPERTY_LABEL, key); final Object tempValue = node.removeProperty(key); vertexPropertyNode.setProperty(T.key.getAccessor(), key); vertexPropertyNode.setProperty(T.value.getAccessor(), tempValue); vertexPropertyNode.setProperty(key, tempValue); node.connectTo(vertexPropertyNode, prefixedKey); node.setProperty(key, VERTEX_PROPERTY_TOKEN); vertexPropertyNode = graph.createNode(VERTEX_PROPERTY_LABEL, key); vertexPropertyNode.setProperty(T.key.getAccessor(), key); vertexPropertyNode.setProperty(T.value.getAccessor(), value); vertexPropertyNode.setProperty(key, value); node.connectTo(vertexPropertyNode, prefixedKey); final Neo4jVertexProperty<V> property = new Neo4jVertexProperty<>(vertex, key, value, vertexPropertyNode); ElementHelper.attachProperties(property, keyValues); // TODO: make this inlined return property; } } else { node.setProperty(key, value); final Neo4jVertexProperty<V> property = new Neo4jVertexProperty<>(vertex, key, value); ElementHelper.attachProperties(property, keyValues); // TODO: make this inlined return property; } } catch (final IllegalArgumentException iae) { throw Property.Exceptions.dataTypeOfPropertyValueNotSupported(value); } } @Override public VertexProperty.Cardinality getCardinality(final String key) { return VertexProperty.Cardinality.list; } @Override public boolean supportsMultiProperties() { return true; } @Override public boolean supportsMetaProperties() { return true; } @Override public void removeVertexProperty(final Neo4jVertexProperty vertexProperty) { final Neo4jNode vertexPropertyNode = Neo4jHelper.getVertexPropertyNode(vertexProperty); final Neo4jNode vertexNode = ((Neo4jVertex) vertexProperty.element()).getBaseVertex(); if (null == vertexPropertyNode) { if (vertexNode.degree(Neo4jDirection.OUTGOING, Graph.Hidden.hide(vertexProperty.key())) == 0) { if (vertexNode.hasProperty(vertexProperty.key())) vertexNode.removeProperty(vertexProperty.key()); } } else { vertexPropertyNode.relationships(Neo4jDirection.BOTH).forEach(Neo4jRelationship::delete); vertexPropertyNode.delete(); if (vertexNode.degree(Neo4jDirection.OUTGOING, Graph.Hidden.hide(vertexProperty.key())) == 0) { if (vertexNode.hasProperty(vertexProperty.key())) vertexNode.removeProperty(vertexProperty.key()); } } } @Override public <V> Property<V> setProperty(final Neo4jVertexProperty vertexProperty, final String key, final V value) { final Neo4jNode vertexPropertyNode = Neo4jHelper.getVertexPropertyNode(vertexProperty); if (null != vertexPropertyNode) { vertexPropertyNode.setProperty(key, value); return new Neo4jProperty<>(vertexProperty, key, value); } else { final Neo4jNode vertexNode = ((Neo4jVertex) vertexProperty.element()).getBaseVertex(); final Neo4jNode newVertexPropertyNode = ((WrappedGraph<Neo4jGraphAPI>) vertexProperty.element().graph()).getBaseGraph().createNode(VERTEX_PROPERTY_LABEL, vertexProperty.label()); newVertexPropertyNode.setProperty(T.key.getAccessor(), vertexProperty.key()); newVertexPropertyNode.setProperty(T.value.getAccessor(), vertexProperty.value()); newVertexPropertyNode.setProperty(vertexProperty.key(), vertexProperty.value()); newVertexPropertyNode.setProperty(key, value); vertexNode.connectTo(newVertexPropertyNode, Graph.Hidden.hide(vertexProperty.key())); vertexNode.setProperty(vertexProperty.key(), VERTEX_PROPERTY_TOKEN); Neo4jHelper.setVertexPropertyNode(vertexProperty, newVertexPropertyNode); return new Neo4jProperty<>(vertexProperty, key, value); } } @Override public <V> Property<V> getProperty(final Neo4jVertexProperty vertexProperty, final String key) { final Neo4jNode vertexPropertyNode = Neo4jHelper.getVertexPropertyNode(vertexProperty); if (null != vertexPropertyNode && vertexPropertyNode.hasProperty(key)) return new Neo4jProperty<>(vertexProperty, key, (V) vertexPropertyNode.getProperty(key)); else return Property.empty(); } @Override public <V> Iterator<Property<V>> getProperties(final Neo4jVertexProperty vertexProperty, final String... keys) { final Neo4jNode vertexPropertyNode = Neo4jHelper.getVertexPropertyNode(vertexProperty); if (null == vertexPropertyNode) return Collections.emptyIterator(); else return IteratorUtils.stream(vertexPropertyNode.getKeys()) .filter(key -> ElementHelper.keyExists(key, keys)) .filter(key -> !key.equals(vertexProperty.key())) .map(key -> (Property<V>) new Neo4jProperty<>(vertexProperty, key, (V) vertexPropertyNode.getProperty(key))).iterator(); } @Override public Iterator<Vertex> lookupVertices(final Neo4jGraph graph, final List<HasContainer> hasContainers, final Object... ids) { // ids are present, filter on them first if (ids.length > 0) return IteratorUtils.filter(graph.vertices(ids), vertex -> HasContainer.testAll(vertex, hasContainers)); ////// do index lookups ////// graph.tx().readWrite(); // get a label being search on Optional<String> label = hasContainers.stream() .filter(hasContainer -> hasContainer.getKey().equals(T.label.getAccessor())) .filter(hasContainer -> Compare.eq == hasContainer.getBiPredicate()) .map(hasContainer -> (String) hasContainer.getValue()) .findAny(); if (!label.isPresent()) label = hasContainers.stream() .filter(hasContainer -> hasContainer.getKey().equals(T.label.getAccessor())) .filter(hasContainer -> hasContainer.getPredicate() instanceof LabelP) .map(hasContainer -> (String) hasContainer.getValue()) .findAny(); if (label.isPresent()) { // find a vertex by label and key/value for (final HasContainer hasContainer : hasContainers) { if (Compare.eq == hasContainer.getBiPredicate()) { if (graph.getBaseGraph().hasSchemaIndex(label.get(), hasContainer.getKey())) { return Stream.concat( IteratorUtils.stream(graph.getBaseGraph().findNodes(label.get(), hasContainer.getKey(), hasContainer.getValue())) .filter(getNodePredicate()) .map(node -> (Vertex) new Neo4jVertex(node, graph)) .filter(vertex -> HasContainer.testAll(vertex, hasContainers)), IteratorUtils.stream(graph.getBaseGraph().findNodes(VERTEX_PROPERTY_LABEL, hasContainer.getKey(), hasContainer.getValue())) // look up indexed vertex property nodes .map(node -> node.relationships(Neo4jDirection.INCOMING).iterator().next().start()) .map(node -> (Vertex) new Neo4jVertex(node, graph)) .filter(vertex -> HasContainer.testAll(vertex, hasContainers))).iterator(); } } } // find a vertex by label return IteratorUtils.stream(graph.getBaseGraph().findNodes(label.get())) .filter(getNodePredicate()) .map(node -> (Vertex) new Neo4jVertex(node, graph)) .filter(vertex -> HasContainer.testAll(vertex, hasContainers)).iterator(); } else { // linear scan return IteratorUtils.filter(graph.vertices(), vertex -> HasContainer.testAll(vertex, hasContainers)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * This is not the original file distributed by the Apache Software Foundation * It has been modified by the Hipparchus project */ package org.hipparchus.stat.inference; import org.hipparchus.distribution.continuous.ChiSquaredDistribution; import org.hipparchus.exception.LocalizedCoreFormats; import org.hipparchus.exception.MathIllegalArgumentException; import org.hipparchus.exception.MathIllegalStateException; import org.hipparchus.exception.NullArgumentException; import org.hipparchus.stat.LocalizedStatFormats; import org.hipparchus.util.FastMath; import org.hipparchus.util.MathArrays; import org.hipparchus.util.MathUtils; /** * Implements Chi-Square test statistics. * <p> * This implementation handles both known and unknown distributions. * <p> * Two samples tests can be used when the distribution is unknown <i>a priori</i> * but provided by one sample, or when the hypothesis under test is that the two * samples come from the same underlying distribution. */ public class ChiSquareTest { /** * Computes the <a href="http://www.itl.nist.gov/div898/handbook/eda/section3/eda35f.htm"> * Chi-Square statistic</a> comparing <code>observed</code> and <code>expected</code> * frequency counts. * <p> * This statistic can be used to perform a Chi-Square test evaluating the null * hypothesis that the observed counts follow the expected distribution. * <p> * <strong>Preconditions</strong>: * <ul> * <li>Expected counts must all be positive.</li> * <li>Observed counts must all be &ge; 0.</li> * <li>The observed and expected arrays must have the same length and * their common length must be at least 2.</li> * </ul> * <p> * If any of the preconditions are not met, an * <code>IllegalArgumentException</code> is thrown. * <p> * <strong>Note: </strong>This implementation rescales the * <code>expected</code> array if necessary to ensure that the sum of the * expected and observed counts are equal. * * @param observed array of observed frequency counts * @param expected array of expected frequency counts * @return chiSquare test statistic * @throws MathIllegalArgumentException if <code>observed</code> has negative entries * @throws MathIllegalArgumentException if <code>expected</code> has entries that are * not strictly positive * @throws MathIllegalArgumentException if the arrays length is less than 2 */ public double chiSquare(final double[] expected, final long[] observed) throws MathIllegalArgumentException { if (expected.length < 2) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, expected.length, 2); } MathUtils.checkDimension(expected.length, observed.length); MathArrays.checkPositive(expected); MathArrays.checkNonNegative(observed); double sumExpected = 0d; double sumObserved = 0d; for (int i = 0; i < observed.length; i++) { sumExpected += expected[i]; sumObserved += observed[i]; } double ratio = 1.0d; boolean rescale = false; if (FastMath.abs(sumExpected - sumObserved) > 10E-6) { ratio = sumObserved / sumExpected; rescale = true; } double sumSq = 0.0d; for (int i = 0; i < observed.length; i++) { if (rescale) { final double dev = observed[i] - ratio * expected[i]; sumSq += dev * dev / (ratio * expected[i]); } else { final double dev = observed[i] - expected[i]; sumSq += dev * dev / expected[i]; } } return sumSq; } /** * Returns the <i>observed significance level</i>, or <a href= * "http://www.cas.lancs.ac.uk/glossary_v1.1/hyptest.html#pvalue"> * p-value</a>, associated with a * <a href="http://www.itl.nist.gov/div898/handbook/eda/section3/eda35f.htm"> * Chi-square goodness of fit test</a> comparing the <code>observed</code> * frequency counts to those in the <code>expected</code> array. * <p> * The number returned is the smallest significance level at which one can reject * the null hypothesis that the observed counts conform to the frequency distribution * described by the expected counts. * <p> * <strong>Preconditions</strong>: * <ul> * <li>Expected counts must all be positive.</li> * <li>Observed counts must all be &ge; 0.</li> * <li>The observed and expected arrays must have the same length and * their common length must be at least 2.</li> * </ul> * <p> * If any of the preconditions are not met, an * <code>IllegalArgumentException</code> is thrown. * <p> * <strong>Note: </strong>This implementation rescales the * <code>expected</code> array if necessary to ensure that the sum of the * expected and observed counts are equal. * * @param observed array of observed frequency counts * @param expected array of expected frequency counts * @return p-value * @throws MathIllegalArgumentException if <code>observed</code> has negative entries * @throws MathIllegalArgumentException if <code>expected</code> has entries that are * not strictly positive * @throws MathIllegalArgumentException if the arrays length is less than 2 * @throws MathIllegalStateException if an error occurs computing the p-value */ public double chiSquareTest(final double[] expected, final long[] observed) throws MathIllegalArgumentException, MathIllegalStateException { final ChiSquaredDistribution distribution = new ChiSquaredDistribution(expected.length - 1.0); return 1.0 - distribution.cumulativeProbability(chiSquare(expected, observed)); } /** * Performs a <a href="http://www.itl.nist.gov/div898/handbook/eda/section3/eda35f.htm"> * Chi-square goodness of fit test</a> evaluating the null hypothesis that the * observed counts conform to the frequency distribution described by the expected * counts, with significance level <code>alpha</code>. Returns true iff the null * hypothesis can be rejected with 100 * (1 - alpha) percent confidence. * <p> * <strong>Example:</strong><br> * To test the hypothesis that <code>observed</code> follows * <code>expected</code> at the 99% level, use * <code>chiSquareTest(expected, observed, 0.01)</code> * <p> * <strong>Preconditions</strong>: * <ul> * <li>Expected counts must all be positive.</li> * <li>Observed counts must all be &ge; 0.</li> * <li>The observed and expected arrays must have the same length and * their common length must be at least 2.</li> * <li><code> 0 &lt; alpha &lt; 0.5</code></li> * </ul> * <p> * If any of the preconditions are not met, an * <code>IllegalArgumentException</code> is thrown. * <p> * <strong>Note: </strong>This implementation rescales the * <code>expected</code> array if necessary to ensure that the sum of the * expected and observed counts are equal. * * @param observed array of observed frequency counts * @param expected array of expected frequency counts * @param alpha significance level of the test * @return true iff null hypothesis can be rejected with confidence * 1 - alpha * @throws MathIllegalArgumentException if <code>observed</code> has negative entries * @throws MathIllegalArgumentException if <code>expected</code> has entries that are * not strictly positive * @throws MathIllegalArgumentException if the arrays length is less than 2 * @throws MathIllegalArgumentException if <code>alpha</code> is not in the range (0, 0.5] * @throws MathIllegalStateException if an error occurs computing the p-value */ public boolean chiSquareTest(final double[] expected, final long[] observed, final double alpha) throws MathIllegalArgumentException, MathIllegalStateException { if ((alpha <= 0) || (alpha > 0.5)) { throw new MathIllegalArgumentException(LocalizedStatFormats.OUT_OF_BOUND_SIGNIFICANCE_LEVEL, alpha, 0, 0.5); } return chiSquareTest(expected, observed) < alpha; } /** * Computes the Chi-Square statistic associated with a * <a href="http://www.itl.nist.gov/div898/handbook/prc/section4/prc45.htm"> * chi-square test of independence</a> based on the input <code>counts</code> * array, viewed as a two-way table. * <p> * The rows of the 2-way table are * <code>count[0], ... , count[count.length - 1] </code> * <p> * <strong>Preconditions</strong>: * <ul> * <li>All counts must be &ge; 0.</li> * <li>The count array must be rectangular (i.e. all count[i] subarrays * must have the same length).</li> * <li>The 2-way table represented by <code>counts</code> must have at * least 2 columns and at least 2 rows.</li> * </ul> * <p> * If any of the preconditions are not met, an * <code>IllegalArgumentException</code> is thrown. * * @param counts array representation of 2-way table * @return chiSquare test statistic * @throws NullArgumentException if the array is null * @throws MathIllegalArgumentException if the array is not rectangular * @throws MathIllegalArgumentException if {@code counts} has negative entries */ public double chiSquare(final long[][] counts) throws MathIllegalArgumentException, NullArgumentException { checkArray(counts); int nRows = counts.length; int nCols = counts[0].length; // compute row, column and total sums double[] rowSum = new double[nRows]; double[] colSum = new double[nCols]; double total = 0.0d; for (int row = 0; row < nRows; row++) { for (int col = 0; col < nCols; col++) { rowSum[row] += counts[row][col]; colSum[col] += counts[row][col]; total += counts[row][col]; } } // compute expected counts and chi-square double sumSq = 0.0d; for (int row = 0; row < nRows; row++) { for (int col = 0; col < nCols; col++) { final double expected = (rowSum[row] * colSum[col]) / total; sumSq += ((counts[row][col] - expected) * (counts[row][col] - expected)) / expected; } } return sumSq; } /** * Returns the <i>observed significance level</i>, or <a href= * "http://www.cas.lancs.ac.uk/glossary_v1.1/hyptest.html#pvalue"> * p-value</a>, associated with a * <a href="http://www.itl.nist.gov/div898/handbook/prc/section4/prc45.htm"> * chi-square test of independence</a> based on the input <code>counts</code> * array, viewed as a two-way table. * <p> * The rows of the 2-way table are * <code>count[0], ... , count[count.length - 1] </code> * <p> * <strong>Preconditions</strong>: * <ul> * <li>All counts must be &ge; 0.</li> * <li>The count array must be rectangular (i.e. all count[i] subarrays must have * the same length).</li> * <li>The 2-way table represented by <code>counts</code> must have at least 2 * columns and at least 2 rows.</li> * </ul> * <p> * If any of the preconditions are not met, an * <code>IllegalArgumentException</code> is thrown. * * @param counts array representation of 2-way table * @return p-value * @throws NullArgumentException if the array is null * @throws MathIllegalArgumentException if the array is not rectangular * @throws MathIllegalArgumentException if {@code counts} has negative entries * @throws MathIllegalStateException if an error occurs computing the p-value */ public double chiSquareTest(final long[][] counts) throws MathIllegalArgumentException, NullArgumentException, MathIllegalStateException { checkArray(counts); double df = ((double) counts.length -1) * ((double) counts[0].length - 1); final ChiSquaredDistribution distribution = new ChiSquaredDistribution(df); return 1 - distribution.cumulativeProbability(chiSquare(counts)); } /** * Performs a <a href="http://www.itl.nist.gov/div898/handbook/prc/section4/prc45.htm"> * chi-square test of independence</a> evaluating the null hypothesis that the * classifications represented by the counts in the columns of the input 2-way table * are independent of the rows, with significance level <code>alpha</code>. * Returns true iff the null hypothesis can be rejected with 100 * (1 - alpha) percent * confidence. * <p> * The rows of the 2-way table are * <code>count[0], ... , count[count.length - 1] </code> * <p> * <strong>Example:</strong><br> * To test the null hypothesis that the counts in * <code>count[0], ... , count[count.length - 1] </code> * all correspond to the same underlying probability distribution at the 99% level, * use <code>chiSquareTest(counts, 0.01)</code>. * <p> * <strong>Preconditions</strong>: * <ul> * <li>All counts must be &ge; 0.</li> * <li>The count array must be rectangular (i.e. all count[i] subarrays must have the * same length).</li> * <li>The 2-way table represented by <code>counts</code> must have at least 2 columns and * at least 2 rows.</li> * </ul> * <p> * If any of the preconditions are not met, an * <code>IllegalArgumentException</code> is thrown. * * @param counts array representation of 2-way table * @param alpha significance level of the test * @return true iff null hypothesis can be rejected with confidence * 1 - alpha * @throws NullArgumentException if the array is null * @throws MathIllegalArgumentException if the array is not rectangular * @throws MathIllegalArgumentException if {@code counts} has any negative entries * @throws MathIllegalArgumentException if <code>alpha</code> is not in the range (0, 0.5] * @throws MathIllegalStateException if an error occurs computing the p-value */ public boolean chiSquareTest(final long[][] counts, final double alpha) throws MathIllegalArgumentException, NullArgumentException, MathIllegalStateException { if ((alpha <= 0) || (alpha > 0.5)) { throw new MathIllegalArgumentException(LocalizedStatFormats.OUT_OF_BOUND_SIGNIFICANCE_LEVEL, alpha, 0, 0.5); } return chiSquareTest(counts) < alpha; } /** * Computes a * <a href="http://www.itl.nist.gov/div898/software/dataplot/refman1/auxillar/chi2samp.htm"> * Chi-Square two sample test statistic</a> comparing bin frequency counts * in <code>observed1</code> and <code>observed2</code>. * <p> * The sums of frequency counts in the two samples are not required to be the * same. The formula used to compute the test statistic is * <p> * <code> * &sum;[(K * observed1[i] - observed2[i]/K)<sup>2</sup> / (observed1[i] + observed2[i])] * </code> * <p> * where * <p> * <code>K = &sqrt;[&sum;(observed2 / &sum;(observed1)]</code> * <p> * This statistic can be used to perform a Chi-Square test evaluating the * null hypothesis that both observed counts follow the same distribution. * <p> * <strong>Preconditions</strong>: * <ul> * <li>Observed counts must be non-negative.</li> * <li>Observed counts for a specific bin must not both be zero.</li> * <li>Observed counts for a specific sample must not all be 0.</li> * <li>The arrays <code>observed1</code> and <code>observed2</code> must have * the same length and their common length must be at least 2.</li> * </ul> * <p> * If any of the preconditions are not met, an * <code>IllegalArgumentException</code> is thrown. * * @param observed1 array of observed frequency counts of the first data set * @param observed2 array of observed frequency counts of the second data set * @return chiSquare test statistic * @throws MathIllegalArgumentException the the length of the arrays does not match * @throws MathIllegalArgumentException if any entries in <code>observed1</code> or * <code>observed2</code> are negative * @throws MathIllegalArgumentException if either all counts of <code>observed1</code> or * <code>observed2</code> are zero, or if the count at some index is zero * for both arrays */ public double chiSquareDataSetsComparison(long[] observed1, long[] observed2) throws MathIllegalArgumentException { // Make sure lengths are same if (observed1.length < 2) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, observed1.length, 2); } MathUtils.checkDimension(observed1.length, observed2.length); // Ensure non-negative counts MathArrays.checkNonNegative(observed1); MathArrays.checkNonNegative(observed2); // Compute and compare count sums long countSum1 = 0; long countSum2 = 0; for (int i = 0; i < observed1.length; i++) { countSum1 += observed1[i]; countSum2 += observed2[i]; } // Ensure neither sample is uniformly 0 if (countSum1 == 0 || countSum2 == 0) { throw new MathIllegalArgumentException(LocalizedCoreFormats.ZERO_NOT_ALLOWED); } // Compare and compute weight only if different double weight = 0.0; boolean unequalCounts = countSum1 != countSum2; if (unequalCounts) { weight = FastMath.sqrt((double) countSum1 / (double) countSum2); } // Compute ChiSquare statistic double sumSq = 0.0d; for (int i = 0; i < observed1.length; i++) { if (observed1[i] == 0 && observed2[i] == 0) { throw new MathIllegalArgumentException(LocalizedCoreFormats.OBSERVED_COUNTS_BOTTH_ZERO_FOR_ENTRY, i); } else { final double obs1 = observed1[i]; final double obs2 = observed2[i]; final double dev; if (unequalCounts) { // apply weights dev = obs1/weight - obs2 * weight; } else { dev = obs1 - obs2; } sumSq += (dev * dev) / (obs1 + obs2); } } return sumSq; } /** * Returns the <i>observed significance level</i>, or <a href= * "http://www.cas.lancs.ac.uk/glossary_v1.1/hyptest.html#pvalue"> * p-value</a>, associated with a Chi-Square two sample test comparing * bin frequency counts in <code>observed1</code> and * <code>observed2</code>. * <p> * The number returned is the smallest significance level at which one * can reject the null hypothesis that the observed counts conform to the * same distribution. * <p> * See {@link #chiSquareDataSetsComparison(long[], long[])} for details * on the formula used to compute the test statistic. The degrees of * of freedom used to perform the test is one less than the common length * of the input observed count arrays. * <p> * <strong>Preconditions</strong>: * <ul> * <li>Observed counts must be non-negative.</li> * <li>Observed counts for a specific bin must not both be zero.</li> * <li>Observed counts for a specific sample must not all be 0.</li> * <li>The arrays <code>observed1</code> and <code>observed2</code> must * have the same length and their common length must be at least 2.</li> * </ul> * <p> * If any of the preconditions are not met, an * <code>IllegalArgumentException</code> is thrown. * * @param observed1 array of observed frequency counts of the first data set * @param observed2 array of observed frequency counts of the second data set * @return p-value * @throws MathIllegalArgumentException the the length of the arrays does not match * @throws MathIllegalArgumentException if any entries in <code>observed1</code> or * <code>observed2</code> are negative * @throws MathIllegalArgumentException if either all counts of <code>observed1</code> or * <code>observed2</code> are zero, or if the count at the same index is zero * for both arrays * @throws MathIllegalStateException if an error occurs computing the p-value */ public double chiSquareTestDataSetsComparison(long[] observed1, long[] observed2) throws MathIllegalArgumentException, MathIllegalStateException { final ChiSquaredDistribution distribution = new ChiSquaredDistribution((double) observed1.length - 1); return 1 - distribution.cumulativeProbability( chiSquareDataSetsComparison(observed1, observed2)); } /** * Performs a Chi-Square two sample test comparing two binned data * sets. The test evaluates the null hypothesis that the two lists of * observed counts conform to the same frequency distribution, with * significance level <code>alpha</code>. Returns true iff the null * hypothesis can be rejected with 100 * (1 - alpha) percent confidence. * <p> * See {@link #chiSquareDataSetsComparison(long[], long[])} for * details on the formula used to compute the Chisquare statistic used * in the test. The degrees of of freedom used to perform the test is * one less than the common length of the input observed count arrays. * <p> * <strong>Preconditions</strong>: * <ul> * <li>Observed counts must be non-negative.</li> * <li>Observed counts for a specific bin must not both be zero.</li> * <li>Observed counts for a specific sample must not all be 0.</li> * <li>The arrays <code>observed1</code> and <code>observed2</code> must * have the same length and their common length must be at least 2.</li> * <li><code> 0 &lt; alpha &lt; 0.5</code></li> * </ul> * <p> * If any of the preconditions are not met, an * <code>IllegalArgumentException</code> is thrown. * * @param observed1 array of observed frequency counts of the first data set * @param observed2 array of observed frequency counts of the second data set * @param alpha significance level of the test * @return true iff null hypothesis can be rejected with confidence * 1 - alpha * @throws MathIllegalArgumentException the the length of the arrays does not match * @throws MathIllegalArgumentException if any entries in <code>observed1</code> or * <code>observed2</code> are negative * @throws MathIllegalArgumentException if either all counts of <code>observed1</code> or * <code>observed2</code> are zero, or if the count at the same index is zero * for both arrays * @throws MathIllegalArgumentException if <code>alpha</code> is not in the range (0, 0.5] * @throws MathIllegalStateException if an error occurs performing the test */ public boolean chiSquareTestDataSetsComparison(final long[] observed1, final long[] observed2, final double alpha) throws MathIllegalArgumentException, MathIllegalStateException { if (alpha <= 0 || alpha > 0.5) { throw new MathIllegalArgumentException(LocalizedStatFormats.OUT_OF_BOUND_SIGNIFICANCE_LEVEL, alpha, 0, 0.5); } return chiSquareTestDataSetsComparison(observed1, observed2) < alpha; } /** * Checks to make sure that the input long[][] array is rectangular, * has at least 2 rows and 2 columns, and has all non-negative entries. * * @param in input 2-way table to check * @throws NullArgumentException if the array is null * @throws MathIllegalArgumentException if the array is not valid * @throws MathIllegalArgumentException if the array contains any negative entries */ private void checkArray(final long[][] in) throws MathIllegalArgumentException, NullArgumentException { if (in.length < 2) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, in.length, 2); } if (in[0].length < 2) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, in[0].length, 2); } MathArrays.checkRectangular(in); MathArrays.checkNonNegative(in); } }
/* Copyright (c) 2014 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Johnathan Garrett (LMN Solutions) - initial implementation */ package org.locationtech.geogig.storage.bdbje; import static com.sleepycat.je.OperationStatus.NOTFOUND; import static com.sleepycat.je.OperationStatus.SUCCESS; import java.io.File; import java.io.IOException; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Queue; import javax.annotation.Nullable; import org.locationtech.geogig.api.ObjectId; import org.locationtech.geogig.repository.Hints; import org.locationtech.geogig.repository.RepositoryConnectionException; import org.locationtech.geogig.storage.ConfigDatabase; import org.locationtech.geogig.storage.GraphDatabase; import org.locationtech.geogig.storage.SynchronizedGraphDatabase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.sleepycat.bind.tuple.TupleBinding; import com.sleepycat.je.CacheMode; import com.sleepycat.je.Database; import com.sleepycat.je.DatabaseConfig; import com.sleepycat.je.DatabaseEntry; import com.sleepycat.je.Durability; import com.sleepycat.je.Environment; import com.sleepycat.je.EnvironmentLockedException; import com.sleepycat.je.LockMode; import com.sleepycat.je.OperationStatus; import com.sleepycat.je.Transaction; import com.sleepycat.je.TransactionConfig; /** * Implementation of {@link GraphDatabase} backed by a BerkeleyDB Java Edition database. * <p> * Implementation note: Since this is the only kind of mutable state we maintain, this * implementation extends {@link SynchronizedGraphDatabase} to avoid concurrent threads stepping * over each other's feet and overriding graph relations. An alternate solution would be to * serialize writes and have free threaded reads. * </p> */ abstract class JEGraphDatabase extends SynchronizedGraphDatabase { private static final Logger LOGGER = LoggerFactory.getLogger(JEGraphDatabase.class); static final String ENVIRONMENT_NAME = "graph"; public JEGraphDatabase(final ConfigDatabase config, final EnvironmentBuilder envProvider, final TupleBinding<NodeData> binding, final String formatVersion, final Hints hints) { super(new Impl(config, envProvider, binding, formatVersion, hints)); } private static class Impl implements GraphDatabase { private final TupleBinding<NodeData> BINDING; private EnvironmentBuilder envProvider; /** * Lazily loaded, do not access it directly but through {@link #createEnvironment()} */ protected Environment env; protected Database graphDb; private final String envName; private final ConfigDatabase configDb; private final String databaseName = "GraphDatabase"; private final boolean readOnly; private final String formatVersion; public Impl(final ConfigDatabase config, final EnvironmentBuilder envProvider, final TupleBinding<NodeData> binding, final String formatVersion, final Hints hints) { this.configDb = config; this.envProvider = envProvider; this.BINDING = binding; this.formatVersion = formatVersion; this.envName = JEGraphDatabase.ENVIRONMENT_NAME; this.readOnly = hints.getBoolean(Hints.OBJECTS_READ_ONLY); } @Override public void open() { if (isOpen()) { LOGGER.trace("Environment {} already open", env.getHome()); return; } this.graphDb = createDatabase(); // System.err.println("---> " + getClass().getName() + ".open() " + env.getHome()); LOGGER.debug("Graph database opened at {}. Transactional: {}", env.getHome(), graphDb .getConfig().getTransactional()); } protected Database createDatabase() { Environment environment; try { environment = createEnvironment(readOnly); } catch (EnvironmentLockedException e) { throw new IllegalStateException( "The repository is already open by another process for writing", e); } if (!environment.getDatabaseNames().contains(databaseName)) { if (readOnly) { environment.close(); try { environment = createEnvironment(false); } catch (EnvironmentLockedException e) { throw new IllegalStateException(String.format( "Environment open readonly but database %s does not exist.", databaseName)); } } DatabaseConfig dbConfig = new DatabaseConfig(); dbConfig.setAllowCreate(true); Database openDatabase = environment.openDatabase(null, databaseName, dbConfig); openDatabase.close(); environment.flushLog(true); environment.close(); environment = createEnvironment(readOnly); } Database database; try { LOGGER.debug("Opening GraphDatabase at {}", environment.getHome()); DatabaseConfig dbConfig = new DatabaseConfig(); dbConfig.setCacheMode(CacheMode.MAKE_COLD); dbConfig.setKeyPrefixing(false);// can result in a slightly smaller db size dbConfig.setReadOnly(readOnly); boolean transactional = environment.getConfig().getTransactional(); dbConfig.setTransactional(transactional); dbConfig.setDeferredWrite(!transactional); database = environment.openDatabase(null, databaseName, dbConfig); } catch (RuntimeException e) { if (environment != null) { environment.close(); } throw e; } this.env = environment; return database; } /** * @return creates and returns the environment */ private synchronized Environment createEnvironment(boolean readOnly) throws com.sleepycat.je.EnvironmentLockedException { Environment env = envProvider.setRelativePath(this.envName).setReadOnly(readOnly).get(); return env; } @Override public void configure() throws RepositoryConnectionException { RepositoryConnectionException.StorageType.GRAPH.configure(configDb, "bdbje", formatVersion); } @Override public void checkConfig() throws RepositoryConnectionException { RepositoryConnectionException.StorageType.GRAPH .verify(configDb, "bdbje", formatVersion); } @Override public boolean isOpen() { return graphDb != null; } @Override public void close() { if (env == null) { LOGGER.trace("Database already closed."); return; } // System.err.println("<--- " + getClass().getName() + ".close() " + env.getHome()); final File envHome = env.getHome(); try { LOGGER.debug("Closing graph database at {}", envHome); if (graphDb != null) { graphDb.close(); graphDb = null; } LOGGER.trace("GraphDatabase closed. Closing environment..."); if (!readOnly) { env.sync(); env.cleanLog(); } } finally { env.close(); env = null; } LOGGER.debug("Database {} closed.", envHome); } @Override protected void finalize() { if (isOpen()) { LOGGER.warn("JEGraphDatabase {} was not closed. Forcing close at finalize()", env.getHome()); close(); } } protected NodeData getNodeInternal(final ObjectId id, final boolean failIfNotFound) { Preconditions.checkNotNull(id, "id"); DatabaseEntry key = new DatabaseEntry(id.getRawValue()); DatabaseEntry data = new DatabaseEntry(); final LockMode lockMode = LockMode.READ_UNCOMMITTED; Transaction transaction = null; OperationStatus operationStatus = graphDb.get(transaction, key, data, lockMode); if (NOTFOUND.equals(operationStatus)) { if (failIfNotFound) { throw new IllegalArgumentException("Graph Object does not exist: " + id.toString() + " at " + env.getHome().getAbsolutePath()); } return null; } NodeData node = BINDING.entryToObject(data); return node; } private boolean putNodeInternal(final Transaction transaction, final ObjectId id, final NodeData node) throws IOException { DatabaseEntry key = new DatabaseEntry(id.getRawValue()); DatabaseEntry data = new DatabaseEntry(); BINDING.objectToEntry(node, data); final OperationStatus status = graphDb.put(transaction, key, data); return SUCCESS.equals(status); } private void abort(@Nullable Transaction transaction) { if (transaction != null) { try { transaction.abort(); } catch (Exception e) { LOGGER.error("Error aborting transaction", e); } } } private void commit(@Nullable Transaction transaction) { if (transaction != null) { try { transaction.commit(); } catch (Exception e) { LOGGER.error("Error committing transaction", e); } } } @Nullable private Transaction newTransaction() { final boolean transactional = graphDb.getConfig().getTransactional(); if (transactional) { TransactionConfig txConfig = new TransactionConfig(); txConfig.setReadUncommitted(true); Optional<String> durability = configDb.get("bdbje.object_durability"); if ("safe".equals(durability.orNull())) { txConfig.setDurability(Durability.COMMIT_SYNC); } else { txConfig.setDurability(Durability.COMMIT_WRITE_NO_SYNC); } Transaction transaction = env.beginTransaction(null, txConfig); return transaction; } return null; } @Override public boolean exists(ObjectId commitId) { Preconditions.checkNotNull(commitId, "id"); DatabaseEntry key = new DatabaseEntry(commitId.getRawValue()); DatabaseEntry data = new DatabaseEntry(); // tell db not to retrieve data data.setPartial(0, 0, true); final LockMode lockMode = LockMode.READ_UNCOMMITTED; Transaction transaction = null; OperationStatus status = graphDb.get(transaction, key, data, lockMode); return SUCCESS == status; } @Override public ImmutableList<ObjectId> getParents(ObjectId commitId) throws IllegalArgumentException { Builder<ObjectId> listBuilder = new ImmutableList.Builder<ObjectId>(); NodeData node = getNodeInternal(commitId, false); if (node != null) { return listBuilder.addAll(node.outgoing).build(); } return listBuilder.build(); } @Override public ImmutableList<ObjectId> getChildren(ObjectId commitId) throws IllegalArgumentException { Builder<ObjectId> listBuilder = new ImmutableList.Builder<ObjectId>(); NodeData node = getNodeInternal(commitId, false); if (node != null) { return listBuilder.addAll(node.incoming).build(); } return listBuilder.build(); } @Override public boolean put(ObjectId commitId, ImmutableList<ObjectId> parentIds) { NodeData node = getNodeInternal(commitId, false); boolean updated = false; final Transaction transaction = newTransaction(); try { if (node == null) { node = new NodeData(commitId, parentIds); updated = true; } for (ObjectId parent : parentIds) { if (!node.outgoing.contains(parent)) { node.outgoing.add(parent); updated = true; } NodeData parentNode = getNodeInternal(parent, false); if (parentNode == null) { parentNode = new NodeData(parent); updated = true; } if (!parentNode.incoming.contains(commitId)) { parentNode.incoming.add(commitId); updated = true; } putNodeInternal(transaction, parent, parentNode); } putNodeInternal(transaction, commitId, node); commit(transaction); } catch (Exception e) { abort(transaction); throw Throwables.propagate(e); } return updated; } @Override public void map(ObjectId mapped, ObjectId original) { NodeData node = getNodeInternal(mapped, false); if (node == null) { // didn't exist node = new NodeData(mapped); } node.mappedTo = original; final Transaction transaction = newTransaction(); try { putNodeInternal(transaction, mapped, node); commit(transaction); } catch (Exception e) { abort(transaction); throw Throwables.propagate(e); } } @Override public ObjectId getMapping(ObjectId commitId) { NodeData node = getNodeInternal(commitId, true); return node.mappedTo; } @Override public int getDepth(ObjectId commitId) { int depth = 0; Queue<ObjectId> q = Lists.newLinkedList(); NodeData node = getNodeInternal(commitId, true); Iterables.addAll(q, node.outgoing); List<ObjectId> next = Lists.newArrayList(); while (!q.isEmpty()) { depth++; while (!q.isEmpty()) { ObjectId n = q.poll(); NodeData parentNode = getNodeInternal(n, true); List<ObjectId> parents = Lists.newArrayList(parentNode.outgoing); if (parents.size() == 0) { return depth; } Iterables.addAll(next, parents); } q.addAll(next); next.clear(); } return depth; } @Override public void setProperty(ObjectId commitId, String propertyName, String propertyValue) { NodeData node = getNodeInternal(commitId, true); node.properties.put(propertyName, propertyValue); final Transaction transaction = newTransaction(); try { putNodeInternal(transaction, commitId, node); commit(transaction); } catch (Exception e) { abort(transaction); throw Throwables.propagate(e); } } private class JEGraphNode extends GraphNode { NodeData node; List<GraphEdge> edges; public JEGraphNode(NodeData node) { this.node = node; this.edges = null; } @Override public ObjectId getIdentifier() { return node.id; } @Override public Iterator<GraphEdge> getEdges(final Direction direction) { if (edges == null) { edges = new LinkedList<GraphEdge>(); Iterator<ObjectId> nodeEdges = node.incoming.iterator(); while (nodeEdges.hasNext()) { ObjectId otherNode = nodeEdges.next(); edges.add(new GraphEdge(new JEGraphNode(getNodeInternal(otherNode, true)), this)); } nodeEdges = node.outgoing.iterator(); while (nodeEdges.hasNext()) { ObjectId otherNode = nodeEdges.next(); edges.add(new GraphEdge(this, new JEGraphNode(getNodeInternal(otherNode, true)))); } } final GraphNode myNode = this; return Iterators.filter(edges.iterator(), new Predicate<GraphEdge>() { @Override public boolean apply(GraphEdge input) { switch (direction) { case OUT: return input.getFromNode() == myNode; case IN: return input.getToNode() == myNode; default: break; } return true; } }); } @Override public boolean isSparse() { return node.isSparse(); } } @Override public GraphNode getNode(ObjectId id) { return new JEGraphNode(getNodeInternal(id, true)); } @Override public void truncate() { // TODO Auto-generated method stub } } }
package org.apache.cassandra.db.lifecycle; import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.CRC32; import org.apache.cassandra.io.sstable.SSTable; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.utils.FBUtilities; /** * A decoded line in a transaction log file replica. * * @see LogReplica and LogFile. */ final class LogRecord { public enum Type { UNKNOWN, // a record that cannot be parsed ADD, // new files to be retained on commit REMOVE, // old files to be retained on abort COMMIT, // commit flag ABORT; // abort flag public static Type fromPrefix(String prefix) { return valueOf(prefix.toUpperCase()); } public boolean hasFile() { return this == Type.ADD || this == Type.REMOVE; } public boolean matches(LogRecord record) { return this == record.type; } public boolean isFinal() { return this == Type.COMMIT || this == Type.ABORT; } } /** * The status of a record after it has been verified, any parsing errors * are also store here. */ public final static class Status { // if there are any errors, they end up here Optional<String> error = Optional.empty(); // if the record was only partially matched across files this is true boolean partial = false; // if the status of this record on disk is required (e.g. existing files), it is // stored here for caching LogRecord onDiskRecord; void setError(String error) { if (!this.error.isPresent()) this.error = Optional.of(error); } boolean hasError() { return error.isPresent(); } } // the type of record, see Type public final Type type; // for sstable records, the absolute path of the table desc public final Optional<String> absolutePath; // for sstable records, the last update time of all files (may not be available for NEW records) public final long updateTime; // for sstable records, the total number of files (may not be accurate for NEW records) public final int numFiles; // the raw string as written or read from a file public final String raw; // the checksum of this record, written at the end of the record string public final long checksum; // the status of this record, @see Status class public final Status status; // (add|remove|commit|abort):[*,*,*][checksum] static Pattern REGEX = Pattern.compile("^(add|remove|commit|abort):\\[([^,]*),?([^,]*),?([^,]*)\\]\\[(\\d*)\\]$", Pattern.CASE_INSENSITIVE); public static LogRecord make(String line) { try { Matcher matcher = REGEX.matcher(line); if (!matcher.matches()) return new LogRecord(Type.UNKNOWN, null, 0, 0, 0, line) .setError(String.format("Failed to parse [%s]", line)); Type type = Type.fromPrefix(matcher.group(1)); return new LogRecord(type, matcher.group(2), Long.valueOf(matcher.group(3)), Integer.valueOf(matcher.group(4)), Long.valueOf(matcher.group(5)), line); } catch (Throwable t) { return new LogRecord(Type.UNKNOWN, null, 0, 0, 0, line).setError(t); } } public static LogRecord makeCommit(long updateTime) { return new LogRecord(Type.COMMIT, updateTime); } public static LogRecord makeAbort(long updateTime) { return new LogRecord(Type.ABORT, updateTime); } public static LogRecord make(Type type, SSTable table) { String absoluteTablePath = FileUtils.getCanonicalPath(table.descriptor.baseFilename()); return make(type, getExistingFiles(absoluteTablePath), table.getAllFilePaths().size(), absoluteTablePath); } public LogRecord withExistingFiles() { return make(type, getExistingFiles(), 0, absolutePath.get()); } public static LogRecord make(Type type, List<File> files, int minFiles, String absolutePath) { long lastModified = files.stream().map(File::lastModified).reduce(0L, Long::max); return new LogRecord(type, absolutePath, lastModified, Math.max(minFiles, files.size())); } private LogRecord(Type type, long updateTime) { this(type, null, updateTime, 0, 0, null); } private LogRecord(Type type, String absolutePath, long updateTime, int numFiles) { this(type, absolutePath, updateTime, numFiles, 0, null); } private LogRecord(Type type, String absolutePath, long updateTime, int numFiles, long checksum, String raw) { assert !type.hasFile() || absolutePath != null : "Expected file path for file records"; this.type = type; this.absolutePath = type.hasFile() ? Optional.of(absolutePath) : Optional.<String>empty(); this.updateTime = type == Type.REMOVE ? updateTime : 0; this.numFiles = type.hasFile() ? numFiles : 0; this.status = new Status(); if (raw == null) { assert checksum == 0; this.checksum = computeChecksum(); this.raw = format(); } else { this.checksum = checksum; this.raw = raw; } } LogRecord setError(Throwable t) { return setError(t.getMessage()); } LogRecord setError(String error) { status.setError(error); return this; } String error() { return status.error.orElse(""); } void setPartial() { status.partial = true; } boolean partial() { return status.partial; } boolean isValid() { return !status.hasError() && type != Type.UNKNOWN; } boolean isInvalid() { return !isValid(); } boolean isInvalidOrPartial() { return isInvalid() || partial(); } private String format() { return String.format("%s:[%s,%d,%d][%d]", type.toString(), absolutePath(), updateTime, numFiles, checksum); } public List<File> getExistingFiles() { assert absolutePath.isPresent() : "Expected a path in order to get existing files"; return getExistingFiles(absolutePath.get()); } public static List<File> getExistingFiles(String absoluteFilePath) { Path path = Paths.get(absoluteFilePath); File[] files = path.getParent().toFile().listFiles((dir, name) -> name.startsWith(path.getFileName().toString())); // files may be null if the directory does not exist yet, e.g. when tracking new files return files == null ? Collections.emptyList() : Arrays.asList(files); } public boolean isFinal() { return type.isFinal(); } String fileName() { return absolutePath.isPresent() ? Paths.get(absolutePath.get()).getFileName().toString() : ""; } String absolutePath() { return absolutePath.isPresent() ? absolutePath.get() : ""; } @Override public int hashCode() { // see comment in equals return Objects.hash(type, absolutePath, numFiles, updateTime); } @Override public boolean equals(Object obj) { if (!(obj instanceof LogRecord)) return false; final LogRecord other = (LogRecord)obj; // we exclude on purpose checksum, error and full file path // since records must match across log file replicas on different disks return type == other.type && absolutePath.equals(other.absolutePath) && numFiles == other.numFiles && updateTime == other.updateTime; } @Override public String toString() { return raw; } long computeChecksum() { CRC32 crc32 = new CRC32(); crc32.update((absolutePath()).getBytes(FileUtils.CHARSET)); crc32.update(type.toString().getBytes(FileUtils.CHARSET)); FBUtilities.updateChecksumInt(crc32, (int) updateTime); FBUtilities.updateChecksumInt(crc32, (int) (updateTime >>> 32)); FBUtilities.updateChecksumInt(crc32, numFiles); return crc32.getValue() & (Long.MAX_VALUE); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.functions.worker.rest.api; import lombok.extern.slf4j.Slf4j; import org.apache.pulsar.common.functions.WorkerInfo; import org.apache.pulsar.common.io.ConnectorDefinition; import org.apache.pulsar.common.policies.data.FunctionStats; import org.apache.pulsar.common.policies.data.WorkerFunctionInstanceStats; import org.apache.pulsar.functions.proto.Function; import org.apache.pulsar.functions.utils.FunctionCommon; import org.apache.pulsar.functions.worker.FunctionRuntimeInfo; import org.apache.pulsar.functions.worker.FunctionRuntimeManager; import org.apache.pulsar.functions.worker.MembershipManager; import org.apache.pulsar.functions.worker.WorkerService; import org.apache.pulsar.functions.worker.WorkerUtils; import org.apache.pulsar.functions.worker.rest.RestException; import javax.ws.rs.core.Response.Status; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Supplier; import static com.google.common.base.Preconditions.checkNotNull; import static org.apache.pulsar.functions.worker.rest.RestUtils.throwUnavailableException; @Slf4j public class WorkerImpl { private final Supplier<WorkerService> workerServiceSupplier; public WorkerImpl(Supplier<WorkerService> workerServiceSupplier) { this.workerServiceSupplier = workerServiceSupplier; } private WorkerService worker() { try { return checkNotNull(workerServiceSupplier.get()); } catch (Throwable t) { log.info("Failed to get worker service", t); throw t; } } private boolean isWorkerServiceAvailable() { WorkerService workerService = workerServiceSupplier.get(); if (workerService == null) { return false; } if (!workerService.isInitialized()) { return false; } return true; } public List<WorkerInfo> getCluster(String clientRole) { if (!isWorkerServiceAvailable()) { throwUnavailableException(); } if (worker().getWorkerConfig().isAuthorizationEnabled() && !isSuperUser(clientRole)) { throw new RestException(Status.UNAUTHORIZED, "client is not authorize to perform operation"); } List<WorkerInfo> workers = worker().getMembershipManager().getCurrentMembership(); return workers; } public WorkerInfo getClusterLeader(String clientRole) { if (!isWorkerServiceAvailable()) { throwUnavailableException(); } if (worker().getWorkerConfig().isAuthorizationEnabled() && !isSuperUser(clientRole)) { log.error("Client [{}] is not authorized to get cluster leader", clientRole); throw new RestException(Status.UNAUTHORIZED, "client is not authorize to perform operation"); } MembershipManager membershipManager = worker().getMembershipManager(); WorkerInfo leader = membershipManager.getLeader(); if (leader == null) { throw new RestException(Status.INTERNAL_SERVER_ERROR, "Leader cannot be determined"); } return leader; } public Map<String, Collection<String>> getAssignments(String clientRole) { if (!isWorkerServiceAvailable()) { throwUnavailableException(); } if (worker().getWorkerConfig().isAuthorizationEnabled() && !isSuperUser(clientRole)) { log.error("Client [{}] is not authorized to get cluster assignments", clientRole); throw new RestException(Status.UNAUTHORIZED, "client is not authorize to perform operation"); } FunctionRuntimeManager functionRuntimeManager = worker().getFunctionRuntimeManager(); Map<String, Map<String, Function.Assignment>> assignments = functionRuntimeManager.getCurrentAssignments(); Map<String, Collection<String>> ret = new HashMap<>(); for (Map.Entry<String, Map<String, Function.Assignment>> entry : assignments.entrySet()) { ret.put(entry.getKey(), entry.getValue().keySet()); } return ret; } private boolean isSuperUser(final String clientRole) { return clientRole != null && worker().getWorkerConfig().getSuperUserRoles().contains(clientRole); } public List<org.apache.pulsar.common.stats.Metrics> getWorkerMetrics(final String clientRole) { if (!isWorkerServiceAvailable()) { throwUnavailableException(); } if (worker().getWorkerConfig().isAuthorizationEnabled() && !isSuperUser(clientRole)) { log.error("Client [{}] is not authorized to get worker stats", clientRole); throw new RestException(Status.UNAUTHORIZED, "client is not authorize to perform operation"); } return worker().getMetricsGenerator().generate(); } public List<WorkerFunctionInstanceStats> getFunctionsMetrics(String clientRole) throws IOException { if (!isWorkerServiceAvailable()) { throwUnavailableException(); } if (worker().getWorkerConfig().isAuthorizationEnabled() && !isSuperUser(clientRole)) { log.error("Client [{}] is not authorized to get function stats", clientRole); throw new RestException(Status.UNAUTHORIZED, "client is not authorize to perform operation"); } WorkerService workerService = worker(); Map<String, FunctionRuntimeInfo> functionRuntimes = workerService.getFunctionRuntimeManager() .getFunctionRuntimeInfos(); List<WorkerFunctionInstanceStats> metricsList = new ArrayList<>(functionRuntimes.size()); for (Map.Entry<String, FunctionRuntimeInfo> entry : functionRuntimes.entrySet()) { String fullyQualifiedInstanceName = entry.getKey(); FunctionRuntimeInfo functionRuntimeInfo = entry.getValue(); if (workerService.getFunctionRuntimeManager().getRuntimeFactory().externallyManaged()) { Function.FunctionDetails functionDetails = functionRuntimeInfo.getFunctionInstance().getFunctionMetaData().getFunctionDetails(); int parallelism = functionDetails.getParallelism(); for (int i = 0; i < parallelism; ++i) { FunctionStats.FunctionInstanceStats functionInstanceStats = WorkerUtils.getFunctionInstanceStats(fullyQualifiedInstanceName, functionRuntimeInfo, i); WorkerFunctionInstanceStats workerFunctionInstanceStats = new WorkerFunctionInstanceStats(); workerFunctionInstanceStats.setName(FunctionCommon.getFullyQualifiedInstanceId( functionDetails.getTenant(), functionDetails.getNamespace(), functionDetails.getName(), i )); workerFunctionInstanceStats.setMetrics(functionInstanceStats.getMetrics()); metricsList.add(workerFunctionInstanceStats); } } else { FunctionStats.FunctionInstanceStats functionInstanceStats = WorkerUtils.getFunctionInstanceStats(fullyQualifiedInstanceName, functionRuntimeInfo, functionRuntimeInfo.getFunctionInstance().getInstanceId()); WorkerFunctionInstanceStats workerFunctionInstanceStats = new WorkerFunctionInstanceStats(); workerFunctionInstanceStats.setName(fullyQualifiedInstanceName); workerFunctionInstanceStats.setMetrics(functionInstanceStats.getMetrics()); metricsList.add(workerFunctionInstanceStats); } } return metricsList; } public List<ConnectorDefinition> getListOfConnectors(String clientRole) { if (!isWorkerServiceAvailable()) { throwUnavailableException(); } if (worker().getWorkerConfig().isAuthorizationEnabled() && !isSuperUser(clientRole)) { throw new RestException(Status.UNAUTHORIZED, "client is not authorize to perform operation"); } return this.worker().getConnectorsManager().getConnectors(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; /** A {@link FieldMapper} for numeric types: byte, short, int, long, float and double. */ public class NumberFieldMapper extends FieldMapper { // this is private since it has a different default static final Setting<Boolean> COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", true, Property.IndexScope); public static class Defaults { public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit<>(false, false); public static final Explicit<Boolean> COERCE = new Explicit<>(true, false); } public static class Builder extends FieldMapper.Builder<Builder, NumberFieldMapper> { private Boolean ignoreMalformed; private Boolean coerce; public Builder(String name, NumberType type) { super(name, new NumberFieldType(type), new NumberFieldType(type)); builder = this; } public Builder ignoreMalformed(boolean ignoreMalformed) { this.ignoreMalformed = ignoreMalformed; return builder; } protected Explicit<Boolean> ignoreMalformed(BuilderContext context) { if (ignoreMalformed != null) { return new Explicit<>(ignoreMalformed, true); } if (context.indexSettings() != null) { return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); } return Defaults.IGNORE_MALFORMED; } public Builder coerce(boolean coerce) { this.coerce = coerce; return builder; } protected Explicit<Boolean> coerce(BuilderContext context) { if (coerce != null) { return new Explicit<>(coerce, true); } if (context.indexSettings() != null) { return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); } return Defaults.COERCE; } @Override protected void setupFieldType(BuilderContext context) { super.setupFieldType(context); } @Override public NumberFieldMapper build(BuilderContext context) { setupFieldType(context); return new NumberFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } public static class TypeParser implements Mapper.TypeParser { final NumberType type; public TypeParser(NumberType type) { this.type = type; } @Override public Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { Builder builder = new Builder(name, type); TypeParsers.parseField(builder, name, node, parserContext); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String propName = entry.getKey(); Object propNode = entry.getValue(); if (propName.equals("null_value")) { if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } builder.nullValue(type.parse(propNode)); iterator.remove(); } else if (propName.equals("ignore_malformed")) { builder.ignoreMalformed(TypeParsers.nodeBooleanValue("ignore_malformed", propNode, parserContext)); iterator.remove(); } else if (propName.equals("coerce")) { builder.coerce(TypeParsers.nodeBooleanValue("coerce", propNode, parserContext)); iterator.remove(); } } return builder; } } public enum NumberType { HALF_FLOAT("half_float", NumericType.HALF_FLOAT) { @Override Float parse(Object value) { return (Float) FLOAT.parse(value); } @Override Float parse(XContentParser parser, boolean coerce) throws IOException { return parser.floatValue(coerce); } @Override Query termQuery(String field, Object value) { float v = parse(value); return HalfFloatPoint.newExactQuery(field, v); } @Override Query termsQuery(String field, List<Object> values) { float[] v = new float[values.size()]; for (int i = 0; i < values.size(); ++i) { v[i] = parse(values.get(i)); } return HalfFloatPoint.newSetQuery(field, v); } @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { float l = Float.NEGATIVE_INFINITY; float u = Float.POSITIVE_INFINITY; if (lowerTerm != null) { l = parse(lowerTerm); if (includeLower) { l = Math.nextDown(l); } l = HalfFloatPoint.nextUp(l); } if (upperTerm != null) { u = parse(upperTerm); if (includeUpper) { u = Math.nextUp(u); } u = HalfFloatPoint.nextDown(u); } return HalfFloatPoint.newRangeQuery(field, l, u); } @Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new HalfFloatPoint(name, value.floatValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, HalfFloatPoint.halfFloatToSortableShort(value.floatValue()))); } if (stored) { fields.add(new StoredField(name, value.floatValue())); } return fields; } @Override FieldStats.Double stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Double(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Double(reader.maxDoc(), docCount, -1L, size, isSearchable, isAggregatable, HalfFloatPoint.decodeDimension(min, 0), HalfFloatPoint.decodeDimension(max, 0)); } }, FLOAT("float", NumericType.FLOAT) { @Override Float parse(Object value) { if (value instanceof Number) { return ((Number) value).floatValue(); } if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } return Float.parseFloat(value.toString()); } @Override Float parse(XContentParser parser, boolean coerce) throws IOException { return parser.floatValue(coerce); } @Override Query termQuery(String field, Object value) { float v = parse(value); return FloatPoint.newExactQuery(field, v); } @Override Query termsQuery(String field, List<Object> values) { float[] v = new float[values.size()]; for (int i = 0; i < values.size(); ++i) { v[i] = parse(values.get(i)); } return FloatPoint.newSetQuery(field, v); } @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { float l = Float.NEGATIVE_INFINITY; float u = Float.POSITIVE_INFINITY; if (lowerTerm != null) { l = parse(lowerTerm); if (includeLower == false) { l = Math.nextUp(l); } } if (upperTerm != null) { u = parse(upperTerm); if (includeUpper == false) { u = Math.nextDown(u); } } return FloatPoint.newRangeQuery(field, l, u); } @Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new FloatPoint(name, value.floatValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(value.floatValue()))); } if (stored) { fields.add(new StoredField(name, value.floatValue())); } return fields; } @Override FieldStats.Double stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Double(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, FloatPoint.decodeDimension(min, 0), FloatPoint.decodeDimension(max, 0)); } }, DOUBLE("double", NumericType.DOUBLE) { @Override Double parse(Object value) { if (value instanceof Number) { return ((Number) value).doubleValue(); } if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } return Double.parseDouble(value.toString()); } @Override Double parse(XContentParser parser, boolean coerce) throws IOException { return parser.doubleValue(coerce); } @Override Query termQuery(String field, Object value) { double v = parse(value); return DoublePoint.newExactQuery(field, v); } @Override Query termsQuery(String field, List<Object> values) { double[] v = new double[values.size()]; for (int i = 0; i < values.size(); ++i) { v[i] = parse(values.get(i)); } return DoublePoint.newSetQuery(field, v); } @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { double l = Double.NEGATIVE_INFINITY; double u = Double.POSITIVE_INFINITY; if (lowerTerm != null) { l = parse(lowerTerm); if (includeLower == false) { l = Math.nextUp(l); } } if (upperTerm != null) { u = parse(upperTerm); if (includeUpper == false) { u = Math.nextDown(u); } } return DoublePoint.newRangeQuery(field, l, u); } @Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new DoublePoint(name, value.doubleValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(value.doubleValue()))); } if (stored) { fields.add(new StoredField(name, value.doubleValue())); } return fields; } @Override FieldStats.Double stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Double(reader.maxDoc(),0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, DoublePoint.decodeDimension(min, 0), DoublePoint.decodeDimension(max, 0)); } }, BYTE("byte", NumericType.BYTE) { @Override Byte parse(Object value) { if (value instanceof Number) { double doubleValue = ((Number) value).doubleValue(); if (doubleValue < Byte.MIN_VALUE || doubleValue > Byte.MAX_VALUE) { throw new IllegalArgumentException("Value [" + value + "] is out of range for a byte"); } if (doubleValue % 1 != 0) { throw new IllegalArgumentException("Value [" + value + "] has a decimal part"); } return ((Number) value).byteValue(); } if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } return Byte.parseByte(value.toString()); } @Override Short parse(XContentParser parser, boolean coerce) throws IOException { int value = parser.intValue(coerce); if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { throw new IllegalArgumentException("Value [" + value + "] is out of range for a byte"); } return (short) value; } @Override Query termQuery(String field, Object value) { return INTEGER.termQuery(field, value); } @Override Query termsQuery(String field, List<Object> values) { return INTEGER.termsQuery(field, values); } @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper); } @Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { return INTEGER.createFields(name, value, indexed, docValued, stored); } @Override FieldStats.Long stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { return (FieldStats.Long) INTEGER.stats(reader, fieldName, isSearchable, isAggregatable); } @Override Number valueForSearch(Number value) { return value.byteValue(); } }, SHORT("short", NumericType.SHORT) { @Override Short parse(Object value) { if (value instanceof Number) { double doubleValue = ((Number) value).doubleValue(); if (doubleValue < Short.MIN_VALUE || doubleValue > Short.MAX_VALUE) { throw new IllegalArgumentException("Value [" + value + "] is out of range for a short"); } if (doubleValue % 1 != 0) { throw new IllegalArgumentException("Value [" + value + "] has a decimal part"); } return ((Number) value).shortValue(); } if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } return Short.parseShort(value.toString()); } @Override Short parse(XContentParser parser, boolean coerce) throws IOException { int value = parser.intValue(coerce); if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { throw new IllegalArgumentException("Value [" + value + "] is out of range for a short"); } return (short) value; } @Override Query termQuery(String field, Object value) { return INTEGER.termQuery(field, value); } @Override Query termsQuery(String field, List<Object> values) { return INTEGER.termsQuery(field, values); } @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper); } @Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { return INTEGER.createFields(name, value, indexed, docValued, stored); } @Override FieldStats.Long stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { return (FieldStats.Long) INTEGER.stats(reader, fieldName, isSearchable, isAggregatable); } @Override Number valueForSearch(Number value) { return value.shortValue(); } }, INTEGER("integer", NumericType.INT) { @Override Integer parse(Object value) { if (value instanceof Number) { double doubleValue = ((Number) value).doubleValue(); if (doubleValue < Integer.MIN_VALUE || doubleValue > Integer.MAX_VALUE) { throw new IllegalArgumentException("Value [" + value + "] is out of range for an integer"); } if (doubleValue % 1 != 0) { throw new IllegalArgumentException("Value [" + value + "] has a decimal part"); } return ((Number) value).intValue(); } if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } return Integer.parseInt(value.toString()); } @Override Integer parse(XContentParser parser, boolean coerce) throws IOException { return parser.intValue(coerce); } @Override Query termQuery(String field, Object value) { int v = parse(value); return IntPoint.newExactQuery(field, v); } @Override Query termsQuery(String field, List<Object> values) { int[] v = new int[values.size()]; for (int i = 0; i < values.size(); ++i) { v[i] = parse(values.get(i)); } return IntPoint.newSetQuery(field, v); } @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { int l = Integer.MIN_VALUE; int u = Integer.MAX_VALUE; if (lowerTerm != null) { l = parse(lowerTerm); if (includeLower == false) { if (l == Integer.MAX_VALUE) { return new MatchNoDocsQuery(); } ++l; } } if (upperTerm != null) { u = parse(upperTerm); if (includeUpper == false) { if (u == Integer.MIN_VALUE) { return new MatchNoDocsQuery(); } --u; } } return IntPoint.newRangeQuery(field, l, u); } @Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new IntPoint(name, value.intValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, value.intValue())); } if (stored) { fields.add(new StoredField(name, value.intValue())); } return fields; } @Override FieldStats.Long stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Long(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, IntPoint.decodeDimension(min, 0), IntPoint.decodeDimension(max, 0)); } }, LONG("long", NumericType.LONG) { @Override Long parse(Object value) { if (value instanceof Number) { double doubleValue = ((Number) value).doubleValue(); if (doubleValue < Long.MIN_VALUE || doubleValue > Long.MAX_VALUE) { throw new IllegalArgumentException("Value [" + value + "] is out of range for a long"); } if (doubleValue % 1 != 0) { throw new IllegalArgumentException("Value [" + value + "] has a decimal part"); } return ((Number) value).longValue(); } if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } return Long.parseLong(value.toString()); } @Override Long parse(XContentParser parser, boolean coerce) throws IOException { return parser.longValue(coerce); } @Override Query termQuery(String field, Object value) { long v = parse(value); return LongPoint.newExactQuery(field, v); } @Override Query termsQuery(String field, List<Object> values) { long[] v = new long[values.size()]; for (int i = 0; i < values.size(); ++i) { v[i] = parse(values.get(i)); } return LongPoint.newSetQuery(field, v); } @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { long l = Long.MIN_VALUE; long u = Long.MAX_VALUE; if (lowerTerm != null) { l = parse(lowerTerm); if (includeLower == false) { if (l == Long.MAX_VALUE) { return new MatchNoDocsQuery(); } ++l; } } if (upperTerm != null) { u = parse(upperTerm); if (includeUpper == false) { if (u == Long.MIN_VALUE) { return new MatchNoDocsQuery(); } --u; } } return LongPoint.newRangeQuery(field, l, u); } @Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new LongPoint(name, value.longValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, value.longValue())); } if (stored) { fields.add(new StoredField(name, value.longValue())); } return fields; } @Override FieldStats.Long stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Long(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0)); } }; private final String name; private final NumericType numericType; NumberType(String name, NumericType numericType) { this.name = name; this.numericType = numericType; } /** Get the associated type name. */ public final String typeName() { return name; } /** Get the associated numerit type */ final NumericType numericType() { return numericType; } abstract Query termQuery(String field, Object value); abstract Query termsQuery(String field, List<Object> values); abstract Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper); abstract Number parse(XContentParser parser, boolean coerce) throws IOException; abstract Number parse(Object value); public abstract List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored); abstract FieldStats<? extends Number> stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException; Number valueForSearch(Number value) { return value; } } public static final class NumberFieldType extends MappedFieldType { NumberType type; public NumberFieldType(NumberType type) { super(); this.type = Objects.requireNonNull(type); setTokenized(false); setHasDocValues(true); setOmitNorms(true); } NumberFieldType(NumberFieldType other) { super(other); this.type = other.type; } @Override public MappedFieldType clone() { return new NumberFieldType(this); } @Override public String typeName() { return type.name; } @Override public Query termQuery(Object value, QueryShardContext context) { failIfNotIndexed(); Query query = type.termQuery(name(), value); if (boost() != 1f) { query = new BoostQuery(query, boost()); } return query; } @Override public Query termsQuery(List values, QueryShardContext context) { failIfNotIndexed(); Query query = type.termsQuery(name(), values); if (boost() != 1f) { query = new BoostQuery(query, boost()); } return query; } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { failIfNotIndexed(); Query query = type.rangeQuery(name(), lowerTerm, upperTerm, includeLower, includeUpper); if (boost() != 1f) { query = new BoostQuery(query, boost()); } return query; } @Override public FieldStats stats(IndexReader reader) throws IOException { return type.stats(reader, name(), isSearchable(), isAggregatable()); } @Override public IndexFieldData.Builder fielddataBuilder() { failIfNoDocValues(); return new DocValuesIndexFieldData.Builder().numericType(type.numericType()); } @Override public Object valueForDisplay(Object value) { if (value == null) { return null; } return type.valueForSearch((Number) value); } @Override public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { if (timeZone != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); } if (format == null) { return DocValueFormat.RAW; } else { return new DocValueFormat.Decimal(format); } } } private Boolean includeInAll; private Explicit<Boolean> ignoreMalformed; private Explicit<Boolean> coerce; private NumberFieldMapper( String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, Boolean includeInAll, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); this.ignoreMalformed = ignoreMalformed; this.coerce = coerce; this.includeInAll = includeInAll; } @Override public NumberFieldType fieldType() { return (NumberFieldType) super.fieldType(); } @Override protected String contentType() { return fieldType.typeName(); } @Override protected NumberFieldMapper clone() { return (NumberFieldMapper) super.clone(); } @Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { final boolean includeInAll = context.includeInAll(this.includeInAll, this); XContentParser parser = context.parser(); Object value; Number numericValue = null; if (context.externalValueSet()) { value = context.externalValue(); } else if (parser.currentToken() == Token.VALUE_NULL) { value = null; } else if (coerce.value() && parser.currentToken() == Token.VALUE_STRING && parser.textLength() == 0) { value = null; } else { try { numericValue = fieldType().type.parse(parser, coerce.value()); } catch (IllegalArgumentException e) { if (ignoreMalformed.value()) { return; } else { throw e; } } if (includeInAll) { value = parser.textOrNull(); // preserve formatting } else { value = numericValue; } } if (value == null) { value = fieldType().nullValue(); } if (value == null) { return; } if (numericValue == null) { numericValue = fieldType().type.parse(value); } if (includeInAll) { context.allEntries().addText(fieldType().name(), value.toString(), fieldType().boost()); } boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; boolean docValued = fieldType().hasDocValues(); boolean stored = fieldType().stored(); fields.addAll(fieldType().type.createFields(fieldType().name(), numericValue, indexed, docValued, stored)); } @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { super.doMerge(mergeWith, updateAllTypes); NumberFieldMapper other = (NumberFieldMapper) mergeWith; this.includeInAll = other.includeInAll; if (other.ignoreMalformed.explicit()) { this.ignoreMalformed = other.ignoreMalformed; } if (other.coerce.explicit()) { this.coerce = other.coerce; } } @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); if (includeDefaults || ignoreMalformed.explicit()) { builder.field("ignore_malformed", ignoreMalformed.value()); } if (includeDefaults || coerce.explicit()) { builder.field("coerce", coerce.value()); } if (includeDefaults || fieldType().nullValue() != null) { builder.field("null_value", fieldType().nullValue()); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", false); } } }
package ml.alternet.parser.step2; import java.io.IOException; import java.util.Deque; import java.util.LinkedList; import java.util.List; import java.util.Optional; import ml.alternet.parser.Grammar.Rule; import ml.alternet.parser.handlers.TreeHandler; import ml.alternet.parser.step2.Expression; import ml.alternet.parser.step2.Calc.Additive; import ml.alternet.parser.step2.Calc.Multiplicative; import ml.alternet.parser.step2.Expression.Constant; import ml.alternet.parser.step2.Expression.Exponent; import ml.alternet.parser.step2.Expression.Product; import ml.alternet.parser.step2.Expression.Sum; import ml.alternet.parser.step2.Expression.Term; import ml.alternet.parser.step2.Expression.Variable; import ml.alternet.scan.Scanner; /** * Build evaluable expressions based on the {@link Calc}. * * @see #build(String) * * @author Philippe Poulard */ public class ExpressionBuilder extends TreeHandler<Expression, Expression> { @Override public Value<Expression> tokenToValue(TokenValue<?> token, Deque<Value<Expression>> next) { String tokenName = token.getRule().getName(); // e.g. "FUNCTION" // find it with the same name and ask it to transform the token to an expression Expression expr = Tokens.valueOf(tokenName).asExpression(token, next); if (expr == null) { // no transformations was made return new Value<Expression>().setSource(token); } else { // we have it return new Value<Expression>().setTarget(expr); } } @Override public Value<Expression> ruleToValue(Rule rule, Deque<Value<Expression>> args) { String ruleName = rule.getName(); // e.g. "Product" // find it with the same name and ask it to transform the rule to an expression Expression expr = Rules.valueOf(ruleName).asExpression(rule, args); if (expr == null) { return null; // discard } else { return new Value<Expression>().setTarget(expr); } } /** * Entry point for building an expression. * * @param input An expression that follows the grammar, e.g. "sin(x)*(1+var_12)" * * @return An evaluable expression. * * @throws IOException * * @see Calc */ public static Optional<Expression> build(String input) throws IOException { ExpressionBuilder eb = new ExpressionBuilder(); if (Calc.$.parse(Scanner.of(input), eb, true)) { return Optional.of(eb.get()); } else { return Optional.empty(); } } /** * Rules to Expression mapper. * * @author Philippe Poulard */ enum Rules { Sum { @SuppressWarnings("unchecked") @Override public Expression asExpression(Rule rule, Deque<Value<Expression>> args) { // Sum ::= SignedTerm (ADDITIVE Product)* if (args.size() == 1) { // a single term is not a sum return args.pollFirst().getTarget(); } else { Expression signedTerm = args.removeFirst().getTarget(); if (! (signedTerm instanceof Term<?>) || ! (((Term<?>) signedTerm).operation instanceof Additive)) { // force "x" to be "+x" signedTerm = new Term<>(Additive.PLUS, signedTerm); } List<Term<Additive>> arguments = new LinkedList<>(); arguments.add((Term<Additive>) signedTerm); args.stream() // next arguments are all Term<Additive> .map(v -> (Term<Additive>) v.getTarget()) .forEachOrdered(arguments::add); return new Sum(arguments); } } }, Product { @SuppressWarnings("unchecked") @Override public Expression asExpression(Rule rule, Deque<Value<Expression>> args) { // Product ::= Factor (MULTIPLICATIVE SignedFactor)* if (args.size() == 1) { // a single term is not a product return args.pollFirst().getTarget(); } else { // assume x to be *x, because the product will start by 1*x Term<Multiplicative> factor = new Term<>(Multiplicative.MULT, args.removeFirst().getTarget()); List<Term<Multiplicative>> arguments = new LinkedList<>(); arguments.add(factor); args.stream() // next arguments are all Term<Multiplicative> .map(v -> (Term<Multiplicative>) v.getTarget()) .forEachOrdered(arguments::add); return new Product(arguments); } } }, Factor { @Override public Expression asExpression(Rule rule, Deque<Value<Expression>> args) { // Factor ::= Argument ('^' SignedFactor)? Expression base = args.pollFirst().getTarget(); Value<Expression> raised = args.peekFirst(); if (raised != null && raised.isSource() && raised.getSource().getRule() == Calc.RAISED) { args.pollFirst(); // ^ Expression exponent = args.pollFirst().getTarget(); return new Exponent(base, exponent); } else { // a single term is not a factor return base; } } }; public abstract Expression asExpression(Rule rule, Deque<Value<Expression>> args); } /** * Tokens to Expression mapper. * * @author Philippe Poulard */ enum Tokens { FUNCTION { @Override public Expression asExpression(TokenValue<?> token, Deque<Value<Expression>> next) { // e.g. sin x // function argument Calc.Function function = token.getValue(); // e.g. CalcGrammar.Function.sin Expression argument = next.pollFirst().getTarget(); // e.g. Expression.Variable("x") return new Expression.Function(function, argument); } }, RAISED { @Override public Expression asExpression(TokenValue<?> token, Deque<Value<Expression>> next) { // e.g. a ^ b return null; // we don't know how to process it here => keep the source value } }, ADDITIVE { @Override public Expression asExpression(TokenValue<?> token, Deque<Value<Expression>> next) { // e.g. a + b Additive op = token.getValue(); // + | - // + is always followed by an argument Expression arg = next.pollFirst().getTarget(); // b argument Term<Additive> term = new Term<>(op, arg); return term; } }, MULTIPLICATIVE { @Override public Expression asExpression(TokenValue<?> token, Deque<Value<Expression>> next) { // e.g. a * b Multiplicative op = token.getValue(); // * | / // * is always followed by an argument Expression arg = next.pollFirst().getTarget(); // b argument Term<Multiplicative> term = new Term<>(op, arg); return term; } }, NUMBER { @Override public Expression asExpression(TokenValue<?> token, Deque<Value<Expression>> next) { Number n = token.getValue(); return new Constant(n); } }, VARIABLE { @Override public Expression asExpression(TokenValue<?> token, Deque<Value<Expression>> next) { String name = token.getValue(); return new Variable(name); } }; public abstract Expression asExpression(TokenValue<?> token, Deque<Value<Expression>> next); } }
/** * Copyright (C) 2013 * by 52 North Initiative for Geospatial Open Source Software GmbH * * Contact: Andreas Wytzisk * 52 North Initiative for Geospatial Open Source Software GmbH * Martin-Luther-King-Weg 24 * 48155 Muenster, Germany * info@52north.org * * This program is free software; you can redistribute and/or modify it under * the terms of the GNU General Public License version 2 as published by the * Free Software Foundation. * * This program is distributed WITHOUT ANY WARRANTY; even without the implied * WARRANTY OF MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License along with * this program (see gnu-gpl v2.txt). If not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA or * visit the Free Software Foundation web page, http://www.fsf.org. */ package org.n52.sos.decode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import net.opengis.gml.x32.FeaturePropertyType; import net.opengis.sos.x20.SosInsertionMetadataPropertyType; import net.opengis.sos.x20.SosInsertionMetadataType; import net.opengis.swes.x20.DeleteSensorDocument; import net.opengis.swes.x20.DescribeSensorDocument; import net.opengis.swes.x20.DescribeSensorType; import net.opengis.swes.x20.InsertSensorDocument; import net.opengis.swes.x20.InsertSensorType; import net.opengis.swes.x20.InsertSensorType.Metadata; import net.opengis.swes.x20.InsertSensorType.RelatedFeature; import net.opengis.swes.x20.SensorDescriptionType; import net.opengis.swes.x20.UpdateSensorDescriptionDocument; import net.opengis.swes.x20.UpdateSensorDescriptionType; import net.opengis.swes.x20.UpdateSensorDescriptionType.Description; import org.apache.xmlbeans.XmlException; import org.apache.xmlbeans.XmlObject; import org.n52.sos.coding.CodingRepository; import org.n52.sos.exception.ows.InvalidParameterValueException; import org.n52.sos.exception.ows.NoApplicableCodeException; import org.n52.sos.exception.ows.concrete.DecoderResponseUnsupportedException; import org.n52.sos.exception.ows.concrete.UnsupportedDecoderInputException; import org.n52.sos.ogc.OGCConstants; import org.n52.sos.ogc.gml.CodeType; import org.n52.sos.ogc.gml.CodeWithAuthority; import org.n52.sos.ogc.gml.time.Time; import org.n52.sos.ogc.om.features.samplingFeatures.SamplingFeature; import org.n52.sos.ogc.ows.OwsExceptionReport; import org.n52.sos.ogc.sos.Sos2Constants; import org.n52.sos.ogc.sos.Sos2Constants.UpdateSensorDescriptionParams; import org.n52.sos.ogc.sos.SosConstants; import org.n52.sos.ogc.sos.SosInsertionMetadata; import org.n52.sos.ogc.sos.SosProcedureDescription; import org.n52.sos.ogc.swes.SwesConstants; import org.n52.sos.ogc.swes.SwesFeatureRelationship; import org.n52.sos.request.AbstractServiceRequest; import org.n52.sos.request.DeleteSensorRequest; import org.n52.sos.request.DescribeSensorRequest; import org.n52.sos.request.InsertSensorRequest; import org.n52.sos.request.UpdateSensorRequest; import org.n52.sos.service.AbstractServiceCommunicationObject; import org.n52.sos.service.ServiceConstants.SupportedTypeKey; import org.n52.sos.util.CodingHelper; import org.n52.sos.util.CollectionHelper; import org.n52.sos.util.SosHelper; import org.n52.sos.util.XmlHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import com.google.common.base.Joiner; import com.google.common.collect.Lists; /** * @since 4.0.0 * */ public class SwesDecoderv20 implements Decoder<AbstractServiceCommunicationObject, XmlObject> { private static final Logger LOGGER = LoggerFactory.getLogger(SwesDecoderv20.class); @SuppressWarnings("unchecked") private static final Set<DecoderKey> DECODER_KEYS = CollectionHelper.union(CodingHelper.decoderKeysForElements( SwesConstants.NS_SWES_20, DescribeSensorDocument.class, InsertSensorDocument.class, UpdateSensorDescriptionDocument.class, DeleteSensorDocument.class), CodingHelper .xmlDecoderKeysForOperation(SosConstants.SOS, Sos2Constants.SERVICEVERSION, SosConstants.Operations.DescribeSensor, Sos2Constants.Operations.InsertSensor, Sos2Constants.Operations.UpdateSensorDescription, Sos2Constants.Operations.DeleteSensor)); public SwesDecoderv20() { LOGGER.debug("Decoder for the following keys initialized successfully: {}!", Joiner.on(", ") .join(DECODER_KEYS)); } @Override public Set<DecoderKey> getDecoderKeyTypes() { return Collections.unmodifiableSet(DECODER_KEYS); } @Override public Map<SupportedTypeKey, Set<String>> getSupportedTypes() { return Collections.emptyMap(); } @Override public Set<String> getConformanceClasses() { return Collections.emptySet(); } @Override public AbstractServiceRequest decode(final XmlObject xmlObject) throws OwsExceptionReport { LOGGER.debug("REQUESTTYPE:" + xmlObject.getClass()); XmlHelper.validateDocument(xmlObject); if (xmlObject instanceof DescribeSensorDocument) { return parseDescribeSensor((DescribeSensorDocument) xmlObject); } else if (xmlObject instanceof InsertSensorDocument) { return parseInsertSensor((InsertSensorDocument) xmlObject); } else if (xmlObject instanceof UpdateSensorDescriptionDocument) { return parseUpdateSensorDescription((UpdateSensorDescriptionDocument) xmlObject); } else if (xmlObject instanceof DeleteSensorDocument) { return parseDeleteSensor((DeleteSensorDocument) xmlObject); } else { throw new UnsupportedDecoderInputException(this, xmlObject); } } /** * parses the passes XmlBeans document and creates a SOS describeSensor * request * * @param xbDescSenDoc * XmlBeans document representing the describeSensor request * @return Returns SOS describeSensor request * * * @throws OwsExceptionReport * * if validation of the request failed */ private AbstractServiceRequest parseDescribeSensor(final DescribeSensorDocument xbDescSenDoc) throws OwsExceptionReport { final DescribeSensorRequest descSensorRequest = new DescribeSensorRequest(); final DescribeSensorType xbDescSensor = xbDescSenDoc.getDescribeSensor(); descSensorRequest.setService(xbDescSensor.getService()); descSensorRequest.setVersion(xbDescSensor.getVersion()); descSensorRequest.setProcedure(xbDescSensor.getProcedure()); descSensorRequest.setProcedureDescriptionFormat(xbDescSensor.getProcedureDescriptionFormat()); if (xbDescSensor.isSetValidTime()) { descSensorRequest.setValidTime(getValidTime(xbDescSensor.getValidTime())); } return descSensorRequest; } private AbstractServiceRequest parseInsertSensor(final InsertSensorDocument xbInsSensDoc) throws OwsExceptionReport { final InsertSensorRequest request = new InsertSensorRequest(); final InsertSensorType xbInsertSensor = xbInsSensDoc.getInsertSensor(); request.setService(xbInsertSensor.getService()); request.setVersion(xbInsertSensor.getVersion()); // format request.setProcedureDescriptionFormat(xbInsertSensor.getProcedureDescriptionFormat()); // observable properties if (xbInsertSensor.getObservablePropertyArray() != null && xbInsertSensor.getObservablePropertyArray().length > 0) { request.setObservableProperty(Arrays.asList(xbInsertSensor.getObservablePropertyArray())); } // related features if (xbInsertSensor.getRelatedFeatureArray() != null && xbInsertSensor.getRelatedFeatureArray().length > 0) { request.setRelatedFeature(parseRelatedFeature(xbInsertSensor.getRelatedFeatureArray())); } // metadata if (xbInsertSensor.getMetadataArray() != null && xbInsertSensor.getMetadataArray().length > 0) { request.setMetadata(parseMetadata(xbInsertSensor.getMetadataArray())); } try { final XmlObject xbProcedureDescription = XmlObject.Factory.parse(getNodeFromNodeList(xbInsertSensor.getProcedureDescription().getDomNode() .getChildNodes())); final Decoder<?, XmlObject> decoder = CodingRepository.getInstance().getDecoder( new XmlNamespaceDecoderKey(xbInsertSensor.getProcedureDescriptionFormat(), xbProcedureDescription.getClass())); if (decoder == null) { SosHelper.checkProcedureDescriptionFormat(xbInsertSensor.getProcedureDescriptionFormat(), request.getService(), request.getVersion()); // if // (StringHelper.isNullOrEmpty(xbInsertSensor.getProcedureDescriptionFormat())) // { // // } else { // throw new InvalidParameterValueException().at( // Sos2Constants.InsertSensorParams.procedureDescriptionFormat).withMessage( // "The requested %s is not supported!", // Sos2Constants.InsertSensorParams.procedureDescriptionFormat.name()); // } } final Object decodedProcedureDescription = decoder.decode(xbProcedureDescription); if (decodedProcedureDescription instanceof SosProcedureDescription) { request.setProcedureDescription((SosProcedureDescription) decodedProcedureDescription); } } catch (final XmlException xmle) { throw new NoApplicableCodeException().causedBy(xmle).withMessage( "Error while parsing procedure description of InsertSensor request!"); } return request; } private AbstractServiceRequest parseDeleteSensor(final DeleteSensorDocument xbDelSenDoc) { final DeleteSensorRequest request = new DeleteSensorRequest(); request.setService(xbDelSenDoc.getDeleteSensor().getService()); request.setVersion(xbDelSenDoc.getDeleteSensor().getVersion()); request.setProcedureIdentifier(xbDelSenDoc.getDeleteSensor().getProcedure()); return request; } /** * parses the Xmlbeans UpdateSensorDescription document to a SOS request. * * @param xbUpSenDoc * UpdateSensorDescription document * @return SOS UpdateSensor request * * * @throws OwsExceptionReport * * if an error occurs. */ private AbstractServiceRequest parseUpdateSensorDescription(final UpdateSensorDescriptionDocument xbUpSenDoc) throws OwsExceptionReport { final UpdateSensorRequest request = new UpdateSensorRequest(); final UpdateSensorDescriptionType xbUpdateSensor = xbUpSenDoc.getUpdateSensorDescription(); request.setService(xbUpdateSensor.getService()); request.setVersion(xbUpdateSensor.getVersion()); request.setProcedureIdentifier(xbUpdateSensor.getProcedure()); request.setProcedureDescriptionFormat(xbUpdateSensor.getProcedureDescriptionFormat()); for (final Description description : xbUpdateSensor.getDescriptionArray()) { SensorDescriptionType sensorDescription = description.getSensorDescription(); try { // TODO exception if valid time is set final XmlObject xmlObject = XmlObject.Factory.parse(getNodeFromNodeList(sensorDescription.getData().getDomNode() .getChildNodes())); final Decoder<?, XmlObject> decoder = CodingRepository.getInstance().getDecoder(CodingHelper.getDecoderKey(xmlObject)); if (decoder == null) { throw new InvalidParameterValueException().at( UpdateSensorDescriptionParams.procedureDescriptionFormat).withMessage( "The requested procedureDescritpionFormat is not supported!"); } final Object decodedObject = decoder.decode(xmlObject); if (decodedObject instanceof SosProcedureDescription) { SosProcedureDescription sosProcedureDescription = (SosProcedureDescription) decodedObject; if (sensorDescription.isSetValidTime()) { sosProcedureDescription.setValidTime(getValidTime(sensorDescription.getValidTime())); } request.addProcedureDescriptionString(sosProcedureDescription); } } catch (final XmlException xmle) { throw new NoApplicableCodeException().causedBy(xmle).withMessage( "Error while parsing procedure description of UpdateSensor request!"); } } return request; } private SosInsertionMetadata parseMetadata(final Metadata[] metadataArray) throws OwsExceptionReport { final SosInsertionMetadata sosMetadata = new SosInsertionMetadata(); try { for (final Metadata metadata : metadataArray) { SosInsertionMetadataType xbSosInsertionMetadata = null; if (metadata.getInsertionMetadata() != null && metadata.getInsertionMetadata().schemaType() == SosInsertionMetadataType.type) { xbSosInsertionMetadata = (SosInsertionMetadataType) metadata.getInsertionMetadata(); } else { if (metadata.getDomNode().hasChildNodes()) { final Node node = getNodeFromNodeList(metadata.getDomNode().getChildNodes()); final SosInsertionMetadataPropertyType xbMetadata = SosInsertionMetadataPropertyType.Factory.parse(node); xbSosInsertionMetadata = xbMetadata.getSosInsertionMetadata(); } } if (xbSosInsertionMetadata != null) { // featureOfInterest types if (xbSosInsertionMetadata.getFeatureOfInterestTypeArray() != null) { sosMetadata.setFeatureOfInterestTypes(Arrays.asList(xbSosInsertionMetadata .getFeatureOfInterestTypeArray())); } // observation types if (xbSosInsertionMetadata.getObservationTypeArray() != null) { sosMetadata .setObservationTypes(Arrays.asList(xbSosInsertionMetadata.getObservationTypeArray())); } } } } catch (final XmlException xmle) { throw new NoApplicableCodeException().causedBy(xmle).withMessage( "An error occurred while parsing the metadata in the http post request"); } return sosMetadata; } private List<SwesFeatureRelationship> parseRelatedFeature(final RelatedFeature[] relatedFeatureArray) throws OwsExceptionReport { final List<SwesFeatureRelationship> sosRelatedFeatures = new ArrayList<SwesFeatureRelationship>(relatedFeatureArray.length); for (final RelatedFeature relatedFeature : relatedFeatureArray) { final SwesFeatureRelationship sosFeatureRelationship = new SwesFeatureRelationship(); final FeaturePropertyType fpt = relatedFeature.getFeatureRelationship().getTarget(); if (fpt.getHref() != null && !fpt.getHref().isEmpty()) { final String identifier = fpt.getHref(); final SamplingFeature feature = new SamplingFeature(new CodeWithAuthority(identifier)); if (fpt.getTitle() != null && !fpt.getTitle().isEmpty()) { feature.setName(Lists.newArrayList(new CodeType(fpt.getTitle()))); } if (checkForRequestUrl(fpt.getHref())) { feature.setUrl(fpt.getHref()); } feature.setFeatureType(OGCConstants.UNKNOWN); sosFeatureRelationship.setFeature(feature); } else { final Object decodedObject = CodingHelper.decodeXmlElement(fpt); if (decodedObject instanceof SamplingFeature) { sosFeatureRelationship.setFeature((SamplingFeature) decodedObject); } else { throw new DecoderResponseUnsupportedException(fpt.xmlText(), decodedObject); } } sosFeatureRelationship.setRole(relatedFeature.getFeatureRelationship().getRole()); sosRelatedFeatures.add(sosFeatureRelationship); } return sosRelatedFeatures; } private boolean checkForRequestUrl(final String href) { return href.toLowerCase().contains("request="); } private Node getNodeFromNodeList(final NodeList nodeList) { if (nodeList != null && nodeList.getLength() > 0) { for (int i = 0; i < nodeList.getLength(); i++) { if (nodeList.item(i).getNodeType() == Node.ELEMENT_NODE) { return nodeList.item(i); } } } return null; } private Time getValidTime(net.opengis.swes.x20.DescribeSensorType.ValidTime validTime) throws OwsExceptionReport { Object decodeXmlElement = CodingHelper.decodeXmlElement(validTime.getAbstractTimeGeometricPrimitive()); if (decodeXmlElement instanceof Time) { return (Time) decodeXmlElement; } else { throw new InvalidParameterValueException().at(Sos2Constants.DescribeSensorParams.validTime).withMessage( "The validTime element ({}) is not supported", validTime.getAbstractTimeGeometricPrimitive().schemaType()); } } private Time getValidTime(net.opengis.swes.x20.SensorDescriptionType.ValidTime validTime) throws OwsExceptionReport { Object decodeXmlElement = CodingHelper.decodeXmlElement(validTime.getAbstractTimeGeometricPrimitive()); if (decodeXmlElement instanceof Time) { return (Time) decodeXmlElement; } else { throw new InvalidParameterValueException().at(Sos2Constants.UpdateSensorDescriptionParams.validTime) .withMessage("The validTime element ({}) is not supported", validTime.getAbstractTimeGeometricPrimitive().schemaType()); } } }
package cs.si.stavor.fragments; import org.xwalk.core.XWalkView; import cs.si.stavor.MainActivity; import cs.si.stavor.R; import cs.si.stavor.StavorApplication; import cs.si.stavor.app.Parameters; import cs.si.stavor.model.Browsers; import cs.si.stavor.simulator.Simulator; import cs.si.stavor.web.MyResourceClient; import cs.si.stavor.web.MyUIClient; import cs.si.stavor.web.WebAppInterface; import android.annotation.SuppressLint; import android.app.Activity; import android.os.Bundle; import android.app.Fragment; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.view.ViewGroup.LayoutParams; import android.widget.Button; import android.widget.FrameLayout; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.PopupMenu; import android.widget.ProgressBar; import android.widget.SlidingDrawer; import android.widget.SlidingDrawer.OnDrawerCloseListener; import android.widget.SlidingDrawer.OnDrawerOpenListener; /** * Fragment with the visualization browser for the map * @author Xavier Gibert * */ @SuppressWarnings("deprecation") public final class MapFragment extends Fragment { /** * The fragment argument representing the section number for this * fragment. */ private static final String ARG_SECTION_NUMBER = "section_number"; private static String screenName = "Map"; /** * Returns a new instance of this fragment for the given section number. * @param simulation */ public static MapFragment newInstance(int sectionNumber) { MapFragment fragment = new MapFragment(); Bundle args = new Bundle(); args.putInt(ARG_SECTION_NUMBER, sectionNumber); fragment.setArguments(args); return fragment; } public MapFragment() { } private Simulator simulator; LinearLayout browserLayout, slider_content; Button views_menu; SlidingDrawer drawer; /** * WebView from XWalk project to increase compatibility of WebGL */ private XWalkView browser; @SuppressLint({ "JavascriptInterface", "SetJavaScriptEnabled", "NewApi" }) @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.map_display, container, false); ((MainActivity)getActivity()).refreshActionBarIcons(); //((MainActivity)getActivity()).showTutorialMap(); //Browser /*if(mXwalkView==null){ mXwalkView = ((MainActivity)getActivity()).getBrowserMap(); }*/ //Hud Panel drawer = (SlidingDrawer) rootView.findViewById(R.id.slidingDrawer1); drawer.setOnDrawerOpenListener(new OnDrawerOpenListener() { public void onDrawerOpened() { LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(browser.getLayoutParams()); if(getResources().getConfiguration().orientation==android.content.res.Configuration.ORIENTATION_PORTRAIT){ layoutParams.height = browser.getHeight()-slider_content.getHeight(); layoutParams.width = LayoutParams.MATCH_PARENT; }else{ layoutParams.width = browser.getWidth()-slider_content.getWidth(); layoutParams.height = LayoutParams.MATCH_PARENT; } browser.setLayoutParams(layoutParams); ((MainActivity)getActivity()).setHudPanelOpen(true); } }); drawer.setOnDrawerCloseListener(new OnDrawerCloseListener() { public void onDrawerClosed() { LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(browser.getLayoutParams()); layoutParams.height = LayoutParams.MATCH_PARENT; layoutParams.width = LayoutParams.MATCH_PARENT; browser.setLayoutParams(layoutParams); ((MainActivity)getActivity()).setHudPanelOpen(false); } }); slider_content = (LinearLayout) rootView.findViewById(R.id.content); browser = new XWalkView(getActivity().getApplicationContext(), getActivity()); //mXwalkView.setBackgroundResource(R.color.black); browser.setBackgroundColor(0x00000000); browser.setResourceClient(new MyResourceClient(browser)); browser.setUIClient(new MyUIClient(browser)); browser.clearCache(true); /*WebSettings browserSettingsMap = browser.getSettings(); browserSettingsMap.setJavaScriptEnabled(true); browserSettingsMap.setUseWideViewPort(false); browserSettingsMap.setAllowFileAccessFromFileURLs(true); browserSettingsMap.setAllowUniversalAccessFromFileURLs(true);*/ simulator = ((MainActivity)getActivity()).getSimulator(); browser.addJavascriptInterface(new WebAppInterface(getActivity(), simulator.getSimulationResults()), "Android"); simulator.setHudView(Browsers.Map, rootView, browser); browserLayout=(LinearLayout)rootView.findViewById(R.id.simLayout); LayoutParams browser_params = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); browser.setLayoutParams(browser_params); browserLayout.addView(browser); //Play/Pause/Stop buttons ImageButton but_play = (ImageButton)rootView.findViewById(R.id.imageButtonPlay); ImageButton but_stop = (ImageButton)rootView.findViewById(R.id.imageButtonStop); simulator.setControlButtons(but_play,but_stop); simulator.setCorrectSimulatorControls(); views_menu = (Button) rootView.findViewById(R.id.buttonMissionNew); views_menu.setOnClickListener(new OnClickListener(){ @Override public void onClick(View arg0) { showPopup(arg0); } }); views_menu.setText(titleOfViewId(((StavorApplication)getActivity().getApplication()).follow_sc)); ImageButton but_clear = (ImageButton)rootView.findViewById(R.id.imageButtonClear); but_clear.setOnClickListener(new OnClickListener(){ @Override public void onClick(View arg0) { simulator.getSimulationResults().resetMapPathBuffer(); browser.load("javascript:clearPath()",null); } }); ProgressBar progressBar = (ProgressBar) rootView.findViewById(R.id.progressBarBrowser); FrameLayout progressBarLayout = (FrameLayout) rootView.findViewById(R.id.frameLayoutProgress); progressBar.setProgress(10); ((MainActivity)getActivity()).setBrowserProgressBarMap(progressBar,progressBarLayout); //needs to have browser defined but not loaded yet rootView.post(new Runnable() { @Override public void run() { if(((MainActivity)getActivity()).getHudPanelOpen()) drawer.open(); /* if(((MainActivity)getActivity()).getLoadBrowserFlagMap()){ //mXwalkView.load(Parameters.Web.STARTING_PAGE,null); //mXwalkView.load("javascript:showLoadingScreen()",null); mXwalkView.loadUrl("javascript:reloadModel()"); ((MainActivity)getActivity()).resetLoadBrowserFlagMap(); }else{ mXwalkView.loadUrl("javascript:setLoaded()"); }*/ browser.load(Parameters.Web.STARTING_PAGE_MAP,null); } }); return rootView; } private String titleOfViewId(int id){ switch (id) { case R.id.menu_mapviews_free: return getString(R.string.menu_mapviews_free); case R.id.menu_mapviews_locked: return getString(R.string.menu_mapviews_locked); default: return getString(R.string.menu_mapviews_locked); } } /** * Shows the visualization Views menu * @param v */ private void showPopup(View v) { PopupMenu popup = new PopupMenu(getActivity(), v); // This activity implements OnMenuItemClickListener popup.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { String com_view = (String)item.getTitle(); String command; ((StavorApplication)getActivity().getApplication()).follow_sc = item.getItemId(); switch (item.getItemId()) { case R.id.menu_mapviews_free: command = getString(R.string.key_mapviews_free); break; case R.id.menu_mapviews_locked: command = getString(R.string.key_mapviews_locked); break; default: return false; } views_menu.setText(com_view); browser.load("javascript:changeView('"+command+"')",null); return true; } }); popup.inflate(R.menu.views_map); popup.show(); } @Override public void onDestroyView(){ simulator.setBrowserLoaded(false); super.onDestroyView(); } @Override public void onAttach(Activity activity) { super.onAttach(activity); ((MainActivity) activity).onSectionAttached(getArguments().getInt( ARG_SECTION_NUMBER)); } @Override public void onDestroy() { super.onDestroy(); browser.onDestroy(); } @Override public void onPause() {//Pause simulator and browser super.onPause(); if(simulator!=null){ simulator.temporaryPause(); } if (browser != null) { browser.load("javascript:updateMapCenter()",null); browser.pauseTimers(); browser.onHide(); //mXwalkView.pauseTimers(); //mXwalkView.onHide(); } } @Override public void onResume() {//Resume browser super.onResume(); if (browser != null) { browser.resumeTimers(); browser.onShow(); //mXwalkView.resumeTimers(); //mXwalkView.onShow(); } if(simulator!=null){ simulator.resumeTemporaryPause(); } } @Override public void onDetach() { ((MainActivity)getActivity()).resetBrowserProgressBarMap(); simulator.clearHud(); //XWalk if (browser != null) { //mXwalkView.onDestroy(); //System.gc(); browserLayout.removeView(browser); browser.destroyDrawingCache(); //browser.destroy(); } //unbindDrawables(getView()); super.onDetach(); } /*private void unbindDrawables(View view) { if (view.getBackground() != null) { view.getBackground().setCallback(null); } if (view instanceof ViewGroup) { for (int i = 0; i < ((ViewGroup) view).getChildCount(); i++) { unbindDrawables(((ViewGroup) view).getChildAt(i)); } ((ViewGroup) view).removeAllViews(); } }*/ }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.openhris.payroll; import com.openhris.commons.OpenHrisUtilities; import com.openhris.model.Adjustment; import com.openhris.service.PayrollService; import com.openhris.serviceprovider.PayrollServiceImpl; import com.vaadin.Application; import com.vaadin.data.Item; import com.vaadin.event.ItemClickEvent; import com.vaadin.ui.Button; import com.vaadin.ui.Label; import com.vaadin.ui.TabSheet; import com.vaadin.ui.Table; import com.vaadin.ui.TextField; import com.vaadin.ui.VerticalLayout; import com.vaadin.ui.Window; import java.util.List; /** * * @author jetdario */ public class AdjustmentWindow extends Window { PayrollService payrollService = new PayrollServiceImpl(); OpenHrisUtilities utilities = new OpenHrisUtilities(); private int payrollId; private double amountToBeReceive; private double amountReceived; private double adjustment; private Table adjustmentTbl = new Table(); public AdjustmentWindow(int payrollId, double amountToBeReceive, double amountReceived, double adjustment) { this.payrollId = payrollId; this.amountToBeReceive = amountToBeReceive; this.amountReceived = amountReceived; this.adjustment = adjustment; setCaption("ADJUSTMENTS"); setWidth("400px"); TabSheet ts = new TabSheet(); ts.addStyleName("bar"); VerticalLayout vlayout = new VerticalLayout(); vlayout.setMargin(true); vlayout.setSpacing(true); vlayout.setCaption("Post Adjustments"); final TextField amount = new TextField("Amount: "); amount.setWidth("100%"); vlayout.addComponent(amount); final TextField remarks = new TextField("Remarks"); remarks.setWidth("100%"); vlayout.addComponent(remarks); Button saveAdjustments = new Button("POST ADJUSTMENTS"); saveAdjustments.setWidth("100%"); saveAdjustments.addListener(new Button.ClickListener() { @Override public void buttonClick(Button.ClickEvent event) { if(amount.getValue() == null || amount.getValue().toString().trim().isEmpty()){ getWindow().showNotification("Enter Amount for adjustment.", Window.Notification.TYPE_WARNING_MESSAGE); return; } else { if(!utilities.checkInputIfDouble(amount.getValue().toString().trim())){ getWindow().showNotification("Enter a numeric value for amount.", Window.Notification.TYPE_ERROR_MESSAGE); return; } } if(remarks.getValue() == null || remarks.getValue().toString().trim().isEmpty()){ getWindow().showNotification("Add remarks for adjustment.", Window.Notification.TYPE_ERROR_MESSAGE); return; } double amountForAdjustment = utilities.convertStringToDouble(amount.getValue().toString().trim()); String remarksForAdjustment = remarks.getValue().toString().trim().toLowerCase(); boolean result = payrollService.insertAdjustmentToPayroll(getPayrollId(), getAmountToBeReceive(), getAmountReceived(), amountForAdjustment, remarksForAdjustment); if(result){ adjustmentTable(); close(); getWindow().showNotification("Successfully added adjustment.", Window.Notification.TYPE_HUMANIZED_MESSAGE); } } }); vlayout.addComponent(saveAdjustments); ts.addComponent(vlayout); vlayout = new VerticalLayout(); vlayout.setMargin(true); vlayout.setSpacing(true); vlayout.setCaption("Adjustments Table"); Label label = new Label("Remarks: Click ID Column to delete Adjustment"); vlayout.addComponent(label); vlayout.addComponent(adjustmentTable()); Button closeBtn = new Button("CLOSE"); closeBtn.setWidth("100%"); closeBtn.addListener(closeBtnListener); vlayout.addComponent(closeBtn); ts.addComponent(vlayout); addComponent(ts); } private Table adjustmentTable(){ adjustmentTbl.removeAllItems(); adjustmentTbl.setWidth("100%"); adjustmentTbl.setImmediate(true); adjustmentTbl.setSelectable(true); adjustmentTbl.addContainerProperty("id", Integer.class, null); adjustmentTbl.addContainerProperty("amount", Double.class, null); adjustmentTbl.addContainerProperty("remarks", String.class, null); adjustmentTbl.addContainerProperty("date posted", String.class, null); int i = 0; List<Adjustment> adjustmentList = payrollService.getListOfAdjustmentFromPayrollId(getPayrollId()); for(Adjustment adj : adjustmentList){ adjustmentTbl.addItem(new Object[]{ adj.getAdjustmentId(), adj.getAmount(), adj.getRemarks(), utilities.convertDateFormat(adj.getDatePosted().toString()) }, i); i++; } adjustmentTbl.setPageLength(adjustmentTbl.size()); for(Object listener : adjustmentTbl.getListeners(ItemClickEvent.class)){ adjustmentTbl.removeListener(ItemClickEvent.class, listener); } adjustmentTbl.addListener(new ItemClickEvent.ItemClickListener() { @Override public void itemClick(ItemClickEvent event) { Object itemId = event.getItemId(); final Item item = adjustmentTbl.getItem(itemId); double adjustments = utilities.convertStringToDouble(item.getItemProperty("amount").getValue().toString()); String remarks = item.getItemProperty("remarks").getValue().toString(); if(remarks.equals("edit timekeeping table")){ getWindow().showNotification("You cannot delete adjustment from previous Payroll!", Window.Notification.TYPE_WARNING_MESSAGE); return; } if(event.getPropertyId().equals("id")){ Window subWindow = removeAdjustment(utilities.convertStringToInteger(item.getItemProperty("id").getValue().toString()), getAmountToBeReceive(), getAmountReceived(), adjustments, getPayrollId()); if(subWindow.getParent() == null){ getApplication().getMainWindow().addWindow(subWindow); } subWindow.setModal(true); subWindow.center(); } } }); return adjustmentTbl; } private Window removeAdjustment(final int adjustmentId, final double amountToBeReceive, final double amountReceived, final double adjustment, final int payrollId){ VerticalLayout vlayout = new VerticalLayout(); vlayout.setMargin(true); vlayout.setSpacing(true); final Window subWindow = new Window("REMOVE ADVANCES", vlayout); subWindow.setWidth("200px"); Button removeAdjBtn = new Button("REMOVE ADJUSTMENT?"); removeAdjBtn.setWidth("100%"); removeAdjBtn.addListener(new Button.ClickListener() { @Override public void buttonClick(Button.ClickEvent event) { boolean result = payrollService.removeAdjustmentById(adjustmentId, amountToBeReceive, amountReceived, adjustment, payrollId); if(result){ (subWindow.getParent()).removeWindow(subWindow); adjustmentTable(); } } }); subWindow.addComponent(removeAdjBtn); return subWindow; } public int getPayrollId() { return payrollId; } public double getAmountToBeReceive() { return amountToBeReceive; } public double getAmountReceived() { return amountReceived; } public double getAdjustment() { return adjustment; } Button.ClickListener closeBtnListener = new Button.ClickListener() { @Override public void buttonClick(Button.ClickEvent event) { close(); } }; }
/* * Copyright (c) 2015, salesforce.com, inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided * that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of conditions and the * following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and * the following disclaimer in the documentation and/or other materials provided with the distribution. * * Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or * promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /********************************** * A code snippet showing Bulk v2 Ingest using BulkV2Connection. * Requires dataloader-<version>-uber.jar in the classpath to compile. * import java.net.URL; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.FileOutputStream; import java.io.IOException; import com.salesforce.dataloader.action.visitor.BulkV2Connection; import com.salesforce.dataloader.client.HttpClientTransport; import com.sforce.async.JobInfo; import com.sforce.async.JobStateEnum; import com.sforce.async.OperationEnum; import com.sforce.soap.partner.Connector; import com.sforce.soap.partner.LoginResult; import com.sforce.soap.partner.PartnerConnection; import com.sforce.ws.ConnectorConfig; public class TestBulkV2 { public static void main(String[] args) { String insertFilename = "./insertAccountCsv.csv"; String deleteFilename = "./deleteAccountCsv.csv"; String successFilename = "./ingestSuccessResults.csv"; failureFilename = "./ingestFailureResults.csv"; String unprocessedFilename = "./ingestUnprocessedRecords.csv"; String bulkQueryResultsFilename = "./queryResults.csv"; String username = ""; String password = ""; static final String myDomainURLString = "https://<mydomain prefix>.my.salesforce.com"; static final String restEndpoint = myDomainURLString + "/services/data/v52.0/jobs/"; try { URL DEFAULT_AUTH_ENDPOINT_URL = new URL(Connector.END_POINT); URL serverUrl = new URL(myDomainURLString); ConnectorConfig cc = new ConnectorConfig(); cc.setTransport(HttpClientTransport.class); cc.setUsername(username); cc.setPassword(password); cc.setAuthEndpoint(serverUrl + DEFAULT_AUTH_ENDPOINT_URL.getPath()); cc.setServiceEndpoint(serverUrl + DEFAULT_AUTH_ENDPOINT_URL.getPath()); cc.setRestEndpoint(restEndpoint); final PartnerConnection conn = Connector.newConnection(cc); // bulkv2 insert BulkV2Connection v2conn = new BulkV2Connection(cc); JobInfo job = executeJob("account", OperationEnum.insert, v2conn, insertFilename); v2conn.saveIngestSuccessResults(job.getId(), successFilename); v2conn.saveIngestFailureResults(job.getId(), failureFilename); v2conn.saveIngestUnprocessedRecords(job.getId(), unprocessedFilename); // bulkv2 query job = new JobInfo(); job.setOperation(OperationEnum.query); job.setObject("account"); job.setContentType(ContentType.CSV); job.setObject("select id from Account"); job = v2conn.createJob(job); // wait for the job to complete while (job.getState() != JobStateEnum.JobComplete) { Thread.sleep(10,000); job = v2conn.getExtractJobStatus(job.getId()); } // download query results BufferedOutputStream csvFileStream = new BufferedOutputStream(new FileOutputStream(bulkQueryResultsFilename)); String locator = v2conn.getQueryLocator(); while (!"null".equalsIgnoreCase(locator)) { BufferedInputStream resultsStream = new BufferedInputStream(v2conn.getQueryResultStream(job.getId(), locator)); writeTo(resultsStream, csvFileStream); resultsStream.close(); locator = v2conn.getQueryLocator(); } csvFileStream.close(); } catch (Exception ex) { ex.printStackTrace(); System.exit(-1); } } private static JobInfo executeJob(String objectName, OperationEnum operation, BulkV2Connection v2conn, String ingestFilename) throws Exception { JobInfo job = new JobInfo(); job.setObject(objectName); job.setOperation(operation); job = v2conn.createJob(job); job = v2conn.startIngest(job.getId(), ingestFilename); while (job.getState() != JobStateEnum.JobComplete) { Thread.sleep(10,000); job = v2conn.getIngestJobStatus(job.getId()); } return job; } private static void writeTo(BufferedInputStream bis, BufferedOutputStream bos) throws IOException { byte[] buffer = new byte[2048]; for(int len; (len = bis.read(buffer)) > 0;) { bos.write(buffer, 0, len); } } } **************************/ package com.salesforce.dataloader.action.visitor; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import java.io.FileOutputStream; import java.net.HttpURLConnection; import java.net.URL; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.zip.GZIPInputStream; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.xml.namespace.QName; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.salesforce.dataloader.client.HttpTransportInterface; import com.sforce.async.AsyncApiException; import com.sforce.async.AsyncExceptionCode; import com.sforce.async.ContentType; import com.sforce.async.JobInfo; import com.sforce.async.JobStateEnum; import com.sforce.async.OperationEnum; import com.sforce.ws.ConnectionException; import com.sforce.ws.ConnectorConfig; import com.sforce.ws.MessageHandler; import com.sforce.ws.MessageHandlerWithHeaders; import com.sforce.ws.bind.CalendarCodec; import com.sforce.ws.bind.TypeMapper; import com.sforce.ws.parser.PullParserException; import com.sforce.ws.parser.XmlInputStream; import com.sforce.ws.util.FileUtil; enum HttpMethod { GET, POST, PATCH, PUT } public class BulkV2Connection { private static final String URI_STEM_QUERY = "query/"; private static final String URI_STEM_INGEST = "ingest/"; private static final String AUTH_HEADER = "Authorization"; private static final String AUTH_HEADER_VALUE_PREFIX = "Bearer "; public static final String NAMESPACE = "http://www.force.com/2009/06/asyncapi/dataload"; public static final String SESSION_ID = "X-SFDC-Session"; public static final String XML_CONTENT_TYPE = "application/xml"; public static final String CSV_CONTENT_TYPE = "text/csv"; public static final String JSON_CONTENT_TYPE = "application/json"; public static final String ZIP_XML_CONTENT_TYPE = "zip/xml"; public static final String ZIP_CSV_CONTENT_TYPE = "zip/csv"; public static final String ZIP_JSON_CONTENT_TYPE = "zip/json"; public static final String UTF_8 = "UTF-8"; public static final String INGEST_RESULTS_SUCCESSFUL = "successfulResults"; public static final String INGEST_RESULTS_UNSUCCESSFUL = "failedResults"; public static final String INGEST_RECORDS_UNPROCESSED = "unprocessedrecords"; public static final QName JOB_QNAME = new QName(NAMESPACE, "jobInfo"); private String authHeaderValue = ""; private String queryLocator = ""; private int numberOfRecordsInQueryResult = 0; private ConnectorConfig config; public static final TypeMapper typeMapper = new TypeMapper(null, null, false); /********************************** * * public, common methods * **********************************/ public BulkV2Connection(ConnectorConfig connectorConfig) throws AsyncApiException { this.config = connectorConfig; this.authHeaderValue = AUTH_HEADER_VALUE_PREFIX + getConfig().getSessionId(); } public JobInfo createJob(JobInfo job) throws AsyncApiException { ContentType type = job.getContentType(); if (type != null && type != ContentType.CSV) { throw new AsyncApiException("Unsupported Content Type", AsyncExceptionCode.FeatureNotEnabled); } return createJob(job, ContentType.CSV); } public JobInfo getJobStatus(String jobId, boolean isQuery) throws AsyncApiException { return getJobStatus(jobId, isQuery, ContentType.JSON); } public JobInfo getJobStatus(String jobId, boolean isQuery, ContentType contentType) throws AsyncApiException { String urlString = constructRequestURL(jobId, isQuery); HashMap<String, String> headers = getHeaders(JSON_CONTENT_TYPE, JSON_CONTENT_TYPE); // there is nothing in the request body. return doSendJobRequestToServer(urlString, headers, HttpMethod.GET, ContentType.JSON, null, true, "Failed to get job status for job " + jobId); } public JobInfo abortJob(String jobId, boolean isQuery) throws AsyncApiException { return setJobState(jobId, isQuery, JobStateEnum.Aborted, "Failed to abort job " + jobId); } public JobInfo setJobState(String jobId, boolean isQuery, JobStateEnum state, String errorMessage) throws AsyncApiException { String urlString = constructRequestURL(jobId, isQuery); HashMap<String, String> headers = getHeaders(JSON_CONTENT_TYPE, JSON_CONTENT_TYPE); HashMap<Object, Object> requestBodyMap = new HashMap<Object, Object>(); requestBodyMap.put("state", state.toString()); return doSendJobRequestToServer(urlString, headers, HttpMethod.PATCH, ContentType.JSON, requestBodyMap, true, errorMessage); } /********************************** * * public, extract (aka query) methods * **********************************/ public JobInfo getExtractJobStatus(String jobId) throws AsyncApiException { return getJobStatus(jobId, true); } public InputStream getQueryResultStream(String jobId, String locator) throws AsyncApiException { String urlString = constructRequestURL(jobId, true) + "results/"; if (locator != null && !locator.isEmpty() && !"null".equalsIgnoreCase(locator)) { urlString += "?locator=" + locator; } try { return doGetQueryResultStream(new URL(urlString), getHeaders(JSON_CONTENT_TYPE, CSV_CONTENT_TYPE)); } catch (IOException e) { throw new AsyncApiException("Failed to get query results for job " + jobId, AsyncExceptionCode.ClientInputError, e); } } public String getQueryLocator() { return this.queryLocator; } public int getNumberOfRecordsInQueryResult() { return this.numberOfRecordsInQueryResult; } /********************************** * * public, ingest (create, update, upsert, delete) methods * **********************************/ // needed for all upload operations (non-query operations) public JobInfo startIngest(String jobId, String csvFileName) throws AsyncApiException { File csvFile = new File(csvFileName); if (!csvFile.exists()) { throw new AsyncApiException(csvFileName + " not found.", AsyncExceptionCode.ClientInputError); } // Bulk V2 ingest does not accept CSV exceeding 150 MB in size if (csvFile.length() > 150 * 1024 * 1024) { throw new AsyncApiException(csvFileName + " size exceeds the max file size accepted by Bulk V2 (150 MB)", AsyncExceptionCode.ClientInputError); } String urlString = constructRequestURL(jobId, false) + "batches/"; HashMap<String, String> headers = getHeaders(CSV_CONTENT_TYPE, JSON_CONTENT_TYPE); try { HttpTransportInterface transport = (HttpTransportInterface)getConfig().createTransport(); transport.connect(urlString, headers, true, HttpTransportInterface.SupportedHttpMethodType.PUT, new FileInputStream(csvFile), CSV_CONTENT_TYPE); // Following is needed to actually send the request to the server InputStream serverResponseStream = transport.getContent(); if (!transport.isSuccessful()) { parseAndThrowException(serverResponseStream, ContentType.JSON); } }catch (IOException e) { throw new AsyncApiException("Failed to send contents of " + csvFileName + " to server for job " + jobId, AsyncExceptionCode.ClientInputError, e); } catch (ConnectionException e) { throw new AsyncApiException("Failed to send contents of " + csvFileName + " to server for job " + jobId, AsyncExceptionCode.ClientInputError, e); } // Mark upload as completed urlString = constructRequestURL(jobId, false); headers = getHeaders(JSON_CONTENT_TYPE, JSON_CONTENT_TYPE); setJobState(jobId, false, JobStateEnum.UploadComplete, "Failed to mark completion of the upload"); return getIngestJobStatus(jobId); } public JobInfo getIngestJobStatus(String jobId) throws AsyncApiException { return getJobStatus(jobId, false); } public void saveIngestSuccessResults(String jobId, String filename) throws AsyncApiException { doSaveIngestResults(jobId, filename, INGEST_RESULTS_SUCCESSFUL); } public void saveIngestFailureResults(String jobId, String filename) throws AsyncApiException { doSaveIngestResults(jobId, filename, INGEST_RESULTS_UNSUCCESSFUL); } public void saveIngestUnprocessedRecords(String jobId, String filename) throws AsyncApiException { doSaveIngestResults(jobId, filename, INGEST_RECORDS_UNPROCESSED); } public InputStream getIngestSuccessResultsStream(String jobId) throws AsyncApiException { return doGetIngestResultsStream(jobId, INGEST_RESULTS_SUCCESSFUL); } public InputStream getIngestFailedResultsStream(String jobId) throws AsyncApiException { return doGetIngestResultsStream(jobId, INGEST_RESULTS_UNSUCCESSFUL); } public InputStream getIngestUnprocessedRecordsStream(String jobId) throws AsyncApiException { return doGetIngestResultsStream(jobId, INGEST_RECORDS_UNPROCESSED); } /********************************** * * private, common methods * **********************************/ private String constructRequestURL(String jobId, boolean isQuery) { String urlString = getConfig().getRestEndpoint(); if (jobId == null) { jobId = ""; } if (isQuery) { urlString += URI_STEM_QUERY + jobId + "/"; } else { urlString += URI_STEM_INGEST + jobId + "/"; } return urlString; } private JobInfo createJob(JobInfo job, ContentType contentType) throws AsyncApiException { ContentType type = job.getContentType(); if (type != null && type != ContentType.CSV) { throw new AsyncApiException("Unsupported Content Type", AsyncExceptionCode.FeatureNotEnabled); } OperationEnum operation = job.getOperation(); String urlString = constructRequestURL(job.getId(), operation.equals(OperationEnum.query)); HashMap<String, String>headers = null; HashMap<Object, Object> requestBodyMap = new HashMap<Object, Object>(); requestBodyMap.put("operation", job.getOperation().toString()); if (operation.equals(OperationEnum.query)) { headers = getHeaders(JSON_CONTENT_TYPE, CSV_CONTENT_TYPE); requestBodyMap.put("query", job.getObject()); } else { headers = getHeaders(JSON_CONTENT_TYPE, JSON_CONTENT_TYPE); requestBodyMap.put("object", job.getObject()); requestBodyMap.put("contentType", "CSV"); } return doSendJobRequestToServer(urlString, headers, HttpMethod.POST, ContentType.JSON, requestBodyMap, true, "Failed to create job"); } private JobInfo doSendJobRequestToServer(String urlString, HashMap<String, String> headers, HttpMethod requestMethod, ContentType responseContentType, HashMap<Object, Object> requestBodyMap, boolean processServerResponse, String exceptionMessageString) throws AsyncApiException { if (headers == null) { headers = getHeaders(JSON_CONTENT_TYPE, JSON_CONTENT_TYPE); } try { InputStream in; boolean successfulRequest = true; if (requestMethod == HttpMethod.GET) { if (requestBodyMap != null && !requestBodyMap.isEmpty()) { Set<Object> paramNameSet = requestBodyMap.keySet(); boolean firstParam = true; for (Object paramName : paramNameSet) { if (firstParam) { urlString += "?" + paramName.toString() + "=" + requestBodyMap.get(paramName); firstParam = false; } else { urlString += "&" + paramName.toString() + "=" + requestBodyMap.get(paramName); } } } // make a get request HttpURLConnection httpConnection = openHttpConnection(new URL(urlString), headers); in = doHttpGet(httpConnection, new URL(urlString)); } else { HttpTransportInterface transport = (HttpTransportInterface) getConfig().createTransport(); OutputStream out; if (requestMethod == HttpMethod.PATCH) { out = transport.connect(urlString, headers, true, HttpTransportInterface.SupportedHttpMethodType.PATCH); } else if (requestMethod == HttpMethod.PUT) { out = transport.connect(urlString, headers, true, HttpTransportInterface.SupportedHttpMethodType.PUT); } else { // assume post method out = transport.connect(urlString, headers, true, HttpTransportInterface.SupportedHttpMethodType.POST); } String requestContent = serializeToJson(requestBodyMap); out.write(requestContent.getBytes(UTF_8)); out.close(); in = transport.getContent(); successfulRequest = transport.isSuccessful(); } if (!processServerResponse) { // sent the request to server, return without processing the response return null; } JobInfo result = null; if (successfulRequest) { if (responseContentType == ContentType.ZIP_XML || responseContentType == ContentType.XML) { XmlInputStream xin = new XmlInputStream(); xin.setInput(in, UTF_8); result = new JobInfo(); result.load(xin, typeMapper); } else { result = deserializeJsonToObject(in, JobInfo.class); } } else { parseAndThrowException(in, responseContentType); } return result; } catch (IOException e) { throw new AsyncApiException(exceptionMessageString, AsyncExceptionCode.ClientInputError, e); } catch (ConnectionException e) { throw new AsyncApiException(exceptionMessageString, AsyncExceptionCode.ClientInputError, e); } catch (PullParserException e) { throw new AsyncApiException(exceptionMessageString, AsyncExceptionCode.ClientInputError, e); } } private ConnectorConfig getConfig() { return config; } static void parseAndThrowException(InputStream in, ContentType type) throws AsyncApiException { try { AsyncApiException exception; BulkV2Error[] errorList = deserializeJsonToObject(in, BulkV2Error[].class); if (errorList[0].message.contains("Aggregate Relationships not supported in Bulk Query")) { exception = new AsyncApiException(errorList[0].message, AsyncExceptionCode.FeatureNotEnabled); } else { exception = new AsyncApiException(errorList[0].errorCode + " : " + errorList[0].message, AsyncExceptionCode.Unknown); } throw exception; } catch (IOException e) { throw new AsyncApiException("Failed to parse exception", AsyncExceptionCode.ClientInputError, e); } } private HashMap<String, String> getHeaders(String requestContentType, String acceptContentType) { HashMap<String, String> newMap = new HashMap<String, String>(); newMap.put("Content-Type", requestContentType); newMap.put("ACCEPT", acceptContentType); newMap.put(AUTH_HEADER, this.authHeaderValue); return newMap; } static String serializeToJson(HashMap<Object, Object> nameValueMap) throws JsonProcessingException { ObjectMapper mapper = new ObjectMapper(); mapper.setDateFormat(CalendarCodec.getDateFormat()); return mapper.writeValueAsString(nameValueMap); } static <T> T deserializeJsonToObject (InputStream in, Class<T> tmpClass) throws IOException { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true); // By default, ObjectMapper generates Calendar instances with UTC TimeZone. // Here, override that to "GMT" to better match the behavior of the WSC XML parser. mapper.setTimeZone(TimeZone.getTimeZone("GMT")); return mapper.readValue(in, tmpClass); } private HttpURLConnection openHttpConnection(URL url, HashMap<String, String> headers) throws IOException { HttpURLConnection connection = getConfig().createConnection(url, null); SSLContext sslContext = getConfig().getSslContext(); if (sslContext != null && connection instanceof HttpsURLConnection) { ((HttpsURLConnection)connection).setSSLSocketFactory(sslContext.getSocketFactory()); } if (headers != null && !headers.isEmpty()) { Set<String> headerNameSet = headers.keySet(); for (String headerName : headerNameSet) { connection.setRequestProperty(headerName, headers.get(headerName)); } } connection.setRequestProperty(AUTH_HEADER, this.authHeaderValue); return connection; } private InputStream doHttpGet(HttpURLConnection connection, URL url) throws IOException, AsyncApiException { boolean success = true; InputStream in; try { in = connection.getInputStream(); } catch (IOException e) { success = false; in = connection.getErrorStream(); } String encoding = connection.getHeaderField("Content-Encoding"); if ("gzip".equals(encoding)) { in = new GZIPInputStream(in); } if (getConfig().isTraceMessage() || getConfig().hasMessageHandlers()) { byte[] bytes = FileUtil.toBytes(in); in = new ByteArrayInputStream(bytes); if (getConfig().hasMessageHandlers()) { Iterator<MessageHandler> it = getConfig().getMessagerHandlers(); while (it.hasNext()) { MessageHandler handler = it.next(); if (handler instanceof MessageHandlerWithHeaders) { ((MessageHandlerWithHeaders)handler).handleRequest(url, new byte[0], null); ((MessageHandlerWithHeaders)handler).handleResponse(url, bytes, connection.getHeaderFields()); } else { handler.handleRequest(url, new byte[0]); handler.handleResponse(url, bytes); } } } if (getConfig().isTraceMessage()) { getConfig().getTraceStream().println(url.toExternalForm()); Map<String, List<String>> headers = connection.getHeaderFields(); for (Map.Entry<String, List<String>>entry : headers.entrySet()) { StringBuffer sb = new StringBuffer(); List<String> values = entry.getValue(); if (values != null) { for (String v : values) { sb.append(v); } } getConfig().getTraceStream().println(entry.getKey() + ": " + sb.toString()); } getConfig().teeInputStream(bytes); } } if (!success) { ContentType type = null; String contentTypeHeader = connection.getContentType(); if (contentTypeHeader != null) { if (contentTypeHeader.contains(XML_CONTENT_TYPE)) { type = ContentType.XML; } else if (contentTypeHeader.contains(JSON_CONTENT_TYPE)) { type = ContentType.JSON; } } parseAndThrowException(in, type); } return in; } /********************************** * * private, extract (aka query) methods * **********************************/ private InputStream doGetQueryResultStream(URL resultsURL, HashMap<String, String> headers) throws IOException, AsyncApiException { HttpURLConnection httpConnection = openHttpConnection(resultsURL, headers); InputStream is = doHttpGet(httpConnection, resultsURL); this.queryLocator = httpConnection.getHeaderField("Sforce-Locator"); this.numberOfRecordsInQueryResult = Integer.valueOf(httpConnection.getHeaderField("Sforce-NumberOfRecords")); return is; } /********************************** * * private, ingest methods * @throws AsyncApiException * **********************************/ private InputStream doGetIngestResultsStream(String jobId, String resultsType) throws AsyncApiException { String resultsURLString = constructRequestURL(jobId, false) + resultsType; try { URL resultsURL = new URL(resultsURLString); HttpURLConnection httpConnection = openHttpConnection(resultsURL, getHeaders(JSON_CONTENT_TYPE, CSV_CONTENT_TYPE)); return doHttpGet(httpConnection, resultsURL); } catch (IOException e) { throw new AsyncApiException("Failed to get " + resultsType + " for job id " + jobId, AsyncExceptionCode.ClientInputError, e); } } private void doSaveIngestResults(String jobId, String filename, String resultsType) throws AsyncApiException { BufferedOutputStream bos; try { bos = new BufferedOutputStream(new FileOutputStream(filename)); } catch (FileNotFoundException e) { throw new AsyncApiException("File " + filename + " not found", AsyncExceptionCode.ClientInputError, e); } BufferedInputStream bis = new BufferedInputStream(doGetIngestResultsStream(jobId, resultsType)); try { byte[] buffer = new byte[2048]; for(int len; (len = bis.read(buffer)) > 0;) { bos.write(buffer, 0, len); } bis.close(); bos.flush(); bos.close(); } catch (IOException e) { throw new AsyncApiException("Failed to get " + resultsType + " for job " + jobId, AsyncExceptionCode.ClientInputError, e); } } } class BulkV2Error implements Serializable { private static final long serialVersionUID = 3L; public String errorCode = ""; public String message = ""; }
/* * Title: GridSim Toolkit * Description: GridSim (Grid Simulation) Toolkit for Modeling and Simulation * of Parallel and Distributed Systems such as Clusters and Grids * License: GPL - http://www.gnu.org/copyleft/gpl.html */ package gridsim.parallel.gui; import java.awt.BasicStroke; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Composite; import java.awt.Dimension; import java.awt.Font; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.GridLayout; import java.awt.Rectangle; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.awt.font.FontRenderContext; import java.awt.font.LineMetrics; import java.util.ArrayList; import java.util.LinkedList; import java.util.Vector; import javax.swing.AbstractListModel; import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.ButtonGroup; import javax.swing.JButton; import javax.swing.JCheckBoxMenuItem; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JList; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JRadioButtonMenuItem; import javax.swing.JScrollPane; import javax.swing.JSlider; import javax.swing.JTextArea; import javax.swing.JTextField; import javax.swing.SwingUtilities; import javax.swing.border.BevelBorder; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import javax.swing.border.EtchedBorder; import javax.swing.border.TitledBorder; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import gridsim.GridResource; import gridsim.GridSim; import gridsim.Gridlet; import gridsim.parallel.profile.PERange; import gridsim.parallel.profile.PERangeList; import gridsim.parallel.profile.ScheduleItem; import gridsim.parallel.reservation.ReservationStatus; /** * {@link ResourceWindow} class represents the window that shows the * scheduling queue of a given resource allocation policy. This interface * was initially created for PajFit (<b>http://pajfit.sourceforge.net/</b>). * * @author Marco A. S. Netto and Marcos Dias de Assuncao * * @since 5.0 */ public class ResourceWindow extends JFrame implements AllocationListener, ActionListener { private static final long serialVersionUID = 4453814344309889376L; private int numPE; // number of processing elements the Grid resource has // default control options included in the left side of the window private JSlider sliderX, sliderY; private JRadioButtonMenuItem btSecond, btMinute, btHour; private boolean drawID_ = true; private boolean autoScroll_ = true; private boolean animate_ = true; private boolean showPartition_ = false; private JButton btSetSdWindowSize; private JTextField fdSdWindowSize; private double slidingWindowSize = Double.MAX_VALUE; // the left panel itself, the scroller for the scheduling queue panel // and the panel where the jobs are drawn private JComponent pnLeft; private JScrollPane sclGraph; private GraphPanel pnGraph; private double currentTime; // the panel that shows the list of gridlets or advance reservations private ItemPanel pnItem; private JobTypePanel pnColor; // the jobs or advance reservations displayed by this window private ArrayList<ScheduleItem> scheduledItems = new ArrayList<ScheduleItem>(); // the settings object private static GUISettings settings = GUISettings.getInstance(); // time unit used to display information on the screen private int timeUnit = ScheduleItem.TIME_UNIT_SECOND; private static final int WINDOW_WIDTH = 900; private static final int WINDOW_HEIGHT = 350; private static final int SHIFT_X = 30; private static final int SHIFT_Y = 25; private static final int SHIFT_BOTTOM = 25; private static final float PROPORTION_LEFT_PANEL = 0.6f; private static final float PROPORTION_RIGHT_PANEL = 1f - PROPORTION_LEFT_PANEL; private static final int HEIGHT_COLOR_PANEL = 90; /** * Creates the scheduling window. * @param resource the characteristics of the grid resource * @param windowId an id for the window * @param hPos Horizontal position of the window */ public ResourceWindow(GridResource resource, int windowId, int hPos) { numPE = resource.getResourceCharacteristics().getNumPE(); super.getContentPane().setLayout(null); super.setSize(WINDOW_WIDTH, WINDOW_HEIGHT); // initialise the left and right panels initPanels(); FrameResizer adapter = new FrameResizer(); super.addComponentListener(adapter); super.setLocation(hPos, windowId * 200); super.setTitle("Resource Information Window - " + resource.get_name()); } // -------------------------- PUBLIC METHODS ----------------------- /** * Handles allocation actions * @param action an allocation action performed * @return <code>true<code> if action was handled */ public boolean allocationActionPerformed(AllocationAction action) { ActionType type = action.getActionType(); LinkedList<ScheduleItem> list = action.getScheduleItems(); double previousTime = currentTime; currentTime = GridSim.clock(); if (type == ActionType.ITEM_ARRIVED) { for(ScheduleItem item : list){ scheduledItems.add(item); pnItem.insertNewItem(item); } updateResourceWindow(); } else if (type == ActionType.ITEM_STATUS_CHANGED) { pnItem.updateItem(list.getLast()); updateResourceWindow(); } else if (type == ActionType.ITEM_SCHEDULED) { for(ScheduleItem item : list){ long finishTime = (long)item.getActualFinishTime(); settings.setTimeSpan(finishTime); } pnItem.updateItem(list.getLast()); updateResourceWindow(); } else if (type == ActionType.ITEM_CANCELLED) { for(ScheduleItem item : list){ scheduledItems.remove(item); } pnItem.updateItem(list.getLast()); updateResourceWindow(); } else if (type == ActionType.SIMULATION_TIME_CHANGED) { if(currentTime > previousTime) { updateResourceWindow(); } } else { updateResourceWindow(); } return true; } /** * Handles the action events triggered by interface components * @param e the event received */ public void actionPerformed(ActionEvent e) { if (e.getSource() == btSecond && btSecond.isSelected()) { timeUnit = ScheduleItem.TIME_UNIT_SECOND; } else if (e.getSource() == btMinute && btMinute.isSelected()) { timeUnit = ScheduleItem.TIME_UNIT_MINUTE; } else if (e.getSource() == btHour && btHour.isSelected()) { timeUnit = ScheduleItem.TIME_UNIT_HOUR; } else if(e.getSource() == btSetSdWindowSize) { double newSize = 0; boolean success = true; try { newSize = Double.parseDouble(fdSdWindowSize.getText()); if(newSize >= 60) { slidingWindowSize = newSize; } else { success = false; } } catch (NumberFormatException nfe) { success = false; } if(!success) { String message = "The value informed for the size of the " + "sliding window is invalid.\nThe " + (Double.compare(slidingWindowSize, Double.MAX_VALUE) == 0 ? "default" : "current") + " value will be used instead.\n\n" + "Note: the minimum size is 60 seconds."; JOptionPane.showMessageDialog(this, message, "Error Setting the Sliding Window Size", JOptionPane.ERROR_MESSAGE); } } if(scheduledItems.size() > 0) { pnItem.updatePanel(); } updateResourceWindow(); } // -------------------------- PRIVATE METHODS ----------------------- /** * Initialises the panels. That is, the left side where the control buttons * and the panel where the scheduling queue is shown and the right side * where the information about the gridlets is displayed */ private void initPanels() { // calculates the size of the two panels // to be added to the window int leftPanelWidth = (int)((super.getWidth()) * PROPORTION_LEFT_PANEL); int panelsHeight = (int)(((float)super.getHeight()) - 40); int gridletPanelWidth = (int)((super.getWidth()) * PROPORTION_RIGHT_PANEL) - 10; int leftPanelXPos = 0; int gridletPanelXPos = leftPanelXPos + leftPanelWidth; pnLeft = new JPanel(); pnLeft.setOpaque(true); pnLeft.setLayout(new BorderLayout()); pnLeft.setLocation(leftPanelXPos, 0); pnLeft.setSize(leftPanelWidth, panelsHeight); Border raisedetched = BorderFactory.createEtchedBorder(EtchedBorder.RAISED); JPanel instructionPanel = new JPanel(); instructionPanel.setLayout(new BoxLayout(instructionPanel, BoxLayout.X_AXIS)); instructionPanel.setBorder(raisedetched); JPanel sliderPanel = new JPanel(new GridLayout(1, 2)); sliderPanel.setBorder(new TitledBorder("Scale X and Y Axes")); sliderX = new JSlider(10, 100, 10); sliderY = new JSlider(10, 100, 10); ChangeListener graphResizer = new ChangeListener() { public synchronized void stateChanged(ChangeEvent e) { pnGraph.repaint(); } }; sliderX.addChangeListener(graphResizer); sliderY.addChangeListener(graphResizer); sliderPanel.add(sliderX); sliderPanel.add(sliderY); JPanel pnWindowProp = new JPanel(new GridLayout(1, 2)); pnWindowProp.setBorder(new TitledBorder("Sliding Window Size (Sec.):")); fdSdWindowSize = new JTextField(8); pnWindowProp.add(fdSdWindowSize); btSetSdWindowSize = new JButton("Change"); btSetSdWindowSize.addActionListener(this); pnWindowProp.add(btSetSdWindowSize); instructionPanel.add(sliderPanel); instructionPanel.add(pnWindowProp); //Set up the drawing area. pnGraph = new GraphPanel(); //Put the drawing area in a scroll pane. sclGraph = new JScrollPane(pnGraph); pnGraph.setBorder(new BevelBorder(BevelBorder.LOWERED)); pnLeft.add(instructionPanel, BorderLayout.NORTH); pnLeft.add(sclGraph, BorderLayout.CENTER); Border paneBorder = BorderFactory.createEmptyBorder(10, 20, 10, 20); pnLeft.setBorder(paneBorder); pnItem = new ItemPanel(); pnItem.setLocation(gridletPanelXPos, 0); pnItem.setSize(gridletPanelWidth, panelsHeight - HEIGHT_COLOR_PANEL - 10); pnColor = new JobTypePanel(); pnColor.setLocation(gridletPanelXPos, pnItem.getHeight()); pnColor.setSize(gridletPanelWidth, HEIGHT_COLOR_PANEL); this.getContentPane().add(pnLeft); this.getContentPane().add(pnItem); this.getContentPane().add(pnColor); createMenuBar(); pnItem.setMinimumSize(new Dimension( (int)(WINDOW_WIDTH/2.7), (int)(super.getMaximumSize().height))); } /** * Creates the menu bar of the main window */ private void createMenuBar() { JMenuBar menuBar = new JMenuBar(); JMenu menuCommand = new JMenu("Options"); JMenu mnGridlet = new JMenu("Gridlet"); JCheckBoxMenuItem miShowGridID = new JCheckBoxMenuItem("Show ID"); miShowGridID.setSelected(true); mnGridlet.add(miShowGridID); miShowGridID.addItemListener(new ItemListener(){ public void itemStateChanged(ItemEvent e){ if (e.getStateChange() == ItemEvent.DESELECTED){ drawID_ = false; } else if (e.getStateChange() == ItemEvent.SELECTED){ drawID_ = true; } pnGraph.repaint(); } }); menuCommand.add(mnGridlet); JMenu mnTime = new JMenu("Time Unit"); btSecond = new JRadioButtonMenuItem("Second"); btSecond.setActionCommand("time_second"); btSecond.setSelected(true); btMinute = new JRadioButtonMenuItem("Minute"); btMinute.setActionCommand("time_minutes"); btHour = new JRadioButtonMenuItem("Hour"); btHour.setActionCommand("time_hour"); ButtonGroup timeButtonGroup = new ButtonGroup(); timeButtonGroup.add(btSecond); timeButtonGroup.add(btMinute); timeButtonGroup.add(btHour); btSecond.addActionListener(this); btMinute.addActionListener(this); btHour.addActionListener(this); mnTime.add(btSecond); mnTime.add(btMinute); mnTime.add(btHour); menuCommand.add(mnTime); JMenu mnScroll = new JMenu("Scrolling"); JCheckBoxMenuItem miAutoScroll = new JCheckBoxMenuItem("Auto Scroll to End of Queue"); miAutoScroll.setSelected(true); mnScroll.add(miAutoScroll); miAutoScroll.addItemListener(new ItemListener(){ public void itemStateChanged(ItemEvent e){ if (e.getStateChange() == ItemEvent.DESELECTED){ autoScroll_ = false; } else if (e.getStateChange() == ItemEvent.SELECTED){ autoScroll_ = true; } updateResourceWindow(); } }); menuCommand.add(mnScroll); JMenu mnAnimation = new JMenu("Animation"); JCheckBoxMenuItem miAnimation = new JCheckBoxMenuItem("Animate this Window"); miAnimation.setSelected(true); mnAnimation.add(miAnimation); miAnimation.addItemListener(new ItemListener(){ public void itemStateChanged(ItemEvent e){ if (e.getStateChange() == ItemEvent.DESELECTED){ animate_ = false; } else if (e.getStateChange() == ItemEvent.SELECTED){ animate_ = true; } updateResourceWindow(); } }); menuCommand.add(mnAnimation); JMenu mnPartition = new JMenu("Partitions"); JCheckBoxMenuItem miPartition = new JCheckBoxMenuItem("Show Partition Informations"); miPartition.setSelected(false); mnPartition.add(miPartition); miPartition.addItemListener(new ItemListener(){ public void itemStateChanged(ItemEvent e){ if (e.getStateChange() == ItemEvent.DESELECTED){ showPartition_ = false; } else if (e.getStateChange() == ItemEvent.SELECTED){ showPartition_ = true; } updateResourceWindow(); } }); menuCommand.add(mnPartition); menuBar.add(menuCommand); setJMenuBar(menuBar); } private void updateResourceWindow() { pnGraph.repaint(); if(slidingWindowSize != Double.MAX_VALUE) { int max = sclGraph.getHorizontalScrollBar().getMaximum(); if(autoScroll_) { Rectangle visRect = sclGraph.getVisibleRect(); Rectangle rect = new Rectangle(max - visRect.width, 0, visRect.width, sclGraph.getHeight()); sclGraph.getHorizontalScrollBar().setValue(max - visRect.width); sclGraph.scrollRectToVisible(rect); } } } /* * Converts the time to the time unit in use * @param time in seconds * @return the time in the unit in use */ private double convertTime(double time) { return time / timeUnit; } // -------------------------- PRIVATE CLASSES ----------------------- /** * Class responsible for resizing the two main panels * that compose the resource window interface */ class FrameResizer extends ComponentAdapter { public void componentResized(ComponentEvent evt) { // calculates the size of the two panels // to be added to the window int leftPanelWidth = (int)((ResourceWindow.this.getWidth()) * PROPORTION_LEFT_PANEL); int panelsHeight = (int)((ResourceWindow.this.getHeight()) - 40); int gridletPanelWidth = (int)((ResourceWindow.this.getWidth()) * PROPORTION_RIGHT_PANEL) - 10; int leftPanelXPos = 0; int gridletPanelXPos = leftPanelXPos + leftPanelWidth; pnLeft.setLocation(leftPanelXPos, 0); pnLeft.setSize(leftPanelWidth, panelsHeight); pnLeft.updateUI(); pnItem.setLocation(gridletPanelXPos, 0); pnItem.setSize(gridletPanelWidth, panelsHeight - HEIGHT_COLOR_PANEL - 10); pnItem.updateUI(); pnColor.setLocation(gridletPanelXPos, pnItem.getHeight()); pnColor.setSize(gridletPanelWidth, HEIGHT_COLOR_PANEL); pnColor.updateUI(); } } /** * The panel inside the scroll pane where the jobs are shown. */ class GraphPanel extends JPanel { private int panelHeight_; private int panelWidth_; private float scaleY_; private float scaleX_; private BasicStroke dashedStk = settings.getDashedStroke(); private BasicStroke normalStk = settings.getNormalStroke(); private Composite transpComp = settings.getTransparentComposite(); private Color bgColor = settings.getGraphBGColor(); private Color color = settings.getGraphAreaColor(); private Color bdColor = settings.getGraphBDColor(); private Color gridColor = settings.getTimeGridColor(); private Color topTxtColor = settings.getLabelColor(); private Color xTxtColor = settings.getXAxisTextColor(); private Color ctLnColor = settings.getTimeLineColor(); private Font grFont = settings.getGraphFont(); private Color[] colorsQueued = settings.getJobQueuedColors(); private Color[] colorsDone = settings.getJobDoneColors(); private Color[] colorsInExec = settings.getJobInExecColors(); private Color[] colorsARNonCommitted = settings.getARNonCommittedColors(); private Color[] colorsARCommitted = settings.getARCommittedColors(); private Color[] colorsARInProgress = settings.getARInProgressColors(); private Color[] colorQueues = settings.getQueueColors(); // a job to be highlited. That is, a gridlet or advance reservation // selected by the user on the right panel ScheduleItem hlItem = null; protected GraphPanel() { super.setBackground(bgColor); } protected synchronized void paintComponent(Graphics g2) { if(!animate_) { return; } super.paintComponent(g2); Graphics2D g2D = (Graphics2D)g2; g2D.setFont(grFont); double timeSpan = settings.getTimeSpan(); panelHeight_ = pnLeft.getHeight() - 100 - SHIFT_Y - SHIFT_BOTTOM; int minWidth = pnLeft.getWidth() - 50 - 2 * SHIFT_X; panelWidth_ = minWidth; double sdWindowSize = ResourceWindow.this.slidingWindowSize; if(Double.compare(sdWindowSize, Double.MAX_VALUE) != 0) { panelWidth_ = (int)(minWidth * (settings.getTimeSpan() / sdWindowSize)); } panelWidth_ = (panelWidth_ < minWidth) ? minWidth : panelWidth_; scaleY_ = panelHeight_ / (float) numPE; scaleX_ = panelWidth_ / (float) (timeSpan); scaleY_ *= sliderY.getValue() * (float) 0.1; scaleX_ *= sliderX.getValue() * (float) 0.1; super.setPreferredSize(new Dimension((int) (timeSpan * scaleX_) + 2 * SHIFT_X, (int) ((numPE) * scaleY_) + SHIFT_Y + SHIFT_BOTTOM)); drawSchedulingQueue(timeSpan, g2D); drawGridsAndAxes(timeSpan, g2D); super.revalidate(); } /** * Draws the lines and time scale on the scheduling window * @param timeSpan the time span of the simulation * @param g2D the graphics 2D context */ private void drawGridsAndAxes(double timeSpan, Graphics2D g2D) { String text = null; FontMetrics metrics = g2D.getFontMetrics(); g2D.setColor(gridColor); g2D.setStroke(dashedStk); Composite previousComposite = g2D.getComposite(); g2D.setComposite(transpComp); int heightGph = (int)(numPE * scaleY_); int widthGph = (int)(timeSpan * scaleX_); int x = SHIFT_X, y = SHIFT_Y; for(int i=0; i<=widthGph; i+=50) { x = SHIFT_X + i; g2D.drawLine(x, SHIFT_Y, x, SHIFT_Y + heightGph); } g2D.setComposite(previousComposite); g2D.setStroke(normalStk); g2D.setColor(bdColor); g2D.drawRect(SHIFT_X, SHIFT_Y, widthGph, heightGph); for(int i=0; i <= widthGph; i+=50) { x = SHIFT_X + i; g2D.drawLine(x, SHIFT_Y + heightGph - 5, x, SHIFT_Y + heightGph + 3); } g2D.setColor(xTxtColor); y = SHIFT_Y + heightGph + 20; for(int i=0; i<=widthGph; i+=50) { text = "" + (int)convertTime((i/scaleX_)); g2D.drawString(text, SHIFT_X + i - SwingUtilities.computeStringWidth(metrics, text) / 2, y); } g2D.setColor(topTxtColor); text = "CT: "+ (int)(convertTime(currentTime)); g2D.drawString(text, SHIFT_X + (int)(currentTime * scaleX_) - SwingUtilities.computeStringWidth(metrics, text) / 2, SHIFT_Y - 10); text = "Time Span: " + (int)(convertTime(timeSpan)); y = SHIFT_Y + SwingUtilities.computeStringWidth(metrics, text); x = widthGph + SHIFT_X + 15; rotateAndPaint(g2D, x, y, 1.571, text); x = SHIFT_X - 5; y = heightGph + SHIFT_Y - 10; rotateAndPaint(g2D, x, y, 1.571, " Processing Elements: " + numPE); g2D.setColor(ctLnColor); x = SHIFT_X + (int)(currentTime * scaleX_); g2D.drawLine(x, SHIFT_Y - 7, x, SHIFT_Y + heightGph + 10); } /* * Rotate the graphics, print the string and rotate the graphics back */ private void rotateAndPaint(Graphics2D g2D, int x, int y, double theta, String text) { g2D.rotate(-theta, x, y); g2D.drawString(text, x, y); g2D.rotate(theta, x, y); } /* * Draws the boxes representing the gridlets or advance reservations */ private void drawSchedulingQueue(double timeSpan, Graphics2D g2D) { Color boxColor = null; Color fontColor = null; g2D.setColor(color); int heightGph = (int)(numPE * scaleY_); int widthGph = (int) (timeSpan * scaleX_); g2D.fillRect(SHIFT_X, SHIFT_Y, widthGph, heightGph); int size = scheduledItems.size(); for(int i=0; i<size; i++) { ScheduleItem item = (ScheduleItem)scheduledItems.get(i); if(item == null || item.getStartTime() < 0) { continue; } int itemId = item.getID(); if (item.getPERangeList() != null) { // the color of the font for normal gridlets is black fontColor = Color.BLACK; if(showPartition_) { boxColor = colorQueues[item.getPartitionID() % colorQueues.length]; } else if(!item.isAdvanceReservation()) { // Gridlet is in execution if(item.getStatus() == Gridlet.INEXEC) { boxColor = colorsInExec[(itemId % colorsInExec.length)]; } // Gridlet has finished else if(item.getStatus() == Gridlet.SUCCESS) { boxColor = colorsDone[(itemId % colorsDone.length)]; } else { boxColor = colorsQueued[(itemId % colorsQueued.length)]; } } else { // the color of the font for advance reservations is white fontColor = Color.WHITE; if(item.getStatus() == ReservationStatus.IN_PROGRESS.intValue()) { boxColor = colorsARInProgress[(itemId % colorsARInProgress.length)]; } else if (item.getStatus() == ReservationStatus.NOT_COMMITTED.intValue() || item.getStatus() == ReservationStatus.UNKNOWN.intValue()) { boxColor = colorsARNonCommitted[(itemId % colorsARNonCommitted.length)]; } else if (item.getStatus() == ReservationStatus.COMMITTED.intValue()) { boxColor = colorsARCommitted[(itemId % colorsARCommitted.length)]; } else if (item.getStatus() == ReservationStatus.FINISHED.intValue()) { boxColor = colorsDone[(itemId % colorsDone.length)]; } else { boxColor = colorsDone[(itemId % colorsDone.length)]; } } drawItem((Graphics2D) g2D, item, boxColor, fontColor); } } // if there is an item to be highlighted, then do it if(hlItem != null) { highlightItem((Graphics2D) g2D, hlItem); } } /* * Draws a gridlet or advance reservation in the scheduling window. * This method assumes that the gridlet has a range of PEs */ private void drawItem(Graphics2D g2D, ScheduleItem item, Color boxColor, Color fontColor) { int y; int h = 0; //controls the height to draw the gridlet PERangeList gridletPERanges = item.getPERangeList(); gridletPERanges.sortRanges(); int firstX, firstY; int width, height; int textX, textY; int textHeight, textWidth; // gets the time duration of the gridlet double duration = item.getActualFinishTime() - item.getStartTime(); width = (int) (duration * scaleX_); firstX = SHIFT_X + (int) (item.getStartTime() * scaleX_); String boxText; LineMetrics lineMetrics; Font font = g2D.getFont().deriveFont(10f); g2D.setFont(font); FontRenderContext frc = g2D.getFontRenderContext(); // A gridlet can have the nodes 0-2, 5-7, etc. // So it must be painted in parts for(PERange range : gridletPERanges){ y = range.getEnd(); h = range.getNumPE(); firstY = SHIFT_Y + (int) ((numPE - (y + 1)) * scaleY_); height = (int) ((h) * scaleY_); // if it is a gridlet that reserved resources, then make it // transparent to show the advance reservation as well boolean reservedGridlet = !item.isAdvanceReservation() && item.hasReserved(); Composite previousComposite = null; if(reservedGridlet) { previousComposite = g2D.getComposite(); g2D.setComposite(transpComp); } g2D.setColor(boxColor); if(!reservedGridlet) g2D.fillRect(firstX, firstY, width, height); g2D.setColor(Color.black); g2D.drawRect(firstX, firstY, width, height); //draw the label in the center of the box boxText = new Integer(item.getID()).toString(); textWidth = (int)font.getStringBounds(boxText, frc).getWidth(); lineMetrics = font.getLineMetrics(boxText, frc); textHeight = (int)(lineMetrics.getAscent() + lineMetrics.getDescent()); textX = firstX + (width - textWidth)/2; textY = (int)(firstY + (height + textHeight)/2 - lineMetrics.getDescent()); g2D.setColor(fontColor); if(drawID_){ g2D.drawString(boxText, textX, textY); } if(reservedGridlet) { g2D.setComposite(previousComposite); } } } /* * Highlights a schedule item. This method basically draws the item * in the resource window with red lines. */ private void highlightItem(Graphics2D g2D, ScheduleItem item) { int y; int h = 0; //controls the height to draw the gridlet PERangeList gridletPERanges = item.getPERangeList(); if(gridletPERanges == null) return; gridletPERanges.sortRanges(); int firstX, firstY; int width, height; // gets the time duration of the gridlet double duration = item.getActualFinishTime() - item.getStartTime(); width = (int) (duration * scaleX_); firstX = SHIFT_X + (int) (item.getStartTime() * scaleX_); // A gridlet can have the nodes 0-2, 5-7, etc. // So it must be painted in parts for(PERange range : gridletPERanges){ y = range.getEnd(); h = range.getNumPE(); firstY = SHIFT_Y + (int) ((numPE - (y + 1)) * scaleY_); height = (int) ((h) * scaleY_); g2D.setColor(Color.RED); g2D.drawRect(firstX, firstY, width, height); } } } /** * This class corresponds to the panel that contains information about the * {@link ScheduleItem}s (i.e. Gridlets and Reservations) received by a Grid * resource. This panel displays the list of items. If the user clicks on an * item, additional information is shown. <br> * This interface was initially created for another simulator called PajFit * available at <b>http://pajfit.sourceforge.net/</b>. * * @author Marco A. S. Netto (created this class) * @author Marcos Dias de Assuncao (modified this class to be used by GridSim * and receive updates from the {@link ResourceWindow}) * * @since GridSim Turbo Alpha 0.1 * @see ResourceWindow */ class ItemPanel extends JPanel implements ListSelectionListener { private JList itemQueueJList_; private JTextArea itemInfoArea_; private ItemListModel itemModel_ = new ItemListModel(); protected ItemPanel() { // creates the list that contains the gridlets itemQueueJList_ = new JList(); itemQueueJList_.setModel(itemModel_); itemQueueJList_.addListSelectionListener(this); JScrollPane scrollPaneJobs = new JScrollPane(); scrollPaneJobs.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS); scrollPaneJobs.setViewportView(itemQueueJList_); scrollPaneJobs.setBorder(new TitledBorder("List")); super.setLayout(new GridLayout(1, 2)); itemQueueJList_.setFont(itemQueueJList_.getFont().deriveFont(9.5f)); itemQueueJList_.setBackground(super.getBackground()); scrollPaneJobs.setBackground(super.getBackground()); // the list that contains the details of an item itemInfoArea_ = new JTextArea(); itemInfoArea_.setFont(itemInfoArea_.getFont().deriveFont(9.5f)); Border panelBorder = new CompoundBorder(BorderFactory.createEmptyBorder(10, 0, 10, 0), BorderFactory.createTitledBorder( BorderFactory.createEtchedBorder(EtchedBorder.RAISED), "Information About Gridlets and Reservations")); super.setBorder(panelBorder); itemInfoArea_.setBorder(new TitledBorder("Details")); itemInfoArea_.setBackground(super.getBackground()); itemInfoArea_.setEditable(false); super.add(scrollPaneJobs); super.add(itemInfoArea_); } /** * Handles events triggered by the change of the list of Gridlets * @see ListSelectionListener#valueChanged(ListSelectionEvent) */ public void valueChanged(ListSelectionEvent event) { int selectedIndex = (int) itemQueueJList_.getSelectedIndex(); ScheduleItem item = itemModel_.get(selectedIndex); if (item != null) { updateItemDetails(item); pnGraph.hlItem = item; pnGraph.repaint(); } } /** * Inserts a Gridlet to the JList and the vector of Gridlets * @param insertItem the item to be inserted in the vector of * schedule items and the JList */ public void insertNewItem(final ScheduleItem insertItem) { if(insertItem == null) return; itemModel_.addItem(insertItem); itemQueueJList_.setSelectedIndex(itemModel_.getSize() - 1); updateItemDetails(insertItem); } /** * Updates information in the list. This method checks whether * the Item is already in the list or not. If it is, just update * the details window. Otherwise, inserts the item in * the list and updates the details. * @param item the item whose information has to be updated */ protected void updateItem(ScheduleItem item) { if(item == null) return; int position = itemModel_.getPosition(item.getID(), item.getSenderID(), item.isAdvanceReservation()); int selectedIndex = (int) itemQueueJList_.getSelectedIndex(); if(selectedIndex == position) { updateItemDetails(item); } } /** * Called when an update of the whole panel is needed */ protected void updatePanel() { int selectedIndex = (int) itemQueueJList_.getSelectedIndex(); ScheduleItem item = itemModel_.get(selectedIndex); if (item != null) updateItemDetails(item); } /** * Updates the details about the selected gridlet * in the gridlet details panel */ private void updateItemDetails(ScheduleItem item){ itemInfoArea_.setText(item.toString(timeUnit)); } // the list model. This is a wrapper around the vector // of elements to be shown in the list private class ItemListModel extends AbstractListModel { private Vector<ScheduleItem> items_ = new Vector<ScheduleItem>(); public Object getElementAt(int index) { ScheduleItem item = items_.get(index); return item == null ? "" : itemSummary(item); } // Creates a small summary of an item private String itemSummary(ScheduleItem item) { return (item.isAdvanceReservation() ? "Res. " : "Grl. ") + "ID: " + item.getID() + ", User: " + item.getSenderID(); } public int getSize() { return items_.size(); } public ScheduleItem get(int index) { return items_.get(index); } /* * Gets the position of an item in the list * @param itemId the item id * @param itemId the user id * @param ar <tt>true</tt> if it is an advance reservation or * <tt>false</tt> otherwise * @return the index of the element or <tt>-1</tt> if not found */ private int getPosition(int itemId, int userId, boolean ar) { int sizeVector = items_.size(); ScheduleItem item; for (int i = 0; i < sizeVector; i++) { item = items_.get(i); if (item.getID() == itemId && item.getSenderID() == userId && item.isAdvanceReservation() == ar) return i; } return -1; } /* * Inserts an item to the model * @param insertItem the item to be inserted in the model */ public void addItem(ScheduleItem insertItem) { int index = items_.size(); items_.add(index, insertItem); super.fireIntervalAdded(this, index, index); } } } }
// // Copyright 2016 Cityzen Data // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package io.warp10.continuum.gts; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; /** * Comparator class used to sort GTS instances. * * The sorting order is by name, then by labels, then by last timestamp and finally by most recent value. * * The side effect of using this comparator is that if sorting needs to check timestamps or values, * the GTS instances might end up being sorted internally. * */ public class GTSComparator implements Comparator<GeoTimeSerie> { @Override public int compare(GeoTimeSerie gts1, GeoTimeSerie gts2) { int nameCompare = gts1.getName().compareTo(gts2.getName()); // // Names differ, return the result of their comparison // if (0 != nameCompare) { return nameCompare; } // // Compare labels // // // We need to compare last timestamp, first check if GTS instances are // bucketized, in which case we just need to compare the values of 'lastbucket' // Otherwise we need to sort the non bucketized instances and check their last // timestamp. // If one instance is not bucketized and has no values, its last timestamp is // assumed to be Long.MIN_VALUE // Map<String,String> labels1 = gts1.getLabels(); Map<String,String> labels2 = gts2.getLabels(); List<String> keys1 = new ArrayList<String>(); keys1.addAll(labels1.keySet()); Collections.sort(keys1); List<String> keys2 = new ArrayList<String>(); keys2.addAll(labels2.keySet()); Collections.sort(keys2); // Last label to check, passed this the label names differ // and the comparison will be that of the names int differingLabel = 0; while (differingLabel < keys1.size() && differingLabel < keys2.size() && 0 == keys1.get(differingLabel).compareTo(keys2.get(differingLabel))) { differingLabel++; } int labelsCompare = 0; for (int i = 0; i < differingLabel; i++) { String value1 = labels1.get(keys1.get(i)); String value2 = labels2.get(keys1.get(i)); labelsCompare = value1.compareTo(value2); if (0 != labelsCompare) { return labelsCompare; } } // All labels so far have the same values, check the differingLabel'th one // If differingLabel == keys1.size() and differingLabel == keys2.size() then // we will have to check the timestamps as all labels are equal, otherwise, check the // first differing label if (differingLabel != keys1.size() || differingLabel != keys2.size()) { // GTS1 has fewer labels than GTS2, it's therefore smaller if (differingLabel == keys1.size()) { return -1; } // GTS2 has fewer labels than GTS1, it's therefore smaller if (differingLabel == keys2.size()) { return 1; } // Compare label names return keys1.get(differingLabel).compareTo(keys2.get(differingLabel)); } // // All labels are equal, compare the ticks // GTSHelper.sort(gts1); GTSHelper.sort(gts2); int idx = 0; while (idx < gts1.values && idx < gts2.values) { if (gts1.ticks[gts1.values - 1 - idx] < gts2.ticks[gts2.values - 1 - idx]) { return -1; } else if (gts1.ticks[gts1.values - 1 - idx] > gts2.ticks[gts2.values - 1 - idx]) { return 1; } else { // Ticks are equal, compare values Object value1 = GTSHelper.valueAtIndex(gts1, gts1.values - 1 - idx); Object value2 = GTSHelper.valueAtIndex(gts2, gts1.values - 1 - idx); if (value1.equals(value2)) { continue; } if (value1 instanceof String || value2 instanceof String) { int comp = value1.toString().compareTo(value2.toString()); if (0 != comp) { return comp; } } else if (value1 instanceof Number && value2 instanceof Number) { BigDecimal bd1; BigDecimal bd2; if (value1 instanceof Long) { if (value2 instanceof Long) { if (((Number) value1).longValue() < ((Number) value2).longValue()) { return -1; } else if (((Number) value1).longValue() > ((Number) value2).longValue()) { return 1; } } else { bd1 = new BigDecimal(((Number) value1).longValue()); bd2 = new BigDecimal(((Number) value2).doubleValue()); int bdCompare = bd1.compareTo(bd2); if (0 != bdCompare) { return bdCompare; } } } else if (value1 instanceof Double) { if (value2 instanceof Double) { if (((Number) value1).doubleValue() < ((Number) value2).doubleValue()) { return -1; } else if (((Number) value1).doubleValue() > ((Number) value2).doubleValue()) { return 1; } } else { bd1 = new BigDecimal(((Number) value1).doubleValue()); bd2 = new BigDecimal(((Number) value2).longValue()); int bdCompare = bd1.compareTo(bd2); if (0 != bdCompare) { return bdCompare; } } } } else if (value1 instanceof Boolean) { // False is less than anything else if (Boolean.FALSE.equals(value1)) { return -1; } else if (Boolean.TRUE.equals(value2)) { return 1; } } else if (value2 instanceof Boolean) { // True is more than anything else if (Boolean.FALSE.equals(value2)) { return 1; } else if (Boolean.TRUE.equals(value2)) { return -1; } } } idx++; } return 0; } }
/* * Copyright 2014-2017 Richard Linsdale. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.theretiredprogrammer.marktimingsrecorder.nodes; import uk.theretiredprogrammer.marktimingsrecorder.dataobjects.Timing; import uk.theretiredprogrammer.marktimingsrecorder.dataobjects.Coursemark; import java.awt.datatransfer.DataFlavor; import java.io.IOException; import java.util.List; import javax.swing.Action; import uk.theretiredprogrammer.nbpcglibrary.annotations.UseCommonNodeAction; import uk.theretiredprogrammer.nbpcglibrary.annotations.UseCommonNodeActions; import uk.theretiredprogrammer.nbpcglibrary.data.entity.*; import uk.theretiredprogrammer.nbpcglibrary.node.nodes.*; import uk.theretiredprogrammer.nbpcglibrary.node.properties.*; import uk.theretiredprogrammer.nbpcglibrary.api.LogicException; import org.openide.nodes.PropertySupport; import uk.theretiredprogrammer.marktimingsrecorder.dataobjects.Timing.TimingField; /** * Node for TimingNode. * * (Class generated by NetBeans Platform Code Generator tools using script.xml. * Do not edit this file. Apply any changes to the definition file and * regenerate all files.) * * @author Richard Linsdale (richard at theretiredprogrammer.uk) */ public class TimingNode extends TreeNode<Integer, Timing, Coursemark, TimingField> { /** * Data Flavor for Nodes of type TimingNode */ public static final DataFlavor TIMINGNODE_FLAVOR = new DataFlavor(TimingNode.class, "TimingNode"); /** * Constructor * * @param e the entity associated with this node */ public TimingNode(Timing e) { super("TimingNode", e, Timing.EM.class, CAN_COPY | CAN_DELETE | CAN_CUT); } @UseCommonNodeActions({ @UseCommonNodeAction(id = "uk.theretiredprogrammer.nbpcglibrary.node.actions.NodeCutAction", node = "TimingNode", position = 850, separator = 800), @UseCommonNodeAction(id = "uk.theretiredprogrammer.nbpcglibrary.node.actions.NodeCopyAction", node = "TimingNode", position = 870), @UseCommonNodeAction(id = "uk.theretiredprogrammer.nbpcglibrary.node.actions.NodeDeleteAction", node = "TimingNode", position = 890),}) private static void useCommonNodeActions() { } // a dummy method @Override protected DataFlavor nodeGetDataFlavor() { return TIMINGNODE_FLAVOR; } /** * Get the node display name * * @return the display name */ @Override public String getDisplayName() { return getEntity() != null ? getEntity().getDisplayName() : "#undefined#"; } /** * Get the node display title * * @return the display title */ @Override public String getDisplayTitle() { return getEntity() != null ? getEntity().getDisplayTitle() : "#undefined#"; } StringReadonlyProperty raceentryproperty; StringReadonlyProperty lapproperty; StringReadonlyProperty atproperty; StringReadonlyProperty estimatedproperty; StringReadonlyProperty coursemarkproperty; StringReadonlyProperty idproperty; StringReadonlyProperty createdbyproperty; StringReadonlyProperty createdonproperty; StringReadonlyProperty updatedbyproperty; StringReadonlyProperty updatedonproperty; @Override protected List<PropertySupport.ReadOnly<?>> createPropertyItems(List<PropertySupport.ReadOnly<?>> props) { Timing e = getEntity(); if (e != null) { props.add(new StringReadonlyProperty("Node Type", "TimingNode")); props.add(raceentryproperty = new StringReadonlyProperty("Raceentry", e.formatRaceentry())); props.add(lapproperty = new StringReadonlyProperty("Lap", e.formatLap())); props.add(atproperty = new StringReadonlyProperty("At", e.formatAt())); props.add(estimatedproperty = new StringReadonlyProperty("Estimated", e.formatEstimated())); props.add(coursemarkproperty = new StringReadonlyProperty("Coursemark", e.formatCoursemark())); props.add(idproperty = new StringReadonlyProperty("Id", e.formatId())); props.add(createdbyproperty = new StringReadonlyProperty("Created by", e.formatCreatedby())); props.add(createdonproperty = new StringReadonlyProperty("Created on", e.formatCreatedon())); props.add(updatedbyproperty = new StringReadonlyProperty("Updated by", e.formatUpdatedby())); props.add(updatedonproperty = new StringReadonlyProperty("Updated on", e.formatUpdatedon())); } return props; } @Override protected void nodeProcessFieldChange(TimingField field) { Timing e = getEntity(); if (field == null) { if (e != null && idproperty != null) { raceentryproperty.update(e.formatRaceentry()); lapproperty.update(e.formatLap()); atproperty.update(e.formatAt()); estimatedproperty.update(e.formatEstimated()); coursemarkproperty.update(e.formatCoursemark()); idproperty.update(e.formatId()); createdbyproperty.update(e.formatCreatedby()); createdonproperty.update(e.formatCreatedon()); updatedbyproperty.update(e.formatUpdatedby()); updatedonproperty.update(e.formatUpdatedon()); propertyChange(); } } else { switch (field) { case RACEENTRY: if (e != null && raceentryproperty != null) { raceentryproperty.update(e.formatRaceentry()); propertyChange("Raceentry"); } break; case LAP: if (e != null && lapproperty != null) { lapproperty.update(e.formatLap()); propertyChange("Lap"); } break; case AT: if (e != null && atproperty != null) { atproperty.update(e.formatAt()); propertyChange("At"); } break; case ESTIMATED: if (e != null && estimatedproperty != null) { estimatedproperty.update(e.formatEstimated()); propertyChange("Estimated"); } break; case COURSEMARK: if (e != null && coursemarkproperty != null) { coursemarkproperty.update(e.formatCoursemark()); propertyChange("Coursemark"); } break; } } } @Override protected void nodeCutPaste(BasicNode child) throws IOException { CoreEntity e = child.getEntity(); addNewChild(e); } @Override protected void nodeCopyPaste(BasicNode child) throws IOException { CoreEntity e = child.getEntity(); addNewChild(e); } private void addNewChild(CoreEntity e) { throw new LogicException("Timing Entity does not have a child of the requested class (" + e.getClass().getSimpleName() + ")"); } @Override protected void nodeDelete() throws IOException { getEntity().remove(); } @Override protected void nodeReorderChildByFlavor(DataFlavor df, int[] perm) { } @Override public Action getPreferredAction() { return findDefaultAction("TimingNode"); } @Override public Action[] getActions(boolean context) { return findActions("TimingNode"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.stat.inference; import java.math.BigDecimal; import java.util.Arrays; import java.util.Iterator; import org.apache.commons.math3.distribution.RealDistribution; import org.apache.commons.math3.exception.InsufficientDataException; import org.apache.commons.math3.exception.MathArithmeticException; import org.apache.commons.math3.exception.NullArgumentException; import org.apache.commons.math3.exception.NumberIsTooLargeException; import org.apache.commons.math3.exception.OutOfRangeException; import org.apache.commons.math3.exception.TooManyIterationsException; import org.apache.commons.math3.exception.util.LocalizedFormats; import org.apache.commons.math3.fraction.BigFraction; import org.apache.commons.math3.fraction.BigFractionField; import org.apache.commons.math3.fraction.FractionConversionException; import org.apache.commons.math3.linear.Array2DRowFieldMatrix; import org.apache.commons.math3.linear.Array2DRowRealMatrix; import org.apache.commons.math3.linear.FieldMatrix; import org.apache.commons.math3.linear.RealMatrix; import org.apache.commons.math3.random.RandomGenerator; import org.apache.commons.math3.random.Well19937c; import org.apache.commons.math3.util.CombinatoricsUtils; import org.apache.commons.math3.util.FastMath; import org.apache.commons.math3.util.MathArrays; /** * Implementation of the <a href="http://en.wikipedia.org/wiki/Kolmogorov-Smirnov_test"> * Kolmogorov-Smirnov (K-S) test</a> for equality of continuous distributions. * <p> * The K-S test uses a statistic based on the maximum deviation of the empirical distribution of * sample data points from the distribution expected under the null hypothesis. For one-sample tests * evaluating the null hypothesis that a set of sample data points follow a given distribution, the * test statistic is \(D_n=\sup_x |F_n(x)-F(x)|\), where \(F\) is the expected distribution and * \(F_n\) is the empirical distribution of the \(n\) sample data points. The distribution of * \(D_n\) is estimated using a method based on [1] with certain quick decisions for extreme values * given in [2]. * </p> * <p> * Two-sample tests are also supported, evaluating the null hypothesis that the two samples * {@code x} and {@code y} come from the same underlying distribution. In this case, the test * statistic is \(D_{n,m}=\sup_t | F_n(t)-F_m(t)|\) where \(n\) is the length of {@code x}, \(m\) is * the length of {@code y}, \(F_n\) is the empirical distribution that puts mass \(1/n\) at each of * the values in {@code x} and \(F_m\) is the empirical distribution of the {@code y} values. The * default 2-sample test method, {@link #kolmogorovSmirnovTest(double[], double[])} works as * follows: * <ul> * <li>For very small samples (where the product of the sample sizes is less than * {@value #SMALL_SAMPLE_PRODUCT}), the exact distribution is used to compute the p-value for the * 2-sample test.</li> * <li>For mid-size samples (product of sample sizes greater than or equal to * {@value #SMALL_SAMPLE_PRODUCT} but less than {@value #LARGE_SAMPLE_PRODUCT}), Monte Carlo * simulation is used to compute the p-value. The simulation randomly generates partitions of \(m + * n\) into an \(m\)-set and an \(n\)-set and reports the proportion that give \(D\) values * exceeding the observed value.</li> * <li>When the product of the sample sizes exceeds {@value #LARGE_SAMPLE_PRODUCT}, the asymptotic * distribution of \(D_{n,m}\) is used. See {@link #approximateP(double, int, int)} for details on * the approximation.</li> * </ul> * </p> * <p> * In the two-sample case, \(D_{n,m}\) has a discrete distribution. This makes the p-value * associated with the null hypothesis \(H_0 : D_{n,m} \ge d \) differ from \(H_0 : D_{n,m} > d \) * by the mass of the observed value \(d\). To distinguish these, the two-sample tests use a boolean * {@code strict} parameter. This parameter is ignored for large samples. * </p> * <p> * The methods used by the 2-sample default implementation are also exposed directly: * <ul> * <li>{@link #exactP(double, int, int, boolean)} computes exact 2-sample p-values</li> * <li>{@link #monteCarloP(double, int, int, boolean, int)} computes 2-sample p-values by Monte * Carlo simulation</li> * <li>{@link #approximateP(double, int, int)} uses the asymptotic distribution The {@code boolean} * arguments in the first two methods allow the probability used to estimate the p-value to be * expressed using strict or non-strict inequality. See * {@link #kolmogorovSmirnovTest(double[], double[], boolean)}.</li> * </ul> * </p> * <p> * References: * <ul> * <li>[1] <a href="http://www.jstatsoft.org/v08/i18/"> Evaluating Kolmogorov's Distribution</a> by * George Marsaglia, Wai Wan Tsang, and Jingbo Wang</li> * <li>[2] <a href="http://www.jstatsoft.org/v39/i11/"> Computing the Two-Sided Kolmogorov-Smirnov * Distribution</a> by Richard Simard and Pierre L'Ecuyer</li> * </ul> * <br/> * Note that [1] contains an error in computing h, refer to <a * href="https://issues.apache.org/jira/browse/MATH-437">MATH-437</a> for details. * </p> * * @since 3.3 * @version $Id: KolmogorovSmirnovTest.java 1591211 2014-04-30 08:20:51Z luc $ */ public class KolmogorovSmirnovTest { /** * Bound on the number of partial sums in {@link #ksSum(double, double, int)} */ protected static final int MAXIMUM_PARTIAL_SUM_COUNT = 100000; /** Convergence criterion for {@link #ksSum(double, double, int)} */ protected static final double KS_SUM_CAUCHY_CRITERION = 1E-20; /** When product of sample sizes is less than this value, 2-sample K-S test is exact */ protected static final int SMALL_SAMPLE_PRODUCT = 200; /** * When product of sample sizes exceeds this value, 2-sample K-S test uses asymptotic * distribution for strict inequality p-value. */ protected static final int LARGE_SAMPLE_PRODUCT = 10000; /** Default number of iterations used by {@link #monteCarloP(double, int, int, boolean, int)} */ protected static final int MONTE_CARLO_ITERATIONS = 1000000; /** Random data generator used by {@link #monteCarloP(double, int, int, boolean, int)} */ private final RandomGenerator rng; /** * Construct a KolmogorovSmirnovTest instance with a default random data generator. */ public KolmogorovSmirnovTest() { rng = new Well19937c(); } /** * Construct a KolmogorovSmirnovTest with the provided random data generator. * * @param rng random data generator used by {@link #monteCarloP(double, int, int, boolean, int)} */ public KolmogorovSmirnovTest(RandomGenerator rng) { this.rng = rng; } /** * Computes the <i>p-value</i>, or <i>observed significance level</i>, of a one-sample <a * href="http://en.wikipedia.org/wiki/Kolmogorov-Smirnov_test"> Kolmogorov-Smirnov test</a> * evaluating the null hypothesis that {@code data} conforms to {@code distribution}. If * {@code exact} is true, the distribution used to compute the p-value is computed using * extended precision. See {@link #cdfExact(double, int)}. * * @param distribution reference distribution * @param data sample being being evaluated * @param exact whether or not to force exact computation of the p-value * @return the p-value associated with the null hypothesis that {@code data} is a sample from * {@code distribution} * @throws InsufficientDataException if {@code data} does not have length at least 2 * @throws NullArgumentException if {@code data} is null */ public double kolmogorovSmirnovTest(RealDistribution distribution, double[] data, boolean exact) { return 1d - cdf(kolmogorovSmirnovStatistic(distribution, data), data.length, exact); } /** * Computes the one-sample Kolmogorov-Smirnov test statistic, \(D_n=\sup_x |F_n(x)-F(x)|\) where * \(F\) is the distribution (cdf) function associated with {@code distribution}, \(n\) is the * length of {@code data} and \(F_n\) is the empirical distribution that puts mass \(1/n\) at * each of the values in {@code data}. * * @param distribution reference distribution * @param data sample being evaluated * @return Kolmogorov-Smirnov statistic \(D_n\) * @throws InsufficientDataException if {@code data} does not have length at least 2 * @throws NullArgumentException if {@code data} is null */ public double kolmogorovSmirnovStatistic(RealDistribution distribution, double[] data) { checkArray(data); final int n = data.length; final double nd = n; final double[] dataCopy = new double[n]; System.arraycopy(data, 0, dataCopy, 0, n); Arrays.sort(dataCopy); double d = 0d; for (int i = 1; i <= n; i++) { final double yi = distribution.cumulativeProbability(dataCopy[i - 1]); final double currD = FastMath.max(yi - (i - 1) / nd, i / nd - yi); if (currD > d) { d = currD; } } return d; } /** * Computes the <i>p-value</i>, or <i>observed significance level</i>, of a two-sample <a * href="http://en.wikipedia.org/wiki/Kolmogorov-Smirnov_test"> Kolmogorov-Smirnov test</a> * evaluating the null hypothesis that {@code x} and {@code y} are samples drawn from the same * probability distribution. Specifically, what is returned is an estimate of the probability * that the {@link #kolmogorovSmirnovStatistic(double[], double[])} associated with a randomly * selected partition of the combined sample into subsamples of sizes {@code x.length} and * {@code y.length} will strictly exceed (if {@code strict} is {@code true}) or be at least as * large as {@code strict = false}) as {@code kolmogorovSmirnovStatistic(x, y)}. * <ul> * <li>For very small samples (where the product of the sample sizes is less than * {@value #SMALL_SAMPLE_PRODUCT}), the exact distribution is used to compute the p-value. This * is accomplished by enumerating all partitions of the combined sample into two subsamples of * the respective sample sizes, computing \(D_{n,m}\) for each partition and returning the * proportion of partitions that give \(D\) values exceeding the observed value.</li> * <li>For mid-size samples (product of sample sizes greater than or equal to * {@value #SMALL_SAMPLE_PRODUCT} but less than {@value #LARGE_SAMPLE_PRODUCT}), Monte Carlo * simulation is used to compute the p-value. The simulation randomly generates partitions and * reports the proportion that give \(D\) values exceeding the observed value.</li> * <li>When the product of the sample sizes exceeds {@value #LARGE_SAMPLE_PRODUCT}, the * asymptotic distribution of \(D_{n,m}\) is used. See {@link #approximateP(double, int, int)} * for details on the approximation.</li> * </ul> * * @param x first sample dataset * @param y second sample dataset * @param strict whether or not the probability to compute is expressed as a strict inequality * (ignored for large samples) * @return p-value associated with the null hypothesis that {@code x} and {@code y} represent * samples from the same distribution * @throws InsufficientDataException if either {@code x} or {@code y} does not have length at * least 2 * @throws NullArgumentException if either {@code x} or {@code y} is null */ public double kolmogorovSmirnovTest(double[] x, double[] y, boolean strict) { if (x.length * y.length < SMALL_SAMPLE_PRODUCT) { return exactP(kolmogorovSmirnovStatistic(x, y), x.length, y.length, strict); } if (x.length * y.length < LARGE_SAMPLE_PRODUCT) { return monteCarloP(kolmogorovSmirnovStatistic(x, y), x.length, y.length, strict, MONTE_CARLO_ITERATIONS); } return approximateP(kolmogorovSmirnovStatistic(x, y), x.length, y.length); } /** * Computes the <i>p-value</i>, or <i>observed significance level</i>, of a two-sample <a * href="http://en.wikipedia.org/wiki/Kolmogorov-Smirnov_test"> Kolmogorov-Smirnov test</a> * evaluating the null hypothesis that {@code x} and {@code y} are samples drawn from the same * probability distribution. Assumes the strict form of the inequality used to compute the * p-value. See {@link #kolmogorovSmirnovTest(RealDistribution, double[], boolean)}. * * @param x first sample dataset * @param y second sample dataset * @return p-value associated with the null hypothesis that {@code x} and {@code y} represent * samples from the same distribution * @throws InsufficientDataException if either {@code x} or {@code y} does not have length at * least 2 * @throws NullArgumentException if either {@code x} or {@code y} is null */ public double kolmogorovSmirnovTest(double[] x, double[] y) { return kolmogorovSmirnovTest(x, y, true); } /** * Computes the two-sample Kolmogorov-Smirnov test statistic, \(D_{n,m}=\sup_x |F_n(x)-F_m(x)|\) * where \(n\) is the length of {@code x}, \(m\) is the length of {@code y}, \(F_n\) is the * empirical distribution that puts mass \(1/n\) at each of the values in {@code x} and \(F_m\) * is the empirical distribution of the {@code y} values. * * @param x first sample * @param y second sample * @return test statistic \(D_{n,m}\) used to evaluate the null hypothesis that {@code x} and * {@code y} represent samples from the same underlying distribution * @throws InsufficientDataException if either {@code x} or {@code y} does not have length at * least 2 * @throws NullArgumentException if either {@code x} or {@code y} is null */ public double kolmogorovSmirnovStatistic(double[] x, double[] y) { checkArray(x); checkArray(y); // Copy and sort the sample arrays final double[] sx = MathArrays.copyOf(x); final double[] sy = MathArrays.copyOf(y); Arrays.sort(sx); Arrays.sort(sy); final int n = sx.length; final int m = sy.length; // Find the max difference between cdf_x and cdf_y double supD = 0d; // First walk x points for (int i = 0; i < n; i++) { final double cdf_x = (i + 1d) / n; final int yIndex = Arrays.binarySearch(sy, sx[i]); final double cdf_y = yIndex >= 0 ? (yIndex + 1d) / m : (-yIndex - 1d) / m; final double curD = FastMath.abs(cdf_x - cdf_y); if (curD > supD) { supD = curD; } } // Now look at y for (int i = 0; i < m; i++) { final double cdf_y = (i + 1d) / m; final int xIndex = Arrays.binarySearch(sx, sy[i]); final double cdf_x = xIndex >= 0 ? (xIndex + 1d) / n : (-xIndex - 1d) / n; final double curD = FastMath.abs(cdf_x - cdf_y); if (curD > supD) { supD = curD; } } return supD; } /** * Computes the <i>p-value</i>, or <i>observed significance level</i>, of a one-sample <a * href="http://en.wikipedia.org/wiki/Kolmogorov-Smirnov_test"> Kolmogorov-Smirnov test</a> * evaluating the null hypothesis that {@code data} conforms to {@code distribution}. * * @param distribution reference distribution * @param data sample being being evaluated * @return the p-value associated with the null hypothesis that {@code data} is a sample from * {@code distribution} * @throws InsufficientDataException if {@code data} does not have length at least 2 * @throws NullArgumentException if {@code data} is null */ public double kolmogorovSmirnovTest(RealDistribution distribution, double[] data) { return kolmogorovSmirnovTest(distribution, data, false); } /** * Performs a <a href="http://en.wikipedia.org/wiki/Kolmogorov-Smirnov_test"> Kolmogorov-Smirnov * test</a> evaluating the null hypothesis that {@code data} conforms to {@code distribution}. * * @param distribution reference distribution * @param data sample being being evaluated * @param alpha significance level of the test * @return true iff the null hypothesis that {@code data} is a sample from {@code distribution} * can be rejected with confidence 1 - {@code alpha} * @throws InsufficientDataException if {@code data} does not have length at least 2 * @throws NullArgumentException if {@code data} is null */ public boolean kolmogorovSmirnovTest(RealDistribution distribution, double[] data, double alpha) { if ((alpha <= 0) || (alpha > 0.5)) { throw new OutOfRangeException(LocalizedFormats.OUT_OF_BOUND_SIGNIFICANCE_LEVEL, alpha, 0, 0.5); } return kolmogorovSmirnovTest(distribution, data) < alpha; } /** * Calculates \(P(D_n < d)\) using the method described in [1] with quick decisions for extreme * values given in [2] (see above). The result is not exact as with * {@link #cdfExact(double, int)} because calculations are based on * {@code double} rather than {@link org.apache.commons.math3.fraction.BigFraction}. * * @param d statistic * @param n sample size * @return \(P(D_n < d)\) * @throws MathArithmeticException if algorithm fails to convert {@code h} to a * {@link org.apache.commons.math3.fraction.BigFraction} in expressing {@code d} as \((k * - h) / m\) for integer {@code k, m} and \(0 \le h < 1\) */ public double cdf(double d, int n) throws MathArithmeticException { return cdf(d, n, false); } /** * Calculates {@code P(D_n < d)}. The result is exact in the sense that BigFraction/BigReal is * used everywhere at the expense of very slow execution time. Almost never choose this in real * applications unless you are very sure; this is almost solely for verification purposes. * Normally, you would choose {@link #cdf(double, int)}. See the class * javadoc for definitions and algorithm description. * * @param d statistic * @param n sample size * @return \(P(D_n < d)\) * @throws MathArithmeticException if the algorithm fails to convert {@code h} to a * {@link org.apache.commons.math3.fraction.BigFraction} in expressing {@code d} as \((k * - h) / m\) for integer {@code k, m} and \(0 \le h < 1\) */ public double cdfExact(double d, int n) throws MathArithmeticException { return cdf(d, n, true); } /** * Calculates {@code P(D_n < d)} using method described in [1] with quick decisions for extreme * values given in [2] (see above). * * @param d statistic * @param n sample size * @param exact whether the probability should be calculated exact using * {@link org.apache.commons.math3.fraction.BigFraction} everywhere at the expense of * very slow execution time, or if {@code double} should be used convenient places to * gain speed. Almost never choose {@code true} in real applications unless you are very * sure; {@code true} is almost solely for verification purposes. * @return \(P(D_n < d)\) * @throws MathArithmeticException if algorithm fails to convert {@code h} to a * {@link org.apache.commons.math3.fraction.BigFraction} in expressing {@code d} as \((k * - h) / m\) for integer {@code k, m} and \(0 \le h < 1\). */ public double cdf(double d, int n, boolean exact) throws MathArithmeticException { final double ninv = 1 / ((double) n); final double ninvhalf = 0.5 * ninv; if (d <= ninvhalf) { return 0; } else if (ninvhalf < d && d <= ninv) { double res = 1; final double f = 2 * d - ninv; // n! f^n = n*f * (n-1)*f * ... * 1*x for (int i = 1; i <= n; ++i) { res *= i * f; } return res; } else if (1 - ninv <= d && d < 1) { return 1 - 2 * Math.pow(1 - d, n); } else if (1 <= d) { return 1; } return exact ? exactK(d, n) : roundedK(d, n); } /** * Calculates the exact value of {@code P(D_n < d)} using the method described in [1] (reference * in class javadoc above) and {@link org.apache.commons.math3.fraction.BigFraction} (see * above). * * @param d statistic * @param n sample size * @return the two-sided probability of \(P(D_n < d)\) * @throws MathArithmeticException if algorithm fails to convert {@code h} to a * {@link org.apache.commons.math3.fraction.BigFraction} in expressing {@code d} as \((k * - h) / m\) for integer {@code k, m} and \(0 \le h < 1\). */ private double exactK(double d, int n) throws MathArithmeticException { final int k = (int) Math.ceil(n * d); final FieldMatrix<BigFraction> H = this.createH(d, n); final FieldMatrix<BigFraction> Hpower = H.power(n); BigFraction pFrac = Hpower.getEntry(k - 1, k - 1); for (int i = 1; i <= n; ++i) { pFrac = pFrac.multiply(i).divide(n); } /* * BigFraction.doubleValue converts numerator to double and the denominator to double and * divides afterwards. That gives NaN quite easy. This does not (scale is the number of * digits): */ return pFrac.bigDecimalValue(20, BigDecimal.ROUND_HALF_UP).doubleValue(); } /** * Calculates {@code P(D_n < d)} using method described in [1] and doubles (see above). * * @param d statistic * @param n sample size * @return the two-sided probability of \(P(D_n < d)\) * @throws MathArithmeticException if algorithm fails to convert {@code h} to a * {@link org.apache.commons.math3.fraction.BigFraction} in expressing {@code d} as \((k * - h) / m\ for integer {@code k, m} and \(0 <= h < 1\). */ private double roundedK(double d, int n) throws MathArithmeticException { final int k = (int) Math.ceil(n * d); final FieldMatrix<BigFraction> HBigFraction = this.createH(d, n); final int m = HBigFraction.getRowDimension(); /* * Here the rounding part comes into play: use RealMatrix instead of * FieldMatrix<BigFraction> */ final RealMatrix H = new Array2DRowRealMatrix(m, m); for (int i = 0; i < m; ++i) { for (int j = 0; j < m; ++j) { H.setEntry(i, j, HBigFraction.getEntry(i, j).doubleValue()); } } final RealMatrix Hpower = H.power(n); double pFrac = Hpower.getEntry(k - 1, k - 1); for (int i = 1; i <= n; ++i) { pFrac *= (double) i / (double) n; } return pFrac; } /*** * Creates {@code H} of size {@code m x m} as described in [1] (see above). * * @param d statistic * @param n sample size * @return H matrix * @throws NumberIsTooLargeException if fractional part is greater than 1 * @throws FractionConversionException if algorithm fails to convert {@code h} to a * {@link org.apache.commons.math3.fraction.BigFraction} in expressing {@code d} as \((k * - h) / m\) for integer {@code k, m} and \(0 <= h < 1\). */ private FieldMatrix<BigFraction> createH(double d, int n) throws NumberIsTooLargeException, FractionConversionException { final int k = (int) Math.ceil(n * d); final int m = 2 * k - 1; final double hDouble = k - n * d; if (hDouble >= 1) { throw new NumberIsTooLargeException(hDouble, 1.0, false); } BigFraction h = null; try { h = new BigFraction(hDouble, 1.0e-20, 10000); } catch (final FractionConversionException e1) { try { h = new BigFraction(hDouble, 1.0e-10, 10000); } catch (final FractionConversionException e2) { h = new BigFraction(hDouble, 1.0e-5, 10000); } } final BigFraction[][] Hdata = new BigFraction[m][m]; /* * Start by filling everything with either 0 or 1. */ for (int i = 0; i < m; ++i) { for (int j = 0; j < m; ++j) { if (i - j + 1 < 0) { Hdata[i][j] = BigFraction.ZERO; } else { Hdata[i][j] = BigFraction.ONE; } } } /* * Setting up power-array to avoid calculating the same value twice: hPowers[0] = h^1 ... * hPowers[m-1] = h^m */ final BigFraction[] hPowers = new BigFraction[m]; hPowers[0] = h; for (int i = 1; i < m; ++i) { hPowers[i] = h.multiply(hPowers[i - 1]); } /* * First column and last row has special values (each other reversed). */ for (int i = 0; i < m; ++i) { Hdata[i][0] = Hdata[i][0].subtract(hPowers[i]); Hdata[m - 1][i] = Hdata[m - 1][i].subtract(hPowers[m - i - 1]); } /* * [1] states: "For 1/2 < h < 1 the bottom left element of the matrix should be (1 - 2*h^m + * (2h - 1)^m )/m!" Since 0 <= h < 1, then if h > 1/2 is sufficient to check: */ if (h.compareTo(BigFraction.ONE_HALF) == 1) { Hdata[m - 1][0] = Hdata[m - 1][0].add(h.multiply(2).subtract(1).pow(m)); } /* * Aside from the first column and last row, the (i, j)-th element is 1/(i - j + 1)! if i - * j + 1 >= 0, else 0. 1's and 0's are already put, so only division with (i - j + 1)! is * needed in the elements that have 1's. There is no need to calculate (i - j + 1)! and then * divide - small steps avoid overflows. Note that i - j + 1 > 0 <=> i + 1 > j instead of * j'ing all the way to m. Also note that it is started at g = 2 because dividing by 1 isn't * really necessary. */ for (int i = 0; i < m; ++i) { for (int j = 0; j < i + 1; ++j) { if (i - j + 1 > 0) { for (int g = 2; g <= i - j + 1; ++g) { Hdata[i][j] = Hdata[i][j].divide(g); } } } } return new Array2DRowFieldMatrix<BigFraction>(BigFractionField.getInstance(), Hdata); } /** * Verifies that {@code array} has length at least 2. * * @param array array to test * @throws NullArgumentException if array is null * @throws InsufficientDataException if array is too short */ private void checkArray(double[] array) { if (array == null) { throw new NullArgumentException(LocalizedFormats.NULL_NOT_ALLOWED); } if (array.length < 2) { throw new InsufficientDataException(LocalizedFormats.INSUFFICIENT_OBSERVED_POINTS_IN_SAMPLE, array.length, 2); } } /** * Computes \( 1 + 2 \sum_{i=1}^\infty (-1)^i e^{-2 i^2 t^2} \) stopping when successive partial * sums are within {@code tolerance} of one another, or when {@code maxIterations} partial sums * have been computed. If the sum does not converge before {@code maxIterations} iterations a * {@link TooManyIterationsException} is thrown. * * @param t argument * @param tolerance Cauchy criterion for partial sums * @param maxIterations maximum number of partial sums to compute * @return Kolmogorov sum evaluated at t * @throws TooManyIterationsException if the series does not converge */ public double ksSum(double t, double tolerance, int maxIterations) { // TODO: for small t (say less than 1), the alternative expansion in part 3 of [1] // from class javadoc should be used. final double x = -2 * t * t; int sign = -1; long i = 1; double partialSum = 0.5d; double delta = 1; while (delta > tolerance && i < maxIterations) { delta = FastMath.exp(x * i * i); partialSum += sign * delta; sign *= -1; i++; } if (i == maxIterations) { throw new TooManyIterationsException(maxIterations); } return partialSum * 2; } /** * Computes \(P(D_{n,m} > d)\) if {@code strict} is {@code true}; otherwise \(P(D_{n,m} \ge * d)\), where \(D_{n,m}\) is the 2-sample Kolmogorov-Smirnov statistic. See * {@link #kolmogorovSmirnovStatistic(double[], double[])} for the definition of \(D_{n,m}\). * <p> * The returned probability is exact, obtained by enumerating all partitions of {@code m + n} * into {@code m} and {@code n} sets, computing \(D_{n,m}\) for each partition and counting the * number of partitions that yield \(D_{n,m}\) values exceeding (resp. greater than or equal to) * {@code d}. * </p> * <p> * <strong>USAGE NOTE</strong>: Since this method enumerates all combinations in \({m+n} \choose * {n}\), it is very slow if called for large {@code m, n}. For this reason, * {@link #kolmogorovSmirnovTest(double[], double[])} uses this only for {@code m * n < } * {@value #SMALL_SAMPLE_PRODUCT}. * </p> * * @param d D-statistic value * @param n first sample size * @param m second sample size * @param strict whether or not the probability to compute is expressed as a strict inequality * @return probability that a randomly selected m-n partition of m + n generates \(D_{n,m}\) * greater than (resp. greater than or equal to) {@code d} */ public double exactP(double d, int n, int m, boolean strict) { Iterator<int[]> combinationsIterator = CombinatoricsUtils.combinationsIterator(n + m, n); long tail = 0; final double[] nSet = new double[n]; final double[] mSet = new double[m]; while (combinationsIterator.hasNext()) { // Generate an n-set final int[] nSetI = combinationsIterator.next(); // Copy the n-set to nSet and its complement to mSet int j = 0; int k = 0; for (int i = 0; i < n + m; i++) { if (j < n && nSetI[j] == i) { nSet[j++] = i; } else { mSet[k++] = i; } } final double curD = kolmogorovSmirnovStatistic(nSet, mSet); if (curD > d) { tail++; } else if (curD == d && !strict) { tail++; } } return (double) tail / (double) CombinatoricsUtils.binomialCoefficient(n + m, n); } /** * Uses the Kolmogorov-Smirnov distribution to approximate \(P(D_{n,m} > d)\) where \(D_{n,m}\) * is the 2-sample Kolmogorov-Smirnov statistic. See * {@link #kolmogorovSmirnovStatistic(double[], double[])} for the definition of \(D_{n,m}\). * <p> * Specifically, what is returned is \(1 - k(d \sqrt{mn / (m + n)})\) where \(k(t) = 1 + 2 * \sum_{i=1}^\infty (-1)^i e^{-2 i^2 t^2}\). See {@link #ksSum(double, double, int)} for * details on how convergence of the sum is determined. This implementation passes {@code ksSum} * {@value #KS_SUM_CAUCHY_CRITERION} as {@code tolerance} and * {@value #MAXIMUM_PARTIAL_SUM_COUNT} as {@code maxIterations}. * </p> * * @param d D-statistic value * @param n first sample size * @param m second sample size * @return approximate probability that a randomly selected m-n partition of m + n generates * \(D_{n,m}\) greater than {@code d} */ public double approximateP(double d, int n, int m) { final double dm = m; final double dn = n; return 1 - ksSum(d * FastMath.sqrt((dm * dn) / (dm + dn)), KS_SUM_CAUCHY_CRITERION, MAXIMUM_PARTIAL_SUM_COUNT); } /** * Uses Monte Carlo simulation to approximate \(P(D_{n,m} > d)\) where \(D_{n,m}\) is the * 2-sample Kolmogorov-Smirnov statistic. See * {@link #kolmogorovSmirnovStatistic(double[], double[])} for the definition of \(D_{n,m}\). * <p> * The simulation generates {@code iterations} random partitions of {@code m + n} into an * {@code n} set and an {@code m} set, computing \(D_{n,m}\) for each partition and returning * the proportion of values that are greater than {@code d}, or greater than or equal to * {@code d} if {@code strict} is {@code false}. * </p> * * @param d D-statistic value * @param n first sample size * @param m second sample size * @param iterations number of random partitions to generate * @param strict whether or not the probability to compute is expressed as a strict inequality * @return proportion of randomly generated m-n partitions of m + n that result in \(D_{n,m}\) * greater than (resp. greater than or equal to) {@code d} */ public double monteCarloP(double d, int n, int m, boolean strict, int iterations) { final int[] nPlusMSet = MathArrays.natural(m + n); final double[] nSet = new double[n]; final double[] mSet = new double[m]; int tail = 0; for (int i = 0; i < iterations; i++) { copyPartition(nSet, mSet, nPlusMSet, n, m); final double curD = kolmogorovSmirnovStatistic(nSet, mSet); if (curD > d) { tail++; } else if (curD == d && !strict) { tail++; } MathArrays.shuffle(nPlusMSet, rng); Arrays.sort(nPlusMSet, 0, n); } return (double) tail / iterations; } /** * Copies the first {@code n} elements of {@code nSetI} into {@code nSet} and its complement * relative to {@code m + n} into {@code mSet}. For example, if {@code m = 3}, {@code n = 3} and * {@code nSetI = [1,4,5,2,3,0]} then after this method returns, we will have * {@code nSet = [1,4,5], mSet = [0,2,3]}. * <p> * <strong>Precondition:</strong> The first {@code n} elements of {@code nSetI} must be sorted * in ascending order. * </p> * * @param nSet array to fill with the first {@code n} elements of {@code nSetI} * @param mSet array to fill with the {@code m} complementary elements of {@code nSet} relative * to {@code m + n} * @param nSetI array whose first {@code n} elements specify the members of {@code nSet} * @param n number of elements in the first output array * @param m number of elements in the second output array */ private void copyPartition(double[] nSet, double[] mSet, int[] nSetI, int n, int m) { int j = 0; int k = 0; for (int i = 0; i < n + m; i++) { if (j < n && nSetI[j] == i) { nSet[j++] = i; } else { mSet[k++] = i; } } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v10/enums/interaction_type.proto package com.google.ads.googleads.v10.enums; /** * <pre> * Container for enum describing possible interaction types. * </pre> * * Protobuf type {@code google.ads.googleads.v10.enums.InteractionTypeEnum} */ public final class InteractionTypeEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v10.enums.InteractionTypeEnum) InteractionTypeEnumOrBuilder { private static final long serialVersionUID = 0L; // Use InteractionTypeEnum.newBuilder() to construct. private InteractionTypeEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InteractionTypeEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new InteractionTypeEnum(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private InteractionTypeEnum( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.enums.InteractionTypeProto.internal_static_google_ads_googleads_v10_enums_InteractionTypeEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.enums.InteractionTypeProto.internal_static_google_ads_googleads_v10_enums_InteractionTypeEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.enums.InteractionTypeEnum.class, com.google.ads.googleads.v10.enums.InteractionTypeEnum.Builder.class); } /** * <pre> * Enum describing possible interaction types. * </pre> * * Protobuf enum {@code google.ads.googleads.v10.enums.InteractionTypeEnum.InteractionType} */ public enum InteractionType implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Not specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * Used for return value only. Represents value unknown in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * Calls. * </pre> * * <code>CALLS = 8000;</code> */ CALLS(8000), UNRECOGNIZED(-1), ; /** * <pre> * Not specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * Used for return value only. Represents value unknown in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * Calls. * </pre> * * <code>CALLS = 8000;</code> */ public static final int CALLS_VALUE = 8000; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static InteractionType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static InteractionType forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 8000: return CALLS; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<InteractionType> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< InteractionType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<InteractionType>() { public InteractionType findValueByNumber(int number) { return InteractionType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v10.enums.InteractionTypeEnum.getDescriptor().getEnumTypes().get(0); } private static final InteractionType[] VALUES = values(); public static InteractionType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private InteractionType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v10.enums.InteractionTypeEnum.InteractionType) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v10.enums.InteractionTypeEnum)) { return super.equals(obj); } com.google.ads.googleads.v10.enums.InteractionTypeEnum other = (com.google.ads.googleads.v10.enums.InteractionTypeEnum) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v10.enums.InteractionTypeEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible interaction types. * </pre> * * Protobuf type {@code google.ads.googleads.v10.enums.InteractionTypeEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.enums.InteractionTypeEnum) com.google.ads.googleads.v10.enums.InteractionTypeEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.enums.InteractionTypeProto.internal_static_google_ads_googleads_v10_enums_InteractionTypeEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.enums.InteractionTypeProto.internal_static_google_ads_googleads_v10_enums_InteractionTypeEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.enums.InteractionTypeEnum.class, com.google.ads.googleads.v10.enums.InteractionTypeEnum.Builder.class); } // Construct using com.google.ads.googleads.v10.enums.InteractionTypeEnum.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v10.enums.InteractionTypeProto.internal_static_google_ads_googleads_v10_enums_InteractionTypeEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v10.enums.InteractionTypeEnum getDefaultInstanceForType() { return com.google.ads.googleads.v10.enums.InteractionTypeEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v10.enums.InteractionTypeEnum build() { com.google.ads.googleads.v10.enums.InteractionTypeEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v10.enums.InteractionTypeEnum buildPartial() { com.google.ads.googleads.v10.enums.InteractionTypeEnum result = new com.google.ads.googleads.v10.enums.InteractionTypeEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v10.enums.InteractionTypeEnum) { return mergeFrom((com.google.ads.googleads.v10.enums.InteractionTypeEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v10.enums.InteractionTypeEnum other) { if (other == com.google.ads.googleads.v10.enums.InteractionTypeEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v10.enums.InteractionTypeEnum parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v10.enums.InteractionTypeEnum) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.enums.InteractionTypeEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v10.enums.InteractionTypeEnum) private static final com.google.ads.googleads.v10.enums.InteractionTypeEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v10.enums.InteractionTypeEnum(); } public static com.google.ads.googleads.v10.enums.InteractionTypeEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InteractionTypeEnum> PARSER = new com.google.protobuf.AbstractParser<InteractionTypeEnum>() { @java.lang.Override public InteractionTypeEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new InteractionTypeEnum(input, extensionRegistry); } }; public static com.google.protobuf.Parser<InteractionTypeEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InteractionTypeEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v10.enums.InteractionTypeEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.applicationsmanager; import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.yarn.MockApps; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.util.Records; import com.google.common.collect.Lists; @InterfaceAudience.Private public abstract class MockAsm extends MockApps { public static class ApplicationBase implements RMApp { @Override public String getUser() { throw new UnsupportedOperationException("Not supported yet."); } @Override public ApplicationSubmissionContext getApplicationSubmissionContext() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getName() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getQueue() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long getStartTime() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long getSubmitTime() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long getFinishTime() { throw new UnsupportedOperationException("Not supported yet."); } @Override public StringBuilder getDiagnostics() { throw new UnsupportedOperationException("Not supported yet."); } @Override public ApplicationId getApplicationId() { throw new UnsupportedOperationException("Not supported yet."); } @Override public RMAppAttempt getCurrentAppAttempt() { throw new UnsupportedOperationException("Not supported yet."); } @Override public Map<ApplicationAttemptId, RMAppAttempt> getAppAttempts() { throw new UnsupportedOperationException("Not supported yet."); } @Override public float getProgress() { throw new UnsupportedOperationException("Not supported yet."); } @Override public RMAppAttempt getRMAppAttempt(ApplicationAttemptId appAttemptId) { throw new UnsupportedOperationException("Not supported yet."); } @Override public RMAppState getState() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getTrackingUrl() { throw new UnsupportedOperationException("Not supported yet."); } @Override public int getMaxAppAttempts() { throw new UnsupportedOperationException("Not supported yet."); } @Override public ApplicationReport createAndGetApplicationReport( String clientUserName,boolean allowAccess) { throw new UnsupportedOperationException("Not supported yet."); } @Override public void handle(RMAppEvent event) { throw new UnsupportedOperationException("Not supported yet."); } @Override public FinalApplicationStatus getFinalApplicationStatus() { throw new UnsupportedOperationException("Not supported yet."); } @Override public int pullRMNodeUpdates(Collection<RMNode> updatedNodes) { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getApplicationType() { throw new UnsupportedOperationException("Not supported yet."); } @Override public void setQueue(String name) { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean isAppSafeToTerminate() { throw new UnsupportedOperationException("Not supported yet."); } @Override public YarnApplicationState createApplicationState() { throw new UnsupportedOperationException("Not supported yet."); } } public static RMApp newApplication(int i) { final ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(newAppID(i), 0); final Container masterContainer = Records.newRecord(Container.class); ContainerId containerId = ContainerId.newInstance(appAttemptId, 0); masterContainer.setId(containerId); masterContainer.setNodeHttpAddress("node:port"); final String user = newUserName(); final String name = newAppName(); final String queue = newQueue(); final long start = 123456 + i * 1000; final long finish = 234567 + i * 1000; final String type = YarnConfiguration.DEFAULT_APPLICATION_TYPE; YarnApplicationState[] allStates = YarnApplicationState.values(); final YarnApplicationState state = allStates[i % allStates.length]; final int maxAppAttempts = i % 1000; return new ApplicationBase() { @Override public ApplicationId getApplicationId() { return appAttemptId.getApplicationId(); } @Override public String getUser() { return user; } @Override public String getName() { return name; } @Override public String getApplicationType() { return type; } @Override public String getQueue() { return queue; } @Override public long getStartTime() { return start; } @Override public long getFinishTime() { return finish; } @Override public String getTrackingUrl() { return null; } @Override public YarnApplicationState createApplicationState() { return state; } @Override public StringBuilder getDiagnostics() { return new StringBuilder(); } @Override public float getProgress() { return (float)Math.random(); } @Override public FinalApplicationStatus getFinalApplicationStatus() { return FinalApplicationStatus.UNDEFINED; } @Override public RMAppAttempt getCurrentAppAttempt() { return null; } @Override public int getMaxAppAttempts() { return maxAppAttempts; } }; } public static List<RMApp> newApplications(int n) { List<RMApp> list = Lists.newArrayList(); for (int i = 0; i < n; ++i) { list.add(newApplication(i)); } return list; } }
package com.example.alonsiwek.demomap; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.content.LocalBroadcastManager; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.TableLayout; import android.widget.TableRow; import android.widget.TextView; import android.widget.Toast; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Timer; import java.util.TimerTask; /** * Created by dor on 1/11/2017. * This class is the Fragment calls of the main screen */ public class MainPageFrag extends Fragment { Boolean mIsRunning = false; private static final int UPDATE_RECYCLE_VIEW_DURATION = 5000; // 3200 is Toast.Length long private static final int SWIPE_TO_MAPS_FRAG_DURATION_GO_BUTTON = 3200 + 50; private static final int SWIPE_TO_MAPS_FRAG_DURATION_RIGHT_ARROW_BUTTON = 150; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, final Bundle savedInstanceState) { // Inflate the layout for this fragment final View view = inflater.inflate(R.layout.main_screen_frag, null); // set Timer to recycle list TimerTask task = new UserAtAppTimer(getActivity(), view, R.id.users_list); new Timer().scheduleAtFixedRate(task,0,UPDATE_RECYCLE_VIEW_DURATION); // get the widgets reference from Fragment XML layout final ImageButton btn_go = (ImageButton) view.findViewById(R.id.go_walking_btn); final ImageButton btn_rightArrow = (ImageButton) view.findViewById(R.id.right_arrow); final ImageButton btn_leftArrow = (ImageButton) view.findViewById(R.id.left_arrow); final RecyclerView mRecyleView = (RecyclerView) view.findViewById(R.id.users_list); final TextView tv = (TextView) view.findViewById(R.id.your_friends_tv); final ImageButton btn_bell = (ImageButton) view.findViewById(R.id.bell); final TextView numOfNotification = (TextView) view.findViewById(R.id.red_cycle); numOfNotification.setText(String.valueOf(" 1")); // set the viability functionality btn_bell.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { btn_bell.setVisibility(View.GONE); numOfNotification.setVisibility(View.GONE); tv.setVisibility(View.VISIBLE); mRecyleView.setVisibility(View.VISIBLE); // make GO button + arrow invisible btn_go.setVisibility(View.GONE); btn_rightArrow.setVisibility(View.GONE); btn_leftArrow.setVisibility(View.GONE); } }); //make Invitation text clickable and return the normal layout tv.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { btn_go.setVisibility(View.VISIBLE); btn_leftArrow.setVisibility(View.VISIBLE); btn_rightArrow.setVisibility(View.VISIBLE); } }); /* set functionality as Go button * Toast of the Main button * Set a click listener for Fragment button * Auto swipe to the next screen */ btn_go.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // show the Toast activateToast(R.layout.go_massage_toast ,savedInstanceState, Toast.LENGTH_LONG); // UPDATE DB mIsRunning = true; /* update DB only when mIsRunning == true. * update DB only when mIsRunning == false will be with FINISH button. */ Log.d(MainPageFrag.class.toString(),"mIsRunning:" + String.valueOf(mIsRunning)); activate_GoButton(mIsRunning); // auto swipe to next screen activateOnClickSwipe(SWIPE_TO_MAPS_FRAG_DURATION_GO_BUTTON); } }); // Swipe to map fragment btn_rightArrow.setOnClickListener(new View.OnClickListener(){ @Override public void onClick(View v) { activateOnClickSwipe(SWIPE_TO_MAPS_FRAG_DURATION_RIGHT_ARROW_BUTTON); } }); return view; } /** * Swipe to Maps fragemnt after delay * @param duration - delay duration */ public void activateOnClickSwipe(int duration){ new Handler(getActivity().getMainLooper()).postDelayed(new Runnable() { @Override public void run() { ((MainScreen.PageAdapter)getActivity()).setCurrentItem (MainScreen.PageAdapter.FRAGMENT_TWO_MAP , true); } } , duration); } /** * Call the Thread that activate updateRunningState Method * @param mIsRunningStatus */ public static void activate_GoButton(final boolean mIsRunningStatus){ if (mIsRunningStatus) { new Thread(new Runnable() { @Override public void run() { try { updateRunningState(mIsRunningStatus); } catch (IOException e) { Log.e(MainPageFrag.class.toString(), e.toString()); e.printStackTrace(); return; } } }).start(); } } /** * Display Toast to the screen * @param toastLayout - the Toast layot * @param savedInstanceState * @param toastLength - duration of Toast */ public void activateToast(int toastLayout, Bundle savedInstanceState, int toastLength){ // Get the application context Toast toast = new Toast(getContext()); // Set the Toast display position layout center toast.setGravity(Gravity.CENTER, 0, 0); LayoutInflater inflater = getLayoutInflater(savedInstanceState); View layout = inflater.inflate(toastLayout, null); // Set the Toast duration toast.setDuration(toastLength); // Set the Toast custom layout toast.setView(layout); toast.show(); } /** * Update the DB with the boolean state field of "is_running" * @param state - true or flase * @throws IOException */ static void updateRunningState(Boolean state) throws IOException { JSONObject json = new JSONObject(); URL url; BufferedReader bufferedReader = null; BufferedWriter bufferedWriter = null; try { json.put("is_running", state); } catch (JSONException e) { Log.e(MainPageFrag.class.toString(), "json error: " + e.toString()); e.printStackTrace(); return; } try { url = new URL(Constants.SERVER_URL + Constants.LOC_STATUS_PATH + Constants.user_id); } catch (MalformedURLException e) { Log.e(MainPageFrag.class.toString(), "error at url: " + e.toString()); e.printStackTrace(); return; } HttpURLConnection urlConnection = null; urlConnection = (HttpURLConnection) url.openConnection(); urlConnection.setDoOutput(true); //set the time to read from url - in miliSec urlConnection.setReadTimeout(10000); //set time to be used when opening a communications link // to the resource referenced by this URLConnection connect to url urlConnection.setConnectTimeout(10000); urlConnection.setRequestMethod("PUT"); // enable output urlConnection.setDoOutput(true); //set header urlConnection.setRequestProperty("Content-Type","application/json"); urlConnection.connect(); Log.d(MainPageFrag.class.toString(),"Connecting"); //Post data to server OutputStream outputStream = null; outputStream = urlConnection.getOutputStream(); bufferedWriter = new BufferedWriter((new OutputStreamWriter(outputStream))); bufferedWriter.write(json.toString()); Log.d(MainPageFrag.class.toString(),"written to server"); bufferedWriter.flush(); if ( urlConnection.getResponseCode() != 200) { Log.e(MainPageFrag.class.toString()," response code error:" + urlConnection.getResponseCode()); return; } // disconnect urlConnection.disconnect(); } @Override public void onResume(){ super.onResume(); } @Override public void onPause() { super.onPause(); } @Override public void onStart(){ super.onStart(); } @Override public void onDestroy() { super.onDestroy(); } }
package org.apache.lucene.search.postingshighlight; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.text.BreakIterator; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.PriorityQueue; import java.util.SortedSet; import java.util.TreeSet; import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.UnicodeUtil; /** * Simple highlighter that does not analyze fields nor use * term vectors. Instead it requires * {@link IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS}. * <p> * PostingsHighlighter treats the single original document as the whole corpus, and then scores individual * passages as if they were documents in this corpus. It uses a {@link BreakIterator} to find * passages in the text; by default it breaks using {@link BreakIterator#getSentenceInstance(Locale) * getSentenceInstance(Locale.ROOT)}. It then iterates in parallel (merge sorting by offset) through * the positions of all terms from the query, coalescing those hits that occur in a single passage * into a {@link Passage}, and then scores each Passage using a separate {@link PassageScorer}. * Passages are finally formatted into highlighted snippets with a {@link PassageFormatter}. * <p> * <b>WARNING</b>: The code is very new and probably still has some exciting bugs! * <p> * Example usage: * <pre class="prettyprint"> * // configure field with offsets at index time * FieldType offsetsType = new FieldType(TextField.TYPE_STORED); * offsetsType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); * Field body = new Field("body", "foobar", offsetsType); * * // retrieve highlights at query time * PostingsHighlighter highlighter = new PostingsHighlighter(); * Query query = new TermQuery(new Term("body", "highlighting")); * TopDocs topDocs = searcher.search(query, n); * String highlights[] = highlighter.highlight("body", query, searcher, topDocs); * </pre> * <p> * This is thread-safe, and can be used across different readers. * @lucene.experimental */ public class PostingsHighlighter { // TODO: maybe allow re-analysis for tiny fields? currently we require offsets, // but if the analyzer is really fast and the field is tiny, this might really be // unnecessary. /** for rewriting: we don't want slow processing from MTQs */ private static final IndexReader EMPTY_INDEXREADER = new MultiReader(); /** Default maximum content size to process. Typically snippets * closer to the beginning of the document better summarize its content */ public static final int DEFAULT_MAX_LENGTH = 10000; private final int maxLength; /** Set the first time {@link #getFormatter} is called, * and then reused. */ private PassageFormatter defaultFormatter; /** Set the first time {@link #getScorer} is called, * and then reused. */ private PassageScorer defaultScorer; /** * Creates a new highlighter with {@link #DEFAULT_MAX_LENGTH}. */ public PostingsHighlighter() { this(DEFAULT_MAX_LENGTH); } /** * Creates a new highlighter, specifying maximum content length. * @param maxLength maximum content size to process. * @throws IllegalArgumentException if <code>maxLength</code> is negative or <code>Integer.MAX_VALUE</code> */ public PostingsHighlighter(int maxLength) { if (maxLength < 0 || maxLength == Integer.MAX_VALUE) { // two reasons: no overflow problems in BreakIterator.preceding(offset+1), // our sentinel in the offsets queue uses this value to terminate. throw new IllegalArgumentException("maxLength must be < Integer.MAX_VALUE"); } this.maxLength = maxLength; } /** Returns the {@link BreakIterator} to use for * dividing text into passages. This returns * {@link BreakIterator#getSentenceInstance(Locale)} by default; * subclasses can override to customize. */ protected BreakIterator getBreakIterator(String field) { return BreakIterator.getSentenceInstance(Locale.ROOT); } /** Returns the {@link PassageFormatter} to use for * formatting passages into highlighted snippets. This * returns a new {@code PassageFormatter} by default; * subclasses can override to customize. */ protected PassageFormatter getFormatter(String field) { if (defaultFormatter == null) { defaultFormatter = new DefaultPassageFormatter(); } return defaultFormatter; } /** Returns the {@link PassageScorer} to use for * ranking passages. This * returns a new {@code PassageScorer} by default; * subclasses can override to customize. */ protected PassageScorer getScorer(String field) { if (defaultScorer == null) { defaultScorer = new PassageScorer(); } return defaultScorer; } /** * Highlights the top passages from a single field. * * @param field field name to highlight. * Must have a stored string value and also be indexed with offsets. * @param query query to highlight. * @param searcher searcher that was previously used to execute the query. * @param topDocs TopDocs containing the summary result documents to highlight. * @return Array of formatted snippets corresponding to the documents in <code>topDocs</code>. * If no highlights were found for a document, the * first sentence for the field will be returned. * @throws IOException if an I/O error occurred during processing * @throws IllegalArgumentException if <code>field</code> was indexed without * {@link IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS} */ public String[] highlight(String field, Query query, IndexSearcher searcher, TopDocs topDocs) throws IOException { return highlight(field, query, searcher, topDocs, 1); } /** * Highlights the top-N passages from a single field. * * @param field field name to highlight. * Must have a stored string value and also be indexed with offsets. * @param query query to highlight. * @param searcher searcher that was previously used to execute the query. * @param topDocs TopDocs containing the summary result documents to highlight. * @param maxPassages The maximum number of top-N ranked passages used to * form the highlighted snippets. * @return Array of formatted snippets corresponding to the documents in <code>topDocs</code>. * If no highlights were found for a document, the * first {@code maxPassages} sentences from the * field will be returned. * @throws IOException if an I/O error occurred during processing * @throws IllegalArgumentException if <code>field</code> was indexed without * {@link IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS} */ public String[] highlight(String field, Query query, IndexSearcher searcher, TopDocs topDocs, int maxPassages) throws IOException { Map<String,String[]> res = highlightFields(new String[] { field }, query, searcher, topDocs, new int[] { maxPassages }); return res.get(field); } /** * Highlights the top passages from multiple fields. * <p> * Conceptually, this behaves as a more efficient form of: * <pre class="prettyprint"> * Map m = new HashMap(); * for (String field : fields) { * m.put(field, highlight(field, query, searcher, topDocs)); * } * return m; * </pre> * * @param fields field names to highlight. * Must have a stored string value and also be indexed with offsets. * @param query query to highlight. * @param searcher searcher that was previously used to execute the query. * @param topDocs TopDocs containing the summary result documents to highlight. * @return Map keyed on field name, containing the array of formatted snippets * corresponding to the documents in <code>topDocs</code>. * If no highlights were found for a document, the * first sentence from the field will be returned. * @throws IOException if an I/O error occurred during processing * @throws IllegalArgumentException if <code>field</code> was indexed without * {@link IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS} */ public Map<String,String[]> highlightFields(String fields[], Query query, IndexSearcher searcher, TopDocs topDocs) throws IOException { int maxPassages[] = new int[fields.length]; Arrays.fill(maxPassages, 1); return highlightFields(fields, query, searcher, topDocs, maxPassages); } /** * Highlights the top-N passages from multiple fields. * <p> * Conceptually, this behaves as a more efficient form of: * <pre class="prettyprint"> * Map m = new HashMap(); * for (String field : fields) { * m.put(field, highlight(field, query, searcher, topDocs, maxPassages)); * } * return m; * </pre> * * @param fields field names to highlight. * Must have a stored string value and also be indexed with offsets. * @param query query to highlight. * @param searcher searcher that was previously used to execute the query. * @param topDocs TopDocs containing the summary result documents to highlight. * @param maxPassages The maximum number of top-N ranked passages per-field used to * form the highlighted snippets. * @return Map keyed on field name, containing the array of formatted snippets * corresponding to the documents in <code>topDocs</code>. * If no highlights were found for a document, the * first {@code maxPassages} sentences from the * field will be returned. * @throws IOException if an I/O error occurred during processing * @throws IllegalArgumentException if <code>field</code> was indexed without * {@link IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS} */ public Map<String,String[]> highlightFields(String fields[], Query query, IndexSearcher searcher, TopDocs topDocs, int maxPassages[]) throws IOException { final ScoreDoc scoreDocs[] = topDocs.scoreDocs; int docids[] = new int[scoreDocs.length]; for (int i = 0; i < docids.length; i++) { docids[i] = scoreDocs[i].doc; } return highlightFields(fields, query, searcher, docids, maxPassages); } /** * Highlights the top-N passages from multiple fields, * for the provided int[] docids. * * @param fieldsIn field names to highlight. * Must have a stored string value and also be indexed with offsets. * @param query query to highlight. * @param searcher searcher that was previously used to execute the query. * @param docidsIn containing the document IDs to highlight. * @param maxPassagesIn The maximum number of top-N ranked passages per-field used to * form the highlighted snippets. * @return Map keyed on field name, containing the array of formatted snippets * corresponding to the documents in <code>docidsIn</code>. * If no highlights were found for a document, the * first {@code maxPassages} from the field will * be returned. * @throws IOException if an I/O error occurred during processing * @throws IllegalArgumentException if <code>field</code> was indexed without * {@link IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS} */ public Map<String,String[]> highlightFields(String fieldsIn[], Query query, IndexSearcher searcher, int[] docidsIn, int maxPassagesIn[]) throws IOException { Map<String,String[]> snippets = new HashMap<String,String[]>(); for(Map.Entry<String,Object[]> ent : highlightFieldsAsObjects(fieldsIn, query, searcher, docidsIn, maxPassagesIn).entrySet()) { Object[] snippetObjects = ent.getValue(); String[] snippetStrings = new String[snippetObjects.length]; snippets.put(ent.getKey(), snippetStrings); for(int i=0;i<snippetObjects.length;i++) { Object snippet = snippetObjects[i]; if (snippet != null) { snippetStrings[i] = snippet.toString(); } } } return snippets; } /** * Expert: highlights the top-N passages from multiple fields, * for the provided int[] docids, to custom Object as * returned by the {@link PassageFormatter}. Use * this API to render to something other than String. * * @param fieldsIn field names to highlight. * Must have a stored string value and also be indexed with offsets. * @param query query to highlight. * @param searcher searcher that was previously used to execute the query. * @param docidsIn containing the document IDs to highlight. * @param maxPassagesIn The maximum number of top-N ranked passages per-field used to * form the highlighted snippets. * @return Map keyed on field name, containing the array of formatted snippets * corresponding to the documents in <code>docidsIn</code>. * If no highlights were found for a document, the * first {@code maxPassages} from the field will * be returned. * @throws IOException if an I/O error occurred during processing * @throws IllegalArgumentException if <code>field</code> was indexed without * {@link IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS} */ protected Map<String,Object[]> highlightFieldsAsObjects(String fieldsIn[], Query query, IndexSearcher searcher, int[] docidsIn, int maxPassagesIn[]) throws IOException { if (fieldsIn.length < 1) { throw new IllegalArgumentException("fieldsIn must not be empty"); } if (fieldsIn.length != maxPassagesIn.length) { throw new IllegalArgumentException("invalid number of maxPassagesIn"); } final IndexReader reader = searcher.getIndexReader(); query = rewrite(query); SortedSet<Term> queryTerms = new TreeSet<Term>(); query.extractTerms(queryTerms); IndexReaderContext readerContext = reader.getContext(); List<AtomicReaderContext> leaves = readerContext.leaves(); // Make our own copies because we sort in-place: int[] docids = new int[docidsIn.length]; System.arraycopy(docidsIn, 0, docids, 0, docidsIn.length); final String fields[] = new String[fieldsIn.length]; System.arraycopy(fieldsIn, 0, fields, 0, fieldsIn.length); final int maxPassages[] = new int[maxPassagesIn.length]; System.arraycopy(maxPassagesIn, 0, maxPassages, 0, maxPassagesIn.length); // sort for sequential io Arrays.sort(docids); new InPlaceMergeSorter() { @Override protected void swap(int i, int j) { String tmp = fields[i]; fields[i] = fields[j]; fields[j] = tmp; int tmp2 = maxPassages[i]; maxPassages[i] = maxPassages[j]; maxPassages[j] = tmp2; } @Override protected int compare(int i, int j) { return fields[i].compareTo(fields[j]); } }.sort(0, fields.length); // pull stored data: String[][] contents = loadFieldValues(searcher, fields, docids, maxLength); Map<String,Object[]> highlights = new HashMap<String,Object[]>(); for (int i = 0; i < fields.length; i++) { String field = fields[i]; int numPassages = maxPassages[i]; Term floor = new Term(field, ""); Term ceiling = new Term(field, UnicodeUtil.BIG_TERM); SortedSet<Term> fieldTerms = queryTerms.subSet(floor, ceiling); // TODO: should we have some reasonable defaults for term pruning? (e.g. stopwords) // Strip off the redundant field: BytesRef terms[] = new BytesRef[fieldTerms.size()]; int termUpto = 0; for(Term term : fieldTerms) { terms[termUpto++] = term.bytes(); } Map<Integer,Object> fieldHighlights = highlightField(field, contents[i], getBreakIterator(field), terms, docids, leaves, numPassages); Object[] result = new Object[docids.length]; for (int j = 0; j < docidsIn.length; j++) { result[j] = fieldHighlights.get(docidsIn[j]); } highlights.put(field, result); } return highlights; } /** Loads the String values for each field X docID to be * highlighted. By default this loads from stored * fields, but a subclass can change the source. This * method should allocate the String[fields.length][docids.length] * and fill all values. The returned Strings must be * identical to what was indexed. */ protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException { String contents[][] = new String[fields.length][docids.length]; char valueSeparators[] = new char[fields.length]; for (int i = 0; i < fields.length; i++) { valueSeparators[i] = getMultiValuedSeparator(fields[i]); } LimitedStoredFieldVisitor visitor = new LimitedStoredFieldVisitor(fields, valueSeparators, maxLength); for (int i = 0; i < docids.length; i++) { searcher.doc(docids[i], visitor); for (int j = 0; j < fields.length; j++) { contents[j][i] = visitor.getValue(j).toString(); } visitor.reset(); } return contents; } /** * Returns the logical separator between values for multi-valued fields. * The default value is a space character, which means passages can span across values, * but a subclass can override, for example with {@code U+2029 PARAGRAPH SEPARATOR (PS)} * if each value holds a discrete passage for highlighting. */ protected char getMultiValuedSeparator(String field) { return ' '; } private Map<Integer,Object> highlightField(String field, String contents[], BreakIterator bi, BytesRef terms[], int[] docids, List<AtomicReaderContext> leaves, int maxPassages) throws IOException { Map<Integer,Object> highlights = new HashMap<Integer,Object>(); // reuse in the real sense... for docs in same segment we just advance our old enum DocsAndPositionsEnum postings[] = null; TermsEnum termsEnum = null; int lastLeaf = -1; PassageFormatter fieldFormatter = getFormatter(field); if (fieldFormatter == null) { throw new NullPointerException("PassageFormatter cannot be null"); } for (int i = 0; i < docids.length; i++) { String content = contents[i]; if (content.length() == 0) { continue; // nothing to do } bi.setText(content); int doc = docids[i]; int leaf = ReaderUtil.subIndex(doc, leaves); AtomicReaderContext subContext = leaves.get(leaf); AtomicReader r = subContext.reader(); Terms t = r.terms(field); if (t == null) { continue; // nothing to do } if (leaf != lastLeaf) { termsEnum = t.iterator(null); postings = new DocsAndPositionsEnum[terms.length]; } Passage passages[] = highlightDoc(field, terms, content.length(), bi, doc - subContext.docBase, termsEnum, postings, maxPassages); if (passages.length == 0) { passages = getEmptyHighlight(field, bi, maxPassages); } if (passages.length > 0) { // otherwise a null snippet (eg if field is missing // entirely from the doc) highlights.put(doc, fieldFormatter.format(passages, content)); } lastLeaf = leaf; } return highlights; } // algorithm: treat sentence snippets as miniature documents // we can intersect these with the postings lists via BreakIterator.preceding(offset),s // score each sentence as norm(sentenceStartOffset) * sum(weight * tf(freq)) private Passage[] highlightDoc(String field, BytesRef terms[], int contentLength, BreakIterator bi, int doc, TermsEnum termsEnum, DocsAndPositionsEnum[] postings, int n) throws IOException { PassageScorer scorer = getScorer(field); if (scorer == null) { throw new NullPointerException("PassageScorer cannot be null"); } PriorityQueue<OffsetsEnum> pq = new PriorityQueue<OffsetsEnum>(); float weights[] = new float[terms.length]; // initialize postings for (int i = 0; i < terms.length; i++) { DocsAndPositionsEnum de = postings[i]; int pDoc; if (de == EMPTY) { continue; } else if (de == null) { postings[i] = EMPTY; // initially if (!termsEnum.seekExact(terms[i])) { continue; // term not found } de = postings[i] = termsEnum.docsAndPositions(null, null, DocsAndPositionsEnum.FLAG_OFFSETS); if (de == null) { // no positions available throw new IllegalArgumentException("field '" + field + "' was indexed without offsets, cannot highlight"); } pDoc = de.advance(doc); } else { pDoc = de.docID(); if (pDoc < doc) { pDoc = de.advance(doc); } } if (doc == pDoc) { weights[i] = scorer.weight(contentLength, de.freq()); de.nextPosition(); pq.add(new OffsetsEnum(de, i)); } } pq.add(new OffsetsEnum(EMPTY, Integer.MAX_VALUE)); // a sentinel for termination PriorityQueue<Passage> passageQueue = new PriorityQueue<Passage>(n, new Comparator<Passage>() { @Override public int compare(Passage left, Passage right) { if (left.score < right.score) { return -1; } else if (left.score > right.score) { return 1; } else { return left.startOffset - right.startOffset; } } }); Passage current = new Passage(); OffsetsEnum off; while ((off = pq.poll()) != null) { final DocsAndPositionsEnum dp = off.dp; int start = dp.startOffset(); if (start == -1) { throw new IllegalArgumentException("field '" + field + "' was indexed without offsets, cannot highlight"); } int end = dp.endOffset(); // LUCENE-5166: this hit would span the content limit... however more valid // hits may exist (they are sorted by start). so we pretend like we never // saw this term, it won't cause a passage to be added to passageQueue or anything. assert EMPTY.startOffset() == Integer.MAX_VALUE; if (start < contentLength && end > contentLength) { continue; } if (start >= current.endOffset) { if (current.startOffset >= 0) { // finalize current current.score *= scorer.norm(current.startOffset); // new sentence: first add 'current' to queue if (passageQueue.size() == n && current.score < passageQueue.peek().score) { current.reset(); // can't compete, just reset it } else { passageQueue.offer(current); if (passageQueue.size() > n) { current = passageQueue.poll(); current.reset(); } else { current = new Passage(); } } } // if we exceed limit, we are done if (start >= contentLength) { Passage passages[] = new Passage[passageQueue.size()]; passageQueue.toArray(passages); for (Passage p : passages) { p.sort(); } // sort in ascending order Arrays.sort(passages, new Comparator<Passage>() { @Override public int compare(Passage left, Passage right) { return left.startOffset - right.startOffset; } }); return passages; } // advance breakiterator assert BreakIterator.DONE < 0; current.startOffset = Math.max(bi.preceding(start+1), 0); current.endOffset = Math.min(bi.next(), contentLength); } int tf = 0; while (true) { tf++; current.addMatch(start, end, terms[off.id]); if (off.pos == dp.freq()) { break; // removed from pq } else { off.pos++; dp.nextPosition(); start = dp.startOffset(); end = dp.endOffset(); } if (start >= current.endOffset || end > contentLength) { pq.offer(off); break; } } current.score += weights[off.id] * scorer.tf(tf, current.endOffset - current.startOffset); } // Dead code but compiler disagrees: assert false; return null; } /** Called to summarize a document when no hits were * found. By default this just returns the first * {@code maxPassages} sentences; subclasses can override * to customize. */ protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { // BreakIterator should be un-next'd: List<Passage> passages = new ArrayList<Passage>(); int pos = bi.current(); assert pos == 0; while (passages.size() < maxPassages) { int next = bi.next(); if (next == BreakIterator.DONE) { break; } Passage passage = new Passage(); passage.score = Float.NaN; passage.startOffset = pos; passage.endOffset = next; passages.add(passage); pos = next; } return passages.toArray(new Passage[passages.size()]); } private static class OffsetsEnum implements Comparable<OffsetsEnum> { DocsAndPositionsEnum dp; int pos; int id; OffsetsEnum(DocsAndPositionsEnum dp, int id) throws IOException { this.dp = dp; this.id = id; this.pos = 1; } @Override public int compareTo(OffsetsEnum other) { try { int off = dp.startOffset(); int otherOff = other.dp.startOffset(); if (off == otherOff) { return id - other.id; } else { return Long.signum(((long)off) - otherOff); } } catch (IOException e) { throw new RuntimeException(e); } } } private static final DocsAndPositionsEnum EMPTY = new DocsAndPositionsEnum() { @Override public int nextPosition() throws IOException { return 0; } @Override public int startOffset() throws IOException { return Integer.MAX_VALUE; } @Override public int endOffset() throws IOException { return Integer.MAX_VALUE; } @Override public BytesRef getPayload() throws IOException { return null; } @Override public int freq() throws IOException { return 0; } @Override public int docID() { return NO_MORE_DOCS; } @Override public int nextDoc() throws IOException { return NO_MORE_DOCS; } @Override public int advance(int target) throws IOException { return NO_MORE_DOCS; } @Override public long cost() { return 0; } }; /** * we rewrite against an empty indexreader: as we don't want things like * rangeQueries that don't summarize the document */ private static Query rewrite(Query original) throws IOException { Query query = original; for (Query rewrittenQuery = query.rewrite(EMPTY_INDEXREADER); rewrittenQuery != query; rewrittenQuery = query.rewrite(EMPTY_INDEXREADER)) { query = rewrittenQuery; } return query; } private static class LimitedStoredFieldVisitor extends StoredFieldVisitor { private final String fields[]; private final char valueSeparators[]; private final int maxLength; private final StringBuilder builders[]; private int currentField = -1; public LimitedStoredFieldVisitor(String fields[], char valueSeparators[], int maxLength) { assert fields.length == valueSeparators.length; this.fields = fields; this.valueSeparators = valueSeparators; this.maxLength = maxLength; builders = new StringBuilder[fields.length]; for (int i = 0; i < builders.length; i++) { builders[i] = new StringBuilder(); } } @Override public void stringField(FieldInfo fieldInfo, String value) throws IOException { assert currentField >= 0; StringBuilder builder = builders[currentField]; if (builder.length() > 0 && builder.length() < maxLength) { builder.append(valueSeparators[currentField]); } if (builder.length() + value.length() > maxLength) { builder.append(value, 0, maxLength - builder.length()); } else { builder.append(value); } } @Override public Status needsField(FieldInfo fieldInfo) throws IOException { currentField = Arrays.binarySearch(fields, fieldInfo.name); if (currentField < 0) { return Status.NO; } else if (builders[currentField].length() > maxLength) { return fields.length == 1 ? Status.STOP : Status.NO; } return Status.YES; } String getValue(int i) { return builders[i].toString(); } void reset() { currentField = -1; for (int i = 0; i < fields.length; i++) { builders[i].setLength(0); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.plugin.inputformat.orc; import com.google.common.collect.Sets; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector; import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.orc.OrcFile; import org.apache.orc.TypeDescription; import org.apache.orc.Writer; import org.apache.pinot.spi.data.readers.AbstractRecordExtractorTest; import org.apache.pinot.spi.data.readers.RecordReader; import org.apache.pinot.spi.utils.StringUtils; /** * Tests the {@link ORCRecordReader} using a schema containing groovy transform functions */ public class ORCRecordExtractorTest extends AbstractRecordExtractorTest { private final File _dataFile = new File(_tempDir, "events.orc"); /** * Create an ORCRecordReader */ @Override protected RecordReader createRecordReader(Set<String> fieldsToRead) throws IOException { ORCRecordReader orcRecordReader = new ORCRecordReader(); orcRecordReader.init(_dataFile, fieldsToRead, null); return orcRecordReader; } /** * Create an ORC input file using the input records */ @Override protected void createInputFile() throws IOException { TypeDescription schema = TypeDescription.fromString( "struct<" + "userID:int," + "firstName:string," + "bids:array<int>," + "cost:double," + "timestamp:bigint," + "simpleStruct:struct<structString:string,structLong:bigint,structDouble:double>," + "complexStruct:struct<structString:string,nestedStruct:struct<nestedStructInt:int,nestedStructLong:bigint>>," + "complexList:array<struct<complexListInt:int,complexListDouble:double>>," + "simpleMap:map<string,int>," + "complexMap:map<string,struct<doubleField:double,stringField:string>>" + ">" ); int numRecords = _inputRecords.size(); VectorizedRowBatch rowBatch = schema.createRowBatch(numRecords); LongColumnVector userIdVector = (LongColumnVector) rowBatch.cols[0]; userIdVector.noNulls = false; BytesColumnVector firstNameVector = (BytesColumnVector) rowBatch.cols[1]; firstNameVector.noNulls = false; // simple list containing long ListColumnVector bidsVector = (ListColumnVector) rowBatch.cols[2]; bidsVector.noNulls = false; LongColumnVector bidsElementVector = (LongColumnVector) bidsVector.child; bidsElementVector.ensureSize(6, false); DoubleColumnVector costVector = (DoubleColumnVector) rowBatch.cols[3]; LongColumnVector timestampVector = (LongColumnVector) rowBatch.cols[4]; // simple struct - string, long, and double StructColumnVector simpleStructVector = (StructColumnVector) rowBatch.cols[5]; simpleStructVector.noNulls = false; BytesColumnVector simpleStructBytesVector = (BytesColumnVector) simpleStructVector.fields[0]; LongColumnVector simpleStructLongVector = (LongColumnVector) simpleStructVector.fields[1]; DoubleColumnVector simpleStructDoubleVector = (DoubleColumnVector) simpleStructVector.fields[2]; // complex struct - string and struct containing int and long StructColumnVector complexStructVector = (StructColumnVector) rowBatch.cols[6]; complexStructVector.noNulls = false; BytesColumnVector complexStructBytesVector = (BytesColumnVector) complexStructVector.fields[0]; StructColumnVector complexStructInnerVector = (StructColumnVector) complexStructVector.fields[1]; LongColumnVector complexStructIntVector = (LongColumnVector) complexStructInnerVector.fields[0]; LongColumnVector complexStructLongVector = (LongColumnVector) complexStructInnerVector.fields[1]; // complex list elements - each element is a struct containing int and long ListColumnVector complexListVector = (ListColumnVector) rowBatch.cols[7]; complexListVector.noNulls = false; StructColumnVector complexListElementVector = (StructColumnVector) complexListVector.child; LongColumnVector complexListIntVector = (LongColumnVector) complexListElementVector.fields[0]; complexListIntVector.ensureSize(5, false); DoubleColumnVector complexListDoubleVector = (DoubleColumnVector) complexListElementVector.fields[1]; complexListDoubleVector.ensureSize(5, false); // simple map - string key and value long MapColumnVector simpleMapVector = (MapColumnVector) rowBatch.cols[8]; simpleMapVector.noNulls = false; BytesColumnVector simpleMapKeysVector = (BytesColumnVector) simpleMapVector.keys; LongColumnVector simpleMapValuesVector = (LongColumnVector) simpleMapVector.values; simpleMapKeysVector.ensureSize(6, false); simpleMapValuesVector.ensureSize(6, false); // complex map - string key and struct value containing double and string MapColumnVector complexMapVector = (MapColumnVector) rowBatch.cols[9]; complexMapVector.noNulls = false; BytesColumnVector complexMapKeysVector = (BytesColumnVector) complexMapVector.keys; complexMapKeysVector.ensureSize(6, false); StructColumnVector complexMapValuesVector = (StructColumnVector) complexMapVector.values; DoubleColumnVector complexMapValueDoubleVector = (DoubleColumnVector) complexMapValuesVector.fields[0]; complexMapValueDoubleVector.ensureSize(6, false); BytesColumnVector complexMapValueBytesVector = (BytesColumnVector) complexMapValuesVector.fields[1]; complexMapValueBytesVector.ensureSize(6, false); Writer writer = OrcFile.createWriter(new Path(_dataFile.getAbsolutePath()), OrcFile.writerOptions(new Configuration()).setSchema(schema)); for (int i = 0; i < numRecords; i++) { Map<String, Object> record = _inputRecords.get(i); Integer userId = (Integer) record.get("userID"); if (userId != null) { userIdVector.vector[i] = userId; } else { userIdVector.isNull[i] = true; } String firstName = (String) record.get("firstName"); if (firstName != null) { firstNameVector.setVal(i, StringUtils.encodeUtf8(firstName)); } else { firstNameVector.isNull[i] = true; } List<Integer> bids = (List<Integer>) record.get("bids"); if (bids != null) { bidsVector.offsets[i] = bidsVector.childCount; bidsVector.lengths[i] = bids.size(); for (int bid : bids) { bidsElementVector.vector[bidsVector.childCount++] = bid; } } else { bidsVector.isNull[i] = true; } costVector.vector[i] = (double) record.get("cost"); timestampVector.vector[i] = (long) record.get("timestamp"); // simple map with string key and int value Map<String, Integer> simpleMap = (Map<String, Integer>) record.get("simpleMap"); if (simpleMap != null) { simpleMapVector.offsets[i] = simpleMapVector.childCount; simpleMapVector.lengths[i] = simpleMap.size(); for (Map.Entry<String, Integer> entry : simpleMap.entrySet()) { simpleMapKeysVector.setVal(simpleMapVector.childCount, StringUtils.encodeUtf8(entry.getKey())); simpleMapValuesVector.vector[simpleMapVector.childCount] = entry.getValue(); simpleMapVector.childCount++; } } else { simpleMapVector.isNull[i] = true; } // simple struct with long and double values Map<String, Object> struct1 = (Map<String, Object>) record.get("simpleStruct"); if (struct1 != null) { simpleStructBytesVector.setVal(i, StringUtils.encodeUtf8((String) struct1.get("structString"))); simpleStructLongVector.vector[i] = (long) struct1.get("structLong"); simpleStructDoubleVector.vector[i] = (double) struct1.get("structDouble"); } else { simpleStructVector.isNull[i] = true; } // complex struct - string, struct containing int and long Map<String, Object> complexStruct = (Map<String, Object>) record.get("complexStruct"); if (complexStruct != null) { complexStructBytesVector.setVal(i, StringUtils.encodeUtf8((String) complexStruct.get("structString"))); // Set nested struct vector complexStructIntVector.vector[i] = (Integer) ((Map<String, Object>) complexStruct.get("nestedStruct")) .get("nestedStructInt"); complexStructLongVector.vector[i] = (Long) ((Map<String, Object>) complexStruct.get("nestedStruct")) .get("nestedStructLong"); } else { complexStructVector.isNull[i] = true; } // complex list elements List<Map<String, Object>> complexList = (List<Map<String, Object>>) record.get("complexList"); if (complexList != null) { complexListVector.offsets[i] = complexListVector.childCount; complexListVector.lengths[i] = complexList.size(); for (Map<String, Object> complexElement : complexList) { complexListIntVector.vector[complexListVector.childCount] = (int) complexElement.get("complexListInt"); complexListDoubleVector.vector[complexListVector.childCount] = (double) complexElement.get("complexListDouble"); complexListVector.childCount++; } } else { complexListVector.isNull[i] = true; } // complex map with key string and struct. struct contains double and string. Map<String, Map<String, Object>> complexMap = (Map<String, Map<String, Object>>) record.get("complexMap"); if (complexMap != null) { complexMapVector.offsets[i] = complexMapVector.childCount; complexMapVector.lengths[i] = complexMap.size(); for (Map.Entry<String, Map<String, Object>> entry : complexMap.entrySet()) { complexMapKeysVector.setVal(complexMapVector.childCount, StringUtils.encodeUtf8(entry.getKey())); complexMapValueDoubleVector.vector[complexMapVector.childCount] = (double) entry.getValue().get("doubleField"); complexMapValueBytesVector.setVal(complexMapVector.childCount, StringUtils.encodeUtf8((String) entry.getValue().get("stringField"))); complexMapVector.childCount++; } } else { complexMapVector.isNull[i] = true; } rowBatch.size++; } writer.addRowBatch(rowBatch); rowBatch.reset(); writer.close(); } @Override protected List<Map<String, Object>> getInputRecords() { // simple struct - contains a string, long and double array Map[] simpleStructs = new Map[]{ null, createStructInput("structString", "abc", "structLong", 1000L, "structDouble", 5.99999), createStructInput("structString", "def", "structLong", 2000L, "structDouble", 6.99999), createStructInput("structString", "ghi", "structLong", 3000L, "structDouble", 7.99999) }; // complex struct - contains a string and nested struct of int and long Map[] complexStructs = new Map[] { createStructInput("structString", "abc", "nestedStruct", createStructInput("nestedStructInt", 4, "nestedStructLong", 4000L)), createStructInput("structString", "def", "nestedStruct", createStructInput("nestedStructInt", 5, "nestedStructLong", 5000L)), null, createStructInput("structString", "ghi", "nestedStruct", createStructInput("nestedStructInt", 6, "nestedStructLong", 6000L)) }; // complex list element - each element contains a struct of int and double List[] complexLists = new List[]{ Arrays.asList( createStructInput("complexListInt", 10, "complexListDouble", 100.0), createStructInput("complexListInt", 20, "complexListDouble", 200.0) ), null, Collections.singletonList( createStructInput("complexListInt", 30, "complexListDouble", 300.0) ), Arrays.asList( createStructInput("complexListInt", 40, "complexListDouble", 400.0), createStructInput("complexListInt", 50, "complexListDouble", 500.0) ) }; // single value integer Integer[] userID = new Integer[]{1, 2, null, 4}; // single value string String[] firstName = new String[]{null, "John", "Ringo", "George"}; // collection of integers List[] bids = new List[]{Arrays.asList(10, 20), null, Collections.singletonList(1), Arrays.asList(1, 2, 3)}; // single value double double[] cost = new double[]{10000, 20000, 30000, 25000}; // single value long long[] timestamp = new long[]{1570863600000L, 1571036400000L, 1571900400000L, 1574000000000L}; // simple map with string keys and integer values Map[] simpleMaps = new Map[]{ createStructInput("key1", 10, "key2", 20), null, createStructInput("key3", 30), createStructInput("key4", 40, "key5", 50) }; // complex map with struct values - struct contains double and string Map[] complexMap = new Map[] { createStructInput("key1", createStructInput("doubleField", 2.0, "stringField", "abc")), null, createStructInput( "key1", createStructInput("doubleField", 3.0, "stringField", "xyz"), "key2", createStructInput("doubleField", 4.0, "stringField", "abc123") ), createStructInput( "key1", createStructInput("doubleField", 3.0, "stringField", "xyz"), "key2", createStructInput("doubleField", 4.0, "stringField", "abc123"), "key3", createStructInput("doubleField", 4.0, "stringField", "asdf") ) }; List<Map<String, Object>> inputRecords = new ArrayList<>(4); for (int i = 0; i < 4; i++) { Map<String, Object> record = new HashMap<>(); record.put("userID", userID[i]); record.put("firstName", firstName[i]); record.put("bids", bids[i]); record.put("cost", cost[i]); record.put("timestamp", timestamp[i]); record.put("simpleStruct", simpleStructs[i]); record.put("complexStruct", complexStructs[i]); record.put("complexList", complexLists[i]); record.put("simpleMap", simpleMaps[i]); record.put("complexMap", complexMap[i]); inputRecords.add(record); } return inputRecords; } @Override protected Set<String> getSourceFields() { return Sets.newHashSet("userID", "firstName", "bids", "cost", "timestamp", "simpleMap", "simpleStruct", "complexStruct", "complexList", "complexMap"); } private Map<String, Object> createStructInput(String fieldName1, Object value1) { Map<String, Object> struct = new HashMap<>(1); struct.put(fieldName1, value1); return struct; } private Map<String, Object> createStructInput(String fieldName1, Object value1, String fieldName2, Object value2) { Map<String, Object> struct = new HashMap<>(2); struct.put(fieldName1, value1); struct.put(fieldName2, value2); return struct; } private Map<String, Object> createStructInput(String fieldName1, Object value1, String fieldName2, Object value2, String fieldName3, Object value3) { Map<String, Object> struct = new HashMap<>(3); struct.put(fieldName1, value1); struct.put(fieldName2, value2); struct.put(fieldName3, value3); return struct; } }
/* * ==================================================================== * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package org.apache.http.impl.client; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.ConnectException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.http.Header; import org.apache.http.HttpClientConnection; import org.apache.http.HttpEntity; import org.apache.http.HttpException; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.ProtocolVersion; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpRequestRetryHandler; import org.apache.http.client.NonRepeatableRequestException; import org.apache.http.client.methods.AbortableHttpRequest; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.params.ClientPNames; import org.apache.http.conn.ClientConnectionManager; import org.apache.http.conn.ClientConnectionRequest; import org.apache.http.conn.ConnectionPoolTimeoutException; import org.apache.http.conn.ConnectionReleaseTrigger; import org.apache.http.conn.ManagedClientConnection; import org.apache.http.conn.routing.HttpRoute; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.entity.InputStreamEntity; import org.apache.http.entity.StringEntity; import org.apache.http.impl.conn.ClientConnAdapterMockup; import org.apache.http.impl.conn.SingleClientConnManager; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.localserver.BasicServerTestBase; import org.apache.http.localserver.LocalTestServer; import org.apache.http.message.BasicHeader; import org.apache.http.mockup.SocketFactoryMockup; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.ExecutionContext; import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpRequestExecutor; import org.apache.http.protocol.HttpRequestHandler; /** * Unit tests for {@link DefaultRequestDirector} */ public class TestDefaultClientRequestDirector extends BasicServerTestBase { public TestDefaultClientRequestDirector(final String testName) { super(testName); } public static void main(String args[]) { String[] testCaseName = { TestDefaultClientRequestDirector.class.getName() }; junit.textui.TestRunner.main(testCaseName); } public static Test suite() { return new TestSuite(TestDefaultClientRequestDirector.class); } @Override protected void setUp() throws Exception { localServer = new LocalTestServer(null, null); localServer.registerDefaultHandlers(); localServer.start(); } /** * Tests that if abort is called on an {@link AbortableHttpRequest} while * {@link DefaultRequestDirector} is allocating a connection, that the * connection is properly aborted. */ public void testAbortInAllocate() throws Exception { CountDownLatch connLatch = new CountDownLatch(1); CountDownLatch awaitLatch = new CountDownLatch(1); final ConMan conMan = new ConMan(connLatch, awaitLatch); final AtomicReference<Throwable> throwableRef = new AtomicReference<Throwable>(); final CountDownLatch getLatch = new CountDownLatch(1); final DefaultHttpClient client = new DefaultHttpClient(conMan, new BasicHttpParams()); final HttpContext context = new BasicHttpContext(); final HttpGet httpget = new HttpGet("http://www.example.com/a"); new Thread(new Runnable() { public void run() { try { client.execute(httpget, context); } catch(Throwable t) { throwableRef.set(t); } finally { getLatch.countDown(); } } }).start(); assertTrue("should have tried to get a connection", connLatch.await(1, TimeUnit.SECONDS)); httpget.abort(); assertTrue("should have finished get request", getLatch.await(1, TimeUnit.SECONDS)); assertTrue("should be instanceof IOException, was: " + throwableRef.get(), throwableRef.get() instanceof IOException); assertTrue("cause should be InterruptedException, was: " + throwableRef.get().getCause(), throwableRef.get().getCause() instanceof InterruptedException); } /** * Tests that an abort called after the connection has been retrieved * but before a release trigger is set does still abort the request. */ public void testAbortAfterAllocateBeforeRequest() throws Exception { this.localServer.register("*", new BasicService()); CountDownLatch releaseLatch = new CountDownLatch(1); SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); SingleClientConnManager conMan = new SingleClientConnManager(new BasicHttpParams(), registry); final AtomicReference<Throwable> throwableRef = new AtomicReference<Throwable>(); final CountDownLatch getLatch = new CountDownLatch(1); final DefaultHttpClient client = new DefaultHttpClient(conMan, new BasicHttpParams()); final HttpContext context = new BasicHttpContext(); final HttpGet httpget = new CustomGet("a", releaseLatch); new Thread(new Runnable() { public void run() { try { client.execute(getServerHttp(), httpget, context); } catch(Throwable t) { throwableRef.set(t); } finally { getLatch.countDown(); } } }).start(); Thread.sleep(100); // Give it a little time to proceed to release... httpget.abort(); releaseLatch.countDown(); assertTrue("should have finished get request", getLatch.await(1, TimeUnit.SECONDS)); assertTrue("should be instanceof IOException, was: " + throwableRef.get(), throwableRef.get() instanceof IOException); } /** * Tests that an abort called completely before execute * still aborts the request. */ public void testAbortBeforeExecute() throws Exception { this.localServer.register("*", new BasicService()); SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); SingleClientConnManager conMan = new SingleClientConnManager(new BasicHttpParams(), registry); final AtomicReference<Throwable> throwableRef = new AtomicReference<Throwable>(); final CountDownLatch getLatch = new CountDownLatch(1); final CountDownLatch startLatch = new CountDownLatch(1); final DefaultHttpClient client = new DefaultHttpClient(conMan, new BasicHttpParams()); final HttpContext context = new BasicHttpContext(); final HttpGet httpget = new HttpGet("a"); new Thread(new Runnable() { public void run() { try { try { if(!startLatch.await(1, TimeUnit.SECONDS)) throw new RuntimeException("Took too long to start!"); } catch(InterruptedException interrupted) { throw new RuntimeException("Never started!", interrupted); } client.execute(getServerHttp(), httpget, context); } catch(Throwable t) { throwableRef.set(t); } finally { getLatch.countDown(); } } }).start(); httpget.abort(); startLatch.countDown(); assertTrue("should have finished get request", getLatch.await(1, TimeUnit.SECONDS)); assertTrue("should be instanceof IOException, was: " + throwableRef.get(), throwableRef.get() instanceof IOException); } /** * Tests that an abort called after a redirect has found a new host * still aborts in the correct place (while trying to get the new * host's route, not while doing the subsequent request). */ public void testAbortAfterRedirectedRoute() throws Exception { final int port = this.localServer.getServicePort(); this.localServer.register("*", new BasicRedirectService(port)); SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); CountDownLatch connLatch = new CountDownLatch(1); CountDownLatch awaitLatch = new CountDownLatch(1); ConnMan4 conMan = new ConnMan4(new BasicHttpParams(), registry, connLatch, awaitLatch); final AtomicReference<Throwable> throwableRef = new AtomicReference<Throwable>(); final CountDownLatch getLatch = new CountDownLatch(1); final DefaultHttpClient client = new DefaultHttpClient(conMan, new BasicHttpParams()); final HttpContext context = new BasicHttpContext(); final HttpGet httpget = new HttpGet("a"); new Thread(new Runnable() { public void run() { try { HttpHost host = new HttpHost("127.0.0.1", port); client.execute(host, httpget, context); } catch(Throwable t) { throwableRef.set(t); } finally { getLatch.countDown(); } } }).start(); assertTrue("should have tried to get a connection", connLatch.await(1, TimeUnit.SECONDS)); httpget.abort(); assertTrue("should have finished get request", getLatch.await(1, TimeUnit.SECONDS)); assertTrue("should be instanceof IOException, was: " + throwableRef.get(), throwableRef.get() instanceof IOException); assertTrue("cause should be InterruptedException, was: " + throwableRef.get().getCause(), throwableRef.get().getCause() instanceof InterruptedException); } /** * Tests that if a socket fails to connect, the allocated connection is * properly released back to the connection manager. */ public void testSocketConnectFailureReleasesConnection() throws Exception { final ConnMan2 conMan = new ConnMan2(); final DefaultHttpClient client = new DefaultHttpClient(conMan, new BasicHttpParams()); final HttpContext context = new BasicHttpContext(); final HttpGet httpget = new HttpGet("http://www.example.com/a"); try { client.execute(httpget, context); fail("expected IOException"); } catch(IOException expected) {} assertNotNull(conMan.allocatedConnection); assertSame(conMan.allocatedConnection, conMan.releasedConnection); } public void testRequestFailureReleasesConnection() throws Exception { this.localServer.register("*", new ThrowingService()); SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); ConnMan3 conMan = new ConnMan3(new BasicHttpParams(), registry); DefaultHttpClient client = new DefaultHttpClient(conMan, new BasicHttpParams()); HttpGet httpget = new HttpGet("/a"); try { client.execute(getServerHttp(), httpget); fail("expected IOException"); } catch (IOException expected) {} assertNotNull(conMan.allocatedConnection); assertSame(conMan.allocatedConnection, conMan.releasedConnection); } private static class ThrowingService implements HttpRequestHandler { public void handle( final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { throw new IOException(); } } private static class BasicService implements HttpRequestHandler { public void handle(final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { response.setStatusCode(200); response.setEntity(new StringEntity("Hello World")); } } private class BasicRedirectService implements HttpRequestHandler { private int statuscode = HttpStatus.SC_SEE_OTHER; private int port; public BasicRedirectService(int port) { this.port = port; } public void handle(final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { ProtocolVersion ver = request.getRequestLine().getProtocolVersion(); response.setStatusLine(ver, this.statuscode); response.addHeader(new BasicHeader("Location", "http://localhost:" + this.port + "/newlocation/")); response.addHeader(new BasicHeader("Connection", "close")); } } private static class ConnMan4 extends ThreadSafeClientConnManager { private final CountDownLatch connLatch; private final CountDownLatch awaitLatch; public ConnMan4(HttpParams params, SchemeRegistry schreg, CountDownLatch connLatch, CountDownLatch awaitLatch) { super(params, schreg); this.connLatch = connLatch; this.awaitLatch = awaitLatch; } @Override public ClientConnectionRequest requestConnection(HttpRoute route, Object state) { // If this is the redirect route, stub the return value // so-as to pretend the host is waiting on a slot... if(route.getTargetHost().getHostName().equals("localhost")) { final Thread currentThread = Thread.currentThread(); return new ClientConnectionRequest() { public void abortRequest() { currentThread.interrupt(); } public ManagedClientConnection getConnection( long timeout, TimeUnit tunit) throws InterruptedException, ConnectionPoolTimeoutException { connLatch.countDown(); // notify waiter that we're getting a connection // zero usually means sleep forever, but CountDownLatch doesn't interpret it that way. if(timeout == 0) timeout = Integer.MAX_VALUE; if(!awaitLatch.await(timeout, tunit)) throw new ConnectionPoolTimeoutException(); return new ClientConnAdapterMockup(ConnMan4.this); } }; } else { return super.requestConnection(route, state); } } } private static class ConnMan3 extends SingleClientConnManager { private ManagedClientConnection allocatedConnection; private ManagedClientConnection releasedConnection; public ConnMan3(HttpParams params, SchemeRegistry schreg) { super(params, schreg); } @Override public ManagedClientConnection getConnection(HttpRoute route, Object state) { allocatedConnection = super.getConnection(route, state); return allocatedConnection; } @Override public void releaseConnection(ManagedClientConnection conn, long validDuration, TimeUnit timeUnit) { releasedConnection = conn; super.releaseConnection(conn, validDuration, timeUnit); } } static class ConnMan2 implements ClientConnectionManager { private ManagedClientConnection allocatedConnection; private ManagedClientConnection releasedConnection; public ConnMan2() { } public void closeIdleConnections(long idletime, TimeUnit tunit) { throw new UnsupportedOperationException("just a mockup"); } public void closeExpiredConnections() { throw new UnsupportedOperationException("just a mockup"); } public ManagedClientConnection getConnection(HttpRoute route) { throw new UnsupportedOperationException("just a mockup"); } public ManagedClientConnection getConnection(HttpRoute route, long timeout, TimeUnit tunit) { throw new UnsupportedOperationException("just a mockup"); } public ClientConnectionRequest requestConnection( final HttpRoute route, final Object state) { return new ClientConnectionRequest() { public void abortRequest() { throw new UnsupportedOperationException("just a mockup"); } public ManagedClientConnection getConnection( long timeout, TimeUnit unit) throws InterruptedException, ConnectionPoolTimeoutException { allocatedConnection = new ClientConnAdapterMockup(ConnMan2.this) { @Override public void open(HttpRoute route, HttpContext context, HttpParams params) throws IOException { throw new ConnectException(); } }; return allocatedConnection; } }; } public HttpParams getParams() { throw new UnsupportedOperationException("just a mockup"); } public SchemeRegistry getSchemeRegistry() { SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", new SocketFactoryMockup(null), 80)); return registry; } public void releaseConnection(ManagedClientConnection conn, long validDuration, TimeUnit timeUnit) { this.releasedConnection = conn; } public void shutdown() { throw new UnsupportedOperationException("just a mockup"); } } static class ConMan implements ClientConnectionManager { private final CountDownLatch connLatch; private final CountDownLatch awaitLatch; public ConMan(CountDownLatch connLatch, CountDownLatch awaitLatch) { this.connLatch = connLatch; this.awaitLatch = awaitLatch; } public void closeIdleConnections(long idletime, TimeUnit tunit) { throw new UnsupportedOperationException("just a mockup"); } public void closeExpiredConnections() { throw new UnsupportedOperationException("just a mockup"); } public ManagedClientConnection getConnection(HttpRoute route) { throw new UnsupportedOperationException("just a mockup"); } public ManagedClientConnection getConnection(HttpRoute route, long timeout, TimeUnit tunit) { throw new UnsupportedOperationException("just a mockup"); } public ClientConnectionRequest requestConnection( final HttpRoute route, final Object state) { final Thread currentThread = Thread.currentThread(); return new ClientConnectionRequest() { public void abortRequest() { currentThread.interrupt(); } public ManagedClientConnection getConnection( long timeout, TimeUnit tunit) throws InterruptedException, ConnectionPoolTimeoutException { connLatch.countDown(); // notify waiter that we're getting a connection // zero usually means sleep forever, but CountDownLatch doesn't interpret it that way. if(timeout == 0) timeout = Integer.MAX_VALUE; if(!awaitLatch.await(timeout, tunit)) throw new ConnectionPoolTimeoutException(); return new ClientConnAdapterMockup(ConMan.this); } }; } public HttpParams getParams() { throw new UnsupportedOperationException("just a mockup"); } public SchemeRegistry getSchemeRegistry() { SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", new SocketFactoryMockup(null), 80)); return registry; } public void releaseConnection(ManagedClientConnection conn, long validDuration, TimeUnit timeUnit) { throw new UnsupportedOperationException("just a mockup"); } public void shutdown() { throw new UnsupportedOperationException("just a mockup"); } } private static class CustomGet extends HttpGet { private final CountDownLatch releaseTriggerLatch; public CustomGet(String uri, CountDownLatch releaseTriggerLatch) { super(uri); this.releaseTriggerLatch = releaseTriggerLatch; } @Override public void setReleaseTrigger(ConnectionReleaseTrigger releaseTrigger) throws IOException { try { if(!releaseTriggerLatch.await(1, TimeUnit.SECONDS)) throw new RuntimeException("Waited too long..."); } catch(InterruptedException ie) { throw new RuntimeException(ie); } super.setReleaseTrigger(releaseTrigger); } } private class SimpleService implements HttpRequestHandler { public SimpleService() { super(); } public void handle( final HttpRequest request, final HttpResponse response, final HttpContext context) throws HttpException, IOException { response.setStatusCode(HttpStatus.SC_OK); StringEntity entity = new StringEntity("Whatever"); response.setEntity(entity); } } public void testDefaultHostAtClientLevel() throws Exception { int port = this.localServer.getServicePort(); this.localServer.register("*", new SimpleService()); HttpHost target = new HttpHost("localhost", port); DefaultHttpClient client = new DefaultHttpClient(); client.getParams().setParameter(ClientPNames.DEFAULT_HOST, target); String s = "/path"; HttpGet httpget = new HttpGet(s); HttpResponse response = client.execute(httpget); HttpEntity e = response.getEntity(); if (e != null) { e.consumeContent(); } assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); } public void testDefaultHostAtRequestLevel() throws Exception { int port = this.localServer.getServicePort(); this.localServer.register("*", new SimpleService()); HttpHost target1 = new HttpHost("whatever", 80); HttpHost target2 = new HttpHost("localhost", port); DefaultHttpClient client = new DefaultHttpClient(); client.getParams().setParameter(ClientPNames.DEFAULT_HOST, target1); String s = "/path"; HttpGet httpget = new HttpGet(s); httpget.getParams().setParameter(ClientPNames.DEFAULT_HOST, target2); HttpResponse response = client.execute(httpget); HttpEntity e = response.getEntity(); if (e != null) { e.consumeContent(); } assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); } private static class FaultyHttpRequestExecutor extends HttpRequestExecutor { private static final String MARKER = "marker"; private final String failureMsg; public FaultyHttpRequestExecutor(String failureMsg) { this.failureMsg = failureMsg; } @Override public HttpResponse execute( final HttpRequest request, final HttpClientConnection conn, final HttpContext context) throws IOException, HttpException { HttpResponse response = super.execute(request, conn, context); Object marker = context.getAttribute(MARKER); if (marker == null) { context.setAttribute(MARKER, Boolean.TRUE); throw new IOException(failureMsg); } return response; } } private static class FaultyHttpClient extends DefaultHttpClient { private final String failureMsg; public FaultyHttpClient() { this("Oppsie"); } public FaultyHttpClient(String failureMsg) { this.failureMsg = failureMsg; } @Override protected HttpRequestExecutor createRequestExecutor() { return new FaultyHttpRequestExecutor(failureMsg); } } public void testAutoGeneratedHeaders() throws Exception { int port = this.localServer.getServicePort(); this.localServer.register("*", new SimpleService()); FaultyHttpClient client = new FaultyHttpClient(); client.addRequestInterceptor(new HttpRequestInterceptor() { public void process( final HttpRequest request, final HttpContext context) throws HttpException, IOException { request.addHeader("my-header", "stuff"); } }) ; client.setHttpRequestRetryHandler(new HttpRequestRetryHandler() { public boolean retryRequest( final IOException exception, int executionCount, final HttpContext context) { return true; } }); HttpContext context = new BasicHttpContext(); String s = "http://localhost:" + port; HttpGet httpget = new HttpGet(s); HttpResponse response = client.execute(getServerHttp(), httpget, context); HttpEntity e = response.getEntity(); if (e != null) { e.consumeContent(); } HttpRequest reqWrapper = (HttpRequest) context.getAttribute( ExecutionContext.HTTP_REQUEST); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); assertTrue(reqWrapper instanceof RequestWrapper); Header[] myheaders = reqWrapper.getHeaders("my-header"); assertNotNull(myheaders); assertEquals(1, myheaders.length); } public void testNonRepeatableEntity() throws Exception { int port = this.localServer.getServicePort(); this.localServer.register("*", new SimpleService()); String failureMsg = "a message showing that this failed"; FaultyHttpClient client = new FaultyHttpClient(failureMsg); client.setHttpRequestRetryHandler(new HttpRequestRetryHandler() { public boolean retryRequest( final IOException exception, int executionCount, final HttpContext context) { return true; } }); HttpContext context = new BasicHttpContext(); String s = "http://localhost:" + port; HttpPost httppost = new HttpPost(s); httppost.setEntity(new InputStreamEntity( new ByteArrayInputStream( new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 } ), -1)); try { client.execute(getServerHttp(), httppost, context); fail("ClientProtocolException should have been thrown"); } catch (ClientProtocolException ex) { assertTrue(ex.getCause() instanceof NonRepeatableRequestException); NonRepeatableRequestException nonRepeat = (NonRepeatableRequestException)ex.getCause(); assertTrue(nonRepeat.getCause() instanceof IOException); assertEquals(failureMsg, nonRepeat.getCause().getMessage()); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.memory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.*; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.hamcrest.Matchers.equalTo; public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { static class MockController extends IndexingMemoryController { final static ByteSizeValue INACTIVE = new ByteSizeValue(-1); final Map<IndexShard, ByteSizeValue> indexingBuffers = new HashMap<>(); final Map<IndexShard, ByteSizeValue> translogBuffers = new HashMap<>(); final Map<IndexShard, Long> lastIndexTimeNanos = new HashMap<>(); final Set<IndexShard> activeShards = new HashSet<>(); long currentTimeSec = TimeValue.timeValueNanos(System.nanoTime()).seconds(); public MockController(Settings settings) { super(Settings.builder() .put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it .put(SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate .put(settings) .build(), null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb } public void deleteShard(IndexShard id) { indexingBuffers.remove(id); translogBuffers.remove(id); } public void assertBuffers(IndexShard id, ByteSizeValue indexing, ByteSizeValue translog) { assertThat(indexingBuffers.get(id), equalTo(indexing)); assertThat(translogBuffers.get(id), equalTo(translog)); } public void assertInactive(IndexShard id) { assertThat(indexingBuffers.get(id), equalTo(INACTIVE)); assertThat(translogBuffers.get(id), equalTo(INACTIVE)); } @Override protected long currentTimeInNanos() { return TimeValue.timeValueSeconds(currentTimeSec).nanos(); } @Override protected List<IndexShard> availableShards() { return new ArrayList<>(indexingBuffers.keySet()); } @Override protected boolean shardAvailable(IndexShard shard) { return indexingBuffers.containsKey(shard); } @Override protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) { indexingBuffers.put(shard, shardIndexingBufferSize); translogBuffers.put(shard, shardTranslogBufferSize); } @Override protected boolean checkIdle(IndexShard shard, long inactiveTimeNS) { Long ns = lastIndexTimeNanos.get(shard); if (ns == null) { return true; } else if (currentTimeInNanos() - ns >= inactiveTimeNS) { indexingBuffers.put(shard, INACTIVE); translogBuffers.put(shard, INACTIVE); activeShards.remove(shard); return true; } else { return false; } } public void incrementTimeSec(int sec) { currentTimeSec += sec; } public void simulateIndexing(IndexShard shard) { lastIndexTimeNanos.put(shard, currentTimeInNanos()); if (indexingBuffers.containsKey(shard) == false) { // First time we are seeing this shard; start it off with inactive buffers as IndexShard does: indexingBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER); translogBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER); } activeShards.add(shard); forceCheck(); } } public void testShardAdditionAndRemoval() { createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 3).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build()); IndexShard shard0 = test.shard(0); controller.simulateIndexing(shard0); controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K // add another shard IndexShard shard1 = test.shard(1); controller.simulateIndexing(shard1); controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); // remove first shard controller.deleteShard(shard0); controller.forceCheck(); controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K // remove second shard controller.deleteShard(shard1); controller.forceCheck(); // add a new one IndexShard shard2 = test.shard(2); controller.simulateIndexing(shard2); controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K } public void testActiveInactive() { createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb") .put(IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING, "5s") .build()); IndexShard shard0 = test.shard(0); controller.simulateIndexing(shard0); IndexShard shard1 = test.shard(1); controller.simulateIndexing(shard1); controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB)); // index into both shards, move the clock and see that they are still active controller.simulateIndexing(shard0); controller.simulateIndexing(shard1); controller.incrementTimeSec(10); controller.forceCheck(); // both shards now inactive controller.assertInactive(shard0); controller.assertInactive(shard1); // index into one shard only, see it becomes active controller.simulateIndexing(shard0); controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); controller.assertInactive(shard1); controller.incrementTimeSec(3); // increment but not enough to become inactive controller.forceCheck(); controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); controller.assertInactive(shard1); controller.incrementTimeSec(3); // increment some more controller.forceCheck(); controller.assertInactive(shard0); controller.assertInactive(shard1); // index some and shard becomes immediately active controller.simulateIndexing(shard1); controller.assertInactive(shard0); controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); } public void testMinShardBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb") .put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb") .put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build()); assertTwoActiveShards(controller, new ByteSizeValue(6, ByteSizeUnit.MB), new ByteSizeValue(40, ByteSizeUnit.KB)); } public void testMaxShardBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb") .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb") .put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb") .put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build()); assertTwoActiveShards(controller, new ByteSizeValue(3, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.KB)); } public void testRelativeBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%") .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%") .build()); assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(50, ByteSizeUnit.MB))); assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB))); } public void testMinBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%") .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%") .put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb") .put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build()); assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB))); assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB))); } public void testMaxBufferSizes() { MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%") .put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%") .put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb") .put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build()); assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB))); assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB))); } protected void assertTwoActiveShards(MockController controller, ByteSizeValue indexBufferSize, ByteSizeValue translogBufferSize) { createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); IndexShard shard0 = test.shard(0); controller.simulateIndexing(shard0); IndexShard shard1 = test.shard(1); controller.simulateIndexing(shard1); controller.assertBuffers(shard0, indexBufferSize, translogBufferSize); controller.assertBuffers(shard1, indexBufferSize, translogBufferSize); } }
/* * Copyright 2015 DuraSpace, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fcrepo.client.impl; import static org.apache.http.HttpStatus.SC_CONFLICT; import static org.apache.http.HttpStatus.SC_CREATED; import static org.apache.http.HttpStatus.SC_FORBIDDEN; import static org.apache.http.HttpStatus.SC_NO_CONTENT; import static org.apache.http.HttpStatus.SC_NOT_FOUND; import static org.slf4j.LoggerFactory.getLogger; import java.io.IOException; import java.io.InputStream; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.StatusLine; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpPatch; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.fcrepo.client.FedoraException; import org.fcrepo.client.FedoraRepository; import org.fcrepo.client.FedoraResource; import org.fcrepo.client.ForbiddenException; import org.fcrepo.client.NotFoundException; import org.fcrepo.client.utils.HttpCopy; import org.fcrepo.client.utils.HttpHelper; import org.fcrepo.client.utils.HttpMove; import org.fcrepo.kernel.api.RdfLexicon; import org.slf4j.Logger; import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.graph.NodeFactory; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.util.iterator.ExtendedIterator; /** * A Fedora Object Impl. * * @author lsitu * @author escowles * @since 2014-08-11 */ public class FedoraResourceImpl implements FedoraResource { private static final Logger LOGGER = getLogger(FedoraResourceImpl.class); private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); protected FedoraRepository repository = null; protected HttpHelper httpHelper = null; protected String path = null; protected String oldPath = null; protected Node subject = null; protected Graph graph; private String etagValue = null; /** * FedoraResourceImpl constructor * * @param repository FedoraRepositoryImpl that created this resource * @param httpHelper HTTP helper for making repository requests * @param path Repository path of this resource */ public FedoraResourceImpl(final FedoraRepository repository, final HttpHelper httpHelper, final String path) { this.repository = repository; this.httpHelper = httpHelper; this.path = path; subject = NodeFactory.createURI(repository.getRepositoryUrl() + path); } @Override public void copy(final String destination) throws FedoraException { final HttpCopy copy = httpHelper.createCopyMethod(path,destination); try { final HttpResponse response = httpHelper.execute( copy ); final StatusLine status = response.getStatusLine(); final String uri = copy.getURI().toString(); if (status.getStatusCode() == HttpStatus.SC_CREATED) { // Created LOGGER.debug("resource successfully copied from " + path + " to " + destination, uri); } else if (status.getStatusCode() == HttpStatus.SC_CONFLICT) { // Source path doesn't exists LOGGER.error("error copying resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error copying resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } else if (status.getStatusCode() == HttpStatus.SC_PRECONDITION_FAILED) { // Destination path already exists LOGGER.error("error copying resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error copying resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } else if (status.getStatusCode() == HttpStatus.SC_BAD_GATEWAY) { // Destination URI isn't a valid resource path LOGGER.error("error copying resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error copying resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } } catch (final FedoraException e) { throw e; } catch (final Exception e) { LOGGER.error("Could not encode URI parameter: {}", e.getMessage()); throw new FedoraException(e); } finally { copy.releaseConnection(); } } @Override public void delete() throws FedoraException { final HttpDelete delete = httpHelper.createDeleteMethod(path); try { final HttpResponse response = httpHelper.execute( delete ); final StatusLine status = response.getStatusLine(); final String uri = delete.getURI().toString(); if ( status.getStatusCode() == SC_NO_CONTENT) { LOGGER.debug("triples updated successfully for resource {}", uri); } else if ( status.getStatusCode() == SC_NOT_FOUND) { LOGGER.error("resource {} does not exist, cannot update", uri); throw new NotFoundException("resource " + uri + " does not exist, cannot update"); } else { LOGGER.error("error updating resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error updating resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } } catch (final FedoraException e) { throw e; } catch (final Exception e) { LOGGER.error("Error executing request", e); throw new FedoraException(e); } finally { delete.releaseConnection(); } } @Override public void forceDelete() throws FedoraException { delete(); removeTombstone(); } /** * Remove tombstone (for the current path) */ public void removeTombstone() throws FedoraException { removeTombstone(path); } /** * Remove tombstone located at given path */ public void removeTombstone(final String path) throws FedoraException { final HttpDelete delete = httpHelper.createDeleteMethod(path + "/fcr:tombstone"); try { final HttpResponse response = httpHelper.execute( delete ); final StatusLine status = response.getStatusLine(); final String uri = delete.getURI().toString(); if ( status.getStatusCode() == SC_NO_CONTENT) { LOGGER.debug("triples updated successfully for resource {}", uri); } else if ( status.getStatusCode() == SC_NOT_FOUND) { LOGGER.error("resource {} does not exist, cannot update", uri); throw new NotFoundException("resource " + uri + " does not exist, cannot update"); } else { LOGGER.error("error updating resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error updating resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } } catch (final FedoraException e) { throw e; } catch (final Exception e) { LOGGER.error("Error executing request", e); throw new FedoraException(e); } finally { delete.releaseConnection(); } } @Override public Date getCreatedDate() { return getDate(RdfLexicon.CREATED_DATE); } @Override public String getEtagValue() { return etagValue; } /** * set etagValue * * @param etagValue string of etagvalue to set */ public void setEtagValue(final String etagValue) { this.etagValue = etagValue; } @Override public Date getLastModifiedDate() { return getDate(RdfLexicon.LAST_MODIFIED_DATE); } @Override public Collection<String> getMixins() { return getPropertyValues(RdfLexicon.HAS_MIXIN_TYPE); } @Override public String getName() { final String p = path.endsWith("/") ? path.substring(0, path.length() - 1) : path; final String[] paths = p.split("/"); return paths[paths.length - 1]; } @Override public String getPath() { return path; } @Override public Iterator<Triple> getProperties() { return graph.find(Node.ANY, Node.ANY, Node.ANY); } @Override public Long getSize() { return (long) graph.size(); } @Override public void move(final String destination) throws FedoraException { final HttpMove move = httpHelper.createMoveMethod(path,destination); try { final HttpResponse response = httpHelper.execute( move ); final StatusLine status = response.getStatusLine(); final String uri = move.getURI().toString(); if (status.getStatusCode() == HttpStatus.SC_CREATED) { // Created LOGGER.debug("resource successfully moved from " + path + " to " + destination, uri); oldPath = path; path = destination; subject = NodeFactory.createURI(repository.getRepositoryUrl() + path); } else if (status.getStatusCode() == HttpStatus.SC_CONFLICT) { // Source path doesn't exists LOGGER.error("error moving resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error moving resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } else if (status.getStatusCode() == HttpStatus.SC_PRECONDITION_FAILED) { // Destination path already exists LOGGER.error("error moving resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error moving resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } else if (status.getStatusCode() == HttpStatus.SC_BAD_GATEWAY) { // Destination URI isn't a valid resource path LOGGER.error("error moving resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error moving resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } } catch (final FedoraException e) { oldPath = null; throw e; } catch (final Exception e) { LOGGER.error("Could not encode URI parameter: {}", e.getMessage()); throw new FedoraException(e); } finally { move.releaseConnection(); } } @Override public void forceMove(final String destination) throws FedoraException { move(destination); removeTombstone(oldPath); } @Override public void updateProperties(final String sparqlUpdate) throws FedoraException { final HttpPatch patch = httpHelper.createPatchMethod(getPropertiesPath(), sparqlUpdate); try { final HttpResponse response = httpHelper.execute( patch ); final StatusLine status = response.getStatusLine(); final String uri = patch.getURI().toString(); if ( status.getStatusCode() == SC_NO_CONTENT) { LOGGER.debug("triples updated successfully for resource {}", uri); } else if ( status.getStatusCode() == SC_FORBIDDEN) { LOGGER.error("updating resource {} is not authorized.", uri); throw new ForbiddenException("updating resource " + uri + " is not authorized."); } else if ( status.getStatusCode() == SC_NOT_FOUND) { LOGGER.error("resource {} does not exist, cannot update", uri); throw new NotFoundException("resource " + uri + " does not exist, cannot update"); } else if ( status.getStatusCode() == SC_CONFLICT) { LOGGER.error("resource {} is locked", uri); throw new FedoraException("resource is locked: " + uri); } else { LOGGER.error("error updating resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error updating resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } // update properties from server httpHelper.loadProperties(this); } catch (final FedoraException e) { throw e; } catch (final Exception e) { LOGGER.error("Could not encode URI parameter: {}", e.getMessage()); throw new FedoraException(e); } finally { patch.releaseConnection(); } } @Override public void updateProperties(final InputStream updatedProperties, final String contentType) throws FedoraException { final HttpPut put = httpHelper.createTriplesPutMethod(getPropertiesPath(), updatedProperties, contentType); try { final HttpResponse response = httpHelper.execute( put ); final StatusLine status = response.getStatusLine(); final String uri = put.getURI().toString(); if ( status.getStatusCode() == SC_NO_CONTENT) { LOGGER.debug("triples updated successfully for resource {}", uri); } else if ( status.getStatusCode() == SC_FORBIDDEN) { LOGGER.error("updating resource {} is not authorized.", uri); throw new ForbiddenException("updating resource " + uri + " is not authorized."); } else if ( status.getStatusCode() == SC_NOT_FOUND) { LOGGER.error("resource {} does not exist, cannot update", uri); throw new NotFoundException("resource " + uri + " does not exist, cannot update"); } else if ( status.getStatusCode() == SC_CONFLICT) { LOGGER.error("resource {} is locked", uri); throw new FedoraException("resource is locked: " + uri); } else { LOGGER.error("error updating resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error updating resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } // update properties from server httpHelper.loadProperties(this); } catch (final FedoraException e) { throw e; } catch (final Exception e) { LOGGER.error("Error executing request", e); throw new FedoraException(e); } finally { put.releaseConnection(); } } @Override public boolean isWritable() { final Collection<String> values = getPropertyValues(RdfLexicon.WRITABLE); if (values != null && values.size() > 0) { final Iterator<String> it = values.iterator(); return Boolean.parseBoolean(it.next()); } return false; } @Override public void createVersionSnapshot(final String label) throws FedoraException { final HttpPost postVersion = httpHelper.createPostMethod(path + "/fcr:versions", null); try { postVersion.setHeader("Slug", label); final HttpResponse response = httpHelper.execute(postVersion); final StatusLine status = response.getStatusLine(); final String uri = postVersion.getURI().toString(); if ( status.getStatusCode() == SC_CREATED) { LOGGER.debug("new version created for resource at {}", uri); } else if ( status.getStatusCode() == SC_CONFLICT) { LOGGER.debug("The label {} is in use by another version.", label); throw new FedoraException("The label \"" + label + "\" is in use by another version."); } else if ( status.getStatusCode() == SC_FORBIDDEN) { LOGGER.error("updating resource {} is not authorized.", uri); throw new ForbiddenException("updating resource " + uri + " is not authorized."); } else if ( status.getStatusCode() == SC_NOT_FOUND) { LOGGER.error("resource {} does not exist, cannot create version", uri); throw new NotFoundException("resource " + uri + " does not exist, cannot create version"); } else { LOGGER.error("error updating resource {}: {} {}", uri, status.getStatusCode(), status.getReasonPhrase()); throw new FedoraException("error updating resource " + uri + ": " + status.getStatusCode() + " " + status.getReasonPhrase()); } } catch (IOException e) { LOGGER.error("Error executing request", e); throw new FedoraException(e); } finally { postVersion.releaseConnection(); } } /** * Get the properties graph * * @return Graph containing properties for this resource */ public Graph getGraph() { return graph; } /** * Update the properties graph * * @param graph graph to add to this object **/ public void setGraph( final Graph graph ) { this.graph = graph; } private Date getDate(final Property property) { Date date = null; final Triple t = getTriple(subject, property); if ( t != null ) { final String dateValue = t.getObject().getLiteralValue().toString(); try { date = dateFormat.parse(dateValue); } catch (final ParseException e) { LOGGER.debug("Invalid date format error: " + dateValue); } } return date; } /** * Return all the values of a property * * @param property The Property to get values for * @return Collection of values */ protected Collection<String> getPropertyValues(final Property property) { final ExtendedIterator<Triple> iterator = graph.find(Node.ANY, property.asNode(), Node.ANY); final Set<String> set = new HashSet<>(); while (iterator.hasNext()) { final Node object = iterator.next().getObject(); if (object.isLiteral()) { set.add(object.getLiteralValue().toString()); } else if (object.isURI()) { set.add(object.getURI().toString()); } else { set.add(object.toString()); } } return set; } protected Triple getTriple( final Node subject, final Property property ) { final ExtendedIterator<Triple> it = graph.find( subject, property.asNode(), null ); try { if ( it.hasNext() ) { return it.next(); } else { return null; } } finally { it.close(); } } /** * Gets the path to which properties of this resource may be accessed. * * @return string containing properties path */ public String getPropertiesPath() { return path; } }
/* * Copyright 2015-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.glowroot.agent.live; import java.util.Collections; import java.util.List; import java.util.Set; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Ticker; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import org.checkerframework.checker.nullness.qual.Nullable; import org.glowroot.agent.collector.Collector.TraceReader; import org.glowroot.agent.collector.Collector.TraceVisitor; import org.glowroot.agent.impl.TraceCollector; import org.glowroot.agent.impl.TraceCreator; import org.glowroot.agent.impl.Transaction; import org.glowroot.agent.impl.Transaction.TraceEntryVisitor; import org.glowroot.agent.impl.TransactionRegistry; import org.glowroot.agent.model.ErrorMessage; import org.glowroot.common.live.ImmutableEntries; import org.glowroot.common.live.ImmutableQueries; import org.glowroot.common.live.ImmutableTracePoint; import org.glowroot.common.live.LiveTraceRepository; import org.glowroot.common.util.Clock; import org.glowroot.wire.api.model.AggregateOuterClass.Aggregate; import org.glowroot.wire.api.model.ProfileOuterClass.Profile; import org.glowroot.wire.api.model.TraceOuterClass.Trace; import static com.google.common.base.Preconditions.checkNotNull; public class LiveTraceRepositoryImpl implements LiveTraceRepository { private static final String AGENT_ID = ""; private final TransactionRegistry transactionRegistry; private final TraceCollector traceCollector; private final Clock clock; private final Ticker ticker; public LiveTraceRepositoryImpl(TransactionRegistry transactionRegistry, TraceCollector traceCollector, Clock clock, Ticker ticker) { this.transactionRegistry = transactionRegistry; this.traceCollector = traceCollector; this.clock = clock; this.ticker = ticker; } // checks active traces first, then pending traces (and finally caller should check stored // traces) to make sure that the trace is not missed if it is in transition between these states @Override public Trace. /*@Nullable*/ Header getHeader(String agentId, String traceId) { for (Transaction transaction : Iterables.concat(transactionRegistry.getTransactions(), traceCollector.getPendingTransactions())) { if (transaction.getTraceId().equals(traceId)) { return createTraceHeader(transaction); } } return null; } @Override public @Nullable Entries getEntries(String agentId, String traceId) { for (Transaction transaction : Iterables.concat(transactionRegistry.getTransactions(), traceCollector.getPendingTransactions())) { if (transaction.getTraceId().equals(traceId)) { CollectingEntryVisitor visitor = new CollectingEntryVisitor(); transaction.visitEntries(ticker.read(), visitor); return ImmutableEntries.builder() .addAllEntries(visitor.entries) .addAllSharedQueryTexts( TraceCreator.toProto(transaction.getSharedQueryTexts())) .build(); } } return null; } @Override public @Nullable Queries getQueries(String agentId, String traceId) { for (Transaction transaction : Iterables.concat(transactionRegistry.getTransactions(), traceCollector.getPendingTransactions())) { if (transaction.getTraceId().equals(traceId)) { return ImmutableQueries.builder() .addAllQueries(transaction.getQueries()) .addAllSharedQueryTexts( TraceCreator.toProto(transaction.getSharedQueryTexts())) .build(); } } return null; } @Override public @Nullable Profile getMainThreadProfile(String agentId, String traceId) { for (Transaction transaction : Iterables.concat(transactionRegistry.getTransactions(), traceCollector.getPendingTransactions())) { if (transaction.getTraceId().equals(traceId)) { return transaction.getMainThreadProfileProtobuf(); } } return null; } @Override public @Nullable Profile getAuxThreadProfile(String agentId, String traceId) { for (Transaction transaction : Iterables.concat(transactionRegistry.getTransactions(), traceCollector.getPendingTransactions())) { if (transaction.getTraceId().equals(traceId)) { return transaction.getAuxThreadProfileProtobuf(); } } return null; } @Override public @Nullable Trace getFullTrace(String agentId, String traceId) throws Exception { for (Transaction transaction : Iterables.concat(transactionRegistry.getTransactions(), traceCollector.getPendingTransactions())) { if (transaction.getTraceId().equals(traceId)) { CollectingTraceVisitor traceVisitor = new CollectingTraceVisitor(); TraceReader traceReader = createTraceReader(transaction); traceReader.accept(traceVisitor); Trace.Builder builder = Trace.newBuilder() .setId(traceId) .setUpdate(transaction.isPartiallyStored()); Profile mainThreadProfile = traceVisitor.mainThreadProfile; if (mainThreadProfile != null) { builder.setMainThreadProfile(mainThreadProfile); } Profile auxThreadProfile = traceVisitor.auxThreadProfile; if (auxThreadProfile != null) { builder.setAuxThreadProfile(auxThreadProfile); } return builder.setHeader(checkNotNull(traceVisitor.header)) .addAllEntry(traceVisitor.entries) .addAllQuery(traceVisitor.queries) .addAllSharedQueryText(TraceCreator.toProto(traceVisitor.sharedQueryTexts)) .build(); } } return null; } @Override public int getMatchingTraceCount(String transactionType, @Nullable String transactionName) { // include active traces, this is mostly for the case where there is just a single very // long running active trace and it would be misleading to display Traces (0) on the tab int count = 0; for (Transaction transaction : transactionRegistry.getTransactions()) { // don't include partially stored traces since no way to de-dup them with the stored // trace count if (matchesActive(transaction, transactionType, transactionName) && !transaction.isPartiallyStored()) { count++; } } return count; } @Override public List<TracePoint> getMatchingActiveTracePoints(TraceKind traceKind, String transactionType, @Nullable String transactionName, TracePointFilter filter, int limit, long captureTime, long captureTick) { List<TracePoint> activeTracePoints = Lists.newArrayList(); for (Transaction transaction : transactionRegistry.getTransactions()) { long startTick = transaction.getStartTick(); if (matches(transaction, traceKind, transactionType, transactionName, filter) && startTick < captureTick) { activeTracePoints.add(ImmutableTracePoint.builder() .agentId(AGENT_ID) .traceId(transaction.getTraceId()) .captureTime(captureTime) .durationNanos(captureTick - startTick) .partial(true) .error(transaction.getErrorMessage() != null) .checkLiveTraces(true) .build()); } } Collections.sort(activeTracePoints, Ordering.natural().reverse().onResultOf(new Function<TracePoint, Long>() { @Override public Long apply(@Nullable TracePoint tracePoint) { checkNotNull(tracePoint); return tracePoint.durationNanos(); } })); if (limit != 0 && activeTracePoints.size() > limit) { activeTracePoints = activeTracePoints.subList(0, limit); } return activeTracePoints; } @Override public List<TracePoint> getMatchingPendingPoints(TraceKind traceKind, String transactionType, @Nullable String transactionName, TracePointFilter filter, long captureTime) { List<TracePoint> points = Lists.newArrayList(); for (Transaction transaction : traceCollector.getPendingTransactions()) { if (matches(transaction, traceKind, transactionType, transactionName, filter)) { points.add(ImmutableTracePoint.builder() .agentId(AGENT_ID) .traceId(transaction.getTraceId()) // by the time transaction is in pending list, the capture time is set .captureTime(transaction.getCaptureTime()) .durationNanos(transaction.getDurationNanos()) .partial(false) .error(transaction.getErrorMessage() != null) .checkLiveTraces(true) .build()); } } return points; } @Override public Set<String> getTransactionTypes(String agentId) { Set<String> transactionTypes = Sets.newHashSet(); for (Transaction transaction : Iterables.concat(transactionRegistry.getTransactions(), traceCollector.getPendingTransactions())) { if (traceCollector.shouldStoreSlow(transaction)) { transactionTypes.add(transaction.getTransactionType()); } } return transactionTypes; } @VisibleForTesting boolean matchesActive(Transaction transaction, String transactionType, @Nullable String transactionName) { if (!traceCollector.shouldStoreSlow(transaction)) { return false; } if (!transactionType.equals(transaction.getTransactionType())) { return false; } return transactionName == null || transactionName.equals(transaction.getTransactionName()); } private Trace.Header createTraceHeader(Transaction transaction) { // capture time before checking if complete to guard against condition where partial // trace header is created with captureTime > the real (completed) capture time long captureTime = clock.currentTimeMillis(); long captureTick = ticker.read(); if (transaction.isFullyCompleted()) { return TraceCreator.createCompletedTraceHeader(transaction); } else { return TraceCreator.createPartialTraceHeader(transaction, captureTime, captureTick); } } private TraceReader createTraceReader(Transaction transaction) { if (transaction.isFullyCompleted()) { return TraceCreator.createTraceReaderForCompleted(transaction, true); } else { return TraceCreator.createTraceReaderForPartial(transaction, clock.currentTimeMillis(), ticker.read()); } } private boolean matches(Transaction transaction, TraceKind traceKind, String transactionType, @Nullable String transactionName, TracePointFilter filter) { ErrorMessage errorMessage = transaction.getErrorMessage(); return matchesKind(transaction, traceKind) && matchesTransactionType(transaction, transactionType) && matchesTransactionName(transaction, transactionName) && filter.matchesDuration(transaction.getDurationNanos()) && filter.matchesHeadline(transaction.getHeadline()) && filter.matchesError(errorMessage == null ? "" : errorMessage.message()) && filter.matchesUser(transaction.getUser()) && filter.matchesAttributes(transaction.getAttributes().asMap()); } private boolean matchesKind(Transaction transaction, TraceKind traceKind) { if (traceKind == TraceKind.SLOW) { return traceCollector.shouldStoreSlow(transaction); } else { // TraceKind.ERROR return traceCollector.shouldStoreError(transaction); } } private static boolean matchesTransactionType(Transaction transaction, String transactionType) { return transactionType.equals(transaction.getTransactionType()); } private static boolean matchesTransactionName(Transaction transaction, @Nullable String transactionName) { return transactionName == null || transactionName.equals(transaction.getTransactionName()); } private static class CollectingTraceVisitor implements TraceVisitor { private final List<Trace.Entry> entries = Lists.newArrayList(); private List<Aggregate.Query> queries = ImmutableList.of(); private List<String> sharedQueryTexts = ImmutableList.of(); private @Nullable Profile mainThreadProfile; private @Nullable Profile auxThreadProfile; private Trace. /*@Nullable*/ Header header; @Override public void visitEntry(Trace.Entry entry) { entries.add(entry); } @Override public void visitQueries(List<Aggregate.Query> queries) { this.queries = queries; } @Override public void visitSharedQueryTexts(List<String> sharedQueryTexts) { this.sharedQueryTexts = sharedQueryTexts; } @Override public void visitMainThreadProfile(Profile profile) { mainThreadProfile = profile; } @Override public void visitAuxThreadProfile(Profile profile) { auxThreadProfile = profile; } @Override public void visitHeader(Trace.Header header) { this.header = header; } } private static class CollectingEntryVisitor implements TraceEntryVisitor { private final List<Trace.Entry> entries = Lists.newArrayList(); @Override public void visitEntry(Trace.Entry entry) { entries.add(entry); } } }
/** * Copyright (c) 2014-present, Facebook, Inc. All rights reserved. * * You are hereby granted a non-exclusive, worldwide, royalty-free license to use, * copy, modify, and distribute this software in source code or binary form for use * in connection with the web services and APIs provided by Facebook. * * As with any software that integrates with the Facebook platform, your use of * this software is subject to the Facebook Developer Principles and Policies * [http://developers.facebook.com/policy/]. This copyright notice shall be * included in all copies or substantial portions of the software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.example.scrumptious.usersettings; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.Fragment; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import com.facebook.AccessToken; import com.facebook.AccessTokenTracker; import com.facebook.CallbackManager; import com.facebook.GraphRequest; import com.facebook.GraphResponse; import com.facebook.internal.ImageDownloader; import com.facebook.internal.ImageRequest; import com.facebook.internal.ImageResponse; import com.example.scrumptious.R; import com.facebook.login.widget.LoginButton; import org.json.JSONObject; /** * A Fragment that displays a Login/Logout button as well as the user's * profile picture and name when logged in. */ public final class UserSettingsFragment extends Fragment { private static final String NAME = "name"; private static final String ID = "id"; private static final String PICTURE = "picture"; private static final String FIELDS = "fields"; private static final String REQUEST_FIELDS = TextUtils.join(",", new String[] {ID, NAME, PICTURE}); private AccessTokenTracker accessTokenTracker; private CallbackManager callbackManager; private LoginButton loginButton; private TextView connectedStateLabel; private JSONObject user; private Drawable userProfilePic; private String userProfilePicID; @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); accessTokenTracker = new AccessTokenTracker() { @Override protected void onCurrentAccessTokenChanged(AccessToken oldAccessToken, AccessToken currentAccessToken) { fetchUserInfo(); updateUI(); } }; callbackManager = CallbackManager.Factory.create(); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); callbackManager.onActivityResult(requestCode, resultCode, data); } @Override public void onDestroy() { super.onDestroy(); accessTokenTracker.stopTracking(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.usersettings_fragment, container, false); loginButton = (LoginButton) view.findViewById(R.id.usersettings_fragment_login_button); loginButton.setFragment(this); connectedStateLabel = (TextView) view.findViewById(R.id.usersettings_fragment_profile_name); // if no background is set for some reason, then default to Facebook blue if (view.getBackground() == null) { view.setBackgroundColor(getResources().getColor(R.color.com_facebook_blue)); } else { view.getBackground().setDither(true); } return view; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); } /** * @throws com.facebook.FacebookException if errors occur during the loading of user information */ @Override public void onResume() { super.onResume(); fetchUserInfo(); updateUI(); } private void fetchUserInfo() { final AccessToken accessToken = AccessToken.getCurrentAccessToken(); if (accessToken != null) { GraphRequest request = GraphRequest.newMeRequest( accessToken, new GraphRequest.GraphJSONObjectCallback() { @Override public void onCompleted(JSONObject me, GraphResponse response) { user = me; updateUI(); } }); Bundle parameters = new Bundle(); parameters.putString(FIELDS, REQUEST_FIELDS); request.setParameters(parameters); GraphRequest.executeBatchAsync(request); } else { user = null; } } private void updateUI() { if (!isAdded()) { return; } if (AccessToken.getCurrentAccessToken() != null) { connectedStateLabel.setTextColor(getResources().getColor( R.color.usersettings_fragment_connected_text_color)); connectedStateLabel.setShadowLayer(1f, 0f, -1f, getResources().getColor( R.color.usersettings_fragment_connected_shadow_color)); if (user != null) { ImageRequest request = getImageRequest(); if (request != null) { Uri requestUri = request.getImageUri(); // Do we already have the right picture? If so, leave it alone. if (!requestUri.equals(connectedStateLabel.getTag())) { if (user.optString("id").equals(userProfilePicID)) { connectedStateLabel.setCompoundDrawables( null, userProfilePic, null, null); connectedStateLabel.setTag(requestUri); } else { ImageDownloader.downloadAsync(request); } } } connectedStateLabel.setText(user.optString("name")); } else { connectedStateLabel.setText(getResources().getString( R.string.usersettings_fragment_logged_in)); Drawable noProfilePic = getResources().getDrawable( R.drawable.profile_default_icon); noProfilePic.setBounds(0, 0, getResources().getDimensionPixelSize( R.dimen.usersettings_fragment_profile_picture_width), getResources().getDimensionPixelSize( R.dimen.usersettings_fragment_profile_picture_height)); connectedStateLabel.setCompoundDrawables(null, noProfilePic, null, null); } } else { int textColor = getResources().getColor( R.color.usersettings_fragment_not_connected_text_color); connectedStateLabel.setTextColor(textColor); connectedStateLabel.setShadowLayer(0f, 0f, 0f, textColor); connectedStateLabel.setText(getResources().getString( R.string.usersettings_fragment_not_logged_in)); connectedStateLabel.setCompoundDrawables(null, null, null, null); connectedStateLabel.setTag(null); } } private ImageRequest getImageRequest() { ImageRequest request = null; ImageRequest.Builder requestBuilder = new ImageRequest.Builder( getActivity(), ImageRequest.getProfilePictureUri( user.optString("id"), getResources().getDimensionPixelSize( R.dimen.usersettings_fragment_profile_picture_width), getResources().getDimensionPixelSize( R.dimen.usersettings_fragment_profile_picture_height))); request = requestBuilder.setCallerTag(this) .setCallback( new ImageRequest.Callback() { @Override public void onCompleted(ImageResponse response) { processImageResponse(user.optString("id"), response); } }) .build(); return request; } private void processImageResponse(String id, ImageResponse response) { if (response != null) { Bitmap bitmap = response.getBitmap(); if (bitmap != null) { BitmapDrawable drawable = new BitmapDrawable( UserSettingsFragment.this.getResources(), bitmap); drawable.setBounds(0, 0, getResources().getDimensionPixelSize( R.dimen.usersettings_fragment_profile_picture_width), getResources().getDimensionPixelSize( R.dimen.usersettings_fragment_profile_picture_height)); userProfilePic = drawable; userProfilePicID = id; connectedStateLabel.setCompoundDrawables(null, drawable, null, null); connectedStateLabel.setTag(response.getRequest().getImageUri()); } } } }
/* */ package com.googlecode.objectify.test; import org.testng.annotations.Test; import com.googlecode.objectify.Key; import com.googlecode.objectify.Ref; import com.googlecode.objectify.annotation.Entity; import com.googlecode.objectify.annotation.Id; import com.googlecode.objectify.annotation.Load; import com.googlecode.objectify.annotation.Parent; import com.googlecode.objectify.test.LoadParentTests.ChildWithGroup.Group; import com.googlecode.objectify.test.util.TestBase; import com.googlecode.objectify.test.util.TestObjectify; /** * Tests the fetching system for simple parent values. * * @author Jeff Schnitzer <jeff@infohazard.org> */ public class LoadParentTests extends TestBase { /** */ @Entity public static class Father { public @Id Long id; public String foo; @Override public String toString() { return this.getClass().getSimpleName() + "(" + id + ", " + foo + ")"; } } /** */ @Entity public static class Child { public @Id Long id; public @Load @Parent Father father; public String bar; @Override public String toString() { return this.getClass().getSimpleName() + "(" + id + ", " + father + ", " + bar + ")"; } } /** */ @Test public void testParentExists() throws Exception { fact.register(Father.class); fact.register(Child.class); TestObjectify ofy = fact.begin(); Father f = new Father(); f.foo = "foo"; ofy.put(f); Child ch = new Child(); ch.father = f; ch.bar = "bar"; ofy.put(ch); ofy.clear(); Child fetched = ofy.get(fact.<Child>getKey(ch)); assert fetched.bar.equals(ch.bar); assert fetched.father.id.equals(f.id); assert fetched.father.foo.equals(f.foo); } /** */ @Test public void testParentMissing() throws Exception { fact.register(Father.class); fact.register(Child.class); TestObjectify ofy = fact.begin(); Father f = new Father(); f.id = 123L; f.foo = "foo"; // don't put Child ch = new Child(); ch.father = f; ch.bar = "bar"; ofy.put(ch); ofy.clear(); Child fetched = ofy.get(fact.<Child>getKey(ch)); assert fetched.bar.equals(ch.bar); assert fetched.father.id.equals(f.id); assert fetched.father.foo == null; // partial entity doesn't have this part } /** */ @Entity public static class TreeNode { public @Id Long id; public @Load @Parent TreeNode parent; public String foo; } /** */ @Test public void testTwoLevelsOfFetch() throws Exception { fact.register(TreeNode.class); TestObjectify ofy = fact.begin(); TreeNode node1 = new TreeNode(); node1.foo = "foo1"; ofy.put(node1); TreeNode node2 = new TreeNode(); node2.parent = node1; node2.foo = "foo2"; ofy.put(node2); TreeNode node3 = new TreeNode(); node3.parent = node2; node3.foo = "foo3"; ofy.put(node3); ofy.clear(); TreeNode fetched3 = ofy.get(fact.<TreeNode>getKey(node3)); assert fetched3.foo.equals(node3.foo); assert fetched3.parent.id.equals(node2.id); assert fetched3.parent.foo.equals(node2.foo); assert fetched3.parent.parent.id.equals(node1.id); assert fetched3.parent.parent.foo.equals(node1.foo); assert fetched3.parent.parent.parent == null; } /** */ @Test public void testMissingIntermediate() throws Exception { fact.register(TreeNode.class); TestObjectify ofy = fact.begin(); TreeNode node1 = new TreeNode(); node1.foo = "foo1"; ofy.put(node1); // Node2 should not exist but should have a concrete id for node3 TreeNode node2 = new TreeNode(); node2.id = 999L; node2.parent = node1; TreeNode node3 = new TreeNode(); node3.parent = node2; node3.foo = "foo3"; Key<TreeNode> node3Key = ofy.put(node3); ofy.clear(); Ref<TreeNode> fetched3Ref = ofy.load().key(node3Key); TreeNode fetched3 = fetched3Ref.get(); assert fetched3.parent.id.equals(node2.id); assert fetched3.parent.foo == null; assert fetched3.parent.parent.id.equals(node1.id); assert fetched3.parent.parent.foo.equals(node1.foo); assert fetched3.parent.parent.parent == null; } /** */ @Entity public static class ChildWithGroup { public static class Group {} public @Id Long id; public @Load(Group.class) @Parent Father father; public String bar; } /** */ @Test public void testParentWithGroup() throws Exception { fact.register(Father.class); fact.register(ChildWithGroup.class); TestObjectify ofy = fact.begin(); Father f = new Father(); f.foo = "foo"; ofy.put(f); ChildWithGroup ch = new ChildWithGroup(); ch.father = f; ch.bar = "bar"; ofy.put(ch); ofy.clear(); // This should get a hollow entity ChildWithGroup fetched = ofy.get(fact.<ChildWithGroup>getKey(ch)); assert fetched.father.id.equals(f.id); assert fetched.father.foo == null; ofy.clear(); // This should get the complete parent ChildWithGroup fetched2 = ofy.load().group(Group.class).key(fact.<ChildWithGroup>getKey(ch)).get(); assert fetched2.father.id.equals(f.id); assert fetched2.father.foo.equals(f.foo); } }
package io.flutter.plugin.editing; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import android.content.Context; import android.content.res.AssetManager; import android.os.Build; import android.provider.Settings; import android.util.SparseIntArray; import android.view.KeyEvent; import android.view.View; import android.view.inputmethod.CursorAnchorInfo; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputConnection; import android.view.inputmethod.InputMethodManager; import android.view.inputmethod.InputMethodSubtype; import io.flutter.embedding.engine.FlutterJNI; import io.flutter.embedding.engine.dart.DartExecutor; import io.flutter.embedding.engine.systemchannels.TextInputChannel; import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.JSONMethodCodec; import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.platform.PlatformViewsController; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.json.JSONArray; import org.json.JSONException; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.robolectric.RobolectricTestRunner; import org.robolectric.RuntimeEnvironment; import org.robolectric.annotation.Config; import org.robolectric.annotation.Implementation; import org.robolectric.annotation.Implements; import org.robolectric.shadow.api.Shadow; import org.robolectric.shadows.ShadowBuild; import org.robolectric.shadows.ShadowInputMethodManager; @Config(manifest = Config.NONE, shadows = TextInputPluginTest.TestImm.class, sdk = 27) @RunWith(RobolectricTestRunner.class) public class TextInputPluginTest { // Verifies the method and arguments for a captured method call. private void verifyMethodCall(ByteBuffer buffer, String methodName, String[] expectedArgs) throws JSONException { buffer.rewind(); MethodCall methodCall = JSONMethodCodec.INSTANCE.decodeMethodCall(buffer); assertEquals(methodName, methodCall.method); if (expectedArgs != null) { JSONArray args = methodCall.arguments(); assertEquals(expectedArgs.length, args.length()); for (int i = 0; i < args.length(); i++) { assertEquals(expectedArgs[i], args.get(i).toString()); } } } @Test public void textInputPlugin_RequestsReattachOnCreation() throws JSONException { // Initialize a general TextInputPlugin. InputMethodSubtype inputMethodSubtype = mock(InputMethodSubtype.class); TestImm testImm = Shadow.extract( RuntimeEnvironment.application.getSystemService(Context.INPUT_METHOD_SERVICE)); testImm.setCurrentInputMethodSubtype(inputMethodSubtype); View testView = new View(RuntimeEnvironment.application); FlutterJNI mockFlutterJni = mock(FlutterJNI.class); DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJni, mock(AssetManager.class))); TextInputPlugin textInputPlugin = new TextInputPlugin(testView, dartExecutor, mock(PlatformViewsController.class)); ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class); verify(dartExecutor, times(1)) .send( channelCaptor.capture(), bufferCaptor.capture(), any(BinaryMessenger.BinaryReply.class)); assertEquals("flutter/textinput", channelCaptor.getValue()); verifyMethodCall(bufferCaptor.getValue(), "TextInputClient.requestExistingInputState", null); } @Test public void setTextInputEditingState_doesNotRestartWhenTextIsIdentical() { // Initialize a general TextInputPlugin. InputMethodSubtype inputMethodSubtype = mock(InputMethodSubtype.class); TestImm testImm = Shadow.extract( RuntimeEnvironment.application.getSystemService(Context.INPUT_METHOD_SERVICE)); testImm.setCurrentInputMethodSubtype(inputMethodSubtype); View testView = new View(RuntimeEnvironment.application); TextInputPlugin textInputPlugin = new TextInputPlugin( testView, mock(DartExecutor.class), mock(PlatformViewsController.class)); textInputPlugin.setTextInputClient( 0, new TextInputChannel.Configuration( false, false, true, TextInputChannel.TextCapitalization.NONE, null, null, null)); // There's a pending restart since we initialized the text input client. Flush that now. textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); // Move the cursor. assertEquals(1, testImm.getRestartCount(testView)); textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); // Verify that we haven't restarted the input. assertEquals(1, testImm.getRestartCount(testView)); } @Test public void setTextInputEditingState_alwaysSetEditableWhenDifferent() { // Initialize a general TextInputPlugin. InputMethodSubtype inputMethodSubtype = mock(InputMethodSubtype.class); TestImm testImm = Shadow.extract( RuntimeEnvironment.application.getSystemService(Context.INPUT_METHOD_SERVICE)); testImm.setCurrentInputMethodSubtype(inputMethodSubtype); View testView = new View(RuntimeEnvironment.application); TextInputPlugin textInputPlugin = new TextInputPlugin( testView, mock(DartExecutor.class), mock(PlatformViewsController.class)); textInputPlugin.setTextInputClient( 0, new TextInputChannel.Configuration( false, false, true, TextInputChannel.TextCapitalization.NONE, null, null, null)); // There's a pending restart since we initialized the text input client. Flush that now. With // changed text, we should // always set the Editable contents. textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("hello", 0, 0)); assertEquals(1, testImm.getRestartCount(testView)); assertTrue(textInputPlugin.getEditable().toString().equals("hello")); // No pending restart, set Editable contents anyways. textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("Shibuyawoo", 0, 0)); assertEquals(1, testImm.getRestartCount(testView)); assertTrue(textInputPlugin.getEditable().toString().equals("Shibuyawoo")); } // See https://github.com/flutter/flutter/issues/29341 and // https://github.com/flutter/flutter/issues/31512 // All modern Samsung keybords are affected including non-korean languages and thus // need the restart. @Test public void setTextInputEditingState_alwaysRestartsOnAffectedDevices2() { // Initialize a TextInputPlugin that needs to be always restarted. ShadowBuild.setManufacturer("samsung"); InputMethodSubtype inputMethodSubtype = new InputMethodSubtype(0, 0, /*locale=*/ "en", "", "", false, false); Settings.Secure.putString( RuntimeEnvironment.application.getContentResolver(), Settings.Secure.DEFAULT_INPUT_METHOD, "com.sec.android.inputmethod/.SamsungKeypad"); TestImm testImm = Shadow.extract( RuntimeEnvironment.application.getSystemService(Context.INPUT_METHOD_SERVICE)); testImm.setCurrentInputMethodSubtype(inputMethodSubtype); View testView = new View(RuntimeEnvironment.application); TextInputPlugin textInputPlugin = new TextInputPlugin( testView, mock(DartExecutor.class), mock(PlatformViewsController.class)); textInputPlugin.setTextInputClient( 0, new TextInputChannel.Configuration( false, false, true, TextInputChannel.TextCapitalization.NONE, null, null, null)); // There's a pending restart since we initialized the text input client. Flush that now. textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); // Move the cursor. assertEquals(1, testImm.getRestartCount(testView)); textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); // Verify that we've restarted the input. assertEquals(2, testImm.getRestartCount(testView)); } @Test public void setTextInputEditingState_doesNotRestartOnUnaffectedDevices() { // Initialize a TextInputPlugin that needs to be always restarted. ShadowBuild.setManufacturer("samsung"); InputMethodSubtype inputMethodSubtype = new InputMethodSubtype(0, 0, /*locale=*/ "en", "", "", false, false); Settings.Secure.putString( RuntimeEnvironment.application.getContentResolver(), Settings.Secure.DEFAULT_INPUT_METHOD, "com.fake.test.blah/.NotTheRightKeyboard"); TestImm testImm = Shadow.extract( RuntimeEnvironment.application.getSystemService(Context.INPUT_METHOD_SERVICE)); testImm.setCurrentInputMethodSubtype(inputMethodSubtype); View testView = new View(RuntimeEnvironment.application); TextInputPlugin textInputPlugin = new TextInputPlugin( testView, mock(DartExecutor.class), mock(PlatformViewsController.class)); textInputPlugin.setTextInputClient( 0, new TextInputChannel.Configuration( false, false, true, TextInputChannel.TextCapitalization.NONE, null, null, null)); // There's a pending restart since we initialized the text input client. Flush that now. textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); // Move the cursor. assertEquals(1, testImm.getRestartCount(testView)); textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); // Verify that we've restarted the input. assertEquals(1, testImm.getRestartCount(testView)); } @Test public void setTextInputEditingState_nullInputMethodSubtype() { TestImm testImm = Shadow.extract( RuntimeEnvironment.application.getSystemService(Context.INPUT_METHOD_SERVICE)); testImm.setCurrentInputMethodSubtype(null); View testView = new View(RuntimeEnvironment.application); TextInputPlugin textInputPlugin = new TextInputPlugin( testView, mock(DartExecutor.class), mock(PlatformViewsController.class)); textInputPlugin.setTextInputClient( 0, new TextInputChannel.Configuration( false, false, true, TextInputChannel.TextCapitalization.NONE, null, null, null)); // There's a pending restart since we initialized the text input client. Flush that now. textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); assertEquals(1, testImm.getRestartCount(testView)); } @Test public void inputConnection_createsActionFromEnter() throws JSONException { TestImm testImm = Shadow.extract( RuntimeEnvironment.application.getSystemService(Context.INPUT_METHOD_SERVICE)); FlutterJNI mockFlutterJni = mock(FlutterJNI.class); View testView = new View(RuntimeEnvironment.application); DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJni, mock(AssetManager.class))); TextInputPlugin textInputPlugin = new TextInputPlugin(testView, dartExecutor, mock(PlatformViewsController.class)); textInputPlugin.setTextInputClient( 0, new TextInputChannel.Configuration( false, false, true, TextInputChannel.TextCapitalization.NONE, new TextInputChannel.InputType(TextInputChannel.TextInputType.TEXT, false, false), null, null)); // There's a pending restart since we initialized the text input client. Flush that now. textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class); verify(dartExecutor, times(1)) .send( channelCaptor.capture(), bufferCaptor.capture(), any(BinaryMessenger.BinaryReply.class)); assertEquals("flutter/textinput", channelCaptor.getValue()); verifyMethodCall(bufferCaptor.getValue(), "TextInputClient.requestExistingInputState", null); InputConnection connection = textInputPlugin.createInputConnection(testView, new EditorInfo()); connection.sendKeyEvent(new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_ENTER)); verify(dartExecutor, times(2)) .send( channelCaptor.capture(), bufferCaptor.capture(), any(BinaryMessenger.BinaryReply.class)); assertEquals("flutter/textinput", channelCaptor.getValue()); verifyMethodCall( bufferCaptor.getValue(), "TextInputClient.performAction", new String[] {"0", "TextInputAction.done"}); connection.sendKeyEvent(new KeyEvent(KeyEvent.ACTION_UP, KeyEvent.KEYCODE_ENTER)); connection.sendKeyEvent(new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_NUMPAD_ENTER)); verify(dartExecutor, times(3)) .send( channelCaptor.capture(), bufferCaptor.capture(), any(BinaryMessenger.BinaryReply.class)); assertEquals("flutter/textinput", channelCaptor.getValue()); verifyMethodCall( bufferCaptor.getValue(), "TextInputClient.performAction", new String[] {"0", "TextInputAction.done"}); } @Test public void inputConnection_finishComposingTextUpdatesIMM() throws JSONException { ShadowBuild.setManufacturer("samsung"); InputMethodSubtype inputMethodSubtype = new InputMethodSubtype(0, 0, /*locale=*/ "en", "", "", false, false); Settings.Secure.putString( RuntimeEnvironment.application.getContentResolver(), Settings.Secure.DEFAULT_INPUT_METHOD, "com.sec.android.inputmethod/.SamsungKeypad"); TestImm testImm = Shadow.extract( RuntimeEnvironment.application.getSystemService(Context.INPUT_METHOD_SERVICE)); testImm.setCurrentInputMethodSubtype(inputMethodSubtype); FlutterJNI mockFlutterJni = mock(FlutterJNI.class); View testView = new View(RuntimeEnvironment.application); DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJni, mock(AssetManager.class))); TextInputPlugin textInputPlugin = new TextInputPlugin(testView, dartExecutor, mock(PlatformViewsController.class)); textInputPlugin.setTextInputClient( 0, new TextInputChannel.Configuration( false, false, true, TextInputChannel.TextCapitalization.NONE, new TextInputChannel.InputType(TextInputChannel.TextInputType.TEXT, false, false), null, null)); // There's a pending restart since we initialized the text input client. Flush that now. textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); InputConnection connection = textInputPlugin.createInputConnection(testView, new EditorInfo()); connection.finishComposingText(); if (Build.VERSION.SDK_INT >= 21) { CursorAnchorInfo.Builder builder = new CursorAnchorInfo.Builder(); builder.setComposingText(-1, ""); CursorAnchorInfo anchorInfo = builder.build(); assertEquals(testImm.getLastCursorAnchorInfo(), anchorInfo); } } @Test public void inputConnection_samsungFinishComposingTextSetsSelection() throws JSONException { ShadowBuild.setManufacturer("samsung"); InputMethodSubtype inputMethodSubtype = new InputMethodSubtype(0, 0, /*locale=*/ "en", "", "", false, false); Settings.Secure.putString( RuntimeEnvironment.application.getContentResolver(), Settings.Secure.DEFAULT_INPUT_METHOD, "com.sec.android.inputmethod/.SamsungKeypad"); TestImm testImm = Shadow.extract( RuntimeEnvironment.application.getSystemService(Context.INPUT_METHOD_SERVICE)); testImm.setCurrentInputMethodSubtype(inputMethodSubtype); FlutterJNI mockFlutterJni = mock(FlutterJNI.class); View testView = new View(RuntimeEnvironment.application); DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJni, mock(AssetManager.class))); TextInputPlugin textInputPlugin = new TextInputPlugin(testView, dartExecutor, mock(PlatformViewsController.class)); textInputPlugin.setTextInputClient( 0, new TextInputChannel.Configuration( false, false, true, TextInputChannel.TextCapitalization.NONE, new TextInputChannel.InputType(TextInputChannel.TextInputType.TEXT, false, false), null, null)); // There's a pending restart since we initialized the text input client. Flush that now. textInputPlugin.setTextInputEditingState( testView, new TextInputChannel.TextEditState("", 0, 0)); InputConnection connection = textInputPlugin.createInputConnection(testView, new EditorInfo()); testImm.setTrackSelection(true); connection.finishComposingText(); testImm.setTrackSelection(false); List<Integer> expectedSelectionValues = Arrays.asList(0, 0, -1, -1, -1, -1, -1, -1, 0, 0, -1, -1); assertEquals(testImm.getSelectionUpdateValues(), expectedSelectionValues); } @Implements(InputMethodManager.class) public static class TestImm extends ShadowInputMethodManager { private InputMethodSubtype currentInputMethodSubtype; private SparseIntArray restartCounter = new SparseIntArray(); private CursorAnchorInfo cursorAnchorInfo; private ArrayList<Integer> selectionUpdateValues; private boolean trackSelection = false; public TestImm() { selectionUpdateValues = new ArrayList<Integer>(); } @Implementation public InputMethodSubtype getCurrentInputMethodSubtype() { return currentInputMethodSubtype; } @Implementation public void restartInput(View view) { int count = restartCounter.get(view.hashCode(), /*defaultValue=*/ 0) + 1; restartCounter.put(view.hashCode(), count); } public void setCurrentInputMethodSubtype(InputMethodSubtype inputMethodSubtype) { this.currentInputMethodSubtype = inputMethodSubtype; } public int getRestartCount(View view) { return restartCounter.get(view.hashCode(), /*defaultValue=*/ 0); } @Implementation public void updateCursorAnchorInfo(View view, CursorAnchorInfo cursorAnchorInfo) { this.cursorAnchorInfo = cursorAnchorInfo; } // We simply store the values to verify later. @Implementation public void updateSelection( View view, int selStart, int selEnd, int candidatesStart, int candidatesEnd) { if (trackSelection) { this.selectionUpdateValues.add(selStart); this.selectionUpdateValues.add(selEnd); this.selectionUpdateValues.add(candidatesStart); this.selectionUpdateValues.add(candidatesEnd); } } // only track values when enabled via this. public void setTrackSelection(boolean val) { trackSelection = val; } // Returns true if the last updateSelection call passed the following values. public ArrayList<Integer> getSelectionUpdateValues() { return selectionUpdateValues; } public CursorAnchorInfo getLastCursorAnchorInfo() { return cursorAnchorInfo; } } }
/* -*- Mode: Java; c-basic-offset: 4; tab-width: 20; indent-tabs-mode: nil; -*- * Copyright 2018 Mozilla * Licensed under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.mozilla.mentat; import android.content.Context; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import org.robolectric.RuntimeEnvironment; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.LinkedHashMap; import java.util.TimeZone; import java.util.UUID; import java.util.concurrent.CountDownLatch; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; /** * Instrumentation test, which will execute on an Android device. */ @RunWith(RobolectricTestRunner.class) public class FFIIntegrationTest { class DBSetupResult { TxReport schemaReport; TxReport dataReport; DBSetupResult(TxReport schemaReport, TxReport dataReport) { this.schemaReport = schemaReport; this.dataReport = dataReport; } } class QueryTimer { private long startTime = 0; private long endTime = 0; void start() { this.startTime = System.nanoTime(); } void end() { this.endTime = System.nanoTime(); } long duration() { return this.endTime - this.startTime; } } private Mentat mentat = null; @Test public void openInMemoryStoreSucceeds() { Mentat mentat = Mentat.open(); assertNotNull(mentat); } @Test public void openStoreInLocationSucceeds() { Context context = RuntimeEnvironment.application.getApplicationContext(); String path = context.getDatabasePath("test.db").getAbsolutePath(); assertTrue(new File(path).getParentFile().mkdirs()); Mentat mentat = Mentat.open(path); assertNotNull(mentat); } public String readFile(String fileName) { final File resource = new File(getClass().getClassLoader().getResource(fileName).getFile()); assertTrue(resource.exists()); try { final FileInputStream inputStream = new FileInputStream(resource); BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); StringBuilder out = new StringBuilder(); String line; while ((line = reader.readLine()) != null) { out.append(line).append("\n"); } return out.toString(); } catch (IOException e) { e.printStackTrace(); } return null; } public TxReport transactCitiesSchema(Mentat mentat) { String citiesSchema = this.readFile("cities.schema"); return mentat.transact(citiesSchema); } public TxReport transactSeattleData(Mentat mentat) { String seattleData = this.readFile("all_seattle.edn"); return mentat.transact(seattleData); } public Mentat openAndInitializeCitiesStore() { if (this.mentat == null) { this.mentat = Mentat.open(); this.transactCitiesSchema(mentat); this.transactSeattleData(mentat); } return this.mentat; } public DBSetupResult populateWithTypesSchema(Mentat mentat) { InProgress transaction = mentat.beginTransaction(); String schema = "[\n" + " [:db/add \"b\" :db/ident :foo/boolean]\n" + " [:db/add \"b\" :db/valueType :db.type/boolean]\n" + " [:db/add \"b\" :db/cardinality :db.cardinality/one]\n" + " [:db/add \"l\" :db/ident :foo/long]\n" + " [:db/add \"l\" :db/valueType :db.type/long]\n" + " [:db/add \"l\" :db/cardinality :db.cardinality/one]\n" + " [:db/add \"r\" :db/ident :foo/ref]\n" + " [:db/add \"r\" :db/valueType :db.type/ref]\n" + " [:db/add \"r\" :db/cardinality :db.cardinality/one]\n" + " [:db/add \"i\" :db/ident :foo/instant]\n" + " [:db/add \"i\" :db/valueType :db.type/instant]\n" + " [:db/add \"i\" :db/cardinality :db.cardinality/one]\n" + " [:db/add \"d\" :db/ident :foo/double]\n" + " [:db/add \"d\" :db/valueType :db.type/double]\n" + " [:db/add \"d\" :db/cardinality :db.cardinality/one]\n" + " [:db/add \"s\" :db/ident :foo/string]\n" + " [:db/add \"s\" :db/valueType :db.type/string]\n" + " [:db/add \"s\" :db/cardinality :db.cardinality/one]\n" + " [:db/add \"k\" :db/ident :foo/keyword]\n" + " [:db/add \"k\" :db/valueType :db.type/keyword]\n" + " [:db/add \"k\" :db/cardinality :db.cardinality/one]\n" + " [:db/add \"u\" :db/ident :foo/uuid]\n" + " [:db/add \"u\" :db/valueType :db.type/uuid]\n" + " [:db/add \"u\" :db/cardinality :db.cardinality/one]\n" + " ]"; TxReport report = transaction.transact(schema); Long stringEntid = report.getEntidForTempId("s"); String data = "[\n" + " [:db/add \"a\" :foo/boolean true]\n" + " [:db/add \"a\" :foo/long 25]\n" + " [:db/add \"a\" :foo/instant #inst \"2017-01-01T11:00:00.000Z\"]\n" + " [:db/add \"a\" :foo/double 11.23]\n" + " [:db/add \"a\" :foo/string \"The higher we soar the smaller we appear to those who cannot fly.\"]\n" + " [:db/add \"a\" :foo/keyword :foo/string]\n" + " [:db/add \"a\" :foo/uuid #uuid \"550e8400-e29b-41d4-a716-446655440000\"]\n" + " [:db/add \"b\" :foo/boolean false]\n" + " [:db/add \"b\" :foo/ref "+ stringEntid +"]\n" + " [:db/add \"b\" :foo/keyword :foo/string]\n" + " [:db/add \"b\" :foo/long 50]\n" + " [:db/add \"b\" :foo/instant #inst \"2018-01-01T11:00:00.000Z\"]\n" + " [:db/add \"b\" :foo/double 22.46]\n" + " [:db/add \"b\" :foo/string \"Silence is worse; all truths that are kept silent become poisonous.\"]\n" + " [:db/add \"b\" :foo/uuid #uuid \"4cb3f828-752d-497a-90c9-b1fd516d5644\"]\n" + " ]"; TxReport dataReport = transaction.transact(data); transaction.commit(); return new DBSetupResult(report, dataReport); } @Test public void transactingVocabularySucceeds() { Mentat mentat = Mentat.open(); TxReport schemaReport = this.transactCitiesSchema(mentat); assertNotNull(schemaReport); assertTrue(schemaReport.getTxId() > 0); } @Test public void transactingEntitiesSucceeds() { Mentat mentat = Mentat.open(); this.transactCitiesSchema(mentat); TxReport dataReport = this.transactSeattleData(mentat); assertNotNull(dataReport); assertTrue(dataReport.getTxId() > 0); Long entid = dataReport.getEntidForTempId("a17592186045605"); assertEquals(65733, entid.longValue()); } @Test public void runScalarSucceeds() throws InterruptedException { Mentat mentat = openAndInitializeCitiesStore(); String query = "[:find ?n . :in ?name :where [(fulltext $ :community/name ?name) [[?e ?n]]]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bind("?name", "Wallingford").run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals("KOMO Communities - Wallingford", value.asString()); expectation.countDown(); } }); expectation.await(); } @Test public void runCollSucceeds() throws InterruptedException { Mentat mentat = openAndInitializeCitiesStore(); String query = "[:find [?when ...] :where [_ :db/txInstant ?when] :order (asc ?when)]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).run(new CollResultHandler() { @Override public void handleList(CollResult list) { assertNotNull(list); for (int i = 0; i < 3; ++i) { assertNotNull(list.asDate(i)); } expectation.countDown(); } }); expectation.await(); } @Test public void runCollResultIteratorSucceeds() throws InterruptedException { Mentat mentat = openAndInitializeCitiesStore(); String query = "[:find [?when ...] :where [_ :db/txInstant ?when] :order (asc ?when)]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).run(new CollResultHandler() { @Override public void handleList(CollResult list) { assertNotNull(list); for(TypedValue value: list) { assertNotNull(value.asDate()); } expectation.countDown(); } }); expectation.await(); } @Test public void runTupleSucceeds() throws InterruptedException { Mentat mentat = openAndInitializeCitiesStore(); String query = "[:find [?name ?cat]\n" + " :where\n" + " [?c :community/name ?name]\n" + " [?c :community/type :community.type/website]\n" + " [(fulltext $ :community/category \"food\") [[?c ?cat]]]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); String name = row.asString(0); String category = row.asString(1); assertEquals("Community Harvest of Southwest Seattle", name); assertEquals("sustainable food", category); expectation.countDown(); } }); expectation.await(); } @Test public void runRelIteratorSucceeds() throws InterruptedException { Mentat mentat = openAndInitializeCitiesStore(); String query = "[:find ?name ?cat\n" + " :where\n" + " [?c :community/name ?name]\n" + " [?c :community/type :community.type/website]\n" + " [(fulltext $ :community/category \"food\") [[?c ?cat]]]]"; final LinkedHashMap<String, String> expectedResults = new LinkedHashMap<>(); expectedResults.put("InBallard", "food"); expectedResults.put("Seattle Chinatown Guide", "food"); expectedResults.put("Community Harvest of Southwest Seattle", "sustainable food"); expectedResults.put("University District Food Bank", "food bank"); final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).run(new RelResultHandler() { @Override public void handleRows(RelResult rows) { assertNotNull(rows); int index = 0; for (TupleResult row: rows) { String name = row.asString(0); assertNotNull(name); String category = row.asString(1); assertNotNull(category); String expectedCategory = expectedResults.get(name); assertNotNull(expectedCategory); assertEquals(expectedCategory, category); ++index; } assertEquals(expectedResults.size(), index); expectation.countDown(); } }); expectation.await(); } @Test public void runRelSucceeds() throws InterruptedException { Mentat mentat = openAndInitializeCitiesStore(); String query = "[:find ?name ?cat\n" + " :where\n" + " [?c :community/name ?name]\n" + " [?c :community/type :community.type/website]\n" + " [(fulltext $ :community/category \"food\") [[?c ?cat]]]]"; final LinkedHashMap<String, String> expectedResults = new LinkedHashMap<>(); expectedResults.put("InBallard", "food"); expectedResults.put("Seattle Chinatown Guide", "food"); expectedResults.put("Community Harvest of Southwest Seattle", "sustainable food"); expectedResults.put("University District Food Bank", "food bank"); final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).run(new RelResultHandler() { @Override public void handleRows(RelResult rows) { assertNotNull(rows); for (int i = 0; i < expectedResults.size(); ++i) { TupleResult row = rows.rowAtIndex(i); assertNotNull(row); String name = row.asString(0); assertNotNull(name); String category = row.asString(1); assertNotNull(category); String expectedCategory = expectedResults.get(name); assertNotNull(expectedCategory); assertEquals(expectedCategory, category); } expectation.countDown(); } }); expectation.await(); } @Test public void bindingLongValueSucceeds() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?e . :in ?long :where [?e :foo/long ?long]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bind("?long", 25).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(aEntid, value.asEntid()); expectation.countDown(); } }); expectation.await(); } @Test public void bindingRefValueSucceeds() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; long stringEntid = mentat.entIdForAttribute(":foo/string"); final Long bEntid = report.getEntidForTempId("b"); String query = "[:find ?e . :in ?ref :where [?e :foo/ref ?ref]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?ref", stringEntid).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(bEntid, value.asEntid()); expectation.countDown(); } }); expectation.await(); } @Test public void bindingRefKwValueSucceeds() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; String refKeyword = ":foo/string"; final Long bEntid = report.getEntidForTempId("b"); String query = "[:find ?e . :in ?ref :where [?e :foo/ref ?ref]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindKeywordReference("?ref", refKeyword).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(bEntid, value.asEntid()); expectation.countDown(); } }); expectation.await(); } @Test public void bindingKwValueSucceeds() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?e . :in ?kw :where [?e :foo/keyword ?kw]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindKeyword("?kw", ":foo/string").run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(aEntid, value.asEntid()); expectation.countDown(); } }); expectation.await(); } @Test public void bindingDateValueSucceeds() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); Date date = new Date(1523896758000L); String query = "[:find [?e ?d] :in ?now :where [?e :foo/instant ?d] [(< ?d ?now)]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bind("?now", date).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); TypedValue value = row.get(0); assertNotNull(value); assertEquals(aEntid, value.asEntid()); expectation.countDown(); } }); expectation.await(); } @Test public void bindingStringValueSucceeds() throws InterruptedException { Mentat mentat = this.openAndInitializeCitiesStore(); String query = "[:find ?n . :in ?name :where [(fulltext $ :community/name ?name) [[?e ?n]]]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bind("?name", "Wallingford").run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals("KOMO Communities - Wallingford", value.asString()); expectation.countDown(); } }); expectation.await(); } @Test public void bindingUuidValueSucceeds() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?e . :in ?uuid :where [?e :foo/uuid ?uuid]]"; UUID uuid = UUID.fromString("550e8400-e29b-41d4-a716-446655440000"); final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bind("?uuid", uuid).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(aEntid, value.asEntid()); expectation.countDown(); } }); expectation.await(); } @Test public void bindingBooleanValueSucceeds() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?e . :in ?bool :where [?e :foo/boolean ?bool]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bind("?bool", true).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(aEntid, value.asEntid()); expectation.countDown(); } }); expectation.await(); } @Test public void bindingDoubleValueSucceeds() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?e . :in ?double :where [?e :foo/double ?double]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bind("?double", 11.23).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(aEntid, value.asEntid()); expectation.countDown(); } }); expectation.await(); } @Test public void typedValueConvertsToLong() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?v . :in ?e :where [?e :foo/long ?v]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", aEntid).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(25, value.asLong().longValue()); assertEquals(25, value.asLong().longValue()); expectation.countDown(); } }); expectation.await(); } @Test public void typedValueConvertsToRef() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?e . :where [?e :foo/long 25]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(aEntid, value.asEntid()); assertEquals(aEntid, value.asEntid()); expectation.countDown(); } }); expectation.await(); } @Test public void typedValueConvertsToKeyword() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?v . :in ?e :where [?e :foo/keyword ?v]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", aEntid).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(":foo/string", value.asKeyword()); assertEquals(":foo/string", value.asKeyword()); expectation.countDown(); } }); expectation.await(); } @Test public void typedValueConvertsToBoolean() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?v . :in ?e :where [?e :foo/boolean ?v]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", aEntid).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(true, value.asBoolean()); assertEquals(true, value.asBoolean()); expectation.countDown(); } }); expectation.await(); } @Test public void typedValueConvertsToDouble() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?v . :in ?e :where [?e :foo/double ?v]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", aEntid).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(new Double(11.23), value.asDouble()); assertEquals(new Double(11.23), value.asDouble()); expectation.countDown(); } }); expectation.await(); } @Test public void typedValueConvertsToDate() throws InterruptedException, ParseException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?v . :in ?e :where [?e :foo/instant ?v]]"; final TimeZone tz = TimeZone.getTimeZone("UTC"); final DateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); format.setTimeZone(tz); format.parse("2017-01-01T11:00:00.000Z"); final Calendar expectedDate = format.getCalendar(); final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", aEntid).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(expectedDate.getTime(), value.asDate()); assertEquals(expectedDate.getTime(), value.asDate()); expectation.countDown(); } }); expectation.await(); } @Test public void typedValueConvertsToString() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?v . :in ?e :where [?e :foo/string ?v]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", aEntid).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals("The higher we soar the smaller we appear to those who cannot fly.", value.asString()); assertEquals("The higher we soar the smaller we appear to those who cannot fly.", value.asString()); expectation.countDown(); } }); expectation.await(); } @Test public void typedValueConvertsToUUID() throws InterruptedException { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); String query = "[:find ?v . :in ?e :where [?e :foo/uuid ?v]]"; final UUID expectedUUID = UUID.fromString("550e8400-e29b-41d4-a716-446655440000"); final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", aEntid).run(new ScalarResultHandler() { @Override public void handleValue(TypedValue value) { assertNotNull(value); assertEquals(expectedUUID, value.asUUID()); assertEquals(expectedUUID, value.asUUID()); expectation.countDown(); } }); expectation.await(); } @Test public void valueForAttributeOfEntitySucceeds() { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; final Long aEntid = report.getEntidForTempId("a"); TypedValue value = mentat.valueForAttributeOfEntity(":foo/long", aEntid); assertNotNull(value); assertEquals(25, value.asLong().longValue()); } @Test public void entidForAttributeSucceeds() { Mentat mentat = Mentat.open(); this.populateWithTypesSchema(mentat); long entid = mentat.entIdForAttribute(":foo/long"); assertEquals(65540, entid); } @Test public void testInProgressTransact() { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; assertNotNull(report); } @Test public void testInProgressRollback() { Mentat mentat = Mentat.open(); TxReport report = this.populateWithTypesSchema(mentat).dataReport; assertNotNull(report); long aEntid = report.getEntidForTempId("a"); TypedValue preLongValue = mentat.valueForAttributeOfEntity(":foo/long", aEntid); assertEquals(25, preLongValue.asLong().longValue()); InProgress inProgress = mentat.beginTransaction(); report = inProgress.transact("[[:db/add "+ aEntid +" :foo/long 22]]"); assertNotNull(report); inProgress.rollback(); TypedValue postLongValue = mentat.valueForAttributeOfEntity(":foo/long", aEntid); assertEquals(25, postLongValue.asLong().longValue()); } @Test public void testInProgressEntityBuilder() throws InterruptedException { Mentat mentat = Mentat.open(); DBSetupResult reports = this.populateWithTypesSchema(mentat); long bEntid = reports.dataReport.getEntidForTempId("b"); final long longEntid = reports.schemaReport.getEntidForTempId("l"); final long stringEntid = reports.schemaReport.getEntidForTempId("s"); // test that the values are as expected String query = "[:find [?b ?i ?u ?l ?d ?s ?k ?r]\n" + " :in ?e\n" + " :where [?e :foo/boolean ?b]\n" + " [?e :foo/instant ?i]\n" + " [?e :foo/uuid ?u]\n" + " [?e :foo/long ?l]\n" + " [?e :foo/double ?d]\n" + " [?e :foo/string ?s]\n" + " [?e :foo/keyword ?k]\n" + " [?e :foo/ref ?r]]"; final CountDownLatch expectation1 = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); assertEquals(false, row.asBool(0)); assertEquals(new Date(1514804400000L), row.asDate(1)); assertEquals(UUID.fromString("4cb3f828-752d-497a-90c9-b1fd516d5644"), row.asUUID(2)); assertEquals(50, row.asLong(3).longValue()); assertEquals(new Double(22.46), row.asDouble(4)); assertEquals("Silence is worse; all truths that are kept silent become poisonous.", row.asString(5)); assertEquals(":foo/string", row.asKeyword(6)); assertEquals(stringEntid, row.asEntid(7).longValue()); expectation1.countDown(); } }); expectation1.await(); InProgressBuilder builder = mentat.entityBuilder(); builder.add(bEntid, ":foo/boolean", true); final Date newDate = new Date(1524743301000L); builder.add(bEntid, ":foo/instant", newDate); final UUID newUUID = UUID.randomUUID(); builder.add(bEntid, ":foo/uuid", newUUID); builder.add(bEntid, ":foo/long", 75); builder.add(bEntid, ":foo/double", 81.3); builder.add(bEntid, ":foo/string", "Become who you are!"); builder.addKeyword(bEntid, ":foo/keyword", ":foo/long"); builder.addRef(bEntid, ":foo/ref", longEntid); builder.commit(); final CountDownLatch expectation2 = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); assertEquals(true, row.asBool(0)); System.out.println(row.asDate(1).getTime()); assertEquals(newDate, row.asDate(1)); assertEquals(newUUID, row.asUUID(2)); assertEquals(75, row.asLong(3).longValue()); assertEquals(new Double(81.3), row.asDouble(4)); assertEquals("Become who you are!", row.asString(5)); assertEquals(":foo/long", row.asKeyword(6)); assertEquals(longEntid, row.asEntid(7).longValue()); expectation2.countDown(); } }); expectation2.await(); } @Test public void testEntityBuilderForEntid() throws InterruptedException { Mentat mentat = Mentat.open(); DBSetupResult reports = this.populateWithTypesSchema(mentat); long bEntid = reports.dataReport.getEntidForTempId("b"); final long longEntid = reports.schemaReport.getEntidForTempId("l"); final long stringEntid = reports.schemaReport.getEntidForTempId("s"); // test that the values are as expected String query = "[:find [?b ?i ?u ?l ?d ?s ?k ?r]\n" + " :in ?e\n" + " :where [?e :foo/boolean ?b]\n" + " [?e :foo/instant ?i]\n" + " [?e :foo/uuid ?u]\n" + " [?e :foo/long ?l]\n" + " [?e :foo/double ?d]\n" + " [?e :foo/string ?s]\n" + " [?e :foo/keyword ?k]\n" + " [?e :foo/ref ?r]]"; final CountDownLatch expectation1 = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); assertEquals(false, row.asBool(0)); assertEquals(new Date(1514804400000L), row.asDate(1)); assertEquals(UUID.fromString("4cb3f828-752d-497a-90c9-b1fd516d5644"), row.asUUID(2)); assertEquals(50, row.asLong(3).longValue()); assertEquals(new Double(22.46), row.asDouble(4)); assertEquals("Silence is worse; all truths that are kept silent become poisonous.", row.asString(5)); assertEquals(":foo/string", row.asKeyword(6)); assertEquals(stringEntid, row.asEntid(7).longValue()); expectation1.countDown(); } }); expectation1.await(); EntityBuilder builder = mentat.entityBuilder(bEntid); builder.add(":foo/boolean", true); final Date newDate = new Date(1524743301000L); builder.add(":foo/instant", newDate); final UUID newUUID = UUID.randomUUID(); builder.add(":foo/uuid", newUUID); builder.add(":foo/long", 75); builder.add(":foo/double", 81.3); builder.add(":foo/string", "Become who you are!"); builder.addKeyword(":foo/keyword", ":foo/long"); builder.addRef(":foo/ref", longEntid); builder.commit(); final CountDownLatch expectation2 = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); assertEquals(true, row.asBool(0)); System.out.println(row.asDate(1).getTime()); assertEquals(newDate, row.asDate(1)); assertEquals(newUUID, row.asUUID(2)); assertEquals(75, row.asLong(3).longValue()); assertEquals(new Double(81.3), row.asDouble(4)); assertEquals("Become who you are!", row.asString(5)); assertEquals(":foo/long", row.asKeyword(6)); assertEquals(longEntid, row.asEntid(7).longValue()); expectation2.countDown(); } }); expectation2.await(); } @Test public void testEntityBuilderForTempid() throws InterruptedException { Mentat mentat = Mentat.open(); DBSetupResult reports = this.populateWithTypesSchema(mentat); final long longEntid = reports.schemaReport.getEntidForTempId("l"); EntityBuilder builder = mentat.entityBuilder("c"); builder.add(":foo/boolean", true); final Date newDate = new Date(1524743301000L); builder.add(":foo/instant", newDate); final UUID newUUID = UUID.randomUUID(); builder.add(":foo/uuid", newUUID); builder.add(":foo/long", 75); builder.add(":foo/double", 81.3); builder.add(":foo/string", "Become who you are!"); builder.addKeyword(":foo/keyword", ":foo/long"); builder.addRef(":foo/ref", longEntid); TxReport report = builder.commit(); long cEntid = report.getEntidForTempId("c"); // test that the values are as expected String query = "[:find [?b ?i ?u ?l ?d ?s ?k ?r]\n" + " :in ?e\n" + " :where [?e :foo/boolean ?b]\n" + " [?e :foo/instant ?i]\n" + " [?e :foo/uuid ?u]\n" + " [?e :foo/long ?l]\n" + " [?e :foo/double ?d]\n" + " [?e :foo/string ?s]\n" + " [?e :foo/keyword ?k]\n" + " [?e :foo/ref ?r]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", cEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); assertEquals(true, row.asBool(0)); System.out.println(row.asDate(1).getTime()); assertEquals(newDate, row.asDate(1)); assertEquals(newUUID, row.asUUID(2)); assertEquals(75, row.asLong(3).longValue()); assertEquals(new Double(81.3), row.asDouble(4)); assertEquals("Become who you are!", row.asString(5)); assertEquals(":foo/long", row.asKeyword(6)); assertEquals(longEntid, row.asEntid(7).longValue()); expectation.countDown(); } }); expectation.await(); } @Test public void testInProgressBuilderTransact() throws InterruptedException { Mentat mentat = Mentat.open(); DBSetupResult reports = this.populateWithTypesSchema(mentat); long aEntid = reports.dataReport.getEntidForTempId("a"); long bEntid = reports.dataReport.getEntidForTempId("b"); final long longEntid = reports.schemaReport.getEntidForTempId("l"); InProgressBuilder builder = mentat.entityBuilder(); builder.add(bEntid, ":foo/boolean", true); final Date newDate = new Date(1524743301000L); builder.add(bEntid, ":foo/instant", newDate); final UUID newUUID = UUID.randomUUID(); builder.add(bEntid, ":foo/uuid", newUUID); builder.add(bEntid, ":foo/long", 75); builder.add(bEntid, ":foo/double", 81.3); builder.add(bEntid, ":foo/string", "Become who you are!"); builder.addKeyword(bEntid, ":foo/keyword", ":foo/long"); builder.addRef(bEntid, ":foo/ref", longEntid); InProgressTransactionResult result = builder.transact(); assertNotNull(result); InProgress inProgress = result.getInProgress(); assertNotNull(inProgress); assertNotNull(result.getReport()); inProgress.transact("[[:db/add "+ aEntid +" :foo/long 22]]"); inProgress.commit(); // test that the values are as expected String query = "[:find [?b ?i ?u ?l ?d ?s ?k ?r]\n" + " :in ?e\n" + " :where [?e :foo/boolean ?b]\n" + " [?e :foo/instant ?i]\n" + " [?e :foo/uuid ?u]\n" + " [?e :foo/long ?l]\n" + " [?e :foo/double ?d]\n" + " [?e :foo/string ?s]\n" + " [?e :foo/keyword ?k]\n" + " [?e :foo/ref ?r]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); assertEquals(true, row.asBool(0)); System.out.println(row.asDate(1).getTime()); assertEquals(newDate, row.asDate(1)); assertEquals(newUUID, row.asUUID(2)); assertEquals(75, row.asLong(3).longValue()); assertEquals(new Double(81.3), row.asDouble(4)); assertEquals("Become who you are!", row.asString(5)); assertEquals(":foo/long", row.asKeyword(6)); assertEquals(longEntid, row.asEntid(7).longValue()); expectation.countDown(); } }); expectation.await(); TypedValue longValue = mentat.valueForAttributeOfEntity(":foo/long", aEntid); assertEquals(22, longValue.asLong().longValue()); } @Test public void testEntityBuilderTransact() throws InterruptedException { Mentat mentat = Mentat.open(); DBSetupResult reports = this.populateWithTypesSchema(mentat); long aEntid = reports.dataReport.getEntidForTempId("a"); long bEntid = reports.dataReport.getEntidForTempId("b"); final long longEntid = reports.schemaReport.getEntidForTempId("l"); EntityBuilder builder = mentat.entityBuilder(bEntid); builder.add(":foo/boolean", true); final Date newDate = new Date(1524743301000L); builder.add(":foo/instant", newDate); final UUID newUUID = UUID.randomUUID(); builder.add(":foo/uuid", newUUID); builder.add(":foo/long", 75); builder.add(":foo/double", 81.3); builder.add(":foo/string", "Become who you are!"); builder.addKeyword(":foo/keyword", ":foo/long"); builder.addRef(":foo/ref", longEntid); InProgressTransactionResult result = builder.transact(); assertNotNull(result); InProgress inProgress = result.getInProgress(); assertNotNull(inProgress); assertNotNull(result.getReport()); inProgress.transact("[[:db/add "+ aEntid +" :foo/long 22]]"); inProgress.commit(); // test that the values are as expected String query = "[:find [?b ?i ?u ?l ?d ?s ?k ?r]\n" + " :in ?e\n" + " :where [?e :foo/boolean ?b]\n" + " [?e :foo/instant ?i]\n" + " [?e :foo/uuid ?u]\n" + " [?e :foo/long ?l]\n" + " [?e :foo/double ?d]\n" + " [?e :foo/string ?s]\n" + " [?e :foo/keyword ?k]\n" + " [?e :foo/ref ?r]]"; final CountDownLatch expectation = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); assertEquals(true, row.asBool(0)); System.out.println(row.asDate(1).getTime()); assertEquals(newDate, row.asDate(1)); assertEquals(newUUID, row.asUUID(2)); assertEquals(75, row.asLong(3).longValue()); assertEquals(new Double(81.3), row.asDouble(4)); assertEquals("Become who you are!", row.asString(5)); assertEquals(":foo/long", row.asKeyword(6)); assertEquals(longEntid, row.asEntid(7).longValue()); expectation.countDown(); } }); expectation.await(); TypedValue longValue = mentat.valueForAttributeOfEntity(":foo/long", aEntid); assertEquals(22, longValue.asLong().longValue()); } @Test public void testEntityBuilderRetract() throws InterruptedException { Mentat mentat = Mentat.open(); DBSetupResult reports = this.populateWithTypesSchema(mentat); long bEntid = reports.dataReport.getEntidForTempId("b"); final long longEntid = reports.schemaReport.getEntidForTempId("l"); final long stringEntid = reports.schemaReport.getEntidForTempId("s"); // test that the values are as expected String query = "[:find [?b ?i ?u ?l ?d ?s ?k ?r]\n" + " :in ?e\n" + " :where [?e :foo/boolean ?b]\n" + " [?e :foo/instant ?i]\n" + " [?e :foo/uuid ?u]\n" + " [?e :foo/long ?l]\n" + " [?e :foo/double ?d]\n" + " [?e :foo/string ?s]\n" + " [?e :foo/keyword ?k]\n" + " [?e :foo/ref ?r]]"; final CountDownLatch expectation1 = new CountDownLatch(1); final Date previousDate = new Date(1514804400000L); final UUID previousUuid = UUID.fromString("4cb3f828-752d-497a-90c9-b1fd516d5644"); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); assertEquals(false, row.asBool(0)); assertEquals(previousDate, row.asDate(1)); assertEquals(previousUuid, row.asUUID(2)); assertEquals(50, row.asLong(3).longValue()); assertEquals(new Double(22.46), row.asDouble(4)); assertEquals("Silence is worse; all truths that are kept silent become poisonous.", row.asString(5)); assertEquals(":foo/string", row.asKeyword(6)); assertEquals(stringEntid, row.asEntid(7).longValue()); expectation1.countDown(); } }); expectation1.await(); EntityBuilder builder = mentat.entityBuilder(bEntid); builder.retract(":foo/boolean", false); builder.retract(":foo/instant", previousDate); builder.retract(":foo/uuid", previousUuid); builder.retract(":foo/long", 50); builder.retract(":foo/double", 22.46); builder.retract(":foo/string", "Silence is worse; all truths that are kept silent become poisonous."); builder.retractKeyword(":foo/keyword", ":foo/string"); builder.retractRef(":foo/ref", stringEntid); builder.commit(); final CountDownLatch expectation2 = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNull(row); expectation2.countDown(); } }); expectation2.await(); } @Test public void testInProgressBuilderRetract() throws InterruptedException { Mentat mentat = Mentat.open(); DBSetupResult reports = this.populateWithTypesSchema(mentat); long bEntid = reports.dataReport.getEntidForTempId("b"); final long longEntid = reports.schemaReport.getEntidForTempId("l"); final long stringEntid = reports.schemaReport.getEntidForTempId("s"); // test that the values are as expected String query = "[:find [?b ?i ?u ?l ?d ?s ?k ?r]\n" + " :in ?e\n" + " :where [?e :foo/boolean ?b]\n" + " [?e :foo/instant ?i]\n" + " [?e :foo/uuid ?u]\n" + " [?e :foo/long ?l]\n" + " [?e :foo/double ?d]\n" + " [?e :foo/string ?s]\n" + " [?e :foo/keyword ?k]\n" + " [?e :foo/ref ?r]]"; final CountDownLatch expectation1 = new CountDownLatch(1); final Date previousDate = new Date(1514804400000L); final UUID previousUuid = UUID.fromString("4cb3f828-752d-497a-90c9-b1fd516d5644"); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNotNull(row); assertEquals(false, row.asBool(0)); assertEquals(previousDate, row.asDate(1)); assertEquals(previousUuid, row.asUUID(2)); assertEquals(50, row.asLong(3).longValue()); assertEquals(new Double(22.46), row.asDouble(4)); assertEquals("Silence is worse; all truths that are kept silent become poisonous.", row.asString(5)); assertEquals(":foo/string", row.asKeyword(6)); assertEquals(stringEntid, row.asEntid(7).longValue()); expectation1.countDown(); } }); expectation1.await(); InProgressBuilder builder = mentat.entityBuilder(); builder.retract(bEntid, ":foo/boolean", false); builder.retract(bEntid, ":foo/instant", previousDate); builder.retract(bEntid, ":foo/uuid", previousUuid); builder.retract(bEntid, ":foo/long", 50); builder.retract(bEntid, ":foo/double", 22.46); builder.retract(bEntid, ":foo/string", "Silence is worse; all truths that are kept silent become poisonous."); builder.retractKeyword(bEntid, ":foo/keyword", ":foo/string"); builder.retractRef(bEntid, ":foo/ref", stringEntid); builder.commit(); final CountDownLatch expectation2 = new CountDownLatch(1); mentat.query(query).bindEntidReference("?e", bEntid).run(new TupleResultHandler() { @Override public void handleRow(TupleResult row) { assertNull(row); expectation2.countDown(); } }); expectation2.await(); } // @Test // Disabled due to frequent failures. public void testCaching() throws InterruptedException { String query = "[:find ?district :where\n" + " [?neighborhood :neighborhood/name \"Beacon Hill\"]\n" + " [?neighborhood :neighborhood/district ?d]\n" + " [?d :district/name ?district]]"; Mentat mentat = openAndInitializeCitiesStore(); final CountDownLatch expectation1 = new CountDownLatch(1); final QueryTimer uncachedTimer = new QueryTimer(); uncachedTimer.start(); mentat.query(query).run(new RelResultHandler() { @Override public void handleRows(RelResult rows) { uncachedTimer.end(); assertNotNull(rows); expectation1.countDown(); } }); expectation1.await(); mentat.cache(":neighborhood/name", CacheDirection.REVERSE); mentat.cache(":neighborhood/district", CacheDirection.FORWARD); final CountDownLatch expectation2 = new CountDownLatch(1); final QueryTimer cachedTimer = new QueryTimer(); cachedTimer.start(); mentat.query(query).run(new RelResultHandler() { @Override public void handleRows(RelResult rows) { cachedTimer.end(); assertNotNull(rows); expectation2.countDown(); } }); expectation2.await(); long timingDifference = uncachedTimer.duration() - cachedTimer.duration(); assertTrue("Cached query is "+ timingDifference +" nanoseconds faster than the uncached query", cachedTimer.duration() < uncachedTimer.duration()); } }