output
stringlengths 7
516k
| instruction
stringclasses 1
value | input
stringlengths 6
884k
|
---|---|---|
```package org.apache.rocketmq.client.consumer.store;
import java.io.File;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import org.apache.rocketmq.client.ClientConfig;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.common.message.MessageQueue;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class LocalFileOffsetStoreTest {
@Mock
private MQClientInstance mQClientFactory;
private String group = "FooBarGroup";
private String topic = "FooBar";
private String brokerName = "DefaultBrokerName";
@Before
public void init() {
System.setProperty("rocketmq.client.localOffsetStoreDir", System.getProperty("java.io.tmpdir") + File.separator + ".rocketmq_offsets");
String clientId = new ClientConfig().buildMQClientId() + "#TestNamespace" + System.currentTimeMillis();
when(mQClientFactory.getClientId()).thenReturn(clientId);
}
@Test
public void testUpdateOffset() throws Exception {
OffsetStore offsetStore = new LocalFileOffsetStore(mQClientFactory, group);
MessageQueue messageQueue = new MessageQueue(topic, brokerName, 1);
offsetStore.updateOffset(messageQueue, 1024, false);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_MEMORY)).isEqualTo(1024);
offsetStore.updateOffset(messageQueue, 1023, false);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_MEMORY)).isEqualTo(1023);
offsetStore.updateOffset(messageQueue, 1022, true);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_MEMORY)).isEqualTo(1023);
}
@Test
public void testReadOffset_FromStore() throws Exception {
OffsetStore offsetStore = new LocalFileOffsetStore(mQClientFactory, group);
MessageQueue messageQueue = new MessageQueue(topic, brokerName, 2);
offsetStore.updateOffset(messageQueue, 1024, false);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_STORE)).isEqualTo(-1);
offsetStore.persistAll(new HashSet<MessageQueue>(Collections.singletonList(messageQueue)));
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_STORE)).isEqualTo(1024);
}
@Test
public void testCloneOffset() throws Exception {
OffsetStore offsetStore = new LocalFileOffsetStore(mQClientFactory, group);
MessageQueue messageQueue = new MessageQueue(topic, brokerName, 3);
offsetStore.updateOffset(messageQueue, 1024, false);
Map<MessageQueue, Long> cloneOffsetTable = offsetStore.cloneOffsetTable(topic);
assertThat(cloneOffsetTable.size()).isEqualTo(1);
assertThat(cloneOffsetTable.get(messageQueue)).isEqualTo(1024);
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.client.consumer.store;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.client.log.ClientLogger;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.UtilAll;
import org.apache.rocketmq.common.help.FAQUrl;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.slf4j.Logger;
/**
* Local storage implementation
*/
public class LocalFileOffsetStore implements OffsetStore {
public final static String LOCAL_OFFSET_STORE_DIR = System.getProperty(
"rocketmq.client.localOffsetStoreDir",
System.getProperty("user.home") + File.separator + ".rocketmq_offsets");
private final static Logger log = ClientLogger.getLog();
private final MQClientInstance mQClientFactory;
private final String groupName;
private final String storePath;
private ConcurrentMap<MessageQueue, AtomicLong> offsetTable =
new ConcurrentHashMap<MessageQueue, AtomicLong>();
public LocalFileOffsetStore(MQClientInstance mQClientFactory, String groupName) {
this.mQClientFactory = mQClientFactory;
this.groupName = groupName;
this.storePath = LOCAL_OFFSET_STORE_DIR + File.separator +
this.mQClientFactory.getClientId() + File.separator +
this.groupName + File.separator +
"offsets.json";
}
@Override
public void load() throws MQClientException {
OffsetSerializeWrapper offsetSerializeWrapper = this.readLocalOffset();
if (offsetSerializeWrapper != null && offsetSerializeWrapper.getOffsetTable() != null) {
offsetTable.putAll(offsetSerializeWrapper.getOffsetTable());
for (MessageQueue mq : offsetSerializeWrapper.getOffsetTable().keySet()) {
AtomicLong offset = offsetSerializeWrapper.getOffsetTable().get(mq);
log.info("load consumer's offset, {} {} {}",
this.groupName,
mq,
offset.get());
}
}
}
@Override
public void updateOffset(MessageQueue mq, long offset, boolean increaseOnly) {
if (mq != null) {
AtomicLong offsetOld = this.offsetTable.get(mq);
if (null == offsetOld) {
offsetOld = this.offsetTable.putIfAbsent(mq, new AtomicLong(offset));
}
if (null != offsetOld) {
if (increaseOnly) {
MixAll.compareAndIncreaseOnly(offsetOld, offset);
} else {
offsetOld.set(offset);
}
}
}
}
@Override
public long readOffset(final MessageQueue mq, final ReadOffsetType type) {
if (mq != null) {
switch (type) {
case MEMORY_FIRST_THEN_STORE:
case READ_FROM_MEMORY: {
AtomicLong offset = this.offsetTable.get(mq);
if (offset != null) {
return offset.get();
} else if (ReadOffsetType.READ_FROM_MEMORY == type) {
return -1;
}
}
case READ_FROM_STORE: {
OffsetSerializeWrapper offsetSerializeWrapper;
try {
offsetSerializeWrapper = this.readLocalOffset();
} catch (MQClientException e) {
return -1;
}
if (offsetSerializeWrapper != null && offsetSerializeWrapper.getOffsetTable() != null) {
AtomicLong offset = offsetSerializeWrapper.getOffsetTable().get(mq);
if (offset != null) {
this.updateOffset(mq, offset.get(), false);
return offset.get();
}
}
}
default:
break;
}
}
return -1;
}
@Override
public void persistAll(Set<MessageQueue> mqs) {
if (null == mqs || mqs.isEmpty())
return;
OffsetSerializeWrapper offsetSerializeWrapper = new OffsetSerializeWrapper();
for (Map.Entry<MessageQueue, AtomicLong> entry : this.offsetTable.entrySet()) {
if (mqs.contains(entry.getKey())) {
AtomicLong offset = entry.getValue();
offsetSerializeWrapper.getOffsetTable().put(entry.getKey(), offset);
}
}
String jsonString = offsetSerializeWrapper.toJson(true);
if (jsonString != null) {
try {
MixAll.string2File(jsonString, this.storePath);
} catch (IOException e) {
log.error("persistAll consumer offset Exception, " + this.storePath, e);
}
}
}
@Override
public void persist(MessageQueue mq) {
}
@Override
public void removeOffset(MessageQueue mq) {
}
@Override
public void updateConsumeOffsetToBroker(final MessageQueue mq, final long offset, final boolean isOneway)
throws RemotingException, MQBrokerException, InterruptedException, MQClientException {
}
@Override
public Map<MessageQueue, Long> cloneOffsetTable(String topic) {
Map<MessageQueue, Long> cloneOffsetTable = new HashMap<MessageQueue, Long>();
for (Map.Entry<MessageQueue, AtomicLong> entry : this.offsetTable.entrySet()) {
MessageQueue mq = entry.getKey();
if (!UtilAll.isBlank(topic) && !topic.equals(mq.getTopic())) {
continue;
}
cloneOffsetTable.put(mq, entry.getValue().get());
}
return cloneOffsetTable;
}
private OffsetSerializeWrapper readLocalOffset() throws MQClientException {
String content = null;
try {
content = MixAll.file2String(this.storePath);
} catch (IOException e) {
log.warn("Load local offset store file exception", e);
}
if (null == content || content.length() == 0) {
return this.readLocalOffsetBak();
} else {
OffsetSerializeWrapper offsetSerializeWrapper = null;
try {
offsetSerializeWrapper =
OffsetSerializeWrapper.fromJson(content, OffsetSerializeWrapper.class);
} catch (Exception e) {
log.warn("readLocalOffset Exception, and try to correct", e);
return this.readLocalOffsetBak();
}
return offsetSerializeWrapper;
}
}
private OffsetSerializeWrapper readLocalOffsetBak() throws MQClientException {
String content = null;
try {
content = MixAll.file2String(this.storePath + ".bak");
} catch (IOException e) {
log.warn("Load local offset store bak file exception", e);
}
if (content != null && content.length() > 0) {
OffsetSerializeWrapper offsetSerializeWrapper = null;
try {
offsetSerializeWrapper =
OffsetSerializeWrapper.fromJson(content, OffsetSerializeWrapper.class);
} catch (Exception e) {
log.warn("readLocalOffset Exception", e);
throw new MQClientException("readLocalOffset Exception, maybe fastjson version too low"
+ FAQUrl.suggestTodo(FAQUrl.LOAD_JSON_EXCEPTION),
e);
}
return offsetSerializeWrapper;
}
return null;
}
}
```
|
```package org.apache.rocketmq.namesrv.processor;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.rocketmq.common.TopicConfig;
import org.apache.rocketmq.common.namesrv.NamesrvConfig;
import org.apache.rocketmq.common.namesrv.RegisterBrokerResult;
import org.apache.rocketmq.common.protocol.RequestCode;
import org.apache.rocketmq.common.protocol.ResponseCode;
import org.apache.rocketmq.common.protocol.body.TopicConfigSerializeWrapper;
import org.apache.rocketmq.common.protocol.header.namesrv.DeleteKVConfigRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.DeleteTopicInNamesrvRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.GetKVConfigRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.GetKVConfigResponseHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.PutKVConfigRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.RegisterBrokerRequestHeader;
import org.apache.rocketmq.common.protocol.route.BrokerData;
import org.apache.rocketmq.common.protocol.route.QueueData;
import org.apache.rocketmq.namesrv.NamesrvController;
import org.apache.rocketmq.namesrv.routeinfo.RouteInfoManager;
import org.apache.rocketmq.remoting.exception.RemotingCommandException;
import org.apache.rocketmq.remoting.netty.NettyServerConfig;
import org.apache.rocketmq.remoting.protocol.RemotingCommand;
import org.assertj.core.util.Maps;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class DefaultRequestProcessorTest {
private DefaultRequestProcessor defaultRequestProcessor;
private NamesrvController namesrvController;
private NamesrvConfig namesrvConfig;
private NettyServerConfig nettyServerConfig;
private RouteInfoManager routeInfoManager;
private Logger logger;
@Before
public void init() throws Exception {
namesrvConfig = new NamesrvConfig();
nettyServerConfig = new NettyServerConfig();
routeInfoManager = new RouteInfoManager();
namesrvController = new NamesrvController(namesrvConfig, nettyServerConfig);
Field field = NamesrvController.class.getDeclaredField("routeInfoManager");
field.setAccessible(true);
field.set(namesrvController, routeInfoManager);
defaultRequestProcessor = new DefaultRequestProcessor(namesrvController);
registerRouteInfoManager();
logger = mock(Logger.class);
when(logger.isInfoEnabled()).thenReturn(false);
setFinalStatic(DefaultRequestProcessor.class.getDeclaredField("log"), logger);
}
@Test
public void testProcessRequest_PutKVConfig() throws RemotingCommandException {
PutKVConfigRequestHeader header = new PutKVConfigRequestHeader();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.PUT_KV_CONFIG,
header);
request.addExtField("namespace", "namespace");
request.addExtField("key", "key");
request.addExtField("value", "value");
RemotingCommand response = defaultRequestProcessor.processRequest(null, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
assertThat(response.getRemark()).isNull();
assertThat(namesrvController.getKvConfigManager().getKVConfig("namespace", "key"))
.isEqualTo("value");
}
@Test
public void testProcessRequest_GetKVConfigReturnNotNull() throws RemotingCommandException {
namesrvController.getKvConfigManager().putKVConfig("namespace", "key", "value");
GetKVConfigRequestHeader header = new GetKVConfigRequestHeader();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_KV_CONFIG,
header);
request.addExtField("namespace", "namespace");
request.addExtField("key", "key");
RemotingCommand response = defaultRequestProcessor.processRequest(null, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
assertThat(response.getRemark()).isNull();
GetKVConfigResponseHeader responseHeader = (GetKVConfigResponseHeader) response
.readCustomHeader();
assertThat(responseHeader.getValue()).isEqualTo("value");
}
@Test
public void testProcessRequest_GetKVConfigReturnNull() throws RemotingCommandException {
GetKVConfigRequestHeader header = new GetKVConfigRequestHeader();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_KV_CONFIG,
header);
request.addExtField("namespace", "namespace");
request.addExtField("key", "key");
RemotingCommand response = defaultRequestProcessor.processRequest(null, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.QUERY_NOT_FOUND);
assertThat(response.getRemark()).isEqualTo("No config item, Namespace: namespace Key: key");
GetKVConfigResponseHeader responseHeader = (GetKVConfigResponseHeader) response
.readCustomHeader();
assertThat(responseHeader.getValue()).isNull();
}
@Test
public void testProcessRequest_DeleteKVConfig() throws RemotingCommandException {
namesrvController.getKvConfigManager().putKVConfig("namespace", "key", "value");
DeleteKVConfigRequestHeader header = new DeleteKVConfigRequestHeader();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.DELETE_KV_CONFIG,
header);
request.addExtField("namespace", "namespace");
request.addExtField("key", "key");
RemotingCommand response = defaultRequestProcessor.processRequest(null, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
assertThat(response.getRemark()).isNull();
assertThat(namesrvController.getKvConfigManager().getKVConfig("namespace", "key"))
.isNull();
}
@Test
public void testProcessRequest_RegisterBroker() throws RemotingCommandException,
NoSuchFieldException, IllegalAccessException {
RemotingCommand request = genSampleRegisterCmd(true);
ChannelHandlerContext ctx = mock(ChannelHandlerContext.class);
when(ctx.channel()).thenReturn(null);
RemotingCommand response = defaultRequestProcessor.processRequest(ctx, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
assertThat(response.getRemark()).isNull();
RouteInfoManager routes = namesrvController.getRouteInfoManager();
Field brokerAddrTable = RouteInfoManager.class.getDeclaredField("brokerAddrTable");
brokerAddrTable.setAccessible(true);
BrokerData broker = new BrokerData();
broker.setBrokerName("broker");
broker.setBrokerAddrs((HashMap) Maps.newHashMap(new Long(2333), "10.10.1.1"));
assertThat((Map) brokerAddrTable.get(routes))
.contains(new HashMap.SimpleEntry("broker", broker));
}
@Test
public void testProcessRequest_RegisterBrokerWithFilterServer() throws RemotingCommandException,
NoSuchFieldException, IllegalAccessException {
RemotingCommand request = genSampleRegisterCmd(true);
// version >= MQVersion.Version.V3_0_11.ordinal() to register with filter server
request.setVersion(100);
ChannelHandlerContext ctx = mock(ChannelHandlerContext.class);
when(ctx.channel()).thenReturn(null);
RemotingCommand response = defaultRequestProcessor.processRequest(ctx, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
assertThat(response.getRemark()).isNull();
RouteInfoManager routes = namesrvController.getRouteInfoManager();
Field brokerAddrTable = RouteInfoManager.class.getDeclaredField("brokerAddrTable");
brokerAddrTable.setAccessible(true);
BrokerData broker = new BrokerData();
broker.setBrokerName("broker");
broker.setBrokerAddrs((HashMap) Maps.newHashMap(new Long(2333), "10.10.1.1"));
assertThat((Map) brokerAddrTable.get(routes))
.contains(new HashMap.SimpleEntry("broker", broker));
}
@Test
public void testProcessRequest_UnregisterBroker() throws RemotingCommandException, NoSuchFieldException, IllegalAccessException {
ChannelHandlerContext ctx = mock(ChannelHandlerContext.class);
when(ctx.channel()).thenReturn(null);
//Register broker
RemotingCommand regRequest = genSampleRegisterCmd(true);
defaultRequestProcessor.processRequest(ctx, regRequest);
//Unregister broker
RemotingCommand unregRequest = genSampleRegisterCmd(false);
RemotingCommand unregResponse = defaultRequestProcessor.processRequest(ctx, unregRequest);
assertThat(unregResponse.getCode()).isEqualTo(ResponseCode.SUCCESS);
assertThat(unregResponse.getRemark()).isNull();
RouteInfoManager routes = namesrvController.getRouteInfoManager();
Field brokerAddrTable = RouteInfoManager.class.getDeclaredField("brokerAddrTable");
brokerAddrTable.setAccessible(true);
assertThat((Map) brokerAddrTable.get(routes)).isNotEmpty();
}
@Test
public void testProcessRequest_DeleteTopicInNamesrv()throws RemotingCommandException, NoSuchFieldException, IllegalAccessException {
registerRouteInfoManager("127.0.0.2:10911","default-broker_1", 0, "unit-test_1");
registerRouteInfoManager("127.0.0.3:10911","default-broker_2", 0, "unit-test_1");
registerRouteInfoManager("127.0.0.4:10911","default-broker_3", 0, "unit-test_1");
registerRouteInfoManager("127.0.0.5:10911","default-broker_4", 0, "unit-test_1");
registerRouteInfoManager("127.0.0.5:10911","default-broker_5", 0, "unit-test_1");
//delete from one broker
{
DeleteTopicInNamesrvRequestHeader header = new DeleteTopicInNamesrvRequestHeader();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.DELETE_TOPIC_IN_NAMESRV,
header);
request.addExtField("topic", "unit-test_1");
request.addExtField("brokerAddrs", "127.0.0.2:10911");
RemotingCommand response = defaultRequestProcessor.processRequest(null, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
assertThat(response.getRemark()).isNull();
final Set<String> brokers = new HashSet<>();
for (QueueData qd : namesrvController.getRouteInfoManager().pickupTopicRouteData("unit-test_1").getQueueDatas()) {
brokers.add(qd.getBrokerName());
}
assertThat(brokers.contains("default-broker_1")).isFalse();
assertThat(brokers.contains("default-broker_2")).isTrue();
assertThat(brokers.contains("default-broker_3")).isTrue();
}
//delete from two brokers
{
DeleteTopicInNamesrvRequestHeader header = new DeleteTopicInNamesrvRequestHeader();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.DELETE_TOPIC_IN_NAMESRV,
header);
request.addExtField("topic", "unit-test_1");
request.addExtField("brokerAddrs", "127.0.0.3:10911;127.0.0.4:10911");
RemotingCommand response = defaultRequestProcessor.processRequest(null, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
assertThat(response.getRemark()).isNull();
final Set<String> brokers = new HashSet<>();
for (QueueData qd : namesrvController.getRouteInfoManager().pickupTopicRouteData("unit-test_1").getQueueDatas()) {
brokers.add(qd.getBrokerName());
}
assertThat(brokers.contains("default-broker_1")).isFalse();
assertThat(brokers.contains("default-broker_2")).isFalse();
assertThat(brokers.contains("default-broker_3")).isFalse();
assertThat(brokers.contains("default-broker_4")).isTrue();
assertThat(brokers.contains("default-broker_5")).isTrue();
}
//delete all
{
DeleteTopicInNamesrvRequestHeader header = new DeleteTopicInNamesrvRequestHeader();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.DELETE_TOPIC_IN_NAMESRV,
header);
request.addExtField("topic", "unit-test_1");
RemotingCommand response = defaultRequestProcessor.processRequest(null, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS);
assertThat(response.getRemark()).isNull();
assertThat(namesrvController.getRouteInfoManager().pickupTopicRouteData("unit-test_1")).isNull();
}
}
private static RemotingCommand genSampleRegisterCmd(boolean reg) {
RegisterBrokerRequestHeader header = new RegisterBrokerRequestHeader();
header.setBrokerName("broker");
RemotingCommand request = RemotingCommand.createRequestCommand(
reg ? RequestCode.REGISTER_BROKER : RequestCode.UNREGISTER_BROKER, header);
request.addExtField("brokerName", "broker");
request.addExtField("brokerAddr", "10.10.1.1");
request.addExtField("clusterName", "cluster");
request.addExtField("haServerAddr", "10.10.2.1");
request.addExtField("brokerId", "2333");
return request;
}
private static void setFinalStatic(Field field, Object newValue) throws Exception {
field.setAccessible(true);
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);
field.set(null, newValue);
}
private void registerRouteInfoManager() {
registerRouteInfoManager("127.0.0.1:10911","default-broker", 1234, "unit-test");
}
private void registerRouteInfoManager(String brokerAddr, String brokerName, long brokerId, String topicName) {
TopicConfigSerializeWrapper topicConfigSerializeWrapper = new TopicConfigSerializeWrapper();
ConcurrentHashMap<String, TopicConfig> topicConfigConcurrentHashMap = new ConcurrentHashMap<>();
TopicConfig topicConfig = new TopicConfig();
topicConfig.setWriteQueueNums(8);
topicConfig.setTopicName(topicName);
topicConfig.setPerm(6);
topicConfig.setReadQueueNums(8);
topicConfig.setOrder(false);
topicConfigConcurrentHashMap.put(topicName, topicConfig);
topicConfigSerializeWrapper.setTopicConfigTable(topicConfigConcurrentHashMap);
Channel channel = mock(Channel.class);
RegisterBrokerResult registerBrokerResult = routeInfoManager.registerBroker("default-cluster", brokerAddr, brokerName, brokerId, "127.0.0.1:1001",
0l,0l,topicConfigSerializeWrapper, new ArrayList<String>(), channel);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.namesrv.processor;
import io.netty.channel.ChannelHandlerContext;
import java.io.UnsupportedEncodingException;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.rocketmq.common.MQVersion;
import org.apache.rocketmq.common.MQVersion.Version;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.constant.LoggerName;
import org.apache.rocketmq.common.help.FAQUrl;
import org.apache.rocketmq.common.namesrv.NamesrvUtil;
import org.apache.rocketmq.common.namesrv.RegisterBrokerResult;
import org.apache.rocketmq.common.protocol.RequestCode;
import org.apache.rocketmq.common.protocol.ResponseCode;
import org.apache.rocketmq.common.protocol.body.RegisterBrokerBody;
import org.apache.rocketmq.common.protocol.body.TopicConfigSerializeWrapper;
import org.apache.rocketmq.common.protocol.header.GetTopicsByClusterRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.DeleteKVConfigRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.DeleteTopicInNamesrvRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.EnableBrokerRoleSwitchRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.GetKVConfigRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.GetKVConfigResponseHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.GetKVListByNamespaceRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.GetRouteInfoRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.PutKVConfigRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.RegisterBrokerRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.RegisterBrokerResponseHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.UnRegisterBrokerRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.WipeWritePermOfBrokerRequestHeader;
import org.apache.rocketmq.common.protocol.header.namesrv.WipeWritePermOfBrokerResponseHeader;
import org.apache.rocketmq.common.protocol.route.TopicRouteData;
import org.apache.rocketmq.namesrv.NamesrvController;
import org.apache.rocketmq.remoting.common.RemotingHelper;
import org.apache.rocketmq.remoting.exception.RemotingCommandException;
import org.apache.rocketmq.remoting.netty.NettyRequestProcessor;
import org.apache.rocketmq.remoting.protocol.RemotingCommand;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DefaultRequestProcessor implements NettyRequestProcessor {
private static final Logger log = LoggerFactory.getLogger(LoggerName.NAMESRV_LOGGER_NAME);
protected final NamesrvController namesrvController;
public DefaultRequestProcessor(NamesrvController namesrvController) {
this.namesrvController = namesrvController;
}
@Override
public RemotingCommand processRequest(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
if (log.isDebugEnabled()) {
log.debug("receive request, {} {} {}",
request.getCode(),
RemotingHelper.parseChannelRemoteAddr(ctx.channel()),
request);
}
switch (request.getCode()) {
case RequestCode.PUT_KV_CONFIG:
return this.putKVConfig(ctx, request);
case RequestCode.GET_KV_CONFIG:
return this.getKVConfig(ctx, request);
case RequestCode.DELETE_KV_CONFIG:
return this.deleteKVConfig(ctx, request);
case RequestCode.REGISTER_BROKER:
Version brokerVersion = MQVersion.value2Version(request.getVersion());
if (brokerVersion.ordinal() >= MQVersion.Version.V3_0_11.ordinal()) {
return this.registerBrokerWithFilterServer(ctx, request);
} else {
return this.registerBroker(ctx, request);
}
case RequestCode.UNREGISTER_BROKER:
return this.unregisterBroker(ctx, request);
case RequestCode.GET_ROUTEINTO_BY_TOPIC:
return this.getRouteInfoByTopic(ctx, request);
case RequestCode.GET_BROKER_CLUSTER_INFO:
return this.getBrokerClusterInfo(ctx, request);
case RequestCode.WIPE_WRITE_PERM_OF_BROKER:
return this.wipeWritePermOfBroker(ctx, request);
case RequestCode.GET_ALL_TOPIC_LIST_FROM_NAMESERVER:
return getAllTopicListFromNameserver(ctx, request);
case RequestCode.DELETE_TOPIC_IN_NAMESRV:
return deleteTopicInNamesrv(ctx, request);
case RequestCode.GET_KVLIST_BY_NAMESPACE:
return this.getKVListByNamespace(ctx, request);
case RequestCode.GET_TOPICS_BY_CLUSTER:
return this.getTopicsByCluster(ctx, request);
case RequestCode.GET_SYSTEM_TOPIC_LIST_FROM_NS:
return this.getSystemTopicListFromNs(ctx, request);
case RequestCode.GET_UNIT_TOPIC_LIST:
return this.getUnitTopicList(ctx, request);
case RequestCode.GET_HAS_UNIT_SUB_TOPIC_LIST:
return this.getHasUnitSubTopicList(ctx, request);
case RequestCode.GET_HAS_UNIT_SUB_UNUNIT_TOPIC_LIST:
return this.getHasUnitSubUnUnitTopicList(ctx, request);
case RequestCode.UPDATE_NAMESRV_CONFIG:
return this.updateConfig(ctx, request);
case RequestCode.GET_NAMESRV_CONFIG:
return this.getConfig(ctx, request);
case RequestCode.ENABLE_BROKER_ROLE_SWITCH:
return this.enableBrokerRoleSwitch(ctx, request);
default:
break;
}
return null;
}
@Override
public boolean rejectRequest() {
return false;
}
public RemotingCommand putKVConfig(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final PutKVConfigRequestHeader requestHeader =
(PutKVConfigRequestHeader) request.decodeCommandCustomHeader(PutKVConfigRequestHeader.class);
this.namesrvController.getKvConfigManager().putKVConfig(
requestHeader.getNamespace(),
requestHeader.getKey(),
requestHeader.getValue()
);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
public RemotingCommand getKVConfig(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(GetKVConfigResponseHeader.class);
final GetKVConfigResponseHeader responseHeader = (GetKVConfigResponseHeader) response.readCustomHeader();
final GetKVConfigRequestHeader requestHeader =
(GetKVConfigRequestHeader) request.decodeCommandCustomHeader(GetKVConfigRequestHeader.class);
String value = this.namesrvController.getKvConfigManager().getKVConfig(
requestHeader.getNamespace(),
requestHeader.getKey()
);
if (value != null) {
responseHeader.setValue(value);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
response.setCode(ResponseCode.QUERY_NOT_FOUND);
response.setRemark("No config item, Namespace: " + requestHeader.getNamespace() + " Key: " + requestHeader.getKey());
return response;
}
public RemotingCommand deleteKVConfig(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final DeleteKVConfigRequestHeader requestHeader =
(DeleteKVConfigRequestHeader) request.decodeCommandCustomHeader(DeleteKVConfigRequestHeader.class);
this.namesrvController.getKvConfigManager().deleteKVConfig(
requestHeader.getNamespace(),
requestHeader.getKey()
);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
public RemotingCommand registerBrokerWithFilterServer(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(RegisterBrokerResponseHeader.class);
final RegisterBrokerResponseHeader responseHeader = (RegisterBrokerResponseHeader) response.readCustomHeader();
long start = System.currentTimeMillis();
final RegisterBrokerRequestHeader requestHeader =
(RegisterBrokerRequestHeader) request.decodeCommandCustomHeader(RegisterBrokerRequestHeader.class);
long decodeCost = System.currentTimeMillis() - start;
RegisterBrokerBody registerBrokerBody = new RegisterBrokerBody();
long decodeBodyCost = 0;
if (request.getBody() != null) {
long startDecodeBody = System.currentTimeMillis();
registerBrokerBody = RegisterBrokerBody.decode(request.getBody(), RegisterBrokerBody.class);
decodeBodyCost = System.currentTimeMillis() - startDecodeBody;
} else {
registerBrokerBody.getTopicConfigSerializeWrapper().getDataVersion().setCounter(new AtomicLong(0));
registerBrokerBody.getTopicConfigSerializeWrapper().getDataVersion().setTimestamp(0);
}
long startRegister = System.currentTimeMillis();
RegisterBrokerResult result = this.namesrvController.getRouteInfoManager().registerBroker(
requestHeader.getClusterName(),
requestHeader.getBrokerAddr(),
requestHeader.getBrokerName(),
requestHeader.getBrokerId(),
requestHeader.getHaServerAddr(),
requestHeader.getMaxPhyOffset(),
requestHeader.getTerm(),
registerBrokerBody.getTopicConfigSerializeWrapper(),
registerBrokerBody.getFilterServerList(),
ctx.channel());
long registerCost = System.currentTimeMillis() - startRegister;
responseHeader.setHaServerAddr(result.getHaServerAddr());
responseHeader.setMasterAddr(result.getMasterAddr());
byte[] jsonValue = this.namesrvController.getKvConfigManager().getKVListByNamespace(NamesrvUtil.NAMESPACE_ORDER_TOPIC_CONFIG);
response.setBody(jsonValue);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
log.info("registerBrokerWithFilterServer cost:{},decodeRequest:{},decodeBody:{},registerCost:{}",
System.currentTimeMillis() - start,
decodeCost,
decodeBodyCost,
registerCost);
return response;
}
public RemotingCommand registerBroker(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(RegisterBrokerResponseHeader.class);
final RegisterBrokerResponseHeader responseHeader = (RegisterBrokerResponseHeader) response.readCustomHeader();
final RegisterBrokerRequestHeader requestHeader =
(RegisterBrokerRequestHeader) request.decodeCommandCustomHeader(RegisterBrokerRequestHeader.class);
TopicConfigSerializeWrapper topicConfigWrapper;
if (request.getBody() != null) {
topicConfigWrapper = TopicConfigSerializeWrapper.decode(request.getBody(), TopicConfigSerializeWrapper.class);
} else {
topicConfigWrapper = new TopicConfigSerializeWrapper();
topicConfigWrapper.getDataVersion().setCounter(new AtomicLong(0));
topicConfigWrapper.getDataVersion().setTimestamp(0);
}
RegisterBrokerResult result = this.namesrvController.getRouteInfoManager().registerBroker(
requestHeader.getClusterName(),
requestHeader.getBrokerAddr(),
requestHeader.getBrokerName(),
requestHeader.getBrokerId(),
requestHeader.getHaServerAddr(),
requestHeader.getMaxPhyOffset(),
requestHeader.getTerm(),
topicConfigWrapper,
null,
ctx.channel()
);
responseHeader.setHaServerAddr(result.getHaServerAddr());
responseHeader.setMasterAddr(result.getMasterAddr());
byte[] jsonValue = this.namesrvController.getKvConfigManager().getKVListByNamespace(NamesrvUtil.NAMESPACE_ORDER_TOPIC_CONFIG);
response.setBody(jsonValue);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
public RemotingCommand unregisterBroker(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final UnRegisterBrokerRequestHeader requestHeader =
(UnRegisterBrokerRequestHeader) request.decodeCommandCustomHeader(UnRegisterBrokerRequestHeader.class);
this.namesrvController.getRouteInfoManager().unregisterBroker(
requestHeader.getClusterName(),
requestHeader.getBrokerAddr(),
requestHeader.getBrokerName(),
requestHeader.getBrokerId());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
public RemotingCommand getRouteInfoByTopic(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetRouteInfoRequestHeader requestHeader =
(GetRouteInfoRequestHeader) request.decodeCommandCustomHeader(GetRouteInfoRequestHeader.class);
TopicRouteData topicRouteData = this.namesrvController.getRouteInfoManager().pickupTopicRouteData(requestHeader.getTopic());
if (topicRouteData != null) {
if (this.namesrvController.getNamesrvConfig().isOrderMessageEnable()) {
String orderTopicConf =
this.namesrvController.getKvConfigManager().getKVConfig(NamesrvUtil.NAMESPACE_ORDER_TOPIC_CONFIG,
requestHeader.getTopic());
topicRouteData.setOrderTopicConf(orderTopicConf);
}
byte[] content = topicRouteData.encode();
response.setBody(content);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
response.setCode(ResponseCode.TOPIC_NOT_EXIST);
response.setRemark("No topic route info in name server for the topic: " + requestHeader.getTopic()
+ FAQUrl.suggestTodo(FAQUrl.APPLY_TOPIC_URL));
return response;
}
private RemotingCommand getBrokerClusterInfo(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
byte[] content = this.namesrvController.getRouteInfoManager().getAllClusterInfo();
response.setBody(content);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand wipeWritePermOfBroker(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(WipeWritePermOfBrokerResponseHeader.class);
final WipeWritePermOfBrokerResponseHeader responseHeader = (WipeWritePermOfBrokerResponseHeader) response.readCustomHeader();
final WipeWritePermOfBrokerRequestHeader requestHeader =
(WipeWritePermOfBrokerRequestHeader) request.decodeCommandCustomHeader(WipeWritePermOfBrokerRequestHeader.class);
int wipeTopicCnt = this.namesrvController.getRouteInfoManager().wipeWritePermOfBrokerByLock(requestHeader.getBrokerName());
log.info("wipe write perm of broker[{}], client: {}, {}",
requestHeader.getBrokerName(),
RemotingHelper.parseChannelRemoteAddr(ctx.channel()),
wipeTopicCnt);
responseHeader.setWipeTopicCount(wipeTopicCnt);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getAllTopicListFromNameserver(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
byte[] body = this.namesrvController.getRouteInfoManager().getAllTopicList();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand deleteTopicInNamesrv(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final DeleteTopicInNamesrvRequestHeader requestHeader =
(DeleteTopicInNamesrvRequestHeader) request.decodeCommandCustomHeader(DeleteTopicInNamesrvRequestHeader.class);
this.namesrvController.getRouteInfoManager().deleteTopic(requestHeader.getTopic(), requestHeader.getBrokerAddrs());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getKVListByNamespace(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetKVListByNamespaceRequestHeader requestHeader =
(GetKVListByNamespaceRequestHeader) request.decodeCommandCustomHeader(GetKVListByNamespaceRequestHeader.class);
byte[] jsonValue = this.namesrvController.getKvConfigManager().getKVListByNamespace(
requestHeader.getNamespace());
if (null != jsonValue) {
response.setBody(jsonValue);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
response.setCode(ResponseCode.QUERY_NOT_FOUND);
response.setRemark("No config item, Namespace: " + requestHeader.getNamespace());
return response;
}
private RemotingCommand getTopicsByCluster(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetTopicsByClusterRequestHeader requestHeader =
(GetTopicsByClusterRequestHeader) request.decodeCommandCustomHeader(GetTopicsByClusterRequestHeader.class);
byte[] body = this.namesrvController.getRouteInfoManager().getTopicsByCluster(requestHeader.getCluster());
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getSystemTopicListFromNs(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
byte[] body = this.namesrvController.getRouteInfoManager().getSystemTopicList();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getUnitTopicList(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
byte[] body = this.namesrvController.getRouteInfoManager().getUnitTopics();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getHasUnitSubTopicList(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
byte[] body = this.namesrvController.getRouteInfoManager().getHasUnitSubTopicList();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getHasUnitSubUnUnitTopicList(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
byte[] body = this.namesrvController.getRouteInfoManager().getHasUnitSubUnUnitTopicList();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand updateConfig(ChannelHandlerContext ctx, RemotingCommand request) {
log.info("updateConfig called by {}", RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
byte[] body = request.getBody();
if (body != null) {
String bodyStr;
try {
bodyStr = new String(body, MixAll.DEFAULT_CHARSET);
} catch (UnsupportedEncodingException e) {
log.error("updateConfig byte array to string error: ", e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
if (bodyStr == null) {
log.error("updateConfig get null body!");
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("string2Properties error");
return response;
}
Properties properties = MixAll.string2Properties(bodyStr);
if (properties == null) {
log.error("updateConfig MixAll.string2Properties error {}", bodyStr);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("string2Properties error");
return response;
}
this.namesrvController.getConfiguration().update(properties);
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getConfig(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
String content = this.namesrvController.getConfiguration().getAllConfigsFormatString();
if (content != null && content.length() > 0) {
try {
response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET));
} catch (UnsupportedEncodingException e) {
log.error("getConfig error, ", e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand enableBrokerRoleSwitch(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final EnableBrokerRoleSwitchRequestHeader requestHeader =
(EnableBrokerRoleSwitchRequestHeader) request.decodeCommandCustomHeader(EnableBrokerRoleSwitchRequestHeader.class);
if (this.namesrvController.getHaManager() != null) {
this.namesrvController.getHaManager().enableBrokerRoleSwitch(requestHeader.getClusterName(), requestHeader.getBrokerName());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} else {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("ha manager is disabled");
}
return response;
}
}
```
|
```package org.apache.rocketmq.common;
import java.util.concurrent.atomic.AtomicLong;
import org.junit.Assert;
import org.junit.Test;
public class DataVersionTest {
@Test
public void testEquals() {
DataVersion dataVersion = new DataVersion();
DataVersion other = new DataVersion();
other.setTimestamp(dataVersion.getTimestamp());
Assert.assertTrue(dataVersion.equals(other));
}
@Test
public void testEquals_falseWhenCounterDifferent() {
DataVersion dataVersion = new DataVersion();
DataVersion other = new DataVersion();
other.setCounter(new AtomicLong(1L));
other.setTimestamp(dataVersion.getTimestamp());
Assert.assertFalse(dataVersion.equals(other));
}
@Test
public void testEquals_falseWhenCounterDifferent2() {
DataVersion dataVersion = new DataVersion();
DataVersion other = new DataVersion();
other.setCounter(null);
other.setTimestamp(dataVersion.getTimestamp());
Assert.assertFalse(dataVersion.equals(other));
}
@Test
public void testEquals_falseWhenCounterDifferent3() {
DataVersion dataVersion = new DataVersion();
dataVersion.setCounter(null);
DataVersion other = new DataVersion();
other.setTimestamp(dataVersion.getTimestamp());
Assert.assertFalse(dataVersion.equals(other));
}
@Test
public void testEquals_trueWhenCountersBothNull() {
DataVersion dataVersion = new DataVersion();
dataVersion.setCounter(null);
DataVersion other = new DataVersion();
other.setCounter(null);
other.setTimestamp(dataVersion.getTimestamp());
Assert.assertTrue(dataVersion.equals(other));
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.common;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.rocketmq.remoting.protocol.RemotingSerializable;
public class DataVersion extends RemotingSerializable {
private long timestamp = System.currentTimeMillis();
private AtomicLong counter = new AtomicLong(0);
public void assignNewOne(final DataVersion dataVersion) {
this.timestamp = dataVersion.timestamp;
this.counter.set(dataVersion.counter.get());
}
public void nextVersion() {
this.timestamp = System.currentTimeMillis();
this.counter.incrementAndGet();
}
public long getTimestamp() {
return timestamp;
}
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
public AtomicLong getCounter() {
return counter;
}
public void setCounter(AtomicLong counter) {
this.counter = counter;
}
@Override
public boolean equals(final Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
final DataVersion that = (DataVersion) o;
if (timestamp != that.timestamp) {
return false;
}
if (counter != null && that.counter != null) {
return counter.longValue() == that.counter.longValue();
}
return (null == counter) && (null == that.counter);
}
@Override
public int hashCode() {
int result = (int) (timestamp ^ (timestamp >>> 32));
if (null != counter) {
long l = counter.get();
result = 31 * result + (int) (l ^ (l >>> 32));
}
return result;
}
}
```
|
```package org.apache.rocketmq.store.index;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.apache.rocketmq.common.UtilAll;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class IndexFileTest {
private final int HASH_SLOT_NUM = 100;
private final int INDEX_NUM = 400;
@Test
public void testPutKey() throws Exception {
IndexFile indexFile = new IndexFile("100", HASH_SLOT_NUM, INDEX_NUM, 0, 0);
for (long i = 0; i < (INDEX_NUM - 1); i++) {
boolean putResult = indexFile.putKey(Long.toString(i), i, System.currentTimeMillis());
assertThat(putResult).isTrue();
}
// put over index file capacity.
boolean putResult = indexFile.putKey(Long.toString(400), 400, System.currentTimeMillis());
assertThat(putResult).isFalse();
indexFile.destroy(0);
File file = new File("100");
UtilAll.deleteFile(file);
}
@Test
public void testSelectPhyOffset() throws Exception {
IndexFile indexFile = new IndexFile("200", HASH_SLOT_NUM, INDEX_NUM, 0, 0);
for (long i = 0; i < (INDEX_NUM - 1); i++) {
boolean putResult = indexFile.putKey(Long.toString(i), i, System.currentTimeMillis());
assertThat(putResult).isTrue();
}
// put over index file capacity.
boolean putResult = indexFile.putKey(Long.toString(400), 400, System.currentTimeMillis());
assertThat(putResult).isFalse();
final List<Long> phyOffsets = new ArrayList<Long>();
indexFile.selectPhyOffset(phyOffsets, "60", 10, 0, Long.MAX_VALUE, true);
assertThat(phyOffsets).isNotEmpty();
assertThat(phyOffsets.size()).isEqualTo(1);
indexFile.destroy(0);
File file = new File("200");
UtilAll.deleteFile(file);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.store.index;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.util.List;
import org.apache.rocketmq.common.constant.LoggerName;
import org.apache.rocketmq.store.MappedFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class IndexFile {
private static final Logger log = LoggerFactory.getLogger(LoggerName.STORE_LOGGER_NAME);
private static int hashSlotSize = 4;
private static int indexSize = 20;
private static int invalidIndex = 0;
private final int hashSlotNum;
private final int indexNum;
private final MappedFile mappedFile;
private final FileChannel fileChannel;
private final MappedByteBuffer mappedByteBuffer;
private final IndexHeader indexHeader;
public IndexFile(final String fileName, final int hashSlotNum, final int indexNum,
final long endPhyOffset, final long endTimestamp) throws IOException {
int fileTotalSize =
IndexHeader.INDEX_HEADER_SIZE + (hashSlotNum * hashSlotSize) + (indexNum * indexSize);
this.mappedFile = new MappedFile(fileName, fileTotalSize);
this.fileChannel = this.mappedFile.getFileChannel();
this.mappedByteBuffer = this.mappedFile.getMappedByteBuffer();
this.hashSlotNum = hashSlotNum;
this.indexNum = indexNum;
ByteBuffer byteBuffer = this.mappedByteBuffer.slice();
this.indexHeader = new IndexHeader(byteBuffer);
if (endPhyOffset > 0) {
this.indexHeader.setBeginPhyOffset(endPhyOffset);
this.indexHeader.setEndPhyOffset(endPhyOffset);
}
if (endTimestamp > 0) {
this.indexHeader.setBeginTimestamp(endTimestamp);
this.indexHeader.setEndTimestamp(endTimestamp);
}
}
public String getFileName() {
return this.mappedFile.getFileName();
}
public void load() {
this.indexHeader.load();
}
public void flush() {
long beginTime = System.currentTimeMillis();
if (this.mappedFile.hold()) {
this.indexHeader.updateByteBuffer();
this.mappedByteBuffer.force();
this.mappedFile.release();
log.info("flush index file eclipse time(ms) " + (System.currentTimeMillis() - beginTime));
}
}
public boolean isWriteFull() {
return this.indexHeader.getIndexCount() >= this.indexNum;
}
public boolean destroy(final long intervalForcibly) {
return this.mappedFile.destroy(intervalForcibly);
}
public boolean putKey(final String key, final long phyOffset, final long storeTimestamp) {
if (this.indexHeader.getIndexCount() < this.indexNum) {
int keyHash = indexKeyHashMethod(key);
int slotPos = keyHash % this.hashSlotNum;
int absSlotPos = IndexHeader.INDEX_HEADER_SIZE + slotPos * hashSlotSize;
FileLock fileLock = null;
try {
// fileLock = this.fileChannel.lock(absSlotPos, hashSlotSize,
// false);
int slotValue = this.mappedByteBuffer.getInt(absSlotPos);
if (slotValue <= invalidIndex || slotValue > this.indexHeader.getIndexCount()) {
slotValue = invalidIndex;
}
long timeDiff = storeTimestamp - this.indexHeader.getBeginTimestamp();
timeDiff = timeDiff / 1000;
if (this.indexHeader.getBeginTimestamp() <= 0) {
timeDiff = 0;
} else if (timeDiff > Integer.MAX_VALUE) {
timeDiff = Integer.MAX_VALUE;
} else if (timeDiff < 0) {
timeDiff = 0;
}
int absIndexPos =
IndexHeader.INDEX_HEADER_SIZE + this.hashSlotNum * hashSlotSize
+ this.indexHeader.getIndexCount() * indexSize;
this.mappedByteBuffer.putInt(absIndexPos, keyHash);
this.mappedByteBuffer.putLong(absIndexPos + 4, phyOffset);
this.mappedByteBuffer.putInt(absIndexPos + 4 + 8, (int) timeDiff);
this.mappedByteBuffer.putInt(absIndexPos + 4 + 8 + 4, slotValue);
this.mappedByteBuffer.putInt(absSlotPos, this.indexHeader.getIndexCount());
if (this.indexHeader.getIndexCount() <= 1) {
this.indexHeader.setBeginPhyOffset(phyOffset);
this.indexHeader.setBeginTimestamp(storeTimestamp);
}
this.indexHeader.incHashSlotCount();
this.indexHeader.incIndexCount();
this.indexHeader.setEndPhyOffset(phyOffset);
this.indexHeader.setEndTimestamp(storeTimestamp);
return true;
} catch (Exception e) {
log.error("putKey exception, Key: " + key + " KeyHashCode: " + key.hashCode(), e);
} finally {
if (fileLock != null) {
try {
fileLock.release();
} catch (IOException e) {
log.error("Failed to release the lock", e);
}
}
}
} else {
log.warn("Over index file capacity: index count = " + this.indexHeader.getIndexCount()
+ "; index max num = " + this.indexNum);
}
return false;
}
public int indexKeyHashMethod(final String key) {
int keyHash = key.hashCode();
int keyHashPositive = Math.abs(keyHash);
if (keyHashPositive < 0)
keyHashPositive = 0;
return keyHashPositive;
}
public long getBeginTimestamp() {
return this.indexHeader.getBeginTimestamp();
}
public long getEndTimestamp() {
return this.indexHeader.getEndTimestamp();
}
public long getEndPhyOffset() {
return this.indexHeader.getEndPhyOffset();
}
public boolean isTimeMatched(final long begin, final long end) {
boolean result = begin < this.indexHeader.getBeginTimestamp() && end > this.indexHeader.getEndTimestamp();
result = result || (begin >= this.indexHeader.getBeginTimestamp() && begin <= this.indexHeader.getEndTimestamp());
result = result || (end >= this.indexHeader.getBeginTimestamp() && end <= this.indexHeader.getEndTimestamp());
return result;
}
public void selectPhyOffset(final List<Long> phyOffsets, final String key, final int maxNum,
final long begin, final long end, boolean lock) {
if (this.mappedFile.hold()) {
int keyHash = indexKeyHashMethod(key);
int slotPos = keyHash % this.hashSlotNum;
int absSlotPos = IndexHeader.INDEX_HEADER_SIZE + slotPos * hashSlotSize;
FileLock fileLock = null;
try {
if (lock) {
// fileLock = this.fileChannel.lock(absSlotPos,
// hashSlotSize, true);
}
int slotValue = this.mappedByteBuffer.getInt(absSlotPos);
// if (fileLock != null) {
// fileLock.release();
// fileLock = null;
// }
if (slotValue <= invalidIndex || slotValue > this.indexHeader.getIndexCount()
|| this.indexHeader.getIndexCount() <= 1) {
} else {
for (int nextIndexToRead = slotValue; ; ) {
if (phyOffsets.size() >= maxNum) {
break;
}
int absIndexPos =
IndexHeader.INDEX_HEADER_SIZE + this.hashSlotNum * hashSlotSize
+ nextIndexToRead * indexSize;
int keyHashRead = this.mappedByteBuffer.getInt(absIndexPos);
long phyOffsetRead = this.mappedByteBuffer.getLong(absIndexPos + 4);
long timeDiff = (long) this.mappedByteBuffer.getInt(absIndexPos + 4 + 8);
int prevIndexRead = this.mappedByteBuffer.getInt(absIndexPos + 4 + 8 + 4);
if (timeDiff < 0) {
break;
}
timeDiff *= 1000L;
long timeRead = this.indexHeader.getBeginTimestamp() + timeDiff;
boolean timeMatched = (timeRead >= begin) && (timeRead <= end);
if (keyHash == keyHashRead && timeMatched) {
phyOffsets.add(phyOffsetRead);
}
if (prevIndexRead <= invalidIndex
|| prevIndexRead > this.indexHeader.getIndexCount()
|| prevIndexRead == nextIndexToRead || timeRead < begin) {
break;
}
nextIndexToRead = prevIndexRead;
}
}
} catch (Exception e) {
log.error("selectPhyOffset exception ", e);
} finally {
if (fileLock != null) {
try {
fileLock.release();
} catch (IOException e) {
log.error("Failed to release the lock", e);
}
}
this.mappedFile.release();
}
}
}
}
```
|
```package org.apache.rocketmq.namesrv.ha;
import java.util.Arrays;
import java.util.TreeSet;
import org.apache.rocketmq.common.namesrv.NamesrvConfig;
import org.apache.rocketmq.common.protocol.body.TopicConfigSerializeWrapper;
import org.apache.rocketmq.namesrv.NamesrvController;
import org.apache.rocketmq.remoting.netty.NettyServerConfig;
import org.junit.Assert;
import org.junit.Test;
public class HAManagerTest {
@Test
public void testSelectSlaveId() {
HAManager haManager = new HAManager();
Assert.assertEquals(2, haManager.selectSlaveId(new TreeSet<Long>(Arrays.asList(0l, 1l))));
Assert.assertEquals(1, haManager.selectSlaveId(new TreeSet<Long>(Arrays.asList(0l, 2l))));
Assert.assertEquals(1, haManager.selectSlaveId(new TreeSet<Long>(Arrays.asList(0l, 2l, 4l))));
Assert.assertEquals(2, haManager.selectSlaveId(new TreeSet<Long>(Arrays.asList(1l))));
Assert.assertEquals(1, haManager.selectSlaveId(new TreeSet<Long>(Arrays.asList(2l))));
}
@Test
public void testSelectMaster() throws Exception {
NettyServerConfig nettyServerConfig = new NettyServerConfig();
NamesrvConfig namesrvConfig = new NamesrvConfig();
NamesrvController nameSrvController = new NamesrvController(namesrvConfig, nettyServerConfig);
HAManager haManager = new HAManager(nameSrvController);
String clusterName = "clusterTest";
String brokerName = "brokerTest";
String brokerAddrMaster = "brokerAddrMaster";
String brokerAddrSlave1 = "brokerAddrSlave1";
String brokerAddrSlave2 = "brokerAddrSlave2";
//no broker
HAManager.RoleChangeInfo roleChangeInfo = haManager.selectNewMaster(clusterName, brokerName);
Assert.assertEquals(null, roleChangeInfo);
//only master
TopicConfigSerializeWrapper topicConfigSerializeWrapperMaster = new TopicConfigSerializeWrapper();
nameSrvController.getRouteInfoManager().registerBroker(clusterName,brokerAddrMaster, brokerName, 0,
brokerAddrMaster, 10000L, 1L, topicConfigSerializeWrapperMaster, null, null);
roleChangeInfo = haManager.selectNewMaster(clusterName, brokerName);
Assert.assertEquals(null, roleChangeInfo);
//1 slave
TopicConfigSerializeWrapper topicConfigSerializeWrapperSlave1 = new TopicConfigSerializeWrapper();
nameSrvController.getRouteInfoManager().registerBroker(clusterName,brokerAddrSlave1, brokerName, 1,
null, 10000L, 1L, topicConfigSerializeWrapperSlave1, null, null);
roleChangeInfo = haManager.selectNewMaster(clusterName, brokerName);
Assert.assertEquals(brokerAddrSlave1, roleChangeInfo.newMaster.addr);
Assert.assertEquals(brokerAddrMaster, roleChangeInfo.oldMaster.addr);
//2 slave, select the phy offset latest
TopicConfigSerializeWrapper topicConfigSerializeWrapperSlave2 = new TopicConfigSerializeWrapper();
nameSrvController.getRouteInfoManager().registerBroker(clusterName,brokerAddrSlave2, brokerName, 2,
null, 10001L, 1L, topicConfigSerializeWrapperSlave2, null, null);
roleChangeInfo = haManager.selectNewMaster(clusterName, brokerName);
Assert.assertEquals(brokerAddrSlave2, roleChangeInfo.newMaster.addr);
Assert.assertEquals(brokerAddrMaster, roleChangeInfo.oldMaster.addr);
//no master
nameSrvController.getRouteInfoManager().unregisterBroker(clusterName, brokerAddrMaster, brokerName, 0);
roleChangeInfo = haManager.selectNewMaster(clusterName, brokerName);
Assert.assertEquals(brokerAddrSlave2, roleChangeInfo.newMaster.addr);
Assert.assertEquals(null, roleChangeInfo.oldMaster);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.namesrv.ha;
import java.util.Iterator;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.ThreadFactoryImpl;
import org.apache.rocketmq.common.constant.LoggerName;
import org.apache.rocketmq.common.protocol.route.BrokerData;
import org.apache.rocketmq.namesrv.NamesrvController;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.SortedSet;
import java.util.TreeSet;
public class HAManager {
private static final Logger log = LoggerFactory.getLogger(LoggerName.NAMESRV_LOGGER_NAME);
private final ScheduledExecutorService haScheduledThread = Executors.newSingleThreadScheduledExecutor(new ThreadFactoryImpl(
"HAScheduledThread"));
private NamesrvController namesrvController;
private Detector detector;
private StateKeeper stateKeeper;
private RoleManager roleManager;
private ConcurrentHashMap<String, Long> brokerEnableSwitch = new ConcurrentHashMap<>();
public HAManager() {
}
public HAManager(NamesrvController namesrvController) {
this.namesrvController = namesrvController;
detector = new Detector(namesrvController);
stateKeeper = new StateKeeper(namesrvController);
roleManager = new RoleManager(namesrvController);
}
public void start() throws Exception {
detector.start();
stateKeeper.start();
roleManager.start();
haScheduledThread.scheduleWithFixedDelay(new Runnable() {
@Override
public void run() {
try {
HAManager.this.doHa();
} catch (Throwable throwable) {
log.error("do ha failed", throwable);
}
}
}, namesrvController.getNamesrvConfig().getDetectIntervalMs(), namesrvController.getNamesrvConfig().getDetectIntervalMs(), TimeUnit.MILLISECONDS);
}
public void shutdown() {
haScheduledThread.shutdown();
detector.shutdown();
stateKeeper.shutdown();
}
private void doHa() throws Exception {
HealthStatus healthStatus = detector.detectLiveStat();
Map<String, Map<String, Boolean>> liveStatus = getLiveBroker(healthStatus);
//role switch
checkAndSwitchRole(liveStatus);
//status update to zk
for (Map.Entry<String, Map<String, Boolean>> entry : liveStatus.entrySet()) {
updateBrokerStatus(entry.getKey(), entry.getValue());
}
}
public boolean changeToSlave(String clusterName, String brokerName, String brokerAddr) {
if (!stateKeeper.isLeader()) {
return true;
}
BrokerData brokerData = namesrvController.getRouteInfoManager().getBrokerData(brokerName);
if (brokerData == null) {
return true;
}
SortedSet<Long> ids = new TreeSet<>(brokerData.getBrokerAddrs().keySet());
long slaveId = selectSlaveId(ids);
log.info("change to slave, broker name:{}, broker addr:{}, broker id:{}", brokerName, brokerAddr, slaveId);
return roleManager.change2Slave(brokerName, brokerAddr, slaveId, true);
}
public long selectSlaveId(SortedSet<Long> ids) {
long slaveId = -1;
if (ids.size() - 1 == ids.last()) {
slaveId = ids.last() + 1;
} else {
long preId = 0;
for (long id : ids) {
if (id == MixAll.MASTER_ID) {
continue;
}
if (id - preId > 1) {
slaveId = preId + 1;
break;
}
preId++;
}
if (slaveId == -1) {
slaveId = ids.last() + 1;
}
}
return slaveId;
}
private void checkAndSwitchRole(Map<String, Map<String, Boolean>> liveStatus) {
for (Map.Entry<String, Map<String, Boolean>> cluster : liveStatus.entrySet()) {
if (!stateKeeper.isLeader()) {
continue;
}
String clusterName = cluster.getKey();
for (Map.Entry<String, Boolean> broker : cluster.getValue().entrySet()) {
if (broker.getValue()) {
continue;
}
String brokerName = broker.getKey();
log.info("this name server detect that the broker is unhealthy, broker name:{}, cluster:{}", brokerName, clusterName);
if (!isInRouteInfoManager(clusterName, brokerName)) {
log.warn("not in route info, not need to change");
continue;
}
//unhealthy in other node
boolean isHealthyInOtherNS = stateKeeper.isHealthyInOtherNS(clusterName, brokerName);
log.info("healthy status in other ns:{}", isHealthyInOtherNS);
if (!isHealthyInOtherNS) {
log.warn("broker is unhealthy in other ns, broker name:{}, cluster:{}", brokerName, clusterName);
}
//select new master
if (!isHealthyInOtherNS && isSwitchRole(clusterName, brokerName)) {
RoleChangeInfo roleChangeInfo = selectNewMaster(clusterName, brokerName);
if (roleChangeInfo == null) {
log.warn("can not get a new master, clusterName:{}, brokerName:{}", clusterName, brokerName);
continue;
}
log.info("nodes would be changed {}", roleChangeInfo);
if (roleChangeInfo.oldMaster == null) {
log.warn("no old master, just change a slave to master");
//slave to new master
if (!roleManager.change2Master(brokerName, roleChangeInfo.newMaster.addr, true)) {
log.error("change slave to master failed, stop. clusterName:{}, brokerName:{}, brokerAddr:{}", clusterName,
brokerName, roleChangeInfo.newMaster.addr);
continue;
}
} else {
//old master to slave
if (!roleManager.change2Slave(brokerName, roleChangeInfo.oldMaster.addr, roleChangeInfo.oldMaster.expectId, false)) {
log.error("change master to slave failed, stop. clusterName:{}, brokerName:{}, brokerAddr:{}", clusterName,
brokerName, roleChangeInfo.oldMaster.addr);
continue;
}
//slave to new master
if (!roleManager.change2Master(brokerName, roleChangeInfo.newMaster.addr, true)) {
log.error("change slave to master failed, stop. clusterName:{}, brokerName:{}, brokerAddr:{}", clusterName,
brokerName, roleChangeInfo.newMaster.addr);
continue;
}
//change new slave id
long slaveId;
BrokerData brokerData = namesrvController.getRouteInfoManager().getBrokerData(brokerName);
if (brokerData != null) {
SortedSet<Long> ids = new TreeSet<>(brokerData.getBrokerAddrs().keySet());
slaveId = selectSlaveId(ids);
} else {
slaveId = roleChangeInfo.newMaster.oldId;
}
if (!roleManager.changeId(brokerName, roleChangeInfo.oldMaster.addr, slaveId, false)) {
log.error("change id failed, stop. clusterName:{}, brokerName:{}, brokerAddr:{}", clusterName,
brokerName, roleChangeInfo.oldMaster.addr);
continue;
}
}
log.info("cluster:{}, broker:{}, change role success", clusterName, brokerName);
//clear old detect info
detector.reset(clusterName, brokerName);
}
}
}
}
private boolean isSwitchRole(String clusterName, String brokerName) {
if (namesrvController.getNamesrvConfig().isRoleAutoSwitchEnable()) {
return true;
}
boolean isSwitch = false;
String key = getClusterBrokerKey(clusterName, brokerName);
if (brokerEnableSwitch.containsKey(key)) {
if (System.currentTimeMillis() - brokerEnableSwitch.get(key) < namesrvController.getNamesrvConfig().getEnableValidityPeriodMs()) {
log.info("broker:{} enable to switch", brokerName);
isSwitch = true;
}
brokerEnableSwitch.remove(key);
}
return isSwitch;
}
public void enableBrokerRoleSwitch(String clusterName, String brokerName) {
brokerEnableSwitch.put(getClusterBrokerKey(clusterName, brokerName), System.currentTimeMillis());
log.info("enable clusterName:{}, brokerName:{} to switch role", clusterName, brokerName);
}
private String getClusterBrokerKey(String clusterName, String brokerName) {
return clusterName + "@" + brokerName;
}
private boolean isInRouteInfoManager(String cluster, String brokerName) {
BrokerData brokerData = namesrvController.getRouteInfoManager().getBrokerData(brokerName);
if (brokerData == null || !cluster.equals(brokerData.getCluster())) {
log.warn("no broker data for broker name:{}, broker data:{}", brokerName, brokerData);
return false;
}
return true;
}
public RoleChangeInfo selectNewMaster(String cluster, String brokerName) {
BrokerData brokerData = namesrvController.getRouteInfoManager().getBrokerData(brokerName);
if (brokerData == null || !cluster.equals(brokerData.getCluster())) {
log.warn("no broker data for broker name:{}, broker data:{}", brokerName, brokerData);
return null;
}
HashMap<Long, String> brokerAddrs = new HashMap<>(brokerData.getBrokerAddrs());
for (Iterator<Map.Entry<Long, String>> it = brokerAddrs.entrySet().iterator(); it.hasNext(); ) {
Map.Entry<Long, String> item = it.next();
if (item.getKey() > namesrvController.getNamesrvConfig().getMaxIdForRoleSwitch()) {
it.remove();
}
}
//no broker
if (brokerAddrs == null || brokerAddrs.isEmpty()) {
log.warn("no broker addrs, for broker name:{}, broker data:{}", brokerName, brokerData);
return null;
}
//only one, and master
if (brokerAddrs.size() == 1 && brokerAddrs.get(MixAll.MASTER_ID) != null) {
log.warn("only on broker, but it is current master");
return null;
}
//slave exist
RoleChangeInfo roleChangeInfo = new RoleChangeInfo();
SortedSet<Long> ids = new TreeSet<>(brokerAddrs.keySet());
if (ids.first() == MixAll.MASTER_ID) {
roleChangeInfo.oldMaster = new RoleInChange(brokerAddrs.get(ids.first()), ids.first(), ids.last() + 1);
}
long newMasterId = pickMaster(brokerAddrs);
if (newMasterId == -1) {
//newMasterId = ids.last();
log.error("do not get master, broker name:{}", brokerName);
return null;
}
roleChangeInfo.newMaster = new RoleInChange(brokerAddrs.get(newMasterId), newMasterId, MixAll.MASTER_ID);
return roleChangeInfo;
}
private long pickMaster(HashMap<Long, String> brokerAddrs) {
long maxOffset = -1;
long brokerId = -1;
for (Map.Entry<Long, String> broker : brokerAddrs.entrySet()) {
if (broker.getKey() == MixAll.MASTER_ID) {
continue;
}
long offset = namesrvController.getRouteInfoManager().getBrokerMaxPhyOffset(broker.getValue());
if (offset > maxOffset) {
brokerId = broker.getKey();
maxOffset = offset;
}
}
log.info("get new master id:{}, maxOffset:{}", brokerId, maxOffset);
return brokerId;
}
private void updateBrokerStatus(String clusterName, Map<String, Boolean> status) {
Set<String> alive = new HashSet<>();
for (Map.Entry<String, Boolean> entry : status.entrySet()) {
if (entry.getValue()) {
alive.add(entry.getKey());
}
}
stateKeeper.updateBrokerStatus(clusterName, alive);
}
private Map<String, Map<String, Boolean>> getLiveBroker(HealthStatus healthStatus) {
Map<String/*cluster name*/, Map<String/*broker name*/, Boolean>> brokerLiveStatus = new HashMap<>(4);
for (Map.Entry<String, Map<String, HealthStatus.NodeHealthStatus>> cluster : healthStatus.getClusterStatus().entrySet()) {
String clusterName = cluster.getKey();
Map<String, HealthStatus.NodeHealthStatus> brokerStatus = cluster.getValue();
for (Map.Entry<String, HealthStatus.NodeHealthStatus> broker : brokerStatus.entrySet()) {
if (!brokerLiveStatus.containsKey(clusterName)) {
Map<String, Boolean> status = new HashMap<>();
status.put(broker.getKey(), broker.getValue().isHealthy());
brokerLiveStatus.put(clusterName, status);
} else {
brokerLiveStatus.get(clusterName).put(broker.getKey(), broker.getValue().isHealthy());
}
}
}
return brokerLiveStatus;
}
class RoleChangeInfo {
RoleInChange newMaster = null;
RoleInChange oldMaster = null;
@Override
public String toString() {
return "RoleChangeInfo{" +
"newMaster=" + newMaster +
", oldMaster=" + oldMaster +
'}';
}
}
class RoleInChange {
String addr;
long oldId;
long expectId;
public RoleInChange(String addr, long oldId, long expectId) {
this.addr = addr;
this.oldId = oldId;
this.expectId = expectId;
}
@Override
public String toString() {
return "RoleInChange{" +
"addr='" + addr + '\'' +
", oldId=" + oldId +
", expectId=" + expectId +
'}';
}
}
}
```
|
```package org.apache.rocketmq.tools.command.topic;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class UpdateTopicPermSubCommandTest {
@Test
public void testExecute() {
UpdateTopicPermSubCommand cmd = new UpdateTopicPermSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-b 127.0.0.1:10911", "-c default-cluster", "-t unit-test", "-p 6"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
assertThat(commandLine.getOptionValue('b').trim()).isEqualTo("127.0.0.1:10911");
assertThat(commandLine.getOptionValue('c').trim()).isEqualTo("default-cluster");
assertThat(commandLine.getOptionValue('t').trim()).isEqualTo("unit-test");
assertThat(commandLine.getOptionValue('p').trim()).isEqualTo("6");
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.topic;
import java.util.List;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.common.TopicConfig;
import org.apache.rocketmq.common.protocol.route.QueueData;
import org.apache.rocketmq.common.protocol.route.TopicRouteData;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.CommandUtil;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class UpdateTopicPermSubCommand implements SubCommand {
@Override
public String commandName() {
return "updateTopicPerm";
}
@Override
public String commandDesc() {
return "Update topic perm";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("b", "brokerAddr", true, "create topic to which broker");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("c", "clusterName", true, "create topic to which cluster");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("t", "topic", true, "topic name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("p", "perm", true, "set topic's permission(2|4|6), intro[2:W; 4:R; 6:RW]");
opt.setRequired(true);
options.addOption(opt);
return options;
}
@Override
public void execute(final CommandLine commandLine, final Options options,
RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
defaultMQAdminExt.start();
TopicConfig topicConfig = new TopicConfig();
String topic = commandLine.getOptionValue('t').trim();
TopicRouteData topicRouteData = defaultMQAdminExt.examineTopicRouteInfo(topic);
assert topicRouteData != null;
List<QueueData> queueDatas = topicRouteData.getQueueDatas();
assert queueDatas != null && queueDatas.size() > 0;
QueueData queueData = queueDatas.get(0);
topicConfig.setTopicName(topic);
topicConfig.setWriteQueueNums(queueData.getWriteQueueNums());
topicConfig.setReadQueueNums(queueData.getReadQueueNums());
topicConfig.setPerm(queueData.getPerm());
topicConfig.setTopicSysFlag(queueData.getTopicSynFlag());
//new perm
int perm = Integer.parseInt(commandLine.getOptionValue('p').trim());
int oldPerm = topicConfig.getPerm();
if (perm == oldPerm) {
System.out.printf("new perm equals to the old one!%n");
return;
}
topicConfig.setPerm(perm);
if (commandLine.hasOption('b')) {
String addr = commandLine.getOptionValue('b').trim();
defaultMQAdminExt.createAndUpdateTopicConfig(addr, topicConfig);
System.out.printf("update topic perm from %s to %s in %s success.%n", oldPerm, perm, addr);
System.out.printf("%s%n", topicConfig);
return;
} else if (commandLine.hasOption('c')) {
String clusterName = commandLine.getOptionValue('c').trim();
Set<String> masterSet =
CommandUtil.fetchMasterAddrByClusterName(defaultMQAdminExt, clusterName);
for (String addr : masterSet) {
defaultMQAdminExt.createAndUpdateTopicConfig(addr, topicConfig);
System.out.printf("update topic perm from %s to %s in %s success.%n", oldPerm, perm, addr);
}
return;
}
ServerUtil.printCommandLineHelp("mqadmin " + this.commandName(), options);
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
```
|
```package org.apache.rocketmq.client.producer.selector;
import java.util.ArrayList;
import java.util.List;
import org.apache.rocketmq.common.message.Message;
import org.apache.rocketmq.common.message.MessageQueue;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class SelectMessageQueueByHashTest {
private String topic = "FooBar";
@Test
public void testSelect() throws Exception {
SelectMessageQueueByHash selector = new SelectMessageQueueByHash();
Message message = new Message(topic, new byte[] {});
List<MessageQueue> messageQueues = new ArrayList<MessageQueue>();
for (int i = 0; i < 10; i++) {
MessageQueue messageQueue = new MessageQueue(topic, "DefaultBroker", i);
messageQueues.add(messageQueue);
}
String orderId = "123";
String anotherOrderId = "234";
MessageQueue selected = selector.select(messageQueues, message, orderId);
assertThat(selector.select(messageQueues, message, anotherOrderId)).isNotEqualTo(selected);
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.client.producer.selector;
import java.util.List;
import org.apache.rocketmq.client.producer.MessageQueueSelector;
import org.apache.rocketmq.common.message.Message;
import org.apache.rocketmq.common.message.MessageQueue;
public class SelectMessageQueueByHash implements MessageQueueSelector {
@Override
public MessageQueue select(List<MessageQueue> mqs, Message msg, Object arg) {
int value = arg.hashCode();
if (value < 0) {
value = Math.abs(value);
}
value = value % mqs.size();
return mqs.get(value);
}
}
```
|
```package org.apache.rocketmq.test.client.producer.exception.producer;
import org.apache.log4j.Logger;
import org.apache.rocketmq.test.base.BaseConf;
import org.apache.rocketmq.test.client.rmq.RMQNormalProducer;
import org.apache.rocketmq.test.util.RandomUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static com.google.common.truth.Truth.assertThat;
public class ProducerGroupAndInstanceNameValidityIT extends BaseConf {
private static Logger logger = Logger.getLogger(ProducerGroupAndInstanceNameValidityIT.class);
private String topic = null;
@Before
public void setUp() {
topic = initTopic();
logger.info(String.format("use topic: %s !", topic));
}
@After
public void tearDown() {
super.shutdown();
}
/**
* @since version3.4.6
*/
@Test
public void testTwoProducerSameGroupAndInstanceName() {
RMQNormalProducer producer1 = getProducer(nsAddr, topic);
assertThat(producer1.isStartSuccess()).isEqualTo(true);
RMQNormalProducer producer2 = getProducer(nsAddr, topic,
producer1.getProducerGroupName(), producer1.getProducerInstanceName());
assertThat(producer2.isStartSuccess()).isEqualTo(false);
}
/**
* @since version3.4.6
*/
@Test
public void testTwoProducerSameGroup() {
RMQNormalProducer producer1 = getProducer(nsAddr, topic);
assertThat(producer1.isStartSuccess()).isEqualTo(true);
RMQNormalProducer producer2 = getProducer(nsAddr, topic,
producer1.getProducerGroupName(), RandomUtils.getStringByUUID());
assertThat(producer2.isStartSuccess()).isEqualTo(true);
}
}
```
|
Please help me generate a test for this class.
|
```package com.xiaojukeji.carrera.producer;
import com.xiaojukeji.carrera.thrift.DelayResult;
public class CancelDelayMessageBuilder {
private CarreraProducer producer = null;
private String topic = null;
private String uniqDelayMsgId = null;
private String tags = null;
public CancelDelayMessageBuilder(CarreraProducer producer) {
this.producer = producer;
}
public CancelDelayMessageBuilder setTopic(String topic) {
this.topic = topic;
return this;
}
public CancelDelayMessageBuilder setUniqDelayMsgId(String uniqDelayMsgId) {
this.uniqDelayMsgId = uniqDelayMsgId;
return this;
}
public String getUniqDelayMsgId() {
return uniqDelayMsgId;
}
public CancelDelayMessageBuilder setTags(String tag) {
this.tags = tag;
return this;
}
public DelayResult send() {
if (this.tags == null)
return this.producer.cancelDelay(this.topic, this.uniqDelayMsgId);
return this.producer.cancelDelay(this.topic, this.uniqDelayMsgId, this.tags);
}
}```
|
```package org.apache.rocketmq.test.client.consumer.filter;
import org.apache.log4j.Logger;
import org.apache.rocketmq.client.consumer.MessageSelector;
import org.apache.rocketmq.test.base.BaseConf;
import org.apache.rocketmq.test.client.consumer.broadcast.BaseBroadCastIT;
import org.apache.rocketmq.test.client.consumer.broadcast.normal.NormalMsgTwoSameGroupConsumerIT;
import org.apache.rocketmq.test.client.rmq.RMQBroadCastConsumer;
import org.apache.rocketmq.test.client.rmq.RMQNormalProducer;
import org.apache.rocketmq.test.client.rmq.RMQSqlConsumer;
import org.apache.rocketmq.test.factory.ConsumerFactory;
import org.apache.rocketmq.test.listener.rmq.concurrent.RMQNormalListener;
import org.apache.rocketmq.test.util.VerifyUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import static com.google.common.truth.Truth.assertThat;
public class SqlFilterIT extends BaseConf {
private static Logger logger = Logger.getLogger(SqlFilterIT.class);
private RMQNormalProducer producer = null;
private String topic = null;
@Before
public void setUp() {
topic = initTopic();
logger.info(String.format("use topic: %s;", topic));
producer = getProducer(nsAddr, topic);
}
@After
public void tearDown() {
super.shutdown();
}
@Test
public void testFilterConsumer() throws Exception {
int msgSize = 16;
String group = initConsumerGroup();
MessageSelector selector = MessageSelector.bySql("(TAGS is not null and TAGS in ('TagA', 'TagB'))");
RMQSqlConsumer consumer = ConsumerFactory.getRMQSqlConsumer(nsAddr, group, topic, selector, new RMQNormalListener(group + "_1"));
Thread.sleep(3000);
producer.send("TagA", msgSize);
producer.send("TagB", msgSize);
producer.send("TagC", msgSize);
Assert.assertEquals("Not all sent succeeded", msgSize * 3, producer.getAllUndupMsgBody().size());
consumer.getListener().waitForMessageConsume(msgSize * 2, consumeTime);
assertThat(producer.getAllMsgBody())
.containsAllIn(VerifyUtils.getFilterdMessage(producer.getAllMsgBody(),
consumer.getListener().getAllMsgBody()));
assertThat(consumer.getListener().getAllMsgBody().size()).isEqualTo(msgSize * 2);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.filter;
import org.apache.rocketmq.common.filter.ExpressionType;
import org.apache.rocketmq.filter.expression.Expression;
import org.apache.rocketmq.filter.expression.MQFilterException;
import org.apache.rocketmq.filter.parser.SelectorParser;
/**
* SQL92 Filter, just a wrapper of {@link org.apache.rocketmq.filter.parser.SelectorParser}.
* <p/>
* <p>
* Do not use this filter directly.Use {@link FilterFactory#get} to select a filter.
* </p>
*/
public class SqlFilter implements FilterSpi {
@Override
public Expression compile(final String expr) throws MQFilterException {
return SelectorParser.parse(expr);
}
@Override
public String ofType() {
return ExpressionType.SQL92;
}
}
```
|
```package org.apache.rocketmq.client.common;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class ThreadLocalIndexTest {
@Test
public void testGetAndIncrement() throws Exception {
ThreadLocalIndex localIndex = new ThreadLocalIndex();
int initialVal = localIndex.getAndIncrement();
assertThat(localIndex.getAndIncrement()).isEqualTo(initialVal + 1);
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.client.common;
import java.util.Random;
public class ThreadLocalIndex {
private final ThreadLocal<Integer> threadLocalIndex = new ThreadLocal<Integer>();
private final Random random = new Random();
public int getAndIncrement() {
Integer index = this.threadLocalIndex.get();
if (null == index) {
index = Math.abs(random.nextInt());
if (index < 0)
index = 0;
this.threadLocalIndex.set(index);
}
index = Math.abs(index + 1);
if (index < 0)
index = 0;
this.threadLocalIndex.set(index);
return index;
}
@Override
public String toString() {
return "ThreadLocalIndex{" +
"threadLocalIndex=" + threadLocalIndex.get() +
'}';
}
}
```
|
```package org.apache.rocketmq.remoting.protocol;
import java.util.HashMap;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class RocketMQSerializableTest {
@Test
public void testRocketMQProtocolEncodeAndDecode_WithoutRemarkWithoutExtFields() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
//org.apache.rocketmq.common.protocol.RequestCode.REGISTER_BROKER
int code = 103;
RemotingCommand cmd = RemotingCommand.createRequestCommand(code, new SampleCommandCustomHeader());
cmd.setSerializeTypeCurrentRPC(SerializeType.ROCKETMQ);
byte[] result = RocketMQSerializable.rocketMQProtocolEncode(cmd);
int opaque = cmd.getOpaque();
assertThat(result).hasSize(21);
assertThat(parseToShort(result, 0)).isEqualTo((short) code); //code
assertThat(result[2]).isEqualTo(LanguageCode.JAVA.getCode()); //language
assertThat(parseToShort(result, 3)).isEqualTo((short) 2333); //version
assertThat(parseToInt(result, 9)).isEqualTo(0); //flag
assertThat(parseToInt(result, 13)).isEqualTo(0); //empty remark
assertThat(parseToInt(result, 17)).isEqualTo(0); //empty extFields
RemotingCommand decodedCommand = RocketMQSerializable.rocketMQProtocolDecode(result);
assertThat(decodedCommand.getCode()).isEqualTo(code);
assertThat(decodedCommand.getLanguage()).isEqualTo(LanguageCode.JAVA);
assertThat(decodedCommand.getVersion()).isEqualTo(2333);
assertThat(decodedCommand.getOpaque()).isEqualTo(opaque);
assertThat(decodedCommand.getFlag()).isEqualTo(0);
assertThat(decodedCommand.getRemark()).isNull();
assertThat(decodedCommand.getExtFields()).isNull();
}
@Test
public void testRocketMQProtocolEncodeAndDecode_WithRemarkWithoutExtFields() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
//org.apache.rocketmq.common.protocol.RequestCode.REGISTER_BROKER
int code = 103;
RemotingCommand cmd = RemotingCommand.createRequestCommand(code,
new SampleCommandCustomHeader());
cmd.setSerializeTypeCurrentRPC(SerializeType.ROCKETMQ);
cmd.setRemark("Sample Remark");
byte[] result = RocketMQSerializable.rocketMQProtocolEncode(cmd);
int opaque = cmd.getOpaque();
assertThat(result).hasSize(34);
assertThat(parseToShort(result, 0)).isEqualTo((short) code); //code
assertThat(result[2]).isEqualTo(LanguageCode.JAVA.getCode()); //language
assertThat(parseToShort(result, 3)).isEqualTo((short) 2333); //version
assertThat(parseToInt(result, 9)).isEqualTo(0); //flag
assertThat(parseToInt(result, 13)).isEqualTo(13); //remark length
byte[] remarkArray = new byte[13];
System.arraycopy(result, 17, remarkArray, 0, 13);
assertThat(new String(remarkArray)).isEqualTo("Sample Remark");
assertThat(parseToInt(result, 30)).isEqualTo(0); //empty extFields
RemotingCommand decodedCommand = RocketMQSerializable.rocketMQProtocolDecode(result);
assertThat(decodedCommand.getCode()).isEqualTo(code);
assertThat(decodedCommand.getLanguage()).isEqualTo(LanguageCode.JAVA);
assertThat(decodedCommand.getVersion()).isEqualTo(2333);
assertThat(decodedCommand.getOpaque()).isEqualTo(opaque);
assertThat(decodedCommand.getFlag()).isEqualTo(0);
assertThat(decodedCommand.getRemark()).contains("Sample Remark");
assertThat(decodedCommand.getExtFields()).isNull();
}
@Test
public void testRocketMQProtocolEncodeAndDecode_WithoutRemarkWithExtFields() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
//org.apache.rocketmq.common.protocol.RequestCode.REGISTER_BROKER
int code = 103;
RemotingCommand cmd = RemotingCommand.createRequestCommand(code,
new SampleCommandCustomHeader());
cmd.setSerializeTypeCurrentRPC(SerializeType.ROCKETMQ);
cmd.addExtField("key", "value");
byte[] result = RocketMQSerializable.rocketMQProtocolEncode(cmd);
int opaque = cmd.getOpaque();
assertThat(result).hasSize(35);
assertThat(parseToShort(result, 0)).isEqualTo((short) code); //code
assertThat(result[2]).isEqualTo(LanguageCode.JAVA.getCode()); //language
assertThat(parseToShort(result, 3)).isEqualTo((short) 2333); //version
assertThat(parseToInt(result, 9)).isEqualTo(0); //flag
assertThat(parseToInt(result, 13)).isEqualTo(0); //empty remark
assertThat(parseToInt(result, 17)).isEqualTo(14); //extFields length
byte[] extFieldsArray = new byte[14];
System.arraycopy(result, 21, extFieldsArray, 0, 14);
HashMap<String, String> extFields = RocketMQSerializable.mapDeserialize(extFieldsArray);
assertThat(extFields).contains(new HashMap.SimpleEntry("key", "value"));
RemotingCommand decodedCommand = RocketMQSerializable.rocketMQProtocolDecode(result);
assertThat(decodedCommand.getCode()).isEqualTo(code);
assertThat(decodedCommand.getLanguage()).isEqualTo(LanguageCode.JAVA);
assertThat(decodedCommand.getVersion()).isEqualTo(2333);
assertThat(decodedCommand.getOpaque()).isEqualTo(opaque);
assertThat(decodedCommand.getFlag()).isEqualTo(0);
assertThat(decodedCommand.getRemark()).isNull();
assertThat(decodedCommand.getExtFields()).contains(new HashMap.SimpleEntry("key", "value"));
}
@Test
public void testIsBlank_NotBlank() {
assertThat(RocketMQSerializable.isBlank("bar")).isFalse();
assertThat(RocketMQSerializable.isBlank(" A ")).isFalse();
}
@Test
public void testIsBlank_Blank() {
assertThat(RocketMQSerializable.isBlank(null)).isTrue();
assertThat(RocketMQSerializable.isBlank("")).isTrue();
assertThat(RocketMQSerializable.isBlank(" ")).isTrue();
}
private short parseToShort(byte[] array, int index) {
return (short) (array[index] * 256 + array[++index]);
}
private int parseToInt(byte[] array, int index) {
return array[index] * 16777216 + array[++index] * 65536 + array[++index] * 256
+ array[++index];
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.remoting.protocol;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
public class RocketMQSerializable {
private static final Charset CHARSET_UTF8 = Charset.forName("UTF-8");
public static byte[] rocketMQProtocolEncode(RemotingCommand cmd) {
// String remark
byte[] remarkBytes = null;
int remarkLen = 0;
if (cmd.getRemark() != null && cmd.getRemark().length() > 0) {
remarkBytes = cmd.getRemark().getBytes(CHARSET_UTF8);
remarkLen = remarkBytes.length;
}
// HashMap<String, String> extFields
byte[] extFieldsBytes = null;
int extLen = 0;
if (cmd.getExtFields() != null && !cmd.getExtFields().isEmpty()) {
extFieldsBytes = mapSerialize(cmd.getExtFields());
extLen = extFieldsBytes.length;
}
int totalLen = calTotalLen(remarkLen, extLen);
ByteBuffer headerBuffer = ByteBuffer.allocate(totalLen);
// int code(~32767)
headerBuffer.putShort((short) cmd.getCode());
// LanguageCode language
headerBuffer.put(cmd.getLanguage().getCode());
// int version(~32767)
headerBuffer.putShort((short) cmd.getVersion());
// int opaque
headerBuffer.putInt(cmd.getOpaque());
// int flag
headerBuffer.putInt(cmd.getFlag());
// String remark
if (remarkBytes != null) {
headerBuffer.putInt(remarkBytes.length);
headerBuffer.put(remarkBytes);
} else {
headerBuffer.putInt(0);
}
// HashMap<String, String> extFields;
if (extFieldsBytes != null) {
headerBuffer.putInt(extFieldsBytes.length);
headerBuffer.put(extFieldsBytes);
} else {
headerBuffer.putInt(0);
}
return headerBuffer.array();
}
public static byte[] mapSerialize(HashMap<String, String> map) {
// keySize+key+valSize+val
if (null == map || map.isEmpty())
return null;
int totalLength = 0;
int kvLength;
Iterator<Map.Entry<String, String>> it = map.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String> entry = it.next();
if (entry.getKey() != null && entry.getValue() != null) {
kvLength =
// keySize + Key
2 + entry.getKey().getBytes(CHARSET_UTF8).length
// valSize + val
+ 4 + entry.getValue().getBytes(CHARSET_UTF8).length;
totalLength += kvLength;
}
}
ByteBuffer content = ByteBuffer.allocate(totalLength);
byte[] key;
byte[] val;
it = map.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String> entry = it.next();
if (entry.getKey() != null && entry.getValue() != null) {
key = entry.getKey().getBytes(CHARSET_UTF8);
val = entry.getValue().getBytes(CHARSET_UTF8);
content.putShort((short) key.length);
content.put(key);
content.putInt(val.length);
content.put(val);
}
}
return content.array();
}
private static int calTotalLen(int remark, int ext) {
// int code(~32767)
int length = 2
// LanguageCode language
+ 1
// int version(~32767)
+ 2
// int opaque
+ 4
// int flag
+ 4
// String remark
+ 4 + remark
// HashMap<String, String> extFields
+ 4 + ext;
return length;
}
public static RemotingCommand rocketMQProtocolDecode(final byte[] headerArray) {
RemotingCommand cmd = new RemotingCommand();
ByteBuffer headerBuffer = ByteBuffer.wrap(headerArray);
// int code(~32767)
cmd.setCode(headerBuffer.getShort());
// LanguageCode language
cmd.setLanguage(LanguageCode.valueOf(headerBuffer.get()));
// int version(~32767)
cmd.setVersion(headerBuffer.getShort());
// int opaque
cmd.setOpaque(headerBuffer.getInt());
// int flag
cmd.setFlag(headerBuffer.getInt());
// String remark
int remarkLength = headerBuffer.getInt();
if (remarkLength > 0) {
byte[] remarkContent = new byte[remarkLength];
headerBuffer.get(remarkContent);
cmd.setRemark(new String(remarkContent, CHARSET_UTF8));
}
// HashMap<String, String> extFields
int extFieldsLength = headerBuffer.getInt();
if (extFieldsLength > 0) {
byte[] extFieldsBytes = new byte[extFieldsLength];
headerBuffer.get(extFieldsBytes);
cmd.setExtFields(mapDeserialize(extFieldsBytes));
}
return cmd;
}
public static HashMap<String, String> mapDeserialize(byte[] bytes) {
if (bytes == null || bytes.length <= 0)
return null;
HashMap<String, String> map = new HashMap<String, String>();
ByteBuffer byteBuffer = ByteBuffer.wrap(bytes);
short keySize;
byte[] keyContent;
int valSize;
byte[] valContent;
while (byteBuffer.hasRemaining()) {
keySize = byteBuffer.getShort();
keyContent = new byte[keySize];
byteBuffer.get(keyContent);
valSize = byteBuffer.getInt();
valContent = new byte[valSize];
byteBuffer.get(valContent);
map.put(new String(keyContent, CHARSET_UTF8), new String(valContent, CHARSET_UTF8));
}
return map;
}
public static boolean isBlank(String str) {
int strLen;
if (str == null || (strLen = str.length()) == 0) {
return true;
}
for (int i = 0; i < strLen; i++) {
if (!Character.isWhitespace(str.charAt(i))) {
return false;
}
}
return true;
}
}
```
|
```package org.apache.rocketmq.tools.command.topic;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class DeleteTopicSubCommandTest {
@Test
public void testExecute() {
DeleteTopicSubCommand cmd = new DeleteTopicSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-t unit-test", "-c default-cluster"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
assertThat(commandLine.getOptionValue('t').trim()).isEqualTo("unit-test");
assertThat(commandLine.getOptionValue("c").trim()).isEqualTo("default-cluster");
}
@Test
public void testExecuteBroker() {
DeleteTopicSubCommand cmd = new DeleteTopicSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-t unit-test", "-b localhost:10911"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
assertThat(commandLine.getOptionValue('t').trim()).isEqualTo("unit-test");
assertThat(commandLine.getOptionValue("b").trim()).isEqualTo("localhost:10911");
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.topic;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.CommandUtil;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class DeleteTopicSubCommand implements SubCommand {
public static void deleteTopic(final DefaultMQAdminExt adminExt,
final String clusterName,
final String topic
) throws InterruptedException, MQBrokerException, RemotingException, MQClientException {
Set<String> masterSet = CommandUtil.fetchMasterAddrByClusterName(adminExt, clusterName);
adminExt.deleteTopicInBroker(masterSet, topic);
System.out.printf("delete topic [%s] from cluster [%s] success.%n", topic, clusterName);
Set<String> nameServerSet = null;
if (adminExt.getNamesrvAddr() != null) {
String[] ns = adminExt.getNamesrvAddr().trim().split(";");
nameServerSet = new HashSet(Arrays.asList(ns));
}
adminExt.deleteTopicInNameServer(nameServerSet, topic);
System.out.printf("delete topic [%s] from NameServer success.%n", topic);
}
public static void deleteTopicByBroker(final DefaultMQAdminExt adminExt,
final Set<String> brokerAddrs,
final String topic
) throws InterruptedException, MQBrokerException, RemotingException, MQClientException {
adminExt.deleteTopicInBroker(brokerAddrs, topic);
System.out.printf("delete topic [%s] from broker [%s] success.%n", topic, brokerAddrs);
Set<String> nameServerSet = null;
if (adminExt.getNamesrvAddr() != null) {
String[] ns = adminExt.getNamesrvAddr().trim().split(";");
nameServerSet = new HashSet(Arrays.asList(ns));
}
adminExt.deleteTopicInNameServer(nameServerSet, topic, brokerAddrs);
System.out.printf("delete topic [%s] from NameServer success.%n", topic);
}
@Override
public String commandName() {
return "deleteTopic";
}
@Override
public String commandDesc() {
return "Delete topic from broker and NameServer.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("t", "topic", true, "topic name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("c", "clusterName", true, "delete topic from which cluster");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("b", "brokerAddr", true, "create topic to which broker");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt adminExt = new DefaultMQAdminExt(rpcHook);
adminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
String topic = commandLine.getOptionValue('t').trim();
adminExt.start();
if (commandLine.hasOption('c')) {
String clusterName = commandLine.getOptionValue('c').trim();
deleteTopic(adminExt, clusterName, topic);
return;
} else if (commandLine.hasOption("b")) {
String brokerAddrs = commandLine.getOptionValue('b').trim();
String[] brokerArr = brokerAddrs.split(";");
if (brokerArr.length <= 0) {
throw new IllegalArgumentException("not broker");
}
Set<String> brokerSet = new HashSet<>();
Collections.addAll(brokerSet, brokerArr);
deleteTopicByBroker(adminExt, brokerSet, topic);
return;
}
ServerUtil.printCommandLineHelp("mqadmin " + this.commandName(), options);
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
adminExt.shutdown();
}
}
}
```
|
```package org.apache.rocketmq.tools.command.namesrv;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.client.ClientConfig;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.MQClientAPIImpl;
import org.apache.rocketmq.client.impl.MQClientManager;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.remoting.exception.RemotingCommandException;
import org.apache.rocketmq.remoting.exception.RemotingConnectException;
import org.apache.rocketmq.remoting.exception.RemotingSendRequestException;
import org.apache.rocketmq.remoting.exception.RemotingTimeoutException;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExtImpl;
import org.apache.rocketmq.tools.command.SubCommandException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class WipeWritePermSubCommandTest {
private static DefaultMQAdminExt defaultMQAdminExt;
private static DefaultMQAdminExtImpl defaultMQAdminExtImpl;
private static MQClientInstance mqClientInstance = MQClientManager.getInstance().getAndCreateMQClientInstance(new ClientConfig());
private static MQClientAPIImpl mQClientAPIImpl;
@BeforeClass
public static void init() throws NoSuchFieldException, IllegalAccessException, InterruptedException, RemotingTimeoutException, MQClientException, RemotingSendRequestException, RemotingConnectException, MQBrokerException, RemotingCommandException {
mQClientAPIImpl = mock(MQClientAPIImpl.class);
defaultMQAdminExt = new DefaultMQAdminExt();
defaultMQAdminExtImpl = new DefaultMQAdminExtImpl(defaultMQAdminExt, 1000);
Field field = DefaultMQAdminExtImpl.class.getDeclaredField("mqClientInstance");
field.setAccessible(true);
field.set(defaultMQAdminExtImpl, mqClientInstance);
field = MQClientInstance.class.getDeclaredField("mQClientAPIImpl");
field.setAccessible(true);
field.set(mqClientInstance, mQClientAPIImpl);
field = DefaultMQAdminExt.class.getDeclaredField("defaultMQAdminExtImpl");
field.setAccessible(true);
field.set(defaultMQAdminExt, defaultMQAdminExtImpl);
List<String> result = new ArrayList<>();
result.add("default-name-one");
result.add("default-name-two");
when(mqClientInstance.getMQClientAPIImpl().getNameServerAddressList()).thenReturn(result);
when(mQClientAPIImpl.wipeWritePermOfBroker(anyString(), anyString(), anyLong())).thenReturn(6);
}
@AfterClass
public static void terminate() {
defaultMQAdminExt.shutdown();
}
@Ignore
@Test
public void testExecute() throws SubCommandException {
WipeWritePermSubCommand cmd = new WipeWritePermSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-b default-broker"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
cmd.execute(commandLine, options, null);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.namesrv;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class WipeWritePermSubCommand implements SubCommand {
@Override
public String commandName() {
return "wipeWritePerm";
}
@Override
public String commandDesc() {
return "Wipe write perm of broker in all name server";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("b", "brokerName", true, "broker name");
opt.setRequired(true);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
defaultMQAdminExt.start();
String brokerName = commandLine.getOptionValue('b').trim();
List<String> namesrvList = defaultMQAdminExt.getNameServerAddressList();
if (namesrvList != null) {
for (String namesrvAddr : namesrvList) {
try {
int wipeTopicCount = defaultMQAdminExt.wipeWritePermOfBroker(namesrvAddr, brokerName);
System.out.printf("wipe write perm of broker[%s] in name server[%s] OK, %d%n",
brokerName,
namesrvAddr,
wipeTopicCount
);
} catch (Exception e) {
System.out.printf("wipe write perm of broker[%s] in name server[%s] Failed%n",
brokerName,
namesrvAddr
);
e.printStackTrace();
}
}
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
```
|
```package org.apache.rocketmq.tools.command.consumer;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.client.ClientConfig;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.MQClientAPIImpl;
import org.apache.rocketmq.client.impl.MQClientManager;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.common.consumer.ConsumeFromWhere;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.protocol.body.Connection;
import org.apache.rocketmq.common.protocol.body.ConsumeStatus;
import org.apache.rocketmq.common.protocol.body.ConsumerConnection;
import org.apache.rocketmq.common.protocol.body.ConsumerRunningInfo;
import org.apache.rocketmq.common.protocol.body.ProcessQueueInfo;
import org.apache.rocketmq.common.protocol.heartbeat.ConsumeType;
import org.apache.rocketmq.common.protocol.heartbeat.MessageModel;
import org.apache.rocketmq.common.protocol.heartbeat.SubscriptionData;
import org.apache.rocketmq.common.protocol.route.BrokerData;
import org.apache.rocketmq.common.protocol.route.QueueData;
import org.apache.rocketmq.common.protocol.route.TopicRouteData;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExtImpl;
import org.apache.rocketmq.tools.command.SubCommandException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ConsumerStatusSubCommandTest {
private static DefaultMQAdminExt defaultMQAdminExt;
private static DefaultMQAdminExtImpl defaultMQAdminExtImpl;
private static MQClientInstance mqClientInstance = MQClientManager.getInstance().getAndCreateMQClientInstance(new ClientConfig());
private static MQClientAPIImpl mQClientAPIImpl;
@BeforeClass
public static void init() throws NoSuchFieldException, IllegalAccessException, InterruptedException, RemotingException, MQClientException, MQBrokerException {
mQClientAPIImpl = mock(MQClientAPIImpl.class);
defaultMQAdminExt = new DefaultMQAdminExt();
defaultMQAdminExtImpl = new DefaultMQAdminExtImpl(defaultMQAdminExt, 1000);
Field field = DefaultMQAdminExtImpl.class.getDeclaredField("mqClientInstance");
field.setAccessible(true);
field.set(defaultMQAdminExtImpl, mqClientInstance);
field = MQClientInstance.class.getDeclaredField("mQClientAPIImpl");
field.setAccessible(true);
field.set(mqClientInstance, mQClientAPIImpl);
field = DefaultMQAdminExt.class.getDeclaredField("defaultMQAdminExtImpl");
field.setAccessible(true);
field.set(defaultMQAdminExt, defaultMQAdminExtImpl);
TopicRouteData topicRouteData = new TopicRouteData();
List<BrokerData> brokerDatas = new ArrayList<>();
HashMap<Long, String> brokerAddrs = new HashMap<>();
brokerAddrs.put(1234l, "127.0.0.1:10911");
BrokerData brokerData = new BrokerData();
brokerData.setCluster("default-cluster");
brokerData.setBrokerName("default-broker");
brokerData.setBrokerAddrs(brokerAddrs);
brokerDatas.add(brokerData);
topicRouteData.setBrokerDatas(brokerDatas);
topicRouteData.setQueueDatas(new ArrayList<QueueData>());
topicRouteData.setFilterServerTable(new HashMap<String, List<String>>());
when(mQClientAPIImpl.getTopicRouteInfoFromNameServer(anyString(), anyLong())).thenReturn(topicRouteData);
ConsumerConnection consumerConnection = new ConsumerConnection();
consumerConnection.setConsumeType(ConsumeType.CONSUME_PASSIVELY);
consumerConnection.setMessageModel(MessageModel.CLUSTERING);
HashSet<Connection> connections = new HashSet<>();
connections.add(new Connection());
consumerConnection.setConnectionSet(connections);
consumerConnection.setSubscriptionTable(new ConcurrentHashMap<String, SubscriptionData>());
consumerConnection.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
when(mQClientAPIImpl.getConsumerConnectionList(anyString(), anyString(), anyLong())).thenReturn(consumerConnection);
ConsumerRunningInfo consumerRunningInfo = new ConsumerRunningInfo();
consumerRunningInfo.setJstack("test");
consumerRunningInfo.setMqTable(new TreeMap<MessageQueue, ProcessQueueInfo>());
consumerRunningInfo.setStatusTable(new TreeMap<String, ConsumeStatus>());
consumerRunningInfo.setSubscriptionSet(new TreeSet<SubscriptionData>());
when(mQClientAPIImpl.getConsumerRunningInfo(anyString(), anyString(), anyString(), anyBoolean(), anyLong())).thenReturn(consumerRunningInfo);
}
@AfterClass
public static void terminate() {
defaultMQAdminExt.shutdown();
}
@Ignore
@Test
public void testExecute() throws SubCommandException {
ConsumerStatusSubCommand cmd = new ConsumerStatusSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-g default-group", "-i cid_one"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
cmd.execute(commandLine, options, null);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.consumer;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.common.MQVersion;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.protocol.body.Connection;
import org.apache.rocketmq.common.protocol.body.ConsumerConnection;
import org.apache.rocketmq.common.protocol.body.ConsumerRunningInfo;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.MQAdminStartup;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class ConsumerStatusSubCommand implements SubCommand {
public static void main(String[] args) {
System.setProperty(MixAll.NAMESRV_ADDR_PROPERTY, "127.0.0.1:9876");
MQAdminStartup.main(new String[] {new ConsumerStatusSubCommand().commandName(), "-g", "benchmark_consumer"});
}
@Override
public String commandName() {
return "consumerStatus";
}
@Override
public String commandDesc() {
return "Query consumer's internal data structure";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("g", "consumerGroup", true, "consumer group name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("i", "clientId", true, "The consumer's client id");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("s", "jstack", false, "Run jstack command in the consumer progress");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
defaultMQAdminExt.start();
String group = commandLine.getOptionValue('g').trim();
ConsumerConnection cc = defaultMQAdminExt.examineConsumerConnectionInfo(group);
boolean jstack = commandLine.hasOption('s');
if (!commandLine.hasOption('i')) {
int i = 1;
long now = System.currentTimeMillis();
final TreeMap<String/* clientId */, ConsumerRunningInfo> criTable = new TreeMap<String, ConsumerRunningInfo>();
for (Connection conn : cc.getConnectionSet()) {
try {
ConsumerRunningInfo consumerRunningInfo =
defaultMQAdminExt.getConsumerRunningInfo(group, conn.getClientId(), jstack);
if (consumerRunningInfo != null) {
criTable.put(conn.getClientId(), consumerRunningInfo);
String filePath = now + "/" + conn.getClientId();
MixAll.string2FileNotSafe(consumerRunningInfo.formatString(), filePath);
System.out.printf("%03d %-40s %-20s %s%n",
i++,
conn.getClientId(),
MQVersion.getVersionDesc(conn.getVersion()),
filePath);
}
} catch (Exception e) {
e.printStackTrace();
}
}
printRebalanceResult(criTable);
if (!criTable.isEmpty()) {
boolean subSame = ConsumerRunningInfo.analyzeSubscription(criTable);
boolean rebalanceOK = subSame && ConsumerRunningInfo.analyzeRebalance(criTable);
if (subSame) {
System.out.printf("%n%nSame subscription in the same group of consumer");
System.out.printf("%n%nRebalance %s%n", rebalanceOK ? "OK" : "Failed");
Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, ConsumerRunningInfo> next = it.next();
String result =
ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue());
if (result.length() > 0) {
System.out.printf("%s", result);
}
}
} else {
System.out.printf("%n%nWARN: Different subscription in the same group of consumer!!!");
}
}
} else {
String clientId = commandLine.getOptionValue('i').trim();
ConsumerRunningInfo consumerRunningInfo =
defaultMQAdminExt.getConsumerRunningInfo(group, clientId, jstack);
if (consumerRunningInfo != null) {
System.out.printf("%s", consumerRunningInfo.formatString());
}
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
private void printRebalanceResult(TreeMap<String, ConsumerRunningInfo> criTable) {
if (criTable == null || criTable.isEmpty()) {
System.out.printf("Empty Result: criTable is empty.\n");
return;
}
Map<MessageQueue, String> rbResult = new TreeMap<MessageQueue, String>();
for (String cid : criTable.keySet()) {
for (MessageQueue messageQueue : criTable.get(cid).getMqTable().keySet()) {
rbResult.put(messageQueue, cid);
}
}
String format = "%30s|%20s|%10s| %s\n";
System.out.printf("--------------------------------------------------------------------------------------------------\n");
System.out.printf(format, "Topic","Broker Name", "QueueId", "ConsumerClientId");
System.out.printf("--------------------------------------------------------------------------------------------------\n");
for (Entry<MessageQueue, String> entry : rbResult.entrySet()) {
System.out.printf(format, entry.getKey().getTopic(), entry.getKey().getBrokerName(),
entry.getKey().getQueueId(), entry.getValue());
}
}
}
```
|
```package org.apache.rocketmq.tools.command.connection;
import java.lang.reflect.Field;
import java.util.HashSet;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.client.ClientConfig;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.MQClientAPIImpl;
import org.apache.rocketmq.client.impl.MQClientManager;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.common.protocol.body.Connection;
import org.apache.rocketmq.common.protocol.body.ProducerConnection;
import org.apache.rocketmq.remoting.exception.RemotingConnectException;
import org.apache.rocketmq.remoting.exception.RemotingSendRequestException;
import org.apache.rocketmq.remoting.exception.RemotingTimeoutException;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExtImpl;
import org.apache.rocketmq.tools.command.SubCommandException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ProducerConnectionSubCommandTest {
private static DefaultMQAdminExt defaultMQAdminExt;
private static DefaultMQAdminExtImpl defaultMQAdminExtImpl;
private static MQClientInstance mqClientInstance = MQClientManager.getInstance().getAndCreateMQClientInstance(new ClientConfig());
private static MQClientAPIImpl mQClientAPIImpl;
@BeforeClass
public static void init() throws NoSuchFieldException, IllegalAccessException, InterruptedException, RemotingTimeoutException, MQClientException, RemotingSendRequestException, RemotingConnectException, MQBrokerException {
mQClientAPIImpl = mock(MQClientAPIImpl.class);
defaultMQAdminExt = new DefaultMQAdminExt();
defaultMQAdminExtImpl = new DefaultMQAdminExtImpl(defaultMQAdminExt, 1000);
Field field = DefaultMQAdminExtImpl.class.getDeclaredField("mqClientInstance");
field.setAccessible(true);
field.set(defaultMQAdminExtImpl, mqClientInstance);
field = MQClientInstance.class.getDeclaredField("mQClientAPIImpl");
field.setAccessible(true);
field.set(mqClientInstance, mQClientAPIImpl);
field = DefaultMQAdminExt.class.getDeclaredField("defaultMQAdminExtImpl");
field.setAccessible(true);
field.set(defaultMQAdminExt, defaultMQAdminExtImpl);
ProducerConnection producerConnection = new ProducerConnection();
Connection connection = new Connection();
connection.setClientAddr("127.0.0.1:9898");
connection.setClientId("PID_12345");
HashSet<Connection> connectionSet = new HashSet<>();
connectionSet.add(connection);
producerConnection.setConnectionSet(connectionSet);
when(mQClientAPIImpl.getProducerConnectionList(anyString(), anyString(), anyLong())).thenReturn(producerConnection);
}
@AfterClass
public static void terminate() {
defaultMQAdminExt.shutdown();
}
@Ignore
@Test
public void testExecute() throws SubCommandException {
ProducerConnectionSubCommand cmd = new ProducerConnectionSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-g default-producer-group", "-t unit-test"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
cmd.execute(commandLine, options, null);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.connection;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.common.MQVersion;
import org.apache.rocketmq.common.protocol.body.Connection;
import org.apache.rocketmq.common.protocol.body.ProducerConnection;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class ProducerConnectionSubCommand implements SubCommand {
@Override
public String commandName() {
return "producerConnection";
}
@Override
public String commandDesc() {
return "Query producer's socket connection and client version";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("g", "producerGroup", true, "producer group name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("t", "topic", true, "topic name");
opt.setRequired(true);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
defaultMQAdminExt.start();
String group = commandLine.getOptionValue('g').trim();
String topic = commandLine.getOptionValue('t').trim();
ProducerConnection pc = defaultMQAdminExt.examineProducerConnectionInfo(group, topic);
int i = 1;
for (Connection conn : pc.getConnectionSet()) {
System.out.printf("%04d %-32s %-22s %-8s %s%n",
i++,
conn.getClientId(),
conn.getClientAddr(),
conn.getLanguage(),
MQVersion.getVersionDesc(conn.getVersion())
);
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
```
|
```package org.apache.rocketmq.tools.command.topic;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class AllocateMQSubCommandTest {
@Test
public void testExecute() {
AllocateMQSubCommand cmd = new AllocateMQSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-t unit-test", "-i 127.0.0.1:10911"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
assertThat(commandLine.getOptionValue('t').trim()).isEqualTo("unit-test");
assertThat(commandLine.getOptionValue("i").trim()).isEqualTo("127.0.0.1:10911");
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.topic;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.client.consumer.rebalance.AllocateMessageQueueAveragely;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.protocol.route.TopicRouteData;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.remoting.protocol.RemotingSerializable;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class AllocateMQSubCommand implements SubCommand {
@Override
public String commandName() {
return "allocateMQ";
}
@Override
public String commandDesc() {
return "Allocate MQ";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("t", "topic", true, "topic name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("i", "ipList", true, "ipList");
opt.setRequired(true);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt adminExt = new DefaultMQAdminExt(rpcHook);
adminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
adminExt.start();
String topic = commandLine.getOptionValue('t').trim();
String ips = commandLine.getOptionValue('i').trim();
final String[] split = ips.split(",");
final List<String> ipList = new LinkedList<String>();
for (String ip : split) {
ipList.add(ip);
}
final TopicRouteData topicRouteData = adminExt.examineTopicRouteInfo(topic);
final Set<MessageQueue> mqs = MQClientInstance.topicRouteData2TopicSubscribeInfo(topic, topicRouteData);
final AllocateMessageQueueAveragely averagely = new AllocateMessageQueueAveragely();
RebalanceResult rr = new RebalanceResult();
for (String i : ipList) {
final List<MessageQueue> mqResult = averagely.allocate("aa", i, new ArrayList<MessageQueue>(mqs), ipList);
rr.getResult().put(i, mqResult);
}
final String json = RemotingSerializable.toJson(rr, false);
System.out.printf("%s%n", json);
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
adminExt.shutdown();
}
}
}
```
|
```package org.apache.rocketmq.tools.command.topic;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class TopicStatusSubCommandTest {
@Test
public void testExecute() {
TopicStatusSubCommand cmd = new TopicStatusSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-t unit-test"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
assertThat(commandLine.getOptionValue('t').trim()).isEqualTo("unit-test");
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.topic;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.common.UtilAll;
import org.apache.rocketmq.common.admin.TopicOffset;
import org.apache.rocketmq.common.admin.TopicStatsTable;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class TopicStatusSubCommand implements SubCommand {
@Override
public String commandName() {
return "topicStatus";
}
@Override
public String commandDesc() {
return "Examine topic Status info";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("t", "topic", true, "topic name");
opt.setRequired(true);
options.addOption(opt);
return options;
}
@Override
public void execute(final CommandLine commandLine, final Options options,
RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
defaultMQAdminExt.start();
String topic = commandLine.getOptionValue('t').trim();
TopicStatsTable topicStatsTable = defaultMQAdminExt.examineTopicStats(topic);
List<MessageQueue> mqList = new LinkedList<MessageQueue>();
mqList.addAll(topicStatsTable.getOffsetTable().keySet());
Collections.sort(mqList);
System.out.printf("%-32s %-4s %-20s %-20s %s%n",
"#Broker Name",
"#QID",
"#Min Offset",
"#Max Offset",
"#Last Updated"
);
for (MessageQueue mq : mqList) {
TopicOffset topicOffset = topicStatsTable.getOffsetTable().get(mq);
String humanTimestamp = "";
if (topicOffset.getLastUpdateTimestamp() > 0) {
humanTimestamp = UtilAll.timeMillisToHumanString2(topicOffset.getLastUpdateTimestamp());
}
System.out.printf("%-32s %-4d %-20d %-20d %s%n",
UtilAll.frontStringAtLeast(mq.getBrokerName(), 32),
mq.getQueueId(),
topicOffset.getMinOffset(),
topicOffset.getMaxOffset(),
humanTimestamp
);
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
```
|
```package org.apache.rocketmq.common;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class MQVersionTest {
@Test
public void testGetVersionDesc() throws Exception {
String desc = "V3_0_0_SNAPSHOT";
assertThat(MQVersion.getVersionDesc(0)).isEqualTo(desc);
}
@Test
public void testGetVersionDesc_higherVersion() throws Exception {
String desc = "HIGHER_VERSION";
assertThat(MQVersion.getVersionDesc(Integer.MAX_VALUE)).isEqualTo(desc);
}
@Test
public void testValue2Version() throws Exception {
assertThat(MQVersion.value2Version(0)).isEqualTo(MQVersion.Version.V3_0_0_SNAPSHOT);
}
@Test
public void testValue2Version_HigherVersion() throws Exception {
assertThat(MQVersion.value2Version(Integer.MAX_VALUE)).isEqualTo(MQVersion.Version.HIGHER_VERSION);
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.common;
public class MQVersion {
public static final int CURRENT_VERSION = Version.V4_2_2_SNAPSHOT.ordinal();
public static String getVersionDesc(int value) {
int length = Version.values().length;
if (value >= length) {
return Version.values()[length - 1].name();
}
return Version.values()[value].name();
}
public static Version value2Version(int value) {
int length = Version.values().length;
if (value >= length) {
return Version.values()[length - 1];
}
return Version.values()[value];
}
public enum Version {
V3_0_0_SNAPSHOT,
V3_0_0_ALPHA1,
V3_0_0_BETA1,
V3_0_0_BETA2,
V3_0_0_BETA3,
V3_0_0_BETA4,
V3_0_0_BETA5,
V3_0_0_BETA6_SNAPSHOT,
V3_0_0_BETA6,
V3_0_0_BETA7_SNAPSHOT,
V3_0_0_BETA7,
V3_0_0_BETA8_SNAPSHOT,
V3_0_0_BETA8,
V3_0_0_BETA9_SNAPSHOT,
V3_0_0_BETA9,
V3_0_0_FINAL,
V3_0_1_SNAPSHOT,
V3_0_1,
V3_0_2_SNAPSHOT,
V3_0_2,
V3_0_3_SNAPSHOT,
V3_0_3,
V3_0_4_SNAPSHOT,
V3_0_4,
V3_0_5_SNAPSHOT,
V3_0_5,
V3_0_6_SNAPSHOT,
V3_0_6,
V3_0_7_SNAPSHOT,
V3_0_7,
V3_0_8_SNAPSHOT,
V3_0_8,
V3_0_9_SNAPSHOT,
V3_0_9,
V3_0_10_SNAPSHOT,
V3_0_10,
V3_0_11_SNAPSHOT,
V3_0_11,
V3_0_12_SNAPSHOT,
V3_0_12,
V3_0_13_SNAPSHOT,
V3_0_13,
V3_0_14_SNAPSHOT,
V3_0_14,
V3_0_15_SNAPSHOT,
V3_0_15,
V3_1_0_SNAPSHOT,
V3_1_0,
V3_1_1_SNAPSHOT,
V3_1_1,
V3_1_2_SNAPSHOT,
V3_1_2,
V3_1_3_SNAPSHOT,
V3_1_3,
V3_1_4_SNAPSHOT,
V3_1_4,
V3_1_5_SNAPSHOT,
V3_1_5,
V3_1_6_SNAPSHOT,
V3_1_6,
V3_1_7_SNAPSHOT,
V3_1_7,
V3_1_8_SNAPSHOT,
V3_1_8,
V3_1_9_SNAPSHOT,
V3_1_9,
V3_2_0_SNAPSHOT,
V3_2_0,
V3_2_1_SNAPSHOT,
V3_2_1,
V3_2_2_SNAPSHOT,
V3_2_2,
V3_2_3_SNAPSHOT,
V3_2_3,
V3_2_4_SNAPSHOT,
V3_2_4,
V3_2_5_SNAPSHOT,
V3_2_5,
V3_2_6_SNAPSHOT,
V3_2_6,
V3_2_7_SNAPSHOT,
V3_2_7,
V3_2_8_SNAPSHOT,
V3_2_8,
V3_2_9_SNAPSHOT,
V3_2_9,
V3_3_1_SNAPSHOT,
V3_3_1,
V3_3_2_SNAPSHOT,
V3_3_2,
V3_3_3_SNAPSHOT,
V3_3_3,
V3_3_4_SNAPSHOT,
V3_3_4,
V3_3_5_SNAPSHOT,
V3_3_5,
V3_3_6_SNAPSHOT,
V3_3_6,
V3_3_7_SNAPSHOT,
V3_3_7,
V3_3_8_SNAPSHOT,
V3_3_8,
V3_3_9_SNAPSHOT,
V3_3_9,
V3_4_1_SNAPSHOT,
V3_4_1,
V3_4_2_SNAPSHOT,
V3_4_2,
V3_4_3_SNAPSHOT,
V3_4_3,
V3_4_4_SNAPSHOT,
V3_4_4,
V3_4_5_SNAPSHOT,
V3_4_5,
V3_4_6_SNAPSHOT,
V3_4_6,
V3_4_7_SNAPSHOT,
V3_4_7,
V3_4_8_SNAPSHOT,
V3_4_8,
V3_4_9_SNAPSHOT,
V3_4_9,
V3_5_1_SNAPSHOT,
V3_5_1,
V3_5_2_SNAPSHOT,
V3_5_2,
V3_5_3_SNAPSHOT,
V3_5_3,
V3_5_4_SNAPSHOT,
V3_5_4,
V3_5_5_SNAPSHOT,
V3_5_5,
V3_5_6_SNAPSHOT,
V3_5_6,
V3_5_7_SNAPSHOT,
V3_5_7,
V3_5_8_SNAPSHOT,
V3_5_8,
V3_5_9_SNAPSHOT,
V3_5_9,
V3_6_1_SNAPSHOT,
V3_6_1,
V3_6_2_SNAPSHOT,
V3_6_2,
V3_6_3_SNAPSHOT,
V3_6_3,
V3_6_4_SNAPSHOT,
V3_6_4,
V3_6_5_SNAPSHOT,
V3_6_5,
V3_6_6_SNAPSHOT,
V3_6_6,
V3_6_7_SNAPSHOT,
V3_6_7,
V3_6_8_SNAPSHOT,
V3_6_8,
V3_6_9_SNAPSHOT,
V3_6_9,
V3_7_1_SNAPSHOT,
V3_7_1,
V3_7_2_SNAPSHOT,
V3_7_2,
V3_7_3_SNAPSHOT,
V3_7_3,
V3_7_4_SNAPSHOT,
V3_7_4,
V3_7_5_SNAPSHOT,
V3_7_5,
V3_7_6_SNAPSHOT,
V3_7_6,
V3_7_7_SNAPSHOT,
V3_7_7,
V3_7_8_SNAPSHOT,
V3_7_8,
V3_7_9_SNAPSHOT,
V3_7_9,
V3_8_1_SNAPSHOT,
V3_8_1,
V3_8_2_SNAPSHOT,
V3_8_2,
V3_8_3_SNAPSHOT,
V3_8_3,
V3_8_4_SNAPSHOT,
V3_8_4,
V3_8_5_SNAPSHOT,
V3_8_5,
V3_8_6_SNAPSHOT,
V3_8_6,
V3_8_7_SNAPSHOT,
V3_8_7,
V3_8_8_SNAPSHOT,
V3_8_8,
V3_8_9_SNAPSHOT,
V3_8_9,
V3_9_1_SNAPSHOT,
V3_9_1,
V3_9_2_SNAPSHOT,
V3_9_2,
V3_9_3_SNAPSHOT,
V3_9_3,
V3_9_4_SNAPSHOT,
V3_9_4,
V3_9_5_SNAPSHOT,
V3_9_5,
V3_9_6_SNAPSHOT,
V3_9_6,
V3_9_7_SNAPSHOT,
V3_9_7,
V3_9_8_SNAPSHOT,
V3_9_8,
V3_9_9_SNAPSHOT,
V3_9_9,
V4_0_0_SNAPSHOT,
V4_0_0,
V4_0_1_SNAPSHOT,
V4_0_1,
V4_0_2_SNAPSHOT,
V4_0_2,
V4_0_3_SNAPSHOT,
V4_0_3,
V4_0_4_SNAPSHOT,
V4_0_4,
V4_0_5_SNAPSHOT,
V4_0_5,
V4_0_6_SNAPSHOT,
V4_0_6,
V4_0_7_SNAPSHOT,
V4_0_7,
V4_0_8_SNAPSHOT,
V4_0_8,
V4_0_9_SNAPSHOT,
V4_0_9,
V4_1_0_SNAPSHOT,
V4_1_0,
V4_1_1_SNAPSHOT,
V4_1_1,
V4_1_2_SNAPSHOT,
V4_1_2,
V4_1_3_SNAPSHOT,
V4_1_3,
V4_1_4_SNAPSHOT,
V4_1_4,
V4_1_5_SNAPSHOT,
V4_1_5,
V4_1_6_SNAPSHOT,
V4_1_6,
V4_1_7_SNAPSHOT,
V4_1_7,
V4_1_8_SNAPSHOT,
V4_1_8,
V4_1_9_SNAPSHOT,
V4_1_9,
V4_2_0_SNAPSHOT,
V4_2_0,
V4_2_1_SNAPSHOT,
V4_2_1,
V4_2_2_SNAPSHOT,
V4_2_2,
V4_2_3_SNAPSHOT,
V4_2_3,
V4_2_4_SNAPSHOT,
V4_2_4,
V4_2_5_SNAPSHOT,
V4_2_5,
V4_2_6_SNAPSHOT,
V4_2_6,
V4_2_7_SNAPSHOT,
V4_2_7,
V4_2_8_SNAPSHOT,
V4_2_8,
V4_2_9_SNAPSHOT,
V4_2_9,
V4_3_0_SNAPSHOT,
V4_3_0,
V4_3_1_SNAPSHOT,
V4_3_1,
V4_3_2_SNAPSHOT,
V4_3_2,
V4_3_3_SNAPSHOT,
V4_3_3,
V4_3_4_SNAPSHOT,
V4_3_4,
V4_3_5_SNAPSHOT,
V4_3_5,
V4_3_6_SNAPSHOT,
V4_3_6,
V4_3_7_SNAPSHOT,
V4_3_7,
V4_3_8_SNAPSHOT,
V4_3_8,
V4_3_9_SNAPSHOT,
V4_3_9,
V4_4_0_SNAPSHOT,
V4_4_0,
V4_4_1_SNAPSHOT,
V4_4_1,
V4_4_2_SNAPSHOT,
V4_4_2,
V4_4_3_SNAPSHOT,
V4_4_3,
V4_4_4_SNAPSHOT,
V4_4_4,
V4_4_5_SNAPSHOT,
V4_4_5,
V4_4_6_SNAPSHOT,
V4_4_6,
V4_4_7_SNAPSHOT,
V4_4_7,
V4_4_8_SNAPSHOT,
V4_4_8,
V4_4_9_SNAPSHOT,
V4_4_9,
V4_5_0_SNAPSHOT,
V4_5_0,
V4_5_1_SNAPSHOT,
V4_5_1,
V4_5_2_SNAPSHOT,
V4_5_2,
V4_5_3_SNAPSHOT,
V4_5_3,
V4_5_4_SNAPSHOT,
V4_5_4,
V4_5_5_SNAPSHOT,
V4_5_5,
V4_5_6_SNAPSHOT,
V4_5_6,
V4_5_7_SNAPSHOT,
V4_5_7,
V4_5_8_SNAPSHOT,
V4_5_8,
V4_5_9_SNAPSHOT,
V4_5_9,
V4_6_0_SNAPSHOT,
V4_6_0,
V4_6_1_SNAPSHOT,
V4_6_1,
V4_6_2_SNAPSHOT,
V4_6_2,
V4_6_3_SNAPSHOT,
V4_6_3,
V4_6_4_SNAPSHOT,
V4_6_4,
V4_6_5_SNAPSHOT,
V4_6_5,
V4_6_6_SNAPSHOT,
V4_6_6,
V4_6_7_SNAPSHOT,
V4_6_7,
V4_6_8_SNAPSHOT,
V4_6_8,
V4_6_9_SNAPSHOT,
V4_6_9,
V4_7_0_SNAPSHOT,
V4_7_0,
V4_7_1_SNAPSHOT,
V4_7_1,
V4_7_2_SNAPSHOT,
V4_7_2,
V4_7_3_SNAPSHOT,
V4_7_3,
V4_7_4_SNAPSHOT,
V4_7_4,
V4_7_5_SNAPSHOT,
V4_7_5,
V4_7_6_SNAPSHOT,
V4_7_6,
V4_7_7_SNAPSHOT,
V4_7_7,
V4_7_8_SNAPSHOT,
V4_7_8,
V4_7_9_SNAPSHOT,
V4_7_9,
V4_8_0_SNAPSHOT,
V4_8_0,
V4_8_1_SNAPSHOT,
V4_8_1,
V4_8_2_SNAPSHOT,
V4_8_2,
V4_8_3_SNAPSHOT,
V4_8_3,
V4_8_4_SNAPSHOT,
V4_8_4,
V4_8_5_SNAPSHOT,
V4_8_5,
V4_8_6_SNAPSHOT,
V4_8_6,
V4_8_7_SNAPSHOT,
V4_8_7,
V4_8_8_SNAPSHOT,
V4_8_8,
V4_8_9_SNAPSHOT,
V4_8_9,
V4_9_0_SNAPSHOT,
V4_9_0,
V4_9_1_SNAPSHOT,
V4_9_1,
V4_9_2_SNAPSHOT,
V4_9_2,
V4_9_3_SNAPSHOT,
V4_9_3,
V4_9_4_SNAPSHOT,
V4_9_4,
V4_9_5_SNAPSHOT,
V4_9_5,
V4_9_6_SNAPSHOT,
V4_9_6,
V4_9_7_SNAPSHOT,
V4_9_7,
V4_9_8_SNAPSHOT,
V4_9_8,
V4_9_9_SNAPSHOT,
V4_9_9,
V5_0_0_SNAPSHOT,
V5_0_0,
V5_0_1_SNAPSHOT,
V5_0_1,
V5_0_2_SNAPSHOT,
V5_0_2,
V5_0_3_SNAPSHOT,
V5_0_3,
V5_0_4_SNAPSHOT,
V5_0_4,
V5_0_5_SNAPSHOT,
V5_0_5,
V5_0_6_SNAPSHOT,
V5_0_6,
V5_0_7_SNAPSHOT,
V5_0_7,
V5_0_8_SNAPSHOT,
V5_0_8,
V5_0_9_SNAPSHOT,
V5_0_9,
V5_1_0_SNAPSHOT,
V5_1_0,
V5_1_1_SNAPSHOT,
V5_1_1,
V5_1_2_SNAPSHOT,
V5_1_2,
V5_1_3_SNAPSHOT,
V5_1_3,
V5_1_4_SNAPSHOT,
V5_1_4,
V5_1_5_SNAPSHOT,
V5_1_5,
V5_1_6_SNAPSHOT,
V5_1_6,
V5_1_7_SNAPSHOT,
V5_1_7,
V5_1_8_SNAPSHOT,
V5_1_8,
V5_1_9_SNAPSHOT,
V5_1_9,
V5_2_0_SNAPSHOT,
V5_2_0,
V5_2_1_SNAPSHOT,
V5_2_1,
V5_2_2_SNAPSHOT,
V5_2_2,
V5_2_3_SNAPSHOT,
V5_2_3,
V5_2_4_SNAPSHOT,
V5_2_4,
V5_2_5_SNAPSHOT,
V5_2_5,
V5_2_6_SNAPSHOT,
V5_2_6,
V5_2_7_SNAPSHOT,
V5_2_7,
V5_2_8_SNAPSHOT,
V5_2_8,
V5_2_9_SNAPSHOT,
V5_2_9,
V5_3_0_SNAPSHOT,
V5_3_0,
V5_3_1_SNAPSHOT,
V5_3_1,
V5_3_2_SNAPSHOT,
V5_3_2,
V5_3_3_SNAPSHOT,
V5_3_3,
V5_3_4_SNAPSHOT,
V5_3_4,
V5_3_5_SNAPSHOT,
V5_3_5,
V5_3_6_SNAPSHOT,
V5_3_6,
V5_3_7_SNAPSHOT,
V5_3_7,
V5_3_8_SNAPSHOT,
V5_3_8,
V5_3_9_SNAPSHOT,
V5_3_9,
V5_4_0_SNAPSHOT,
V5_4_0,
V5_4_1_SNAPSHOT,
V5_4_1,
V5_4_2_SNAPSHOT,
V5_4_2,
V5_4_3_SNAPSHOT,
V5_4_3,
V5_4_4_SNAPSHOT,
V5_4_4,
V5_4_5_SNAPSHOT,
V5_4_5,
V5_4_6_SNAPSHOT,
V5_4_6,
V5_4_7_SNAPSHOT,
V5_4_7,
V5_4_8_SNAPSHOT,
V5_4_8,
V5_4_9_SNAPSHOT,
V5_4_9,
V5_5_0_SNAPSHOT,
V5_5_0,
V5_5_1_SNAPSHOT,
V5_5_1,
V5_5_2_SNAPSHOT,
V5_5_2,
V5_5_3_SNAPSHOT,
V5_5_3,
V5_5_4_SNAPSHOT,
V5_5_4,
V5_5_5_SNAPSHOT,
V5_5_5,
V5_5_6_SNAPSHOT,
V5_5_6,
V5_5_7_SNAPSHOT,
V5_5_7,
V5_5_8_SNAPSHOT,
V5_5_8,
V5_5_9_SNAPSHOT,
V5_5_9,
V5_6_0_SNAPSHOT,
V5_6_0,
V5_6_1_SNAPSHOT,
V5_6_1,
V5_6_2_SNAPSHOT,
V5_6_2,
V5_6_3_SNAPSHOT,
V5_6_3,
V5_6_4_SNAPSHOT,
V5_6_4,
V5_6_5_SNAPSHOT,
V5_6_5,
V5_6_6_SNAPSHOT,
V5_6_6,
V5_6_7_SNAPSHOT,
V5_6_7,
V5_6_8_SNAPSHOT,
V5_6_8,
V5_6_9_SNAPSHOT,
V5_6_9,
V5_7_0_SNAPSHOT,
V5_7_0,
V5_7_1_SNAPSHOT,
V5_7_1,
V5_7_2_SNAPSHOT,
V5_7_2,
V5_7_3_SNAPSHOT,
V5_7_3,
V5_7_4_SNAPSHOT,
V5_7_4,
V5_7_5_SNAPSHOT,
V5_7_5,
V5_7_6_SNAPSHOT,
V5_7_6,
V5_7_7_SNAPSHOT,
V5_7_7,
V5_7_8_SNAPSHOT,
V5_7_8,
V5_7_9_SNAPSHOT,
V5_7_9,
V5_8_0_SNAPSHOT,
V5_8_0,
V5_8_1_SNAPSHOT,
V5_8_1,
V5_8_2_SNAPSHOT,
V5_8_2,
V5_8_3_SNAPSHOT,
V5_8_3,
V5_8_4_SNAPSHOT,
V5_8_4,
V5_8_5_SNAPSHOT,
V5_8_5,
V5_8_6_SNAPSHOT,
V5_8_6,
V5_8_7_SNAPSHOT,
V5_8_7,
V5_8_8_SNAPSHOT,
V5_8_8,
V5_8_9_SNAPSHOT,
V5_8_9,
V5_9_0_SNAPSHOT,
V5_9_0,
V5_9_1_SNAPSHOT,
V5_9_1,
V5_9_2_SNAPSHOT,
V5_9_2,
V5_9_3_SNAPSHOT,
V5_9_3,
V5_9_4_SNAPSHOT,
V5_9_4,
V5_9_5_SNAPSHOT,
V5_9_5,
V5_9_6_SNAPSHOT,
V5_9_6,
V5_9_7_SNAPSHOT,
V5_9_7,
V5_9_8_SNAPSHOT,
V5_9_8,
V5_9_9_SNAPSHOT,
V5_9_9,
HIGHER_VERSION
}
}
```
|
```package io.openmessaging.rocketmq.consumer;
import io.openmessaging.BytesMessage;
import io.openmessaging.Message;
import io.openmessaging.MessageHeader;
import io.openmessaging.MessageListener;
import io.openmessaging.MessagingAccessPoint;
import io.openmessaging.MessagingAccessPointFactory;
import io.openmessaging.OMS;
import io.openmessaging.PushConsumer;
import io.openmessaging.ReceivedMessageContext;
import io.openmessaging.rocketmq.domain.NonStandardKeys;
import java.lang.reflect.Field;
import java.util.Collections;
import org.apache.rocketmq.client.consumer.DefaultMQPushConsumer;
import org.apache.rocketmq.client.consumer.listener.MessageListenerConcurrently;
import org.apache.rocketmq.common.message.MessageExt;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class PushConsumerImplTest {
private PushConsumer consumer;
@Mock
private DefaultMQPushConsumer rocketmqPushConsumer;
@Before
public void init() throws NoSuchFieldException, IllegalAccessException {
final MessagingAccessPoint messagingAccessPoint = MessagingAccessPointFactory
.getMessagingAccessPoint("openmessaging:rocketmq://IP1:9876,IP2:9876/namespace");
consumer = messagingAccessPoint.createPushConsumer(
OMS.newKeyValue().put(NonStandardKeys.CONSUMER_GROUP, "TestGroup"));
Field field = PushConsumerImpl.class.getDeclaredField("rocketmqPushConsumer");
field.setAccessible(true);
DefaultMQPushConsumer innerConsumer = (DefaultMQPushConsumer) field.get(consumer);
field.set(consumer, rocketmqPushConsumer); //Replace
when(rocketmqPushConsumer.getMessageListener()).thenReturn(innerConsumer.getMessageListener());
messagingAccessPoint.startup();
consumer.startup();
}
@Test
public void testConsumeMessage() {
final byte[] testBody = new byte[] {'a', 'b'};
MessageExt consumedMsg = new MessageExt();
consumedMsg.setMsgId("NewMsgId");
consumedMsg.setBody(testBody);
consumedMsg.putUserProperty(NonStandardKeys.MESSAGE_DESTINATION, "TOPIC");
consumedMsg.setTopic("HELLO_QUEUE");
consumer.attachQueue("HELLO_QUEUE", new MessageListener() {
@Override
public void onMessage(final Message message, final ReceivedMessageContext context) {
assertThat(message.headers().getString(MessageHeader.MESSAGE_ID)).isEqualTo("NewMsgId");
assertThat(((BytesMessage) message).getBody()).isEqualTo(testBody);
context.ack();
}
});
((MessageListenerConcurrently) rocketmqPushConsumer
.getMessageListener()).consumeMessage(Collections.singletonList(consumedMsg), null);
}
}```
|
Please help me generate a test for this class.
|
```package io.openmessaging.rocketmq.consumer;
import io.openmessaging.BytesMessage;
import io.openmessaging.KeyValue;
import io.openmessaging.MessageListener;
import io.openmessaging.OMS;
import io.openmessaging.PropertyKeys;
import io.openmessaging.PushConsumer;
import io.openmessaging.ReceivedMessageContext;
import io.openmessaging.exception.OMSRuntimeException;
import io.openmessaging.rocketmq.config.ClientConfig;
import io.openmessaging.rocketmq.domain.NonStandardKeys;
import io.openmessaging.rocketmq.utils.BeanUtils;
import io.openmessaging.rocketmq.utils.OMSUtil;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.rocketmq.client.consumer.DefaultMQPushConsumer;
import org.apache.rocketmq.client.consumer.listener.ConsumeConcurrentlyContext;
import org.apache.rocketmq.client.consumer.listener.ConsumeConcurrentlyStatus;
import org.apache.rocketmq.client.consumer.listener.MessageListenerConcurrently;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.common.message.MessageExt;
public class PushConsumerImpl implements PushConsumer {
private final DefaultMQPushConsumer rocketmqPushConsumer;
private final KeyValue properties;
private boolean started = false;
private final Map<String, MessageListener> subscribeTable = new ConcurrentHashMap<>();
private final ClientConfig clientConfig;
public PushConsumerImpl(final KeyValue properties) {
this.rocketmqPushConsumer = new DefaultMQPushConsumer();
this.properties = properties;
this.clientConfig = BeanUtils.populate(properties, ClientConfig.class);
String accessPoints = clientConfig.getOmsAccessPoints();
if (accessPoints == null || accessPoints.isEmpty()) {
throw new OMSRuntimeException("-1", "OMS AccessPoints is null or empty.");
}
this.rocketmqPushConsumer.setNamesrvAddr(accessPoints.replace(',', ';'));
String consumerGroup = clientConfig.getRmqConsumerGroup();
if (null == consumerGroup || consumerGroup.isEmpty()) {
throw new OMSRuntimeException("-1", "Consumer Group is necessary for RocketMQ, please set it.");
}
this.rocketmqPushConsumer.setConsumerGroup(consumerGroup);
this.rocketmqPushConsumer.setMaxReconsumeTimes(clientConfig.getRmqMaxRedeliveryTimes());
this.rocketmqPushConsumer.setConsumeTimeout(clientConfig.getRmqMessageConsumeTimeout());
this.rocketmqPushConsumer.setConsumeThreadMax(clientConfig.getRmqMaxConsumeThreadNums());
this.rocketmqPushConsumer.setConsumeThreadMin(clientConfig.getRmqMinConsumeThreadNums());
String consumerId = OMSUtil.buildInstanceName();
this.rocketmqPushConsumer.setInstanceName(consumerId);
properties.put(PropertyKeys.CONSUMER_ID, consumerId);
this.rocketmqPushConsumer.registerMessageListener(new MessageListenerImpl());
}
@Override
public KeyValue properties() {
return properties;
}
@Override
public void resume() {
this.rocketmqPushConsumer.resume();
}
@Override
public void suspend() {
this.rocketmqPushConsumer.suspend();
}
@Override
public boolean isSuspended() {
return this.rocketmqPushConsumer.getDefaultMQPushConsumerImpl().isPause();
}
@Override
public PushConsumer attachQueue(final String queueName, final MessageListener listener) {
this.subscribeTable.put(queueName, listener);
try {
this.rocketmqPushConsumer.subscribe(queueName, "*");
} catch (MQClientException e) {
throw new OMSRuntimeException("-1", String.format("RocketMQ push consumer can't attach to %s.", queueName));
}
return this;
}
@Override
public synchronized void startup() {
if (!started) {
try {
this.rocketmqPushConsumer.start();
} catch (MQClientException e) {
throw new OMSRuntimeException("-1", e);
}
}
this.started = true;
}
@Override
public synchronized void shutdown() {
if (this.started) {
this.rocketmqPushConsumer.shutdown();
}
this.started = false;
}
class MessageListenerImpl implements MessageListenerConcurrently {
@Override
public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> rmqMsgList,
ConsumeConcurrentlyContext contextRMQ) {
MessageExt rmqMsg = rmqMsgList.get(0);
BytesMessage omsMsg = OMSUtil.msgConvert(rmqMsg);
MessageListener listener = PushConsumerImpl.this.subscribeTable.get(rmqMsg.getTopic());
if (listener == null) {
throw new OMSRuntimeException("-1",
String.format("The topic/queue %s isn't attached to this consumer", rmqMsg.getTopic()));
}
final KeyValue contextProperties = OMS.newKeyValue();
final CountDownLatch sync = new CountDownLatch(1);
contextProperties.put(NonStandardKeys.MESSAGE_CONSUME_STATUS, ConsumeConcurrentlyStatus.RECONSUME_LATER.name());
ReceivedMessageContext context = new ReceivedMessageContext() {
@Override
public KeyValue properties() {
return contextProperties;
}
@Override
public void ack() {
sync.countDown();
contextProperties.put(NonStandardKeys.MESSAGE_CONSUME_STATUS,
ConsumeConcurrentlyStatus.CONSUME_SUCCESS.name());
}
@Override
public void ack(final KeyValue properties) {
sync.countDown();
contextProperties.put(NonStandardKeys.MESSAGE_CONSUME_STATUS,
properties.getString(NonStandardKeys.MESSAGE_CONSUME_STATUS));
}
};
long begin = System.currentTimeMillis();
listener.onMessage(omsMsg, context);
long costs = System.currentTimeMillis() - begin;
long timeoutMills = clientConfig.getRmqMessageConsumeTimeout() * 60 * 1000;
try {
sync.await(Math.max(0, timeoutMills - costs), TimeUnit.MILLISECONDS);
} catch (InterruptedException ignore) {
}
return ConsumeConcurrentlyStatus.valueOf(contextProperties.getString(NonStandardKeys.MESSAGE_CONSUME_STATUS));
}
}
}
```
|
```package org.apache.rocketmq.test.util.data.collect;
public interface DataFilter {
}
```
|
Please help me generate a test for this class.
|
```package com.xiaojukeji.carrera.cproxy.actions.hdfs;
import com.xiaojukeji.carrera.cproxy.consumer.UpstreamJob;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.InetAddress;
import java.util.EnumSet;
import java.util.Random;
import java.util.concurrent.ConcurrentLinkedQueue;
public class DataFile {
private static final Logger LOGGER = LoggerFactory.getLogger(DataFile.class);
protected DataDir dir;
protected String fileName;
protected long lastModified = 0L;
protected long byteSize;
protected String prefix;
protected long pid;
protected ConcurrentLinkedQueue<UpstreamJob> upstreamJobs = new ConcurrentLinkedQueue<>();
protected volatile boolean closed = false;
protected FSDataOutputStream stream;
protected DataFileManager dataFileManager;
protected volatile int closeInvokedCount = 0;
public DataFile(DataDir dir, String prefix, long pid, DataFileManager dataFileManager) {
this.dir = dir;
this.prefix = prefix;
this.pid = pid;
this.fileName = prefix + "-" + getNodeIDFromHostname() + "-" + pid + "." + System.currentTimeMillis();
this.dataFileManager = dataFileManager;
}
public boolean close() {
boolean flag = false;
closeInvokedCount++;
try {
flush();
stream.close();
flag = true;
closed = true;
} catch (Exception e) {
LOGGER.error("Close file error, dir:" + dir + ", file:" + fileName, e);
}
if (flag) {
dir.deleteDataFile(pid);
LOGGER.info("Close file success,file:" + getPath());
} else {
LOGGER.info("Close file failed, then add CloseFailStream,file:" + getPath());
dataFileManager.addCloseFailedStream(this);
}
return flag;
}
public boolean directClose() {
boolean flag = false;
closeInvokedCount++;
try {
flush();
stream.close();
flag = true;
closed = true;
} catch (Exception e) {
LOGGER.error("Close file error, dir:" + dir + ", file:" + fileName, e);
}
if (flag) {
LOGGER.info("Close file success,file:" + getPath());
} else {
LOGGER.info("Close file failed, then add CloseFailStream,file:" + getPath());
dataFileManager.addCloseFailedStream(this);
}
dir.deleteDataFile(pid);
return flag;
}
public String getPath() {
if (dir.getPath().endsWith("/")) {
return dir.getPath() + fileName;
} else {
return dir.getPath() + "/" + fileName;
}
}
public void write(byte[] b, int off, int len) throws Exception {
this.lastModified = System.currentTimeMillis();
dir.setLastModified(lastModified);
stream.write(b, off, len);
byteSize = byteSize + len;
}
public void flush() throws IOException {
if (closed) {
LOGGER.warn("DateFile has closed, no need to flush!");
return;
}
if (stream instanceof HdfsDataOutputStream) {
((HdfsDataOutputStream) stream).hsync(EnumSet.of(HdfsDataOutputStream.SyncFlag.UPDATE_LENGTH));
} else {
stream.hsync();
}
}
private String getNodeIDFromHostname() {
try {
String hostName = InetAddress.getLocalHost().getHostName();
String[] split1 = hostName.split("\\.")[0].split("-");
String ret = split1[split1.length - 1].replaceAll("\\D+", "");
if (StringUtils.isEmpty(ret)) {
throw new Exception("empty nodeId");
}
return ret;
} catch (Exception e) {
Random random = new Random();
String ret = "" + random.nextInt(100);
LOGGER.error("exception when get nodeId {}, use randomID {}", e, ret);
return ret;
}
}
public void addUpstreamJob(UpstreamJob job) {
upstreamJobs.add(job);
}
public FSDataOutputStream getOut() {
return stream;
}
public void setOut(FSDataOutputStream out) {
this.stream = out;
}
public String toString() {
return "{file:" + fileName + ",lastModified:" + lastModified + "}";
}
public long getLastModified() {
return lastModified;
}
public DataDir getDir() {
return dir;
}
public String getPrefix() {
return prefix;
}
public Long getPid() {
return pid;
}
public FSDataOutputStream getStream() {
return stream;
}
public void setStream(FSDataOutputStream stream) {
this.stream = stream;
}
public long getByteSize() {
return byteSize;
}
public String getFileName() {
return fileName;
}
public ConcurrentLinkedQueue<UpstreamJob> getUpstreamJobs() {
return upstreamJobs;
}
public int getCloseInvokedCount() {
return closeInvokedCount;
}
}
```
|
```package org.apache.rocketmq.test.client.consumer.broadcast;
import org.apache.log4j.Logger;
import org.apache.rocketmq.test.base.BaseConf;
import org.apache.rocketmq.test.client.rmq.RMQBroadCastConsumer;
import org.apache.rocketmq.test.factory.ConsumerFactory;
import org.apache.rocketmq.test.listener.AbstractListener;
public class BaseBroadCastIT extends BaseConf {
private static Logger logger = Logger.getLogger(BaseBroadCastIT.class);
public static RMQBroadCastConsumer getBroadCastConsumer(String nsAddr, String topic,
String subExpression,
AbstractListener listner) {
String consumerGroup = initConsumerGroup();
return getBroadCastConsumer(nsAddr, consumerGroup, topic, subExpression, listner);
}
public static RMQBroadCastConsumer getBroadCastConsumer(String nsAddr, String consumerGroup,
String topic, String subExpression,
AbstractListener listner) {
RMQBroadCastConsumer consumer = ConsumerFactory.getRMQBroadCastConsumer(nsAddr,
consumerGroup, topic, subExpression, listner);
consumer.setDebug();
mqClients.add(consumer);
logger.info(String.format("consumer[%s] start,topic[%s],subExpression[%s]", consumerGroup,
topic, subExpression));
return consumer;
}
public void printSeperator() {
for (int i = 0; i < 3; i++) {
logger.info(
"<<<<<<<<================================================================================>>>>>>>>");
}
}
}
```
|
Please help me generate a test for this class.
|
```package com.didi.carrera.console.web.controller.bo;
import com.alibaba.fastjson.annotation.JSONField;
import com.didi.carrera.console.dao.dict.ConsumeSubscriptionApiType;
import com.didi.carrera.console.dao.dict.ConsumeSubscriptionBigDataType;
import com.didi.carrera.console.dao.dict.ConsumeSubscriptionConsumeType;
import com.didi.carrera.console.dao.dict.ConsumeSubscriptionMsgPushType;
import com.didi.carrera.console.dao.dict.ConsumeSubscriptionMsgType;
import com.didi.carrera.console.dao.dict.IsEnable;
import com.didi.carrera.console.dao.model.ConsumeSubscription;
import com.didi.carrera.console.web.controller.validator.AnotherFieldEqualsSpecifiedValue;
import com.google.common.collect.Lists;
import com.xiaojukeji.carrera.config.Actions;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.validator.constraints.Range;
import org.springframework.beans.BeanUtils;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import java.util.List;
import java.util.Map;
@AnotherFieldEqualsSpecifiedValue.List({
@AnotherFieldEqualsSpecifiedValue(fieldName = "apiType", fieldValue = "1", dependFieldName = "msgType", message = "消息类型不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "apiType", fieldValue = "1", dependFieldName = "consumeType", message = "消费类型不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "apiType", fieldValue = "1", dependFieldName = "enableOrder", message = "是否启用顺序消费不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "msgType", fieldValue = "1", dependFieldName = "enableGroovy", message = "是否启用Groovy不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "msgType", fieldValue = "1", dependFieldName = "enableTransit", message = "是否启用Transit不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "enableGroovy", fieldValue = "0", dependFieldName = "groovy", message = "groovy不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "enableTransit", fieldValue = "0", dependFieldName = "transit", message = "transit不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "enableOrder", fieldValue = "0", dependFieldName = "orderKey", message = "orderKey不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "consumeType", fieldValue = "2", dependFieldName = "urls", message = "urls不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "consumeType", fieldValue = "2", dependFieldName = "httpMethod", message = "HttpMethod不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "consumeType", fieldValue = "2", dependFieldName = "pushMaxConcurrency", message = "推送并发不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "consumeType", fieldValue = "3", dependFieldName = "bigDataType", message = "写入类型不能为空"),
@AnotherFieldEqualsSpecifiedValue(fieldName = "consumeType", fieldValue = "3", dependFieldName = "bigDataConfig", message = "写入配置不能为空")
})
public class ConsumeSubscriptionBaseBo extends BaseOrderBo {
@NotNull(message = "subId不能为空")
private Long subId;
@NotNull(message = "消费组id不能为空")
private Long groupId;
private String groupName;
@NotNull(message = "topicId不能为空")
private Long topicId;
private String topicName;
@NotNull(message = "消费限流不能为空")
@Min(value = 1, message = "消费限流必须大于0")
private Double maxTps;
@NotNull(message = "是否接收压测流量不能为空")
@Range(min = 0, max = 1, message = "是否接收压测流量只能是0不接收 1接收")
private Byte pressureTraffic;
@NotNull(message = "报警类型不能为空")
@Range(min = 0, max = 1, message = "报警类型只能是继承消费组配置或单独配置")
private Byte alarmType;
private Byte alarmIsEnable = 0;
private Integer alarmMsgLag = 10000;
private Integer alarmDelayTime = 300000;
@NotNull(message = "是否启用lowlevel不能为空")
@Range(min = 1, max = 2, message = "是否启用lowlevel只能是1禁用 2启用")
private Byte apiType;
private Integer consumeTimeout = 1000;
private Integer errorRetryTimes = 3;
private List<Integer> retryIntervals = Lists.newArrayList(50, 100, 150);
@Range(min = 1, max = 3, message = "消息类型 1Json 2Text 3Bytes")
private Byte msgType = ConsumeSubscriptionMsgType.BINARY.getIndex();
@Range(min = 0, max = 1, message = "是否启用Groovy只能是 0启用 1禁用")
private Byte enableGroovy = IsEnable.DISABLE.getIndex();
@Range(min = 0, max = 1, message = "是否启用Transit只能是 0启用 1禁用")
private Byte enableTransit = IsEnable.DISABLE.getIndex();
private String groovy;
private Map<String, String> transit;
@Range(min = 0, max = 1, message = "是否启用顺序消费只能是 0启用 1禁用")
private Byte enableOrder = IsEnable.DISABLE.getIndex();
private String orderKey;
@Range(min = 1, max = 3, message = "消费类型只能是 1SDK 2HTTP 3直写第三方组件")
private Byte consumeType = ConsumeSubscriptionConsumeType.SDK.getIndex();
private List<String> urls;
private Byte httpMethod;
private Map<String, String> httpHeaders;
private Map<String, String> httpQueryParams;
private Byte msgPushType;
private String httpToken;
private Integer pushMaxConcurrency;
private Byte bigDataType;
private String bigDataConfig;
private Map<String, String> extraParams;
private Map<String, String> operationParams;
private Boolean useNonBlockAsync = false;
public Long getSubId() {
return subId;
}
public void setSubId(Long subId) {
this.subId = subId;
}
public Long getGroupId() {
return groupId;
}
public void setGroupId(Long groupId) {
this.groupId = groupId;
}
public String getGroupName() {
return groupName;
}
public void setGroupName(String groupName) {
this.groupName = groupName;
}
public Long getTopicId() {
return topicId;
}
public void setTopicId(Long topicId) {
this.topicId = topicId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public Double getMaxTps() {
return maxTps;
}
public void setMaxTps(Double maxTps) {
this.maxTps = maxTps;
}
public Byte getAlarmType() {
return alarmType;
}
public void setAlarmType(Byte alarmType) {
this.alarmType = alarmType;
}
public Byte getAlarmIsEnable() {
return alarmIsEnable;
}
public void setAlarmIsEnable(Byte alarmIsEnable) {
this.alarmIsEnable = alarmIsEnable;
}
public Integer getAlarmMsgLag() {
return alarmMsgLag;
}
public void setAlarmMsgLag(Integer alarmMsgLag) {
this.alarmMsgLag = alarmMsgLag;
}
public Integer getAlarmDelayTime() {
return alarmDelayTime;
}
public void setAlarmDelayTime(Integer alarmDelayTime) {
this.alarmDelayTime = alarmDelayTime;
}
public Byte getApiType() {
return apiType;
}
public void setApiType(Byte apiType) {
this.apiType = apiType;
}
public Integer getConsumeTimeout() {
return consumeTimeout;
}
public void setConsumeTimeout(Integer consumeTimeout) {
this.consumeTimeout = consumeTimeout;
}
public Integer getErrorRetryTimes() {
return errorRetryTimes;
}
public void setErrorRetryTimes(Integer errorRetryTimes) {
this.errorRetryTimes = errorRetryTimes;
}
public List<Integer> getRetryIntervals() {
return retryIntervals;
}
public void setRetryIntervals(List<Integer> retryIntervals) {
this.retryIntervals = retryIntervals;
}
public Byte getMsgType() {
return msgType;
}
public void setMsgType(Byte msgType) {
this.msgType = msgType;
}
public Byte getEnableGroovy() {
return enableGroovy;
}
public void setEnableGroovy(Byte enableGroovy) {
this.enableGroovy = enableGroovy;
}
public Byte getEnableTransit() {
return enableTransit;
}
public void setEnableTransit(Byte enableTransit) {
this.enableTransit = enableTransit;
}
public String getGroovy() {
return groovy;
}
public void setGroovy(String groovy) {
this.groovy = groovy;
}
public Map<String, String> getTransit() {
return transit;
}
public void setTransit(Map<String, String> transit) {
this.transit = transit;
}
public Byte getEnableOrder() {
return enableOrder;
}
public void setEnableOrder(Byte enableOrder) {
this.enableOrder = enableOrder;
}
public String getOrderKey() {
return orderKey;
}
public void setOrderKey(String orderKey) {
this.orderKey = orderKey;
}
public Byte getConsumeType() {
return consumeType;
}
public void setConsumeType(Byte consumeType) {
this.consumeType = consumeType;
}
public List<String> getUrls() {
return urls;
}
public void setUrls(List<String> urls) {
this.urls = urls;
}
public Byte getHttpMethod() {
return httpMethod;
}
public void setHttpMethod(Byte httpMethod) {
this.httpMethod = httpMethod;
}
public Map<String, String> getHttpHeaders() {
return httpHeaders;
}
public void setHttpHeaders(Map<String, String> httpHeaders) {
this.httpHeaders = httpHeaders;
}
public Map<String, String> getHttpQueryParams() {
return httpQueryParams;
}
public void setHttpQueryParams(Map<String, String> httpQueryParams) {
this.httpQueryParams = httpQueryParams;
}
public Byte getMsgPushType() {
return msgPushType;
}
public void setMsgPushType(Byte msgPushType) {
this.msgPushType = msgPushType;
}
public String getHttpToken() {
return httpToken;
}
public void setHttpToken(String httpToken) {
this.httpToken = httpToken;
}
public Integer getPushMaxConcurrency() {
return pushMaxConcurrency;
}
public void setPushMaxConcurrency(Integer pushMaxConcurrency) {
this.pushMaxConcurrency = pushMaxConcurrency;
}
public Byte getBigDataType() {
return bigDataType;
}
public void setBigDataType(Byte bigDataType) {
this.bigDataType = bigDataType;
}
public String getBigDataConfig() {
return bigDataConfig;
}
public void setBigDataConfig(String bigDataConfig) {
this.bigDataConfig = bigDataConfig;
}
public Map<String, String> getExtraParams() {
return extraParams;
}
public void setExtraParams(Map<String, String> extraParams) {
this.extraParams = extraParams;
}
public Byte getPressureTraffic() {
return pressureTraffic;
}
public void setPressureTraffic(Byte pressureTraffic) {
this.pressureTraffic = pressureTraffic;
}
public Map<String, String> getOperationParams() {
return operationParams;
}
public void setOperationParams(Map<String, String> operationParams) {
this.operationParams = operationParams;
}
@JSONField(serialize = false)
public boolean isUseNonBlockAsync() {
return useNonBlockAsync;
}
public void setUseNonBlockAsync(boolean useNonBlockAsync) {
this.useNonBlockAsync = useNonBlockAsync;
}
@JSONField(serialize = false)
public boolean isModify() {
return subId != null && subId > 0;
}
public ConsumeSubscription buildConsumeSubscription() {
ConsumeSubscription subscription = new ConsumeSubscription();
BeanUtils.copyProperties(this, subscription);
subscription.setId(this.getSubId());
subscription.setSubExtraParams(this.getExtraParams());
subscription.setSubActions(this.buildActions());
subscription.setSubHttpHeaders(this.getHttpHeaders());
subscription.setSubHttpQueryParams(this.getHttpQueryParams());
subscription.setSubRetryIntervals(this.getRetryIntervals());
subscription.setSubTransit(this.getTransit());
subscription.setSubUrls(this.getUrls());
return subscription;
}
private void validate() {
if (msgType != null && (msgType == ConsumeSubscriptionMsgType.TEXT.getIndex() || msgType == ConsumeSubscriptionMsgType.BINARY.getIndex()) && IsEnable.isEnable(enableOrder) && StringUtils.isNotEmpty(orderKey) && !ORDER_BY_QID_KEY.equalsIgnoreCase(orderKey) && !ORDER_BY_MSGKEY_KEY.equalsIgnoreCase(orderKey)) {
throw new RuntimeException("JsonPath顺序消费只能消息格式为Json类型的使用");
}
}
/**
* 规则:MsgType=JSON, Transit、GroovyFilter、FormParams2、QueryParams时,Json action开头
* MsgType != JSON, 以Async开头,
*
* @return
*/
public List<String> buildActions() {
validate();
List<String> actionList = Lists.newArrayList();
if (this.getApiType() == ConsumeSubscriptionApiType.LOW_LEVEL.getIndex()) {
actionList.add(Actions.LowLevel);
actionList.add(Actions.PULL_SERVER);
return actionList;
}
if (containsHdfsAction()) {
actionList.add(Actions.ASYNC);
actionList.add(Actions.HDFS);
return actionList;
}
if (containsHbaseAction()) {
actionList.add(Actions.ASYNC);
actionList.add(Actions.HBASE);
return actionList;
}
//头部顺序
if (containsJsonAction()) {
if (actionIsEnableOrderByJson()) {
actionList.add(Actions.JSON);
actionList.add(Actions.ASYNC);
} else {
actionList.add(Actions.ASYNC);
actionList.add(Actions.JSON);
}
} else {
actionList.add(Actions.ASYNC);
}
//中部顺序
if (containsActionOrderTransitGroovy()) {
if (containsTransitAction()) {
actionList.add(Actions.TRANSIT);
}
if (containsGroovyAction()) {
actionList.add(Actions.GROOVY);
}
} else {
if (containsGroovyAction()) {
actionList.add(Actions.GROOVY);
}
if (containsTransitAction()) {
actionList.add(Actions.TRANSIT);
}
}
//结尾顺序
if (containsRedisAction()) {
actionList.add(Actions.REDIS);
} else if (containsPullServerAction()) {
actionList.add(Actions.PULL_SERVER);
} else if (containsHttpAction()) {
if (MapUtils.isNotEmpty(this.getHttpQueryParams())) {
actionList.add(Actions.QueryParams);
}
if (containsFormParamsAction()) {
actionList.add(Actions.FormParams);
} else if (containsFormParams2Action()) {
actionList.add(Actions.FormParams2);
}
actionList.add(Actions.ASYNC_HTTP);
}
if (containsForceSyncFlag() && containsGroovyAction() && actionList.indexOf(Actions.ASYNC) > -1) {
actionList.remove(Actions.ASYNC);
}
int index;
if (isUseNonBlockAsync() && (index = actionList.indexOf(Actions.ASYNC)) > -1) {
actionList.set(index, Actions.NONBLOCKASYNC);
}
return actionList;
}
private boolean containsForceSyncFlag() {
return MapUtils.isNotEmpty(getExtraParams()) && getExtraParams().containsKey(SUB_FLAG_ACTION_IGNORE_ASYNC) && "true".equalsIgnoreCase(getExtraParams().get(SUB_FLAG_ACTION_IGNORE_ASYNC));
}
public static final String SUB_FLAG_ACTION_REDIS = "ACTION_REDIS";
public static final String SUB_FLAG_ACTION_ORDER_TRANSIT_GROOVY = "ACTION_ORDER_TRANSIT_GROOVY";
public static final String SUB_FLAG_ACTION_FORMPARAMS_HTTP_IGNORE_JSON = "FORMPARAMS_HTTP_IGNORE_JSON";
public static final String SUB_FLAG_ACTION_FORMPARAMS2_HTTP_IGNORE_JSON = "FORMPARAMS2_HTTP_IGNORE_JSON";
public static final String ORDER_BY_QID_KEY = "QID";
public static final String ORDER_BY_MSGKEY_KEY = "KEY";
public static final String SUB_FLAG_EXTREA_PARAMS_MQ_CLUSTER = "SUB_MQCLUSTER";
public static final String SUB_FLAG_ACTION_IGNORE_ASYNC = "forceSync";
private boolean containsFormParams2Action() {
return this.getMsgPushType() != null && this.getMsgPushType() == ConsumeSubscriptionMsgPushType.FORM_PARAMS2.getIndex();
}
private boolean containsFormParamsAction() {
return this.getMsgPushType() != null && this.getMsgPushType() == ConsumeSubscriptionMsgPushType.FORM_PARAMS.getIndex();
}
private boolean containsPullServerAction() {
return this.getConsumeType() != null && this.getConsumeType() == ConsumeSubscriptionConsumeType.SDK.getIndex();
}
private boolean containsHttpAction() {
return this.getConsumeType() != null && this.getConsumeType() == ConsumeSubscriptionConsumeType.HTTP.getIndex();
}
private boolean containsHdfsAction() {
return this.getConsumeType() != null && this.getConsumeType() == ConsumeSubscriptionConsumeType.BIG_DATA.getIndex() && this.getBigDataType() != null && this.getBigDataType() == ConsumeSubscriptionBigDataType.HDFS.getIndex();
}
private boolean containsHbaseAction() {
return this.getConsumeType() != null && this.getConsumeType() == ConsumeSubscriptionConsumeType.BIG_DATA.getIndex() && this.getBigDataType() != null && this.getBigDataType() == ConsumeSubscriptionBigDataType.HBASE.getIndex();
}
private boolean containsActionOrderTransitGroovy() {
return MapUtils.isNotEmpty(this.getExtraParams()) && "true".equalsIgnoreCase(this.getExtraParams().get(SUB_FLAG_ACTION_ORDER_TRANSIT_GROOVY));
}
private boolean containsRedisAction() {
return MapUtils.isNotEmpty(this.getExtraParams()) && "true".equalsIgnoreCase(this.getExtraParams().get(SUB_FLAG_ACTION_REDIS));
}
private boolean containsFormHttpIgnoreJson() {
return MapUtils.isNotEmpty(this.getExtraParams()) && "true".equalsIgnoreCase(this.getExtraParams().get(SUB_FLAG_ACTION_FORMPARAMS_HTTP_IGNORE_JSON));
}
private boolean containsForm2HttpIgnoreJson() {
return MapUtils.isNotEmpty(this.getExtraParams()) && "true".equalsIgnoreCase(this.getExtraParams().get(SUB_FLAG_ACTION_FORMPARAMS2_HTTP_IGNORE_JSON));
}
private boolean formParamsHttpIgnoreJson() {
return containsFormParamsAction() && containsFormHttpIgnoreJson();
}
private boolean formParams2HttpIgnoreJson() {
return containsFormParams2Action() && containsForm2HttpIgnoreJson();
}
private boolean formParamsContainsJson() {
return containsHttpAction() && containsFormParamsAction() && !formParamsHttpIgnoreJson();
}
private boolean formParams2ContainsJson() {
return containsHttpAction() && containsFormParams2Action() && !formParams2HttpIgnoreJson();
}
private boolean containsJsonAction() {
return actionIsEnableOrderByJson() || containsGroovyAction() || containsTransitAction() || containsRedisAction()
|| (
(formParamsContainsJson() || formParams2ContainsJson() || (MapUtils.isNotEmpty(this.getHttpQueryParams())))
&& msgType == ConsumeSubscriptionMsgType.JSON.getIndex()
);
}
private boolean actionIsEnableOrderByJson() {
return IsEnable.isEnable(this.getEnableOrder()) && StringUtils.isNotEmpty(this.getOrderKey()) && !ORDER_BY_QID_KEY.equalsIgnoreCase(orderKey) && !ORDER_BY_MSGKEY_KEY.equalsIgnoreCase(this.getOrderKey());
}
private boolean containsTransitAction() {
return IsEnable.isEnable(this.getEnableTransit()) && MapUtils.isNotEmpty(this.getTransit());
}
private boolean containsGroovyAction() {
return IsEnable.isEnable(this.getEnableGroovy()) && StringUtils.isNotEmpty(this.getGroovy());
}
@Override
public String toString() {
return "ConsumeSubscriptionOrderBo{" +
"subId=" + subId +
", groupId=" + groupId +
", groupName='" + groupName + '\'' +
", topicId=" + topicId +
", topicName='" + topicName + '\'' +
", maxTps=" + maxTps +
", pressureTraffic=" + pressureTraffic +
", alarmType=" + alarmType +
", alarmIsEnable=" + alarmIsEnable +
", alarmMsgLag=" + alarmMsgLag +
", alarmDelayTime=" + alarmDelayTime +
", apiType=" + apiType +
", consumeTimeout=" + consumeTimeout +
", errorRetryTimes=" + errorRetryTimes +
", retryIntervals=" + retryIntervals +
", msgType=" + msgType +
", enableGroovy=" + enableGroovy +
", enableTransit=" + enableTransit +
", groovy='" + groovy + '\'' +
", transit=" + transit +
", enableOrder=" + enableOrder +
", orderKey='" + orderKey + '\'' +
", consumeType=" + consumeType +
", urls=" + urls +
", httpMethod=" + httpMethod +
", httpHeaders=" + httpHeaders +
", httpQueryParams=" + httpQueryParams +
", msgPushType=" + msgPushType +
", httpToken='" + httpToken + '\'' +
", pushMaxConcurrency=" + pushMaxConcurrency +
", bigDataType=" + bigDataType +
", bigDataConfig='" + bigDataConfig + '\'' +
", extraParams=" + extraParams +
", operationParams=" + operationParams +
", useNonBlockAsync=" + useNonBlockAsync +
"} " + super.toString();
}
}```
|
```package io.openmessaging.rocketmq.consumer;
import io.openmessaging.rocketmq.config.ClientConfig;
import io.openmessaging.rocketmq.domain.ConsumeRequest;
import io.openmessaging.rocketmq.domain.NonStandardKeys;
import org.apache.rocketmq.client.consumer.DefaultMQPullConsumer;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.common.message.MessageQueue;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class LocalMessageCacheTest {
private LocalMessageCache localMessageCache;
@Mock
private DefaultMQPullConsumer rocketmqPullConsume;
@Mock
private ConsumeRequest consumeRequest;
@Before
public void init() {
ClientConfig clientConfig = new ClientConfig();
clientConfig.setRmqPullMessageBatchNums(512);
clientConfig.setRmqPullMessageCacheCapacity(1024);
localMessageCache = new LocalMessageCache(rocketmqPullConsume, clientConfig);
}
@Test
public void testNextPullBatchNums() throws Exception {
assertThat(localMessageCache.nextPullBatchNums()).isEqualTo(512);
for (int i = 0; i < 513; i++) {
localMessageCache.submitConsumeRequest(consumeRequest);
}
assertThat(localMessageCache.nextPullBatchNums()).isEqualTo(511);
}
@Test
public void testNextPullOffset() throws Exception {
MessageQueue messageQueue = new MessageQueue();
when(rocketmqPullConsume.fetchConsumeOffset(any(MessageQueue.class), anyBoolean()))
.thenReturn(123L);
assertThat(localMessageCache.nextPullOffset(new MessageQueue())).isEqualTo(123L);
}
@Test
public void testUpdatePullOffset() throws Exception {
MessageQueue messageQueue = new MessageQueue();
localMessageCache.updatePullOffset(messageQueue, 124L);
assertThat(localMessageCache.nextPullOffset(messageQueue)).isEqualTo(124L);
}
@Test
public void testSubmitConsumeRequest() throws Exception {
byte[] body = new byte[] {'1', '2', '3'};
MessageExt consumedMsg = new MessageExt();
consumedMsg.setMsgId("NewMsgId");
consumedMsg.setBody(body);
consumedMsg.putUserProperty(NonStandardKeys.MESSAGE_DESTINATION, "TOPIC");
consumedMsg.setTopic("HELLO_QUEUE");
when(consumeRequest.getMessageExt()).thenReturn(consumedMsg);
localMessageCache.submitConsumeRequest(consumeRequest);
assertThat(localMessageCache.poll()).isEqualTo(consumedMsg);
}
}```
|
Please help me generate a test for this class.
|
```package io.openmessaging.rocketmq.consumer;
import io.openmessaging.KeyValue;
import io.openmessaging.PropertyKeys;
import io.openmessaging.ServiceLifecycle;
import io.openmessaging.rocketmq.config.ClientConfig;
import io.openmessaging.rocketmq.domain.ConsumeRequest;
import java.util.Collections;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReadWriteLock;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.rocketmq.client.consumer.DefaultMQPullConsumer;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.consumer.ProcessQueue;
import org.apache.rocketmq.client.log.ClientLogger;
import org.apache.rocketmq.common.ThreadFactoryImpl;
import org.apache.rocketmq.common.message.MessageAccessor;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.utils.ThreadUtils;
import org.slf4j.Logger;
class LocalMessageCache implements ServiceLifecycle {
private final BlockingQueue<ConsumeRequest> consumeRequestCache;
private final Map<String, ConsumeRequest> consumedRequest;
private final ConcurrentHashMap<MessageQueue, Long> pullOffsetTable;
private final DefaultMQPullConsumer rocketmqPullConsumer;
private final ClientConfig clientConfig;
private final ScheduledExecutorService cleanExpireMsgExecutors;
private final static Logger log = ClientLogger.getLog();
LocalMessageCache(final DefaultMQPullConsumer rocketmqPullConsumer, final ClientConfig clientConfig) {
consumeRequestCache = new LinkedBlockingQueue<>(clientConfig.getRmqPullMessageCacheCapacity());
this.consumedRequest = new ConcurrentHashMap<>();
this.pullOffsetTable = new ConcurrentHashMap<>();
this.rocketmqPullConsumer = rocketmqPullConsumer;
this.clientConfig = clientConfig;
this.cleanExpireMsgExecutors = Executors.newSingleThreadScheduledExecutor(new ThreadFactoryImpl(
"OMS_CleanExpireMsgScheduledThread_"));
}
int nextPullBatchNums() {
return Math.min(clientConfig.getRmqPullMessageBatchNums(), consumeRequestCache.remainingCapacity());
}
long nextPullOffset(MessageQueue remoteQueue) {
if (!pullOffsetTable.containsKey(remoteQueue)) {
try {
pullOffsetTable.putIfAbsent(remoteQueue,
rocketmqPullConsumer.fetchConsumeOffset(remoteQueue, false));
} catch (MQClientException e) {
log.error("A error occurred in fetch consume offset process.", e);
}
}
return pullOffsetTable.get(remoteQueue);
}
void updatePullOffset(MessageQueue remoteQueue, long nextPullOffset) {
pullOffsetTable.put(remoteQueue, nextPullOffset);
}
void submitConsumeRequest(ConsumeRequest consumeRequest) {
try {
consumeRequestCache.put(consumeRequest);
} catch (InterruptedException ignore) {
}
}
MessageExt poll() {
return poll(clientConfig.getOmsOperationTimeout());
}
MessageExt poll(final KeyValue properties) {
int currentPollTimeout = clientConfig.getOmsOperationTimeout();
if (properties.containsKey(PropertyKeys.OPERATION_TIMEOUT)) {
currentPollTimeout = properties.getInt(PropertyKeys.OPERATION_TIMEOUT);
}
return poll(currentPollTimeout);
}
private MessageExt poll(long timeout) {
try {
ConsumeRequest consumeRequest = consumeRequestCache.poll(timeout, TimeUnit.MILLISECONDS);
if (consumeRequest != null) {
MessageExt messageExt = consumeRequest.getMessageExt();
consumeRequest.setStartConsumeTimeMillis(System.currentTimeMillis());
MessageAccessor.setConsumeStartTimeStamp(messageExt, String.valueOf(consumeRequest.getStartConsumeTimeMillis()));
consumedRequest.put(messageExt.getMsgId(), consumeRequest);
return messageExt;
}
} catch (InterruptedException ignore) {
}
return null;
}
void ack(final String messageId) {
ConsumeRequest consumeRequest = consumedRequest.remove(messageId);
if (consumeRequest != null) {
long offset = consumeRequest.getProcessQueue().removeMessage(Collections.singletonList(consumeRequest.getMessageExt()));
try {
rocketmqPullConsumer.updateConsumeOffset(consumeRequest.getMessageQueue(), offset);
} catch (MQClientException e) {
log.error("A error occurred in update consume offset process.", e);
}
}
}
void ack(final MessageQueue messageQueue, final ProcessQueue processQueue, final MessageExt messageExt) {
consumedRequest.remove(messageExt.getMsgId());
long offset = processQueue.removeMessage(Collections.singletonList(messageExt));
try {
rocketmqPullConsumer.updateConsumeOffset(messageQueue, offset);
} catch (MQClientException e) {
log.error("A error occurred in update consume offset process.", e);
}
}
@Override
public void startup() {
this.cleanExpireMsgExecutors.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
cleanExpireMsg();
}
}, clientConfig.getRmqMessageConsumeTimeout(), clientConfig.getRmqMessageConsumeTimeout(), TimeUnit.MINUTES);
}
@Override
public void shutdown() {
ThreadUtils.shutdownGracefully(cleanExpireMsgExecutors, 5000, TimeUnit.MILLISECONDS);
}
private void cleanExpireMsg() {
for (final Map.Entry<MessageQueue, ProcessQueue> next : rocketmqPullConsumer.getDefaultMQPullConsumerImpl()
.getRebalanceImpl().getProcessQueueTable().entrySet()) {
ProcessQueue pq = next.getValue();
MessageQueue mq = next.getKey();
ReadWriteLock lockTreeMap = getLockInProcessQueue(pq);
if (lockTreeMap == null) {
log.error("Gets tree map lock in process queue error, may be has compatibility issue");
return;
}
TreeMap<Long, MessageExt> msgTreeMap = pq.getMsgTreeMap();
int loop = msgTreeMap.size();
for (int i = 0; i < loop; i++) {
MessageExt msg = null;
try {
lockTreeMap.readLock().lockInterruptibly();
try {
if (!msgTreeMap.isEmpty()) {
msg = msgTreeMap.firstEntry().getValue();
if (System.currentTimeMillis() - Long.parseLong(MessageAccessor.getConsumeStartTimeStamp(msg))
> clientConfig.getRmqMessageConsumeTimeout() * 60 * 1000) {
//Expired, ack and remove it.
} else {
break;
}
} else {
break;
}
} finally {
lockTreeMap.readLock().unlock();
}
} catch (InterruptedException e) {
log.error("Gets expired message exception", e);
}
try {
rocketmqPullConsumer.sendMessageBack(msg, 3);
log.info("Send expired msg back. topic={}, msgId={}, storeHost={}, queueId={}, queueOffset={}",
msg.getTopic(), msg.getMsgId(), msg.getStoreHost(), msg.getQueueId(), msg.getQueueOffset());
ack(mq, pq, msg);
} catch (Exception e) {
log.error("Send back expired msg exception", e);
}
}
}
}
private ReadWriteLock getLockInProcessQueue(ProcessQueue pq) {
try {
return (ReadWriteLock) FieldUtils.readDeclaredField(pq, "lockTreeMap", true);
} catch (IllegalAccessException e) {
return null;
}
}
}
```
|
```package org.apache.rocketmq.tools.command.offset;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class ResetOffsetByTimeOldCommandTest {
@Test
public void testExecute() {
ResetOffsetByTimeOldCommand cmd = new ResetOffsetByTimeOldCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-g default-group", "-t unit-test", "-s 1412131213231", "-f false"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
assertThat(commandLine.getOptionValue('g').trim()).isEqualTo("default-group");
assertThat(commandLine.getOptionValue('t').trim()).isEqualTo("unit-test");
assertThat(commandLine.getOptionValue('s').trim()).isEqualTo("1412131213231");
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.offset;
import java.util.Date;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.common.UtilAll;
import org.apache.rocketmq.common.admin.RollbackStats;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class ResetOffsetByTimeOldCommand implements SubCommand {
public static void resetOffset(DefaultMQAdminExt defaultMQAdminExt, String consumerGroup, String topic,
long timestamp, boolean force,
String timeStampStr) throws RemotingException, MQBrokerException, InterruptedException, MQClientException {
List<RollbackStats> rollbackStatsList = defaultMQAdminExt.resetOffsetByTimestampOld(consumerGroup, topic, timestamp, force);
System.out.printf(
"rollback consumer offset by specified consumerGroup[%s], topic[%s], force[%s], timestamp(string)[%s], timestamp(long)[%s]%n",
consumerGroup, topic, force, timeStampStr, timestamp);
System.out.printf("%-20s %-20s %-20s %-20s %-20s %-20s%n",
"#brokerName",
"#queueId",
"#brokerOffset",
"#consumerOffset",
"#timestampOffset",
"#rollbackOffset"
);
for (RollbackStats rollbackStats : rollbackStatsList) {
System.out.printf("%-20s %-20d %-20d %-20d %-20d %-20d%n",
UtilAll.frontStringAtLeast(rollbackStats.getBrokerName(), 32),
rollbackStats.getQueueId(),
rollbackStats.getBrokerOffset(),
rollbackStats.getConsumerOffset(),
rollbackStats.getTimestampOffset(),
rollbackStats.getRollbackOffset()
);
}
}
@Override
public String commandName() {
return "resetOffsetByTimeOld";
}
@Override
public String commandDesc() {
return "Reset consumer offset by timestamp(execute this command required client restart).";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("g", "group", true, "set the consumer group");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("t", "topic", true, "set the topic");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("s", "timestamp", true, "set the timestamp[currentTimeMillis|yyyy-MM-dd#HH:mm:ss:SSS]");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("f", "force", true, "set the force rollback by timestamp switch[true|false]");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
String consumerGroup = commandLine.getOptionValue("g").trim();
String topic = commandLine.getOptionValue("t").trim();
String timeStampStr = commandLine.getOptionValue("s").trim();
long timestamp = 0;
try {
timestamp = Long.parseLong(timeStampStr);
} catch (NumberFormatException e) {
Date date = UtilAll.parseDate(timeStampStr, UtilAll.YYYY_MM_DD_HH_MM_SS_SSS);
if (date != null) {
timestamp = UtilAll.parseDate(timeStampStr, UtilAll.YYYY_MM_DD_HH_MM_SS_SSS).getTime();
} else {
System.out.printf("specified timestamp invalid.%n");
return;
}
boolean force = true;
if (commandLine.hasOption('f')) {
force = Boolean.valueOf(commandLine.getOptionValue("f").trim());
}
defaultMQAdminExt.start();
resetOffset(defaultMQAdminExt, consumerGroup, topic, timestamp, force, timeStampStr);
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
```
|
```package org.apache.rocketmq.tools.command.consumer;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.client.ClientConfig;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.MQClientAPIImpl;
import org.apache.rocketmq.client.impl.MQClientManager;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.common.admin.ConsumeStats;
import org.apache.rocketmq.common.admin.OffsetWrapper;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.protocol.route.BrokerData;
import org.apache.rocketmq.common.protocol.route.QueueData;
import org.apache.rocketmq.common.protocol.route.TopicRouteData;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExtImpl;
import org.apache.rocketmq.tools.command.SubCommandException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ConsumerProgressSubCommandTest {
private static DefaultMQAdminExt defaultMQAdminExt;
private static DefaultMQAdminExtImpl defaultMQAdminExtImpl;
private static MQClientInstance mqClientInstance = MQClientManager.getInstance().getAndCreateMQClientInstance(new ClientConfig());
private static MQClientAPIImpl mQClientAPIImpl;
@BeforeClass
public static void init() throws NoSuchFieldException, IllegalAccessException, InterruptedException, RemotingException, MQClientException, MQBrokerException {
mQClientAPIImpl = mock(MQClientAPIImpl.class);
defaultMQAdminExt = new DefaultMQAdminExt();
defaultMQAdminExtImpl = new DefaultMQAdminExtImpl(defaultMQAdminExt, 1000);
Field field = DefaultMQAdminExtImpl.class.getDeclaredField("mqClientInstance");
field.setAccessible(true);
field.set(defaultMQAdminExtImpl, mqClientInstance);
field = MQClientInstance.class.getDeclaredField("mQClientAPIImpl");
field.setAccessible(true);
field.set(mqClientInstance, mQClientAPIImpl);
field = DefaultMQAdminExt.class.getDeclaredField("defaultMQAdminExtImpl");
field.setAccessible(true);
field.set(defaultMQAdminExt, defaultMQAdminExtImpl);
TopicRouteData topicRouteData = new TopicRouteData();
List<BrokerData> brokerDatas = new ArrayList<>();
HashMap<Long, String> brokerAddrs = new HashMap<>();
brokerAddrs.put(1234l, "127.0.0.1:10911");
BrokerData brokerData = new BrokerData();
brokerData.setCluster("default-cluster");
brokerData.setBrokerName("default-broker");
brokerData.setBrokerAddrs(brokerAddrs);
brokerDatas.add(brokerData);
topicRouteData.setBrokerDatas(brokerDatas);
topicRouteData.setQueueDatas(new ArrayList<QueueData>());
topicRouteData.setFilterServerTable(new HashMap<String, List<String>>());
when(mQClientAPIImpl.getTopicRouteInfoFromNameServer(anyString(), anyLong())).thenReturn(topicRouteData);
ConsumeStats consumeStats = new ConsumeStats();
consumeStats.setConsumeTps(1234);
MessageQueue messageQueue = new MessageQueue();
OffsetWrapper offsetWrapper = new OffsetWrapper();
HashMap<MessageQueue, OffsetWrapper> stats = new HashMap<>();
stats.put(messageQueue, offsetWrapper);
consumeStats.setOffsetTable(stats);
when(mQClientAPIImpl.getConsumeStats(anyString(), anyString(), anyString(), anyLong())).thenReturn(consumeStats);
}
@AfterClass
public static void terminate() {
defaultMQAdminExt.shutdown();
}
@Ignore
@Test
public void testExecute() throws SubCommandException {
ConsumerProgressSubCommand cmd = new ConsumerProgressSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-g default-group"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
cmd.execute(commandLine, options, null);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.consumer;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.client.log.ClientLogger;
import org.apache.rocketmq.common.MQVersion;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.UtilAll;
import org.apache.rocketmq.common.admin.ConsumeStats;
import org.apache.rocketmq.common.admin.OffsetWrapper;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.protocol.body.Connection;
import org.apache.rocketmq.common.protocol.body.ConsumerConnection;
import org.apache.rocketmq.common.protocol.body.ConsumerRunningInfo;
import org.apache.rocketmq.common.protocol.body.TopicList;
import org.apache.rocketmq.common.protocol.heartbeat.ConsumeType;
import org.apache.rocketmq.common.protocol.heartbeat.MessageModel;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
import org.slf4j.Logger;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class ConsumerProgressSubCommand implements SubCommand {
private final Logger log = ClientLogger.getLog();
@Override
public String commandName() {
return "consumerProgress";
}
@Override
public String commandDesc() {
return "Query consumers's progress, speed";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("g", "groupName", true, "consumer group name");
opt.setRequired(false);
options.addOption(opt);
Option opt1 = new Option("t", "topic", true, "topic name");
opt1.setRequired(false);
options.addOption(opt1);
return options;
}
private Map<MessageQueue, String> getMessageQueueAllocationResult(DefaultMQAdminExt defaultMQAdminExt,
String groupName) {
Map<MessageQueue, String> results = new HashMap<>();
try {
ConsumerConnection consumerConnection = defaultMQAdminExt.examineConsumerConnectionInfo(groupName);
for (Connection connection : consumerConnection.getConnectionSet()) {
String clientId = connection.getClientId();
ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(groupName, clientId,
false);
for (MessageQueue messageQueue : consumerRunningInfo.getMqTable().keySet()) {
results.put(messageQueue, clientId.split("@")[0]);
}
}
} catch (Exception ignore) {
}
return results;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
defaultMQAdminExt.start();
if (commandLine.hasOption('g')) {
String consumerGroup = commandLine.getOptionValue('g').trim();
ConsumeStats consumeStats;
if (commandLine.hasOption('t')) {
String topic = commandLine.getOptionValue('t').trim();
consumeStats = defaultMQAdminExt.examineConsumeStats(consumerGroup, topic);
} else {
consumeStats = defaultMQAdminExt.examineConsumeStats(consumerGroup);
}
List<MessageQueue> mqList = new LinkedList<MessageQueue>();
mqList.addAll(consumeStats.getOffsetTable().keySet());
Collections.sort(mqList);
Map<MessageQueue, String> messageQueueAllocationResult = getMessageQueueAllocationResult(defaultMQAdminExt, consumerGroup);
System.out.printf("%-32s %-32s %-4s %-20s %-20s %-20s %-20s %s%n",
"#Topic",
"#Broker Name",
"#QID",
"#Broker Offset",
"#Consumer Offset",
"#Client IP",
"#Diff",
"#LastTime");
long diffTotal = 0L;
for (MessageQueue mq : mqList) {
OffsetWrapper offsetWrapper = consumeStats.getOffsetTable().get(mq);
long diff = offsetWrapper.getBrokerOffset() - offsetWrapper.getConsumerOffset();
diffTotal += diff;
String lastTime = "";
try {
lastTime = UtilAll.formatDate(new Date(offsetWrapper.getLastTimestamp()), UtilAll.YYYY_MM_DD_HH_MM_SS);
} catch (Exception e) {
}
String clientIP = messageQueueAllocationResult.get(mq);
System.out.printf("%-32s %-32s %-4d %-20d %-20d %-20s %-20d %s%n",
UtilAll.frontStringAtLeast(mq.getTopic(), 32),
UtilAll.frontStringAtLeast(mq.getBrokerName(), 32),
mq.getQueueId(),
offsetWrapper.getBrokerOffset(),
offsetWrapper.getConsumerOffset(),
null != clientIP ? clientIP : "NA",
diff,
lastTime
);
}
System.out.printf("%n");
System.out.printf("Consume TPS: %s%n", consumeStats.getConsumeTps());
System.out.printf("Diff Total: %d%n", diffTotal);
} else {
System.out.printf("%-32s %-6s %-24s %-5s %-14s %-7s %s%n",
"#Group",
"#Count",
"#Version",
"#Type",
"#Model",
"#TPS",
"#Diff Total"
);
TopicList topicList = defaultMQAdminExt.fetchAllTopicList();
for (String topic : topicList.getTopicList()) {
if (topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) {
String consumerGroup = topic.substring(MixAll.RETRY_GROUP_TOPIC_PREFIX.length());
try {
ConsumeStats consumeStats = null;
try {
consumeStats = defaultMQAdminExt.examineConsumeStats(consumerGroup);
} catch (Exception e) {
log.warn("examineConsumeStats exception, " + consumerGroup, e);
}
ConsumerConnection cc = null;
try {
cc = defaultMQAdminExt.examineConsumerConnectionInfo(consumerGroup);
} catch (Exception e) {
log.warn("examineConsumerConnectionInfo exception, " + consumerGroup, e);
}
GroupConsumeInfo groupConsumeInfo = new GroupConsumeInfo();
groupConsumeInfo.setGroup(consumerGroup);
if (consumeStats != null) {
groupConsumeInfo.setConsumeTps((int) consumeStats.getConsumeTps());
groupConsumeInfo.setDiffTotal(consumeStats.computeTotalDiff());
}
if (cc != null) {
groupConsumeInfo.setCount(cc.getConnectionSet().size());
groupConsumeInfo.setMessageModel(cc.getMessageModel());
groupConsumeInfo.setConsumeType(cc.getConsumeType());
groupConsumeInfo.setVersion(cc.computeMinVersion());
}
System.out.printf("%-32s %-6d %-24s %-5s %-14s %-7d %d%n",
UtilAll.frontStringAtLeast(groupConsumeInfo.getGroup(), 32),
groupConsumeInfo.getCount(),
groupConsumeInfo.getCount() > 0 ? groupConsumeInfo.versionDesc() : "OFFLINE",
groupConsumeInfo.consumeTypeDesc(),
groupConsumeInfo.messageModelDesc(),
groupConsumeInfo.getConsumeTps(),
groupConsumeInfo.getDiffTotal()
);
} catch (Exception e) {
log.warn("examineConsumeStats or examineConsumerConnectionInfo exception, " + consumerGroup, e);
}
}
}
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
class GroupConsumeInfo implements Comparable<GroupConsumeInfo> {
private String group;
private int version;
private int count;
private ConsumeType consumeType;
private MessageModel messageModel;
private int consumeTps;
private long diffTotal;
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public String consumeTypeDesc() {
if (this.count != 0) {
return this.getConsumeType() == ConsumeType.CONSUME_ACTIVELY ? "PULL" : "PUSH";
}
return "";
}
public ConsumeType getConsumeType() {
return consumeType;
}
public void setConsumeType(ConsumeType consumeType) {
this.consumeType = consumeType;
}
public String messageModelDesc() {
if (this.count != 0 && this.getConsumeType() == ConsumeType.CONSUME_PASSIVELY) {
return this.getMessageModel().toString();
}
return "";
}
public MessageModel getMessageModel() {
return messageModel;
}
public void setMessageModel(MessageModel messageModel) {
this.messageModel = messageModel;
}
public String versionDesc() {
if (this.count != 0) {
return MQVersion.getVersionDesc(this.version);
}
return "";
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
public long getDiffTotal() {
return diffTotal;
}
public void setDiffTotal(long diffTotal) {
this.diffTotal = diffTotal;
}
@Override
public int compareTo(GroupConsumeInfo o) {
if (this.count != o.count) {
return o.count - this.count;
}
return (int) (o.diffTotal - diffTotal);
}
public int getConsumeTps() {
return consumeTps;
}
public void setConsumeTps(int consumeTps) {
this.consumeTps = consumeTps;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
}
```
|
```package org.apache.rocketmq.tools.monitor;
import java.util.Properties;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.protocol.body.ConsumeStatus;
import org.apache.rocketmq.common.protocol.body.ConsumerRunningInfo;
import org.apache.rocketmq.common.protocol.body.ProcessQueueInfo;
import org.apache.rocketmq.common.protocol.heartbeat.SubscriptionData;
import org.apache.rocketmq.common.protocol.topic.OffsetMovedEvent;
import org.junit.Before;
import org.junit.Test;
import static org.mockito.Mockito.mock;
public class DefaultMonitorListenerTest {
private DefaultMonitorListener defaultMonitorListener;
@Before
public void init() {
defaultMonitorListener = mock(DefaultMonitorListener.class);
}
@Test
public void testBeginRound() {
defaultMonitorListener.beginRound();
}
@Test
public void testReportUndoneMsgs() {
UndoneMsgs undoneMsgs = new UndoneMsgs();
undoneMsgs.setConsumerGroup("default-group");
undoneMsgs.setTopic("unit-test");
undoneMsgs.setUndoneMsgsDelayTimeMills(30000);
undoneMsgs.setUndoneMsgsSingleMQ(1);
undoneMsgs.setUndoneMsgsTotal(100);
defaultMonitorListener.reportUndoneMsgs(undoneMsgs);
}
@Test
public void testReportFailedMsgs() {
FailedMsgs failedMsgs = new FailedMsgs();
failedMsgs.setTopic("unit-test");
failedMsgs.setConsumerGroup("default-consumer");
failedMsgs.setFailedMsgsTotalRecently(2);
defaultMonitorListener.reportFailedMsgs(failedMsgs);
}
@Test
public void testReportDeleteMsgsEvent() {
DeleteMsgsEvent deleteMsgsEvent = new DeleteMsgsEvent();
deleteMsgsEvent.setEventTimestamp(System.currentTimeMillis());
deleteMsgsEvent.setOffsetMovedEvent(new OffsetMovedEvent());
defaultMonitorListener.reportDeleteMsgsEvent(deleteMsgsEvent);
}
@Test
public void testReportConsumerRunningInfo() {
TreeMap<String, ConsumerRunningInfo> criTable = new TreeMap<>();
ConsumerRunningInfo consumerRunningInfo = new ConsumerRunningInfo();
consumerRunningInfo.setSubscriptionSet(new TreeSet<SubscriptionData>());
consumerRunningInfo.setStatusTable(new TreeMap<String, ConsumeStatus>());
consumerRunningInfo.setSubscriptionSet(new TreeSet<SubscriptionData>());
consumerRunningInfo.setMqTable(new TreeMap<MessageQueue, ProcessQueueInfo>());
consumerRunningInfo.setProperties(new Properties());
criTable.put("test", consumerRunningInfo);
defaultMonitorListener.reportConsumerRunningInfo(criTable);
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.monitor;
import java.util.Iterator;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.apache.rocketmq.client.log.ClientLogger;
import org.apache.rocketmq.common.protocol.body.ConsumerRunningInfo;
import org.slf4j.Logger;
public class DefaultMonitorListener implements MonitorListener {
private final static String LOG_PREFIX = "[MONITOR] ";
private final static String LOG_NOTIFY = LOG_PREFIX + " [NOTIFY] ";
private final Logger log = ClientLogger.getLog();
public DefaultMonitorListener() {
}
@Override
public void beginRound() {
log.info(LOG_PREFIX + "=========================================beginRound");
}
@Override
public void reportUndoneMsgs(UndoneMsgs undoneMsgs) {
log.info(String.format(LOG_PREFIX + "reportUndoneMsgs: %s", undoneMsgs));
}
@Override
public void reportFailedMsgs(FailedMsgs failedMsgs) {
log.info(String.format(LOG_PREFIX + "reportFailedMsgs: %s", failedMsgs));
}
@Override
public void reportDeleteMsgsEvent(DeleteMsgsEvent deleteMsgsEvent) {
log.info(String.format(LOG_PREFIX + "reportDeleteMsgsEvent: %s", deleteMsgsEvent));
}
@Override
public void reportConsumerRunningInfo(TreeMap<String, ConsumerRunningInfo> criTable) {
{
boolean result = ConsumerRunningInfo.analyzeSubscription(criTable);
if (!result) {
log.info(String.format(LOG_NOTIFY
+ "reportConsumerRunningInfo: ConsumerGroup: %s, Subscription different", criTable
.firstEntry().getValue().getProperties().getProperty("consumerGroup")));
}
}
{
Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, ConsumerRunningInfo> next = it.next();
String result = ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue());
if (!result.isEmpty()) {
log.info(String.format(LOG_NOTIFY
+ "reportConsumerRunningInfo: ConsumerGroup: %s, ClientId: %s, %s",
criTable.firstEntry().getValue().getProperties().getProperty("consumerGroup"),
next.getKey(),
result));
}
}
}
}
@Override
public void endRound() {
log.info(LOG_PREFIX + "=========================================endRound");
}
}
```
|
```package org.apache.rocketmq.test.delay;
import org.apache.rocketmq.test.base.BaseConf;
public class DelayConf extends BaseConf {
protected static final int[] DELAY_LEVEL = {
1, 5, 10, 30, 1 * 60, 5 * 60, 10 * 60,
30 * 60, 1 * 3600, 2 * 3600, 6 * 3600, 12 * 3600, 1 * 24 * 3600};
}
```
|
Please help me generate a test for this class.
|
```package com.xiaojukeji.carrera.config.v4.pproxy;
import com.xiaojukeji.carrera.config.ConfigurationValidator;
import org.apache.commons.lang3.StringUtils;
public class DelayConfiguration implements ConfigurationValidator {
private int innerTopicNum;
private String chronosInnerTopicPrefix;
public int getInnerTopicNum() {
return innerTopicNum;
}
public void setInnerTopicNum(int innerTopicNum) {
this.innerTopicNum = innerTopicNum;
}
public String getChronosInnerTopicPrefix() {
return chronosInnerTopicPrefix;
}
public void setChronosInnerTopicPrefix(String chronosInnerTopicPrefix) {
this.chronosInnerTopicPrefix = chronosInnerTopicPrefix;
}
@Override
public String toString() {
return "DelayConfiguration{" +
"innerTopicNum=" + innerTopicNum +
", chronosInnerTopicPrefix='" + chronosInnerTopicPrefix + '\'' +
'}';
}
@Override
public boolean validate() {
return innerTopicNum > 0 && StringUtils.isNotEmpty(chronosInnerTopicPrefix);
}
}```
|
```package org.apache.rocketmq.common.protocol;
import org.apache.rocketmq.common.protocol.body.ConsumeStatus;
import org.apache.rocketmq.remoting.protocol.RemotingSerializable;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.within;
public class ConsumeStatusTest {
@Test
public void testFromJson() throws Exception {
ConsumeStatus cs = new ConsumeStatus();
cs.setConsumeFailedTPS(10);
cs.setPullRT(100);
cs.setPullTPS(1000);
String json = RemotingSerializable.toJson(cs, true);
ConsumeStatus fromJson = RemotingSerializable.fromJson(json, ConsumeStatus.class);
assertThat(fromJson.getPullRT()).isCloseTo(cs.getPullRT(), within(0.0001));
assertThat(fromJson.getPullTPS()).isCloseTo(cs.getPullTPS(), within(0.0001));
assertThat(fromJson.getConsumeFailedTPS()).isCloseTo(cs.getConsumeFailedTPS(), within(0.0001));
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.common.protocol.body;
public class ConsumeStatus {
private double pullRT;
private double pullTPS;
private double consumeRT;
private double consumeOKTPS;
private double consumeFailedTPS;
private long consumeFailedMsgs;
public double getPullRT() {
return pullRT;
}
public void setPullRT(double pullRT) {
this.pullRT = pullRT;
}
public double getPullTPS() {
return pullTPS;
}
public void setPullTPS(double pullTPS) {
this.pullTPS = pullTPS;
}
public double getConsumeRT() {
return consumeRT;
}
public void setConsumeRT(double consumeRT) {
this.consumeRT = consumeRT;
}
public double getConsumeOKTPS() {
return consumeOKTPS;
}
public void setConsumeOKTPS(double consumeOKTPS) {
this.consumeOKTPS = consumeOKTPS;
}
public double getConsumeFailedTPS() {
return consumeFailedTPS;
}
public void setConsumeFailedTPS(double consumeFailedTPS) {
this.consumeFailedTPS = consumeFailedTPS;
}
public long getConsumeFailedMsgs() {
return consumeFailedMsgs;
}
public void setConsumeFailedMsgs(long consumeFailedMsgs) {
this.consumeFailedMsgs = consumeFailedMsgs;
}
}
```
|
```package org.apache.rocketmq.tools.command.broker;
import java.lang.reflect.Field;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.client.ClientConfig;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.MQClientAPIImpl;
import org.apache.rocketmq.client.impl.MQClientManager;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.remoting.exception.RemotingConnectException;
import org.apache.rocketmq.remoting.exception.RemotingSendRequestException;
import org.apache.rocketmq.remoting.exception.RemotingTimeoutException;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExtImpl;
import org.apache.rocketmq.tools.command.SubCommandException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class CleanExpiredCQSubCommandTest {
private static DefaultMQAdminExt defaultMQAdminExt;
private static DefaultMQAdminExtImpl defaultMQAdminExtImpl;
private static MQClientInstance mqClientInstance = MQClientManager.getInstance().getAndCreateMQClientInstance(new ClientConfig());
private static MQClientAPIImpl mQClientAPIImpl;
@BeforeClass
public static void init() throws NoSuchFieldException, IllegalAccessException, InterruptedException, RemotingTimeoutException, MQClientException, RemotingSendRequestException, RemotingConnectException, MQBrokerException {
mQClientAPIImpl = mock(MQClientAPIImpl.class);
defaultMQAdminExt = new DefaultMQAdminExt();
defaultMQAdminExtImpl = new DefaultMQAdminExtImpl(defaultMQAdminExt, 1000);
Field field = DefaultMQAdminExtImpl.class.getDeclaredField("mqClientInstance");
field.setAccessible(true);
field.set(defaultMQAdminExtImpl, mqClientInstance);
field = MQClientInstance.class.getDeclaredField("mQClientAPIImpl");
field.setAccessible(true);
field.set(mqClientInstance, mQClientAPIImpl);
field = DefaultMQAdminExt.class.getDeclaredField("defaultMQAdminExtImpl");
field.setAccessible(true);
field.set(defaultMQAdminExt, defaultMQAdminExtImpl);
when(mQClientAPIImpl.cleanExpiredConsumeQueue(anyString(), anyLong())).thenReturn(true);
}
@AfterClass
public static void terminate() {
defaultMQAdminExt.shutdown();
}
@Ignore
@Test
public void testExecute() throws SubCommandException {
CleanExpiredCQSubCommand cmd = new CleanExpiredCQSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-b 127.0.0.1:10911", "-c default-cluster"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
cmd.execute(commandLine, options, null);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.broker;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class CleanExpiredCQSubCommand implements SubCommand {
@Override
public String commandName() {
return "cleanExpiredCQ";
}
@Override
public String commandDesc() {
return "Clean expired ConsumeQueue on broker.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("b", "brokerAddr", true, "Broker address");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("c", "cluster", true, "clustername");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
boolean result = false;
defaultMQAdminExt.start();
if (commandLine.hasOption('b')) {
String addr = commandLine.getOptionValue('b').trim();
result = defaultMQAdminExt.cleanExpiredConsumerQueueByAddr(addr);
} else {
String cluster = commandLine.getOptionValue('c');
if (null != cluster)
cluster = cluster.trim();
result = defaultMQAdminExt.cleanExpiredConsumerQueue(cluster);
}
System.out.printf(result ? "success" : "false");
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
```
|
```package org.apache.rocketmq.client.consumer.store;
import java.util.Collections;
import java.util.HashSet;
import org.apache.rocketmq.client.ClientConfig;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.impl.FindBrokerResult;
import org.apache.rocketmq.client.impl.MQClientAPIImpl;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.protocol.header.QueryConsumerOffsetRequestHeader;
import org.apache.rocketmq.common.protocol.header.UpdateConsumerOffsetRequestHeader;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class RemoteBrokerOffsetStoreTest {
@Mock
private MQClientInstance mQClientFactory;
@Mock
private MQClientAPIImpl mqClientAPI;
private String group = "FooBarGroup";
private String topic = "FooBar";
private String brokerName = "DefaultBrokerName";
@Before
public void init() {
System.setProperty("rocketmq.client.localOffsetStoreDir", System.getProperty("java.io.tmpdir") + ".rocketmq_offsets");
String clientId = new ClientConfig().buildMQClientId() + "#TestNamespace" + System.currentTimeMillis();
when(mQClientFactory.getClientId()).thenReturn(clientId);
when(mQClientFactory.findBrokerAddressInAdmin(brokerName)).thenReturn(new FindBrokerResult("127.0.0.1", false));
when(mQClientFactory.getMQClientAPIImpl()).thenReturn(mqClientAPI);
}
@Test
public void testUpdateOffset() throws Exception {
OffsetStore offsetStore = new RemoteBrokerOffsetStore(mQClientFactory, group);
MessageQueue messageQueue = new MessageQueue(topic, brokerName, 1);
offsetStore.updateOffset(messageQueue, 1024, false);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_MEMORY)).isEqualTo(1024);
offsetStore.updateOffset(messageQueue, 1023, false);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_MEMORY)).isEqualTo(1023);
offsetStore.updateOffset(messageQueue, 1022, true);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_MEMORY)).isEqualTo(1023);
}
@Test
public void testReadOffset_WithException() throws Exception {
OffsetStore offsetStore = new RemoteBrokerOffsetStore(mQClientFactory, group);
MessageQueue messageQueue = new MessageQueue(topic, brokerName, 2);
offsetStore.updateOffset(messageQueue, 1024, false);
doThrow(new MQBrokerException(-1, ""))
.when(mqClientAPI).queryConsumerOffset(anyString(), any(QueryConsumerOffsetRequestHeader.class), anyLong());
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_STORE)).isEqualTo(-1);
doThrow(new RemotingException("", null))
.when(mqClientAPI).queryConsumerOffset(anyString(), any(QueryConsumerOffsetRequestHeader.class), anyLong());
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_STORE)).isEqualTo(-2);
}
@Test
public void testReadOffset_Success() throws Exception {
OffsetStore offsetStore = new RemoteBrokerOffsetStore(mQClientFactory, group);
final MessageQueue messageQueue = new MessageQueue(topic, brokerName, 3);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock mock) throws Throwable {
UpdateConsumerOffsetRequestHeader updateRequestHeader = mock.getArgument(1);
when(mqClientAPI.queryConsumerOffset(anyString(), any(QueryConsumerOffsetRequestHeader.class), anyLong())).thenReturn(updateRequestHeader.getCommitOffset());
return null;
}
}).when(mqClientAPI).updateConsumerOffsetOneway(any(String.class), any(UpdateConsumerOffsetRequestHeader.class), any(Long.class));
offsetStore.updateOffset(messageQueue, 1024, false);
offsetStore.persist(messageQueue);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_STORE)).isEqualTo(1024);
offsetStore.updateOffset(messageQueue, 1023, false);
offsetStore.persist(messageQueue);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_STORE)).isEqualTo(1023);
offsetStore.updateOffset(messageQueue, 1022, true);
offsetStore.persist(messageQueue);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_STORE)).isEqualTo(1023);
offsetStore.updateOffset(messageQueue, 1025, false);
offsetStore.persistAll(new HashSet<MessageQueue>(Collections.singletonList(messageQueue)));
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_STORE)).isEqualTo(1025);
}
@Test
public void testRemoveOffset() throws Exception {
OffsetStore offsetStore = new RemoteBrokerOffsetStore(mQClientFactory, group);
final MessageQueue messageQueue = new MessageQueue(topic, brokerName, 4);
offsetStore.updateOffset(messageQueue, 1024, false);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_MEMORY)).isEqualTo(1024);
offsetStore.removeOffset(messageQueue);
assertThat(offsetStore.readOffset(messageQueue, ReadOffsetType.READ_FROM_MEMORY)).isEqualTo(-1);
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.client.consumer.store;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.FindBrokerResult;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.client.log.ClientLogger;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.UtilAll;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.protocol.header.QueryConsumerOffsetRequestHeader;
import org.apache.rocketmq.common.protocol.header.UpdateConsumerOffsetRequestHeader;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.slf4j.Logger;
/**
* Remote storage implementation
*/
public class RemoteBrokerOffsetStore implements OffsetStore {
private final static Logger log = ClientLogger.getLog();
private final MQClientInstance mQClientFactory;
private final String groupName;
private ConcurrentMap<MessageQueue, AtomicLong> offsetTable =
new ConcurrentHashMap<MessageQueue, AtomicLong>();
public RemoteBrokerOffsetStore(MQClientInstance mQClientFactory, String groupName) {
this.mQClientFactory = mQClientFactory;
this.groupName = groupName;
}
@Override
public void load() {
}
@Override
public void updateOffset(MessageQueue mq, long offset, boolean increaseOnly) {
if (mq != null) {
AtomicLong offsetOld = this.offsetTable.get(mq);
if (null == offsetOld) {
offsetOld = this.offsetTable.putIfAbsent(mq, new AtomicLong(offset));
}
if (null != offsetOld) {
if (increaseOnly) {
MixAll.compareAndIncreaseOnly(offsetOld, offset);
} else {
offsetOld.set(offset);
}
}
}
}
@Override
public long readOffset(final MessageQueue mq, final ReadOffsetType type) {
if (mq != null) {
switch (type) {
case MEMORY_FIRST_THEN_STORE:
case READ_FROM_MEMORY: {
AtomicLong offset = this.offsetTable.get(mq);
if (offset != null) {
return offset.get();
} else if (ReadOffsetType.READ_FROM_MEMORY == type) {
return -1;
}
}
case READ_FROM_STORE: {
try {
long brokerOffset = this.fetchConsumeOffsetFromBroker(mq);
AtomicLong offset = new AtomicLong(brokerOffset);
this.updateOffset(mq, offset.get(), false);
return brokerOffset;
}
// No offset in broker
catch (MQBrokerException e) {
return -1;
}
//Other exceptions
catch (Exception e) {
log.warn("fetchConsumeOffsetFromBroker exception, " + mq, e);
return -2;
}
}
default:
break;
}
}
return -1;
}
@Override
public void persistAll(Set<MessageQueue> mqs) {
if (null == mqs || mqs.isEmpty())
return;
final HashSet<MessageQueue> unusedMQ = new HashSet<MessageQueue>();
if (!mqs.isEmpty()) {
for (Map.Entry<MessageQueue, AtomicLong> entry : this.offsetTable.entrySet()) {
MessageQueue mq = entry.getKey();
AtomicLong offset = entry.getValue();
if (offset != null) {
if (mqs.contains(mq)) {
try {
this.updateConsumeOffsetToBroker(mq, offset.get());
log.info("[persistAll] Group: {} ClientId: {} updateConsumeOffsetToBroker {} {}",
this.groupName,
this.mQClientFactory.getClientId(),
mq,
offset.get());
} catch (Exception e) {
log.error("updateConsumeOffsetToBroker exception, " + mq.toString(), e);
}
} else {
unusedMQ.add(mq);
}
}
}
}
if (!unusedMQ.isEmpty()) {
for (MessageQueue mq : unusedMQ) {
this.offsetTable.remove(mq);
log.info("remove unused mq, {}, {}", mq, this.groupName);
}
}
}
@Override
public void persist(MessageQueue mq) {
AtomicLong offset = this.offsetTable.get(mq);
if (offset != null) {
try {
this.updateConsumeOffsetToBroker(mq, offset.get());
log.info("[persist] Group: {} ClientId: {} updateConsumeOffsetToBroker {} {}",
this.groupName,
this.mQClientFactory.getClientId(),
mq,
offset.get());
} catch (Exception e) {
log.error("updateConsumeOffsetToBroker exception, " + mq.toString(), e);
}
}
}
public void removeOffset(MessageQueue mq) {
if (mq != null) {
this.offsetTable.remove(mq);
log.info("remove unnecessary messageQueue offset. group={}, mq={}, offsetTableSize={}", this.groupName, mq,
offsetTable.size());
}
}
@Override
public Map<MessageQueue, Long> cloneOffsetTable(String topic) {
Map<MessageQueue, Long> cloneOffsetTable = new HashMap<MessageQueue, Long>();
for (Map.Entry<MessageQueue, AtomicLong> entry : this.offsetTable.entrySet()) {
MessageQueue mq = entry.getKey();
if (!UtilAll.isBlank(topic) && !topic.equals(mq.getTopic())) {
continue;
}
cloneOffsetTable.put(mq, entry.getValue().get());
}
return cloneOffsetTable;
}
/**
* Update the Consumer Offset in one way, once the Master is off, updated to Slave,
* here need to be optimized.
*/
private void updateConsumeOffsetToBroker(MessageQueue mq, long offset) throws RemotingException,
MQBrokerException, InterruptedException, MQClientException {
updateConsumeOffsetToBroker(mq, offset, true);
}
/**
* Update the Consumer Offset synchronously, once the Master is off, updated to Slave,
* here need to be optimized.
*/
@Override
public void updateConsumeOffsetToBroker(MessageQueue mq, long offset, boolean isOneway) throws RemotingException,
MQBrokerException, InterruptedException, MQClientException {
FindBrokerResult findBrokerResult = this.mQClientFactory.findBrokerAddressInAdmin(mq.getBrokerName());
if (null == findBrokerResult) {
this.mQClientFactory.updateTopicRouteInfoFromNameServer(mq.getTopic());
findBrokerResult = this.mQClientFactory.findBrokerAddressInAdmin(mq.getBrokerName());
}
if (findBrokerResult != null) {
UpdateConsumerOffsetRequestHeader requestHeader = new UpdateConsumerOffsetRequestHeader();
requestHeader.setTopic(mq.getTopic());
requestHeader.setConsumerGroup(this.groupName);
requestHeader.setQueueId(mq.getQueueId());
requestHeader.setCommitOffset(offset);
if (isOneway) {
this.mQClientFactory.getMQClientAPIImpl().updateConsumerOffsetOneway(
findBrokerResult.getBrokerAddr(), requestHeader, 1000 * 5);
} else {
this.mQClientFactory.getMQClientAPIImpl().updateConsumerOffset(
findBrokerResult.getBrokerAddr(), requestHeader, 1000 * 5);
}
} else {
throw new MQClientException("The broker[" + mq.getBrokerName() + "] not exist", null);
}
}
private long fetchConsumeOffsetFromBroker(MessageQueue mq) throws RemotingException, MQBrokerException,
InterruptedException, MQClientException {
FindBrokerResult findBrokerResult = this.mQClientFactory.findBrokerAddressInAdmin(mq.getBrokerName());
if (null == findBrokerResult) {
this.mQClientFactory.updateTopicRouteInfoFromNameServer(mq.getTopic());
findBrokerResult = this.mQClientFactory.findBrokerAddressInAdmin(mq.getBrokerName());
}
if (findBrokerResult != null) {
QueryConsumerOffsetRequestHeader requestHeader = new QueryConsumerOffsetRequestHeader();
requestHeader.setTopic(mq.getTopic());
requestHeader.setConsumerGroup(this.groupName);
requestHeader.setQueueId(mq.getQueueId());
return this.mQClientFactory.getMQClientAPIImpl().queryConsumerOffset(
findBrokerResult.getBrokerAddr(), requestHeader, 1000 * 5);
} else {
throw new MQClientException("The broker[" + mq.getBrokerName() + "] not exist", null);
}
}
}
```
|
```package org.apache.rocketmq.client.impl.consumer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.common.protocol.body.ProcessQueueInfo;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(MockitoJUnitRunner.class)
public class ProcessQueueTest {
@Test
public void testCachedMessageCount() {
ProcessQueue pq = new ProcessQueue();
pq.putMessage(createMessageList());
assertThat(pq.getMsgCount().get()).isEqualTo(100);
pq.takeMessags(10);
pq.commit();
assertThat(pq.getMsgCount().get()).isEqualTo(90);
pq.removeMessage(Collections.singletonList(pq.getMsgTreeMap().lastEntry().getValue()));
assertThat(pq.getMsgCount().get()).isEqualTo(89);
}
@Test
public void testCachedMessageSize() {
ProcessQueue pq = new ProcessQueue();
pq.putMessage(createMessageList());
assertThat(pq.getMsgSize().get()).isEqualTo(100 * 123);
pq.takeMessags(10);
pq.commit();
assertThat(pq.getMsgSize().get()).isEqualTo(90 * 123);
pq.removeMessage(Collections.singletonList(pq.getMsgTreeMap().lastEntry().getValue()));
assertThat(pq.getMsgSize().get()).isEqualTo(89 * 123);
}
@Test
public void testFillProcessQueueInfo() {
ProcessQueue pq = new ProcessQueue();
pq.putMessage(createMessageList(102400));
ProcessQueueInfo processQueueInfo = new ProcessQueueInfo();
pq.fillProcessQueueInfo(processQueueInfo);
assertThat(processQueueInfo.getCachedMsgSizeInMiB()).isEqualTo(12);
pq.takeMessags(10000);
pq.commit();
pq.fillProcessQueueInfo(processQueueInfo);
assertThat(processQueueInfo.getCachedMsgSizeInMiB()).isEqualTo(10);
pq.takeMessags(10000);
pq.commit();
pq.fillProcessQueueInfo(processQueueInfo);
assertThat(processQueueInfo.getCachedMsgSizeInMiB()).isEqualTo(9);
pq.takeMessags(80000);
pq.commit();
pq.fillProcessQueueInfo(processQueueInfo);
assertThat(processQueueInfo.getCachedMsgSizeInMiB()).isEqualTo(0);
}
private List<MessageExt> createMessageList() {
return createMessageList(100);
}
private List<MessageExt> createMessageList(int count) {
List<MessageExt> messageExtList = new ArrayList<MessageExt>();
for (int i = 0; i < count; i++) {
MessageExt messageExt = new MessageExt();
messageExt.setQueueOffset(i);
messageExt.setBody(new byte[123]);
messageExtList.add(messageExt);
}
return messageExtList;
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.client.impl.consumer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.rocketmq.client.consumer.DefaultMQPushConsumer;
import org.apache.rocketmq.client.log.ClientLogger;
import org.apache.rocketmq.common.message.MessageAccessor;
import org.apache.rocketmq.common.message.MessageConst;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.common.protocol.body.ProcessQueueInfo;
import org.slf4j.Logger;
/**
* Queue consumption snapshot
*/
public class ProcessQueue {
public final static long REBALANCE_LOCK_MAX_LIVE_TIME =
Long.parseLong(System.getProperty("rocketmq.client.rebalance.lockMaxLiveTime", "30000"));
public final static long REBALANCE_LOCK_INTERVAL = Long.parseLong(System.getProperty("rocketmq.client.rebalance.lockInterval", "20000"));
private final static long PULL_MAX_IDLE_TIME = Long.parseLong(System.getProperty("rocketmq.client.pull.pullMaxIdleTime", "120000"));
private final Logger log = ClientLogger.getLog();
private final ReadWriteLock lockTreeMap = new ReentrantReadWriteLock();
private final TreeMap<Long, MessageExt> msgTreeMap = new TreeMap<Long, MessageExt>();
private final AtomicLong msgCount = new AtomicLong();
private final AtomicLong msgSize = new AtomicLong();
private final Lock lockConsume = new ReentrantLock();
/**
* A subset of msgTreeMap, will only be used when orderly consume
*/
private final TreeMap<Long, MessageExt> consumingMsgOrderlyTreeMap = new TreeMap<Long, MessageExt>();
private final AtomicLong tryUnlockTimes = new AtomicLong(0);
private volatile long queueOffsetMax = 0L;
private volatile boolean dropped = false;
private volatile long lastPullTimestamp = System.currentTimeMillis();
private volatile long lastConsumeTimestamp = System.currentTimeMillis();
private volatile boolean locked = false;
private volatile long lastLockTimestamp = System.currentTimeMillis();
private volatile boolean consuming = false;
private volatile long msgAccCnt = 0;
public boolean isLockExpired() {
return (System.currentTimeMillis() - this.lastLockTimestamp) > REBALANCE_LOCK_MAX_LIVE_TIME;
}
public boolean isPullExpired() {
return (System.currentTimeMillis() - this.lastPullTimestamp) > PULL_MAX_IDLE_TIME;
}
/**
* @param pushConsumer
*/
public void cleanExpiredMsg(DefaultMQPushConsumer pushConsumer) {
if (pushConsumer.getDefaultMQPushConsumerImpl().isConsumeOrderly()) {
return;
}
int loop = msgTreeMap.size() < 16 ? msgTreeMap.size() : 16;
for (int i = 0; i < loop; i++) {
MessageExt msg = null;
try {
this.lockTreeMap.readLock().lockInterruptibly();
try {
if (!msgTreeMap.isEmpty() && System.currentTimeMillis() - Long.parseLong(MessageAccessor.getConsumeStartTimeStamp(msgTreeMap.firstEntry().getValue())) > pushConsumer.getConsumeTimeout() * 60 * 1000) {
msg = msgTreeMap.firstEntry().getValue();
} else {
break;
}
} finally {
this.lockTreeMap.readLock().unlock();
}
} catch (InterruptedException e) {
log.error("getExpiredMsg exception", e);
}
try {
pushConsumer.sendMessageBack(msg, 3);
log.info("send expire msg back. topic={}, msgId={}, storeHost={}, queueId={}, queueOffset={}", msg.getTopic(), msg.getMsgId(), msg.getStoreHost(), msg.getQueueId(), msg.getQueueOffset());
try {
this.lockTreeMap.writeLock().lockInterruptibly();
try {
if (!msgTreeMap.isEmpty() && msg.getQueueOffset() == msgTreeMap.firstKey()) {
try {
removeMessage(Collections.singletonList(msg));
} catch (Exception e) {
log.error("send expired msg exception", e);
}
}
} finally {
this.lockTreeMap.writeLock().unlock();
}
} catch (InterruptedException e) {
log.error("getExpiredMsg exception", e);
}
} catch (Exception e) {
log.error("send expired msg exception", e);
}
}
}
public boolean putMessage(final List<MessageExt> msgs) {
boolean dispatchToConsume = false;
try {
this.lockTreeMap.writeLock().lockInterruptibly();
try {
int validMsgCnt = 0;
for (MessageExt msg : msgs) {
MessageExt old = msgTreeMap.put(msg.getQueueOffset(), msg);
if (null == old) {
validMsgCnt++;
this.queueOffsetMax = msg.getQueueOffset();
msgSize.addAndGet(msg.getBody().length);
}
}
msgCount.addAndGet(validMsgCnt);
if (!msgTreeMap.isEmpty() && !this.consuming) {
dispatchToConsume = true;
this.consuming = true;
}
if (!msgs.isEmpty()) {
MessageExt messageExt = msgs.get(msgs.size() - 1);
String property = messageExt.getProperty(MessageConst.PROPERTY_MAX_OFFSET);
if (property != null) {
long accTotal = Long.parseLong(property) - messageExt.getQueueOffset();
if (accTotal > 0) {
this.msgAccCnt = accTotal;
}
}
}
} finally {
this.lockTreeMap.writeLock().unlock();
}
} catch (InterruptedException e) {
log.error("putMessage exception", e);
}
return dispatchToConsume;
}
public long getMaxSpan() {
try {
this.lockTreeMap.readLock().lockInterruptibly();
try {
if (!this.msgTreeMap.isEmpty()) {
return this.msgTreeMap.lastKey() - this.msgTreeMap.firstKey();
}
} finally {
this.lockTreeMap.readLock().unlock();
}
} catch (InterruptedException e) {
log.error("getMaxSpan exception", e);
}
return 0;
}
public long removeMessage(final List<MessageExt> msgs) {
long result = -1;
final long now = System.currentTimeMillis();
try {
this.lockTreeMap.writeLock().lockInterruptibly();
this.lastConsumeTimestamp = now;
try {
if (!msgTreeMap.isEmpty()) {
result = this.queueOffsetMax + 1;
int removedCnt = 0;
for (MessageExt msg : msgs) {
MessageExt prev = msgTreeMap.remove(msg.getQueueOffset());
if (prev != null) {
removedCnt--;
msgSize.addAndGet(0 - msg.getBody().length);
}
}
msgCount.addAndGet(removedCnt);
if (!msgTreeMap.isEmpty()) {
result = msgTreeMap.firstKey();
}
}
} finally {
this.lockTreeMap.writeLock().unlock();
}
} catch (Throwable t) {
log.error("removeMessage exception", t);
}
return result;
}
public TreeMap<Long, MessageExt> getMsgTreeMap() {
return msgTreeMap;
}
public AtomicLong getMsgCount() {
return msgCount;
}
public AtomicLong getMsgSize() {
return msgSize;
}
public boolean isDropped() {
return dropped;
}
public void setDropped(boolean dropped) {
this.dropped = dropped;
}
public boolean isLocked() {
return locked;
}
public void setLocked(boolean locked) {
this.locked = locked;
}
public void rollback() {
try {
this.lockTreeMap.writeLock().lockInterruptibly();
try {
this.msgTreeMap.putAll(this.consumingMsgOrderlyTreeMap);
this.consumingMsgOrderlyTreeMap.clear();
} finally {
this.lockTreeMap.writeLock().unlock();
}
} catch (InterruptedException e) {
log.error("rollback exception", e);
}
}
public long commit() {
try {
this.lockTreeMap.writeLock().lockInterruptibly();
try {
Long offset = this.consumingMsgOrderlyTreeMap.lastKey();
msgCount.addAndGet(0 - this.consumingMsgOrderlyTreeMap.size());
for (MessageExt msg : this.consumingMsgOrderlyTreeMap.values()) {
msgSize.addAndGet(0 - msg.getBody().length);
}
this.consumingMsgOrderlyTreeMap.clear();
if (offset != null) {
return offset + 1;
}
} finally {
this.lockTreeMap.writeLock().unlock();
}
} catch (InterruptedException e) {
log.error("commit exception", e);
}
return -1;
}
public void makeMessageToCosumeAgain(List<MessageExt> msgs) {
try {
this.lockTreeMap.writeLock().lockInterruptibly();
try {
for (MessageExt msg : msgs) {
this.consumingMsgOrderlyTreeMap.remove(msg.getQueueOffset());
this.msgTreeMap.put(msg.getQueueOffset(), msg);
}
} finally {
this.lockTreeMap.writeLock().unlock();
}
} catch (InterruptedException e) {
log.error("makeMessageToCosumeAgain exception", e);
}
}
public List<MessageExt> takeMessags(final int batchSize) {
List<MessageExt> result = new ArrayList<MessageExt>(batchSize);
final long now = System.currentTimeMillis();
try {
this.lockTreeMap.writeLock().lockInterruptibly();
this.lastConsumeTimestamp = now;
try {
if (!this.msgTreeMap.isEmpty()) {
for (int i = 0; i < batchSize; i++) {
Map.Entry<Long, MessageExt> entry = this.msgTreeMap.pollFirstEntry();
if (entry != null) {
result.add(entry.getValue());
consumingMsgOrderlyTreeMap.put(entry.getKey(), entry.getValue());
} else {
break;
}
}
}
if (result.isEmpty()) {
consuming = false;
}
} finally {
this.lockTreeMap.writeLock().unlock();
}
} catch (InterruptedException e) {
log.error("take Messages exception", e);
}
return result;
}
public boolean hasTempMessage() {
try {
this.lockTreeMap.readLock().lockInterruptibly();
try {
return !this.msgTreeMap.isEmpty();
} finally {
this.lockTreeMap.readLock().unlock();
}
} catch (InterruptedException e) {
}
return true;
}
public void clear() {
try {
this.lockTreeMap.writeLock().lockInterruptibly();
try {
this.msgTreeMap.clear();
this.consumingMsgOrderlyTreeMap.clear();
this.msgCount.set(0);
this.msgSize.set(0);
this.queueOffsetMax = 0L;
} finally {
this.lockTreeMap.writeLock().unlock();
}
} catch (InterruptedException e) {
log.error("rollback exception", e);
}
}
public long getLastLockTimestamp() {
return lastLockTimestamp;
}
public void setLastLockTimestamp(long lastLockTimestamp) {
this.lastLockTimestamp = lastLockTimestamp;
}
public Lock getLockConsume() {
return lockConsume;
}
public long getLastPullTimestamp() {
return lastPullTimestamp;
}
public void setLastPullTimestamp(long lastPullTimestamp) {
this.lastPullTimestamp = lastPullTimestamp;
}
public long getMsgAccCnt() {
return msgAccCnt;
}
public void setMsgAccCnt(long msgAccCnt) {
this.msgAccCnt = msgAccCnt;
}
public long getTryUnlockTimes() {
return this.tryUnlockTimes.get();
}
public void incTryUnlockTimes() {
this.tryUnlockTimes.incrementAndGet();
}
public void fillProcessQueueInfo(final ProcessQueueInfo info) {
try {
this.lockTreeMap.readLock().lockInterruptibly();
if (!this.msgTreeMap.isEmpty()) {
info.setCachedMsgMinOffset(this.msgTreeMap.firstKey());
info.setCachedMsgMaxOffset(this.msgTreeMap.lastKey());
info.setCachedMsgCount(this.msgTreeMap.size());
info.setCachedMsgSizeInMiB((int) (this.msgSize.get() / (1024 * 1024)));
}
if (!this.consumingMsgOrderlyTreeMap.isEmpty()) {
info.setTransactionMsgMinOffset(this.consumingMsgOrderlyTreeMap.firstKey());
info.setTransactionMsgMaxOffset(this.consumingMsgOrderlyTreeMap.lastKey());
info.setTransactionMsgCount(this.consumingMsgOrderlyTreeMap.size());
}
info.setLocked(this.locked);
info.setTryUnlockTimes(this.tryUnlockTimes.get());
info.setLastLockTimestamp(this.lastLockTimestamp);
info.setDroped(this.dropped);
info.setLastPullTimestamp(this.lastPullTimestamp);
info.setLastConsumeTimestamp(this.lastConsumeTimestamp);
} catch (Exception e) {
} finally {
this.lockTreeMap.readLock().unlock();
}
}
public long getLastConsumeTimestamp() {
return lastConsumeTimestamp;
}
public void setLastConsumeTimestamp(long lastConsumeTimestamp) {
this.lastConsumeTimestamp = lastConsumeTimestamp;
}
}
```
|
```package org.apache.rocketmq.tools.command.namesrv;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.rocketmq.client.ClientConfig;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.MQClientAPIImpl;
import org.apache.rocketmq.client.impl.MQClientManager;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.remoting.exception.RemotingConnectException;
import org.apache.rocketmq.remoting.exception.RemotingSendRequestException;
import org.apache.rocketmq.remoting.exception.RemotingTimeoutException;
import org.apache.rocketmq.srvutil.ServerUtil;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExtImpl;
import org.apache.rocketmq.tools.command.SubCommandException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.ArgumentMatchers;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class GetNamesrvConfigCommandTest {
private static DefaultMQAdminExt defaultMQAdminExt;
private static DefaultMQAdminExtImpl defaultMQAdminExtImpl;
private static MQClientInstance mqClientInstance = MQClientManager.getInstance().getAndCreateMQClientInstance(new ClientConfig());
private static MQClientAPIImpl mQClientAPIImpl;
@BeforeClass
public static void init() throws NoSuchFieldException, IllegalAccessException, InterruptedException, RemotingTimeoutException, MQClientException, RemotingSendRequestException, RemotingConnectException, MQBrokerException, UnsupportedEncodingException {
mQClientAPIImpl = mock(MQClientAPIImpl.class);
defaultMQAdminExt = new DefaultMQAdminExt();
defaultMQAdminExtImpl = new DefaultMQAdminExtImpl(defaultMQAdminExt, 1000);
Field field = DefaultMQAdminExtImpl.class.getDeclaredField("mqClientInstance");
field.setAccessible(true);
field.set(defaultMQAdminExtImpl, mqClientInstance);
field = MQClientInstance.class.getDeclaredField("mQClientAPIImpl");
field.setAccessible(true);
field.set(mqClientInstance, mQClientAPIImpl);
field = DefaultMQAdminExt.class.getDeclaredField("defaultMQAdminExtImpl");
field.setAccessible(true);
field.set(defaultMQAdminExt, defaultMQAdminExtImpl);
Map<String, Properties> propertiesMap = new HashMap<>();
List<String> nameServers = new ArrayList<>();
when(mQClientAPIImpl.getNameServerConfig(ArgumentMatchers.<String>anyList(), anyLong())).thenReturn(propertiesMap);
}
@AfterClass
public static void terminate() {
defaultMQAdminExt.shutdown();
}
// @Ignore
@Test
public void testExecute() throws SubCommandException {
GetNamesrvConfigCommand cmd = new GetNamesrvConfigCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new PosixParser());
cmd.execute(commandLine, options, null);
}
}
```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.tools.command.namesrv;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.SubCommand;
import org.apache.rocketmq.tools.command.SubCommandException;
public class GetNamesrvConfigCommand implements SubCommand {
@Override
public String commandName() {
return "getNamesrvConfig";
}
@Override
public String commandDesc() {
return "Get configs of name server.";
}
@Override
public Options buildCommandlineOptions(final Options options) {
return options;
}
@Override
public void execute(final CommandLine commandLine, final Options options,
final RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
// servers
String servers = commandLine.getOptionValue('n');
List<String> serverList = null;
if (servers != null && servers.length() > 0) {
String[] serverArray = servers.trim().split(";");
if (serverArray.length > 0) {
serverList = Arrays.asList(serverArray);
}
}
defaultMQAdminExt.start();
Map<String, Properties> nameServerConfigs = defaultMQAdminExt.getNameServerConfig(serverList);
for (String server : nameServerConfigs.keySet()) {
System.out.printf("============%s============\n",
server);
for (Object key : nameServerConfigs.get(server).keySet()) {
System.out.printf("%-50s= %s\n", key, nameServerConfigs.get(server).get(key));
}
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}```
|
```package org.apache.rocketmq.remoting.netty;
import java.util.concurrent.Semaphore;
import org.apache.rocketmq.remoting.InvokeCallback;
import org.apache.rocketmq.remoting.common.SemaphoreReleaseOnlyOnce;
import org.apache.rocketmq.remoting.protocol.RemotingCommand;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class NettyRemotingAbstractTest {
@Spy
private NettyRemotingAbstract remotingAbstract = new NettyRemotingClient(new NettyClientConfig());
@Test
public void testProcessResponseCommand() throws InterruptedException {
final Semaphore semaphore = new Semaphore(0);
ResponseFuture responseFuture = new ResponseFuture(1, 3000, new InvokeCallback() {
@Override
public void operationComplete(final ResponseFuture responseFuture) {
assertThat(semaphore.availablePermits()).isEqualTo(0);
}
}, new SemaphoreReleaseOnlyOnce(semaphore));
remotingAbstract.responseTable.putIfAbsent(1, responseFuture);
RemotingCommand response = RemotingCommand.createResponseCommand(0, "Foo");
response.setOpaque(1);
remotingAbstract.processResponseCommand(null, response);
// Acquire the release permit after call back
semaphore.acquire(1);
assertThat(semaphore.availablePermits()).isEqualTo(0);
}
@Test
public void testProcessResponseCommand_NullCallBack() throws InterruptedException {
final Semaphore semaphore = new Semaphore(0);
ResponseFuture responseFuture = new ResponseFuture(1, 3000, null,
new SemaphoreReleaseOnlyOnce(semaphore));
remotingAbstract.responseTable.putIfAbsent(1, responseFuture);
RemotingCommand response = RemotingCommand.createResponseCommand(0, "Foo");
response.setOpaque(1);
remotingAbstract.processResponseCommand(null, response);
assertThat(semaphore.availablePermits()).isEqualTo(1);
}
@Test
public void testProcessResponseCommand_RunCallBackInCurrentThread() throws InterruptedException {
final Semaphore semaphore = new Semaphore(0);
ResponseFuture responseFuture = new ResponseFuture(1, 3000, new InvokeCallback() {
@Override
public void operationComplete(final ResponseFuture responseFuture) {
assertThat(semaphore.availablePermits()).isEqualTo(0);
}
}, new SemaphoreReleaseOnlyOnce(semaphore));
remotingAbstract.responseTable.putIfAbsent(1, responseFuture);
when(remotingAbstract.getCallbackExecutor()).thenReturn(null);
RemotingCommand response = RemotingCommand.createResponseCommand(0, "Foo");
response.setOpaque(1);
remotingAbstract.processResponseCommand(null, response);
// Acquire the release permit after call back finished in current thread
semaphore.acquire(1);
assertThat(semaphore.availablePermits()).isEqualTo(0);
}
}```
|
Please help me generate a test for this class.
|
```package org.apache.rocketmq.remoting.netty;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslHandler;
import java.net.SocketAddress;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.apache.rocketmq.remoting.ChannelEventListener;
import org.apache.rocketmq.remoting.InvokeCallback;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.remoting.common.Pair;
import org.apache.rocketmq.remoting.common.RemotingHelper;
import org.apache.rocketmq.remoting.common.SemaphoreReleaseOnlyOnce;
import org.apache.rocketmq.remoting.common.ServiceThread;
import org.apache.rocketmq.remoting.exception.RemotingSendRequestException;
import org.apache.rocketmq.remoting.exception.RemotingTimeoutException;
import org.apache.rocketmq.remoting.exception.RemotingTooMuchRequestException;
import org.apache.rocketmq.remoting.protocol.RemotingCommand;
import org.apache.rocketmq.remoting.protocol.RemotingSysResponseCode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class NettyRemotingAbstract {
/**
* Remoting logger instance.
*/
private static final Logger log = LoggerFactory.getLogger(RemotingHelper.ROCKETMQ_REMOTING);
/**
* Semaphore to limit maximum number of on-going one-way requests, which protects system memory footprint.
*/
protected final Semaphore semaphoreOneway;
/**
* Semaphore to limit maximum number of on-going asynchronous requests, which protects system memory footprint.
*/
protected final Semaphore semaphoreAsync;
/**
* This map caches all on-going requests.
*/
protected final ConcurrentMap<Integer /* opaque */, ResponseFuture> responseTable =
new ConcurrentHashMap<Integer, ResponseFuture>(256);
/**
* This container holds all processors per request code, aka, for each incoming request, we may look up the
* responding processor in this map to handle the request.
*/
protected final HashMap<Integer/* request code */, Pair<NettyRequestProcessor, ExecutorService>> processorTable =
new HashMap<Integer, Pair<NettyRequestProcessor, ExecutorService>>(64);
/**
* Executor to feed netty events to user defined {@link ChannelEventListener}.
*/
protected final NettyEventExecutor nettyEventExecutor = new NettyEventExecutor();
/**
* The default request processor to use in case there is no exact match in {@link #processorTable} per request code.
*/
protected Pair<NettyRequestProcessor, ExecutorService> defaultRequestProcessor;
/**
* SSL context via which to create {@link SslHandler}.
*/
protected SslContext sslContext;
/**
* Constructor, specifying capacity of one-way and asynchronous semaphores.
*
* @param permitsOneway Number of permits for one-way requests.
* @param permitsAsync Number of permits for asynchronous requests.
*/
public NettyRemotingAbstract(final int permitsOneway, final int permitsAsync) {
this.semaphoreOneway = new Semaphore(permitsOneway, true);
this.semaphoreAsync = new Semaphore(permitsAsync, true);
}
/**
* Custom channel event listener.
*
* @return custom channel event listener if defined; null otherwise.
*/
public abstract ChannelEventListener getChannelEventListener();
/**
* Put a netty event to the executor.
*
* @param event Netty event instance.
*/
public void putNettyEvent(final NettyEvent event) {
this.nettyEventExecutor.putNettyEvent(event);
}
/**
* Entry of incoming command processing.
*
* <p>
* <strong>Note:</strong>
* The incoming remoting command may be
* <ul>
* <li>An inquiry request from a remote peer component;</li>
* <li>A response to a previous request issued by this very participant.</li>
* </ul>
* </p>
*
* @param ctx Channel handler context.
* @param msg incoming remoting command.
* @throws Exception if there were any error while processing the incoming command.
*/
public void processMessageReceived(ChannelHandlerContext ctx, RemotingCommand msg) throws Exception {
final RemotingCommand cmd = msg;
if (cmd != null) {
switch (cmd.getType()) {
case REQUEST_COMMAND:
processRequestCommand(ctx, cmd);
break;
case RESPONSE_COMMAND:
processResponseCommand(ctx, cmd);
break;
default:
break;
}
}
}
/**
* Process incoming request command issued by remote peer.
*
* @param ctx channel handler context.
* @param cmd request command.
*/
public void processRequestCommand(final ChannelHandlerContext ctx, final RemotingCommand cmd) {
final Pair<NettyRequestProcessor, ExecutorService> matched = this.processorTable.get(cmd.getCode());
final Pair<NettyRequestProcessor, ExecutorService> pair = null == matched ? this.defaultRequestProcessor : matched;
final int opaque = cmd.getOpaque();
if (pair != null) {
Runnable run = new Runnable() {
@Override
public void run() {
try {
RPCHook rpcHook = NettyRemotingAbstract.this.getRPCHook();
if (rpcHook != null) {
rpcHook.doBeforeRequest(RemotingHelper.parseChannelRemoteAddr(ctx.channel()), cmd);
}
final RemotingCommand response = pair.getObject1().processRequest(ctx, cmd);
if (rpcHook != null) {
rpcHook.doAfterResponse(RemotingHelper.parseChannelRemoteAddr(ctx.channel()), cmd, response);
}
if (!cmd.isOnewayRPC()) {
if (response != null) {
response.setOpaque(opaque);
response.markResponseType();
try {
ctx.writeAndFlush(response);
} catch (Throwable e) {
log.error("process request over, but response failed", e);
log.error(cmd.toString());
log.error(response.toString());
}
} else {
}
}
} catch (Throwable e) {
log.error("process request exception", e);
log.error(cmd.toString());
if (!cmd.isOnewayRPC()) {
final RemotingCommand response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.SYSTEM_ERROR,
RemotingHelper.exceptionSimpleDesc(e));
response.setOpaque(opaque);
ctx.writeAndFlush(response);
}
}
}
};
if (pair.getObject1().rejectRequest()) {
final RemotingCommand response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.SYSTEM_BUSY,
"[REJECTREQUEST]system busy, start flow control for a while");
response.setOpaque(opaque);
ctx.writeAndFlush(response);
return;
}
try {
final RequestTask requestTask = new RequestTask(run, ctx.channel(), cmd);
pair.getObject2().submit(requestTask);
} catch (RejectedExecutionException e) {
if ((System.currentTimeMillis() % 10000) == 0) {
log.warn(RemotingHelper.parseChannelRemoteAddr(ctx.channel())
+ ", too many requests and system thread pool busy, RejectedExecutionException "
+ pair.getObject2().toString()
+ " request code: " + cmd.getCode());
}
if (!cmd.isOnewayRPC()) {
final RemotingCommand response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.SYSTEM_BUSY,
"[OVERLOAD]system busy, start flow control for a while");
response.setOpaque(opaque);
ctx.writeAndFlush(response);
}
}
} else {
String error = " request type " + cmd.getCode() + " not supported";
final RemotingCommand response =
RemotingCommand.createResponseCommand(RemotingSysResponseCode.REQUEST_CODE_NOT_SUPPORTED, error);
response.setOpaque(opaque);
ctx.writeAndFlush(response);
log.error(RemotingHelper.parseChannelRemoteAddr(ctx.channel()) + error);
}
}
/**
* Process response from remote peer to the previous issued requests.
*
* @param ctx channel handler context.
* @param cmd response command instance.
*/
public void processResponseCommand(ChannelHandlerContext ctx, RemotingCommand cmd) {
final int opaque = cmd.getOpaque();
final ResponseFuture responseFuture = responseTable.get(opaque);
if (responseFuture != null) {
responseFuture.setResponseCommand(cmd);
responseTable.remove(opaque);
if (responseFuture.getInvokeCallback() != null) {
executeInvokeCallback(responseFuture);
} else {
responseFuture.putResponse(cmd);
responseFuture.release();
}
} else {
log.warn("receive response, but not matched any request, " + RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
log.warn(cmd.toString());
}
}
/**
* Execute callback in callback executor. If callback executor is null, run directly in current thread
*/
private void executeInvokeCallback(final ResponseFuture responseFuture) {
boolean runInThisThread = false;
ExecutorService executor = this.getCallbackExecutor();
if (executor != null) {
try {
executor.submit(new Runnable() {
@Override
public void run() {
try {
responseFuture.executeInvokeCallback();
} catch (Throwable e) {
log.warn("execute callback in executor exception, and callback throw", e);
} finally {
responseFuture.release();
}
}
});
} catch (Exception e) {
runInThisThread = true;
log.warn("execute callback in executor exception, maybe executor busy", e);
}
} else {
runInThisThread = true;
}
if (runInThisThread) {
try {
responseFuture.executeInvokeCallback();
} catch (Throwable e) {
log.warn("executeInvokeCallback Exception", e);
} finally {
responseFuture.release();
}
}
}
/**
* Custom RPC hook.
*
* @return RPC hook if specified; null otherwise.
*/
public abstract RPCHook getRPCHook();
/**
* This method specifies thread pool to use while invoking callback methods.
*
* @return Dedicated thread pool instance if specified; or null if the callback is supposed to be executed in the
* netty event-loop thread.
*/
public abstract ExecutorService getCallbackExecutor();
/**
* <p>
* This method is periodically invoked to scan and expire deprecated request.
* </p>
*/
public void scanResponseTable() {
final List<ResponseFuture> rfList = new LinkedList<ResponseFuture>();
Iterator<Entry<Integer, ResponseFuture>> it = this.responseTable.entrySet().iterator();
while (it.hasNext()) {
Entry<Integer, ResponseFuture> next = it.next();
ResponseFuture rep = next.getValue();
if ((rep.getBeginTimestamp() + rep.getTimeoutMillis() + 1000) <= System.currentTimeMillis()) {
rep.release();
it.remove();
rfList.add(rep);
log.warn("remove timeout request, " + rep);
}
}
for (ResponseFuture rf : rfList) {
try {
executeInvokeCallback(rf);
} catch (Throwable e) {
log.warn("scanResponseTable, operationComplete Exception", e);
}
}
}
public RemotingCommand invokeSyncImpl(final Channel channel, final RemotingCommand request,
final long timeoutMillis)
throws InterruptedException, RemotingSendRequestException, RemotingTimeoutException {
final int opaque = request.getOpaque();
try {
final ResponseFuture responseFuture = new ResponseFuture(opaque, timeoutMillis, null, null);
this.responseTable.put(opaque, responseFuture);
final SocketAddress addr = channel.remoteAddress();
channel.writeAndFlush(request).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture f) throws Exception {
if (f.isSuccess()) {
responseFuture.setSendRequestOK(true);
return;
} else {
responseFuture.setSendRequestOK(false);
}
responseTable.remove(opaque);
responseFuture.setCause(f.cause());
responseFuture.putResponse(null);
log.warn("send a request command to channel <" + addr + "> failed.");
}
});
RemotingCommand responseCommand = responseFuture.waitResponse(timeoutMillis);
if (null == responseCommand) {
if (responseFuture.isSendRequestOK()) {
throw new RemotingTimeoutException(RemotingHelper.parseSocketAddressAddr(addr), timeoutMillis,
responseFuture.getCause());
} else {
throw new RemotingSendRequestException(RemotingHelper.parseSocketAddressAddr(addr), responseFuture.getCause());
}
}
return responseCommand;
} finally {
this.responseTable.remove(opaque);
}
}
public void invokeAsyncImpl(final Channel channel, final RemotingCommand request, final long timeoutMillis,
final InvokeCallback invokeCallback)
throws InterruptedException, RemotingTooMuchRequestException, RemotingTimeoutException, RemotingSendRequestException {
final int opaque = request.getOpaque();
boolean acquired = this.semaphoreAsync.tryAcquire(timeoutMillis, TimeUnit.MILLISECONDS);
if (acquired) {
final SemaphoreReleaseOnlyOnce once = new SemaphoreReleaseOnlyOnce(this.semaphoreAsync);
final ResponseFuture responseFuture = new ResponseFuture(opaque, timeoutMillis, invokeCallback, once);
this.responseTable.put(opaque, responseFuture);
try {
channel.writeAndFlush(request).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture f) throws Exception {
if (f.isSuccess()) {
responseFuture.setSendRequestOK(true);
return;
} else {
responseFuture.setSendRequestOK(false);
}
responseFuture.putResponse(null);
responseTable.remove(opaque);
try {
executeInvokeCallback(responseFuture);
} catch (Throwable e) {
log.warn("excute callback in writeAndFlush addListener, and callback throw", e);
} finally {
responseFuture.release();
}
log.warn("send a request command to channel <{}> failed.", RemotingHelper.parseChannelRemoteAddr(channel));
}
});
} catch (Exception e) {
responseFuture.release();
log.warn("send a request command to channel <" + RemotingHelper.parseChannelRemoteAddr(channel) + "> Exception", e);
throw new RemotingSendRequestException(RemotingHelper.parseChannelRemoteAddr(channel), e);
}
} else {
if (timeoutMillis <= 0) {
throw new RemotingTooMuchRequestException("invokeAsyncImpl invoke too fast");
} else {
String info =
String.format("invokeAsyncImpl tryAcquire semaphore timeout, %dms, waiting thread nums: %d semaphoreAsyncValue: %d",
timeoutMillis,
this.semaphoreAsync.getQueueLength(),
this.semaphoreAsync.availablePermits()
);
log.warn(info);
throw new RemotingTimeoutException(info);
}
}
}
public void invokeOnewayImpl(final Channel channel, final RemotingCommand request, final long timeoutMillis)
throws InterruptedException, RemotingTooMuchRequestException, RemotingTimeoutException, RemotingSendRequestException {
request.markOnewayRPC();
boolean acquired = this.semaphoreOneway.tryAcquire(timeoutMillis, TimeUnit.MILLISECONDS);
if (acquired) {
final SemaphoreReleaseOnlyOnce once = new SemaphoreReleaseOnlyOnce(this.semaphoreOneway);
try {
channel.writeAndFlush(request).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture f) throws Exception {
once.release();
if (!f.isSuccess()) {
log.warn("send a request command to channel <" + channel.remoteAddress() + "> failed.");
}
}
});
} catch (Exception e) {
once.release();
log.warn("write send a request command to channel <" + channel.remoteAddress() + "> failed.");
throw new RemotingSendRequestException(RemotingHelper.parseChannelRemoteAddr(channel), e);
}
} else {
if (timeoutMillis <= 0) {
throw new RemotingTooMuchRequestException("invokeOnewayImpl invoke too fast");
} else {
String info = String.format(
"invokeOnewayImpl tryAcquire semaphore timeout, %dms, waiting thread nums: %d semaphoreAsyncValue: %d",
timeoutMillis,
this.semaphoreOneway.getQueueLength(),
this.semaphoreOneway.availablePermits()
);
log.warn(info);
throw new RemotingTimeoutException(info);
}
}
}
class NettyEventExecutor extends ServiceThread {
private final LinkedBlockingQueue<NettyEvent> eventQueue = new LinkedBlockingQueue<NettyEvent>();
private final int maxSize = 10000;
public void putNettyEvent(final NettyEvent event) {
if (this.eventQueue.size() <= maxSize) {
this.eventQueue.add(event);
} else {
log.warn("event queue size[{}] enough, so drop this event {}", this.eventQueue.size(), event.toString());
}
}
@Override
public void run() {
log.info(this.getServiceName() + " service started");
final ChannelEventListener listener = NettyRemotingAbstract.this.getChannelEventListener();
while (!this.isStopped()) {
try {
NettyEvent event = this.eventQueue.poll(3000, TimeUnit.MILLISECONDS);
if (event != null && listener != null) {
switch (event.getType()) {
case IDLE:
listener.onChannelIdle(event.getRemoteAddr(), event.getChannel());
break;
case CLOSE:
listener.onChannelClose(event.getRemoteAddr(), event.getChannel());
break;
case CONNECT:
listener.onChannelConnect(event.getRemoteAddr(), event.getChannel());
break;
case EXCEPTION:
listener.onChannelException(event.getRemoteAddr(), event.getChannel());
break;
default:
break;
}
}
} catch (Exception e) {
log.warn(this.getServiceName() + " service has exception. ", e);
}
}
log.info(this.getServiceName() + " service end");
}
@Override
public String getServiceName() {
return NettyEventExecutor.class.getSimpleName();
}
}
}
```
|
```package io.dronefleet.mavlink;
import io.dronefleet.mavlink.minimal.Heartbeat;
import io.dronefleet.mavlink.minimal.MavAutopilot;
import io.dronefleet.mavlink.minimal.MavState;
import io.dronefleet.mavlink.minimal.MavType;
import io.dronefleet.mavlink.util.UnmodifiableMapBuilder;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.Collections;
import static org.junit.Assert.assertEquals;
public class MavlinkConnectionTest {
private PipedInputStream in;
private PipedOutputStream out;
private MavlinkConnection source;
@Before
public void setUp() throws IOException {
in = new PipedInputStream();
out = new PipedOutputStream();
source = MavlinkConnection.create(
new PipedInputStream(out),
new PipedOutputStream(in));
}
@Test(timeout = 500L)
public void itUsesDefaultDialectByDefault() throws IOException {
MavlinkDialect dialect = new AbstractMavlinkDialect(
"testdialect",
Collections.emptyList(),
new UnmodifiableMapBuilder<Integer, Class>()
.put(0, TestMessage.class)
.build());
MavlinkConnection target = MavlinkConnection.builder(in, out)
.defaultDialect(dialect)
.build();
Object expected = TestMessage.builder()
.text("Test")
.build();
source.send1(0, 0, expected);
Object actual = target.next().getPayload();
assertEquals(expected, actual);
}
@Test(timeout = 500L)
public void itUsesCommonDialectAsFallback() throws IOException {
MavlinkDialect dialect = new AbstractMavlinkDialect(
"testdialect",
Collections.emptyList(),
new UnmodifiableMapBuilder<Integer, Class>()
.put(0, TestMessage.class)
.build());
MavlinkConnection target = MavlinkConnection.builder(in, out)
.defaultDialect(dialect)
.build();
Object expected = Heartbeat.builder()
.autopilot(MavAutopilot.MAV_AUTOPILOT_GENERIC)
.type(MavType.MAV_TYPE_GENERIC)
.systemStatus(MavState.MAV_STATE_UNINIT)
.baseMode()
.mavlinkVersion(3)
.build();
source.send1(0, 0, expected);
Object actual = target.next().getPayload();
assertEquals(expected, actual);
}
@Test(timeout = 500L)
public void defaultDialectDoesNotPreventHeartbeatFromConfiguringDialect() throws IOException {
MavlinkDialect defaultDialect = new AbstractMavlinkDialect(
"testdialect",
Collections.emptyList(),
new UnmodifiableMapBuilder<Integer, Class>()
.put(0, TestMessage.class)
.build());
MavlinkDialect expected = new AbstractMavlinkDialect(
"expecteddialect",
Collections.emptyList(),
Collections.emptyMap());
MavlinkConnection target = MavlinkConnection.builder(in, out)
.dialect(MavAutopilot.MAV_AUTOPILOT_GENERIC, expected)
.defaultDialect(defaultDialect)
.build();
source.send1(0, 0, Heartbeat.builder()
.autopilot(MavAutopilot.MAV_AUTOPILOT_GENERIC)
.type(MavType.MAV_TYPE_GENERIC)
.systemStatus(MavState.MAV_STATE_UNINIT)
.baseMode()
.mavlinkVersion(3)
.build());
target.next();
MavlinkDialect actual = target.getDialect(0);
assertEquals(expected, actual);
}
}```
|
Please help me generate a test for this class.
|
```package io.dronefleet.mavlink;
import io.dronefleet.mavlink.annotations.MavlinkMessageInfo;
import io.dronefleet.mavlink.ardupilotmega.ArdupilotmegaDialect;
import io.dronefleet.mavlink.asluav.AsluavDialect;
import io.dronefleet.mavlink.autoquad.AutoquadDialect;
import io.dronefleet.mavlink.common.CommonDialect;
import io.dronefleet.mavlink.minimal.MavAutopilot;
import io.dronefleet.mavlink.minimal.Heartbeat;
import io.dronefleet.mavlink.paparazzi.PaparazziDialect;
import io.dronefleet.mavlink.protocol.MavlinkPacket;
import io.dronefleet.mavlink.protocol.MavlinkPacketReader;
import io.dronefleet.mavlink.serialization.payload.MavlinkPayloadDeserializer;
import io.dronefleet.mavlink.serialization.payload.MavlinkPayloadSerializer;
import io.dronefleet.mavlink.serialization.payload.reflection.ReflectionPayloadDeserializer;
import io.dronefleet.mavlink.serialization.payload.reflection.ReflectionPayloadSerializer;
import io.dronefleet.mavlink.slugs.SlugsDialect;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* <p>Represents a Mavlink connection. This class is responsible for mid-to-low-level function of Mavlink communication.
* A {@code MavlinkConnection} is responsible for the following:</p>
* <ul>
* <li>Serialization of Mavlink messages.</li>
* <li>Tracking and resolving dialects of systems that are available through the connection.</li>
* <li>CRC validation of packets.</li>
* </ul>
*/
public class MavlinkConnection {
/**
* Builds MavlinkConnection instances.
*/
public static final class Builder {
private final InputStream in;
private final OutputStream out;
private final Map<MavAutopilot, MavlinkDialect> dialects;
private MavlinkDialect defaultDialect;
private Builder(InputStream in, OutputStream out) {
this.in = in;
this.out = out;
dialects = new HashMap<>();
dialect(MavAutopilot.MAV_AUTOPILOT_GENERIC, new CommonDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_AEROB, new CommonDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_AIRRAILS, new CommonDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_UDB, new CommonDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_SMARTAP, new CommonDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_SMACCMPILOT, new CommonDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_OPENPILOT, new CommonDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_FP, new CommonDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_ARDUPILOTMEGA, new ArdupilotmegaDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_PX4, new ArdupilotmegaDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_AUTOQUAD, new AutoquadDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_ASLUAV, new AsluavDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_SLUGS, new SlugsDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_AUTOQUAD, new AutoquadDialect())
.dialect(MavAutopilot.MAV_AUTOPILOT_PPZ, new PaparazziDialect());
defaultDialect = COMMON_DIALECT;
}
/**
* Adds a dialect entry to this builder. The added dialect will then become supported
* by the built connection.
*
* @param autopilot The autopilot to associate the dialect with.
* @param dialect The dialect to associate.
* @return This builder.
*/
public Builder dialect(MavAutopilot autopilot, MavlinkDialect dialect) {
dialects.put(autopilot, dialect);
return this;
}
/**
* Sets the default dialect to be used by the built connection. The default dialect
* will be assumed for systems which did not yet send a
* {@link io.dronefleet.mavlink.minimal.Heartbeat heartbeat}.
*
* @param dialect The default dialect to use.
* @return This builder.
*/
public Builder defaultDialect(MavlinkDialect dialect) {
defaultDialect = dialect;
return this;
}
/**
* Builds a ready to use connection instance.
*/
public MavlinkConnection build() {
return new MavlinkConnection(
new MavlinkPacketReader(in),
out,
dialects,
defaultDialect,
new ReflectionPayloadDeserializer(),
new ReflectionPayloadSerializer()
);
}
}
/**
* The default dialect for systems which have not yet been associated
* with a specific dialect.
*/
private static MavlinkDialect COMMON_DIALECT = new CommonDialect();
/**
* Creates a new builder for the specified input/output streams.
*
* @param in The input stream to read messages from.
* @param out The output stream to write messages to.
* @return A builder instance for the specified settings.
*/
public static Builder builder(InputStream in, OutputStream out) {
return new Builder(in, out);
}
/**
* Creates a default connection instance. The result of calling this method
* is equivalent to calling {@code builder(in,out).build()}.
*
* @param in The input stream to read messages from.
* @param out The output stream to write messages to.
* @return A builder instance for the specified settings.
*/
public static MavlinkConnection create(InputStream in, OutputStream out) {
return builder(in, out).build();
}
/**
* A mapping of system IDs and their dialects. Entries are added to this map
* when heartbeats are received.
*/
private final Map<Integer, MavlinkDialect> systemDialects;
/**
* The current send sequence of this connection.
*/
private int sequence;
/**
* The reader that this connection reads messages from.
*/
private final MavlinkPacketReader reader;
/**
* The output stream that this connection writes messages to.
*/
private final OutputStream out;
/**
* A mapping of autopilot types and their associated dialects. This is used
* in order to calculate the dialect of a system when a heartbeat is
* received.
*/
private final Map<MavAutopilot, MavlinkDialect> dialects;
/**
* The default dialect to use before a system announces its dialect with a
* heartbeat.
*/
private final MavlinkDialect defaultDialect;
/**
* The payload deserializer that this connection uses in order to deserialize
* message payloads.
*/
private final MavlinkPayloadDeserializer deserializer;
/**
* The serializer that this connection uses in order to serialize message payloads.
*/
private final MavlinkPayloadSerializer serializer;
/**
* Locks calls to {@link #next()} to ensure no concurrent reads occur.
*/
private final Lock readLock;
/**
* Locks write calls to ensure no concurrent writes.
*/
private final Lock writeLock;
MavlinkConnection(
MavlinkPacketReader reader,
OutputStream out,
Map<MavAutopilot, MavlinkDialect> dialects,
MavlinkDialect defaultDialect,
MavlinkPayloadDeserializer deserializer,
MavlinkPayloadSerializer serializer) {
this.reader = reader;
this.out = out;
this.dialects = dialects;
this.defaultDialect = defaultDialect;
this.deserializer = deserializer;
this.serializer = serializer;
systemDialects = new HashMap<>();
readLock = new ReentrantLock();
writeLock = new ReentrantLock();
}
/**
* <p>Reads a single message from this connection. This method drops messages, attempting to read the next
* message when given the following conditions:</p>
* <p>
* <ul>
* <li>The currently configured dialect for the origin system does not support the received message.</li>
* <li>The received message failed to pass CRC validation.</li>
* </ul>
* <p>
* <p>When a heartbeat is read, this method resolves the dialect of the originating system by using the
* dialect map that was specified when this connection was constructed. The resolved dialect will then be used
* when evaluating the next messages received from that system.</p>
* <p>
* <p>When receiving messages from an origin which dialect is unknown or unsupported -- Such as before receiving
* a heartbeat, or if the autopilot of the heartbeat is unrecognized, this method defaults to using the
* {@link io.dronefleet.mavlink.common.CommonDialect common} dialect.</p>
*
* @return The next supported and valid Mavlink message.
* @throws EOFException When the stream ends.
* @throws IOException If there has been an error reading from the stream.
*/
public MavlinkMessage next() throws IOException {
readLock.lock();
try {
MavlinkPacket packet;
while ((packet = reader.next()) != null) {
Class<?> messageType = getMessageType(packet, Arrays.asList(
systemDialects.getOrDefault(packet.getSystemId(), defaultDialect),
COMMON_DIALECT));
if (messageType != null) {
Object payload = deserializer.deserialize(packet.getPayload(), messageType);
if (payload instanceof Heartbeat) {
Heartbeat heartbeat = (Heartbeat) payload;
if (dialects.containsKey(heartbeat.autopilot().entry())) {
systemDialects.put(packet.getSystemId(), dialects.get(heartbeat.autopilot().entry()));
}
}
if (packet.isMavlink2()) {
//noinspection unchecked
return new Mavlink2Message(packet, payload);
} else {
//noinspection unchecked
return new MavlinkMessage(packet, payload);
}
} else {
reader.drop();
}
}
throw new EOFException("End of stream");
} finally {
readLock.unlock();
}
}
/**
* Sends a Mavlink 1 message using the specified settings.
*
* @param systemId The system ID that originated this message.
* @param componentId The component ID that originated this message.
* @param payload The payload to send.
* @throws IOException if an I/O error occurs.
*/
public void send1(int systemId, int componentId, Object payload) throws IOException {
MavlinkMessageInfo messageInfo = payload.getClass()
.getAnnotation(MavlinkMessageInfo.class);
byte[] serializedPayload = serializer.serialize(payload);
writeLock.lock();
try {
send(MavlinkPacket.createMavlink1Packet(
sequence++,
systemId,
componentId,
messageInfo.id(),
messageInfo.crc(),
serializedPayload));
} finally {
writeLock.unlock();
}
}
/**
* Sends an unsigned Mavlink 2 message using the specified settings.
*
* @param systemId The system ID that originated this message.
* @param componentId The component ID that originated this message.
* @param payload The payload to send.
* @throws IOException if an I/O error occurs.
*/
public void send2(int systemId, int componentId, Object payload) throws IOException {
MavlinkMessageInfo messageInfo = payload.getClass()
.getAnnotation(MavlinkMessageInfo.class);
byte[] serializedPayload = serializer.serialize(payload);
writeLock.lock();
try {
send(MavlinkPacket.createUnsignedMavlink2Packet(
sequence++,
systemId,
componentId,
messageInfo.id(),
messageInfo.crc(),
serializedPayload));
} finally {
writeLock.unlock();
}
}
/**
* Sends a signed Mavlink 2 message using the specified settings.
*
* @param systemId The system ID that originated this message.
* @param componentId The component ID that originated this message.
* @param payload The payload to send.
* @param linkId The link ID to use when signing.
* @param timestamp The timestamp to use when signing.
* @param secretKey The secret key to use when signing.
* @throws IOException if an I/O error occurs.
*/
public void send2(int systemId, int componentId, Object payload, int linkId,
long timestamp, byte[] secretKey) throws IOException {
MavlinkMessageInfo messageInfo = payload.getClass()
.getAnnotation(MavlinkMessageInfo.class);
byte[] serializedPayload = serializer.serialize(payload);
writeLock.lock();
try {
send(MavlinkPacket.createSignedMavlink2Packet(
sequence++,
systemId,
componentId,
messageInfo.id(),
messageInfo.crc(),
serializedPayload,
linkId,
timestamp,
secretKey));
} finally {
writeLock.unlock();
}
}
/**
* Resolves the dialect of the specified system by its ID. This method relies on that
* a heartbeat has previously been received for the system for which the dialect is
* requested.
*
* @param systemId The ID of the system for which dialect resolution is requested.
* @return The dialect of the system of the specified ID, or {@code null} if a
* heartbeat has not yet been received for that system or if there is no
* dialect configured for that system's autopilot.
*/
public MavlinkDialect getDialect(int systemId) {
return systemDialects.get(systemId);
}
/**
* Sends the specified packet directly to the stream.
*
* @param packet The packet to send.
*/
private void send(MavlinkPacket packet) throws IOException {
out.write(packet.getRawBytes());
out.flush();
}
/**
* @param packet The packet for which to resolve the message type.
* @param dialects The list of dialects to use in order to resolve the message type. The list
* order specifies which dialects will take priority in resolving the
* type of the message.
* @return The message type according to the specified dialects, or {@code null}
* if the packet does not represent a message in any of the specified dialects.
*/
private Class<?> getMessageType(MavlinkPacket packet, List<MavlinkDialect> dialects) {
for (MavlinkDialect dialect : dialects) {
Class<?> messageType = getMessageType(packet, dialect);
if (messageType != null) {
return messageType;
}
}
return null;
}
/**
* @param packet The packet for which to resolve the message type.
* @param dialect The dialect to use in order to resolve the message type.
* @return The message type according to the specified dialect, or {@code null}
* if the packet does not represent a message of the specified dialect.
*/
private Class<?> getMessageType(MavlinkPacket packet, MavlinkDialect dialect) {
if (dialect.supports(packet.getMessageId())) {
Class<?> messageType = dialect.resolve(packet.getMessageId());
MavlinkMessageInfo messageInfo = messageType.getAnnotation(MavlinkMessageInfo.class);
if (packet.validateCrc(messageInfo.crc())) {
return messageType;
}
}
return null;
}
}
```
|
```package io.dronefleet.mavlink.protocol;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import static org.junit.Assert.assertEquals;
public class MavlinkPacketReaderTest {
@Test
public void itCanReadMaximumSizedMavlink2Packet() throws IOException {
MavlinkPacket expected = MavlinkPacket.createSignedMavlink2Packet(
0, 0, 0, 0, 0, new byte[255], 0, 0, new byte[32]);
InputStream in = new ByteArrayInputStream(expected.getRawBytes());
MavlinkPacketReader reader = new MavlinkPacketReader(in);
MavlinkPacket actual = reader.next();
assertEquals(expected, actual);
}
}```
|
Please help me generate a test for this class.
|
```package io.dronefleet.mavlink.protocol;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
/**
* <p>
* Reads Mavlink protocol packets from an {@link InputStream}.
* <p>
* The packets read by this class are not CRC checked, and may not be valid. Users of this class
* should {@link MavlinkPacket#validateCrc(int) validate packet CRC}s and {@link #drop() drop}
* packets which do not pass validation. Doing so ensures that no invalid packets are processed,
* and that valid packets are not skipped in favor of invalid packets.
*/
public class MavlinkPacketReader {
private final MavlinkFrameReader in;
/**
* Constructs a new reader for the specified {@link InputStream}
*
* @param in The input stream to read from.
*/
public MavlinkPacketReader(InputStream in) {
this.in = new MavlinkFrameReader(in);
}
/**
* Reads the next packet from the stream.
*
* @throws IOException if an IO error occurs.
* @throws EOFException if reached the end of stream.
*/
public MavlinkPacket next() throws IOException {
while (in.next()) {
byte[] frame = in.frame();
switch (frame[0] & 0xff) {
case MavlinkPacket.MAGIC_V1:
return MavlinkPacket.fromV1Bytes(frame);
case MavlinkPacket.MAGIC_V2:
return MavlinkPacket.fromV2Bytes(frame);
}
// The frame did not begin with a magic marker that we understand.
in.drop();
}
throw new EOFException("End of stream");
}
/**
* Drops the last read packet, returning its bytes to the stream skipping the first byte.
*
* @throws IOException if an IO error occurs.
*/
public void drop() throws IOException {
in.drop();
}
}
```
|
```package com.linkedin.camus.schemaregistry;
import java.util.Arrays;
import java.util.Collection;
import java.util.Properties;
import org.apache.avro.Schema;
import org.apache.avro.repo.InMemoryRepository;
import org.apache.avro.repo.server.RepositoryServer;
import org.apache.hadoop.conf.Configuration;
import org.junit.After;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@RunWith(value = Parameterized.class)
public class TestAvroRestSchemaRegistry extends TestSchemaRegistries {
private RepositoryServer server;
public static final Schema SCHEMA1 =
new Schema.Parser()
.parse("{\"type\":\"record\",\"name\":\"DummyLog2\",\"namespace\":\"com.linkedin.camus.example.records\",\"doc\":\"Logs for really important stuff.\",\"fields\":[{\"name\":\"id\",\"type\":\"long\"},{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"muchoStuff\",\"type\":{\"type\":\"map\",\"values\":\"string\"}}]}");
public static final Schema SCHEMA2 =
new Schema.Parser()
.parse("{\"type\":\"record\",\"name\":\"DummyLog2\",\"namespace\":\"com.linkedin.camus.example.records\",\"doc\":\"Logs for really important stuff.\",\"fields\":[{\"name\":\"id\",\"type\":\"long\"},{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"muchoStuff\",\"type\":{\"type\":\"map\",\"values\":\"string\"}}]}");
public TestAvroRestSchemaRegistry(SchemaRegistry<String> registry) {
super(registry);
}
@Override
public Object getSchema1() {
return SCHEMA1;
}
@Override
public Object getSchema2() {
return SCHEMA2;
}
@Before
public void doSetup() throws Exception {
Properties props = new Properties();
props.put("avro.repo.class", InMemoryRepository.class.getName());
props.put("jetty.host", "localhost");
props.put("jetty.port", "8123");
server = new RepositoryServer(props);
server.start();
}
@After
public void doTearDown() throws Exception {
server.stop();
}
@Parameters
public static Collection data() {
Properties props = new Properties();
props.put(AvroRestSchemaRegistry.ETL_SCHEMA_REGISTRY_URL, "http://localhost:8123/schema-repo/");
SchemaRegistry<Schema> avroSchemaRegistry = new AvroRestSchemaRegistry();
avroSchemaRegistry.init(props);
Object[][] data = new Object[][] { { avroSchemaRegistry } };
return Arrays.asList(data);
}
}
```
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.schemaregistry;
import java.util.Properties;
import org.apache.avro.Schema;
import org.apache.avro.repo.SchemaEntry;
import org.apache.avro.repo.SchemaValidationException;
import org.apache.avro.repo.Subject;
import org.apache.avro.repo.client.RESTRepositoryClient;
import org.apache.hadoop.conf.Configuration;
/**
* An implementation of SchemaRegistry that uses Avro's schema registry to
* manage Avro schemas.
*/
public class AvroRestSchemaRegistry implements SchemaRegistry<Schema> {
private RESTRepositoryClient client;
public static final String ETL_SCHEMA_REGISTRY_URL = "etl.schema.registry.url";
@Override
public void init(Properties props) {
client = new RESTRepositoryClient(props.getProperty(ETL_SCHEMA_REGISTRY_URL));
}
@Override
public String register(String topic, Schema schema) {
Subject subject = client.lookup(topic);
if (subject == null) {
subject = client.register(topic, "org.apache.avro.repo.Validator");
}
try {
return subject.register(schema.toString()).getId();
} catch (SchemaValidationException e) {
throw new SchemaRegistryException(e);
}
}
@Override
public Schema getSchemaByID(String topic, String id) {
Subject subject = client.lookup(topic);
if (subject == null) {
throw new SchemaNotFoundException("Schema not found for " + topic);
}
SchemaEntry entry = subject.lookupById(id);
if (entry == null)
throw new SchemaNotFoundException("Schema not found for " + topic + " " + id);
return Schema.parse(entry.getSchema());
}
@Override
public SchemaDetails<Schema> getLatestSchemaByTopic(String topicName) {
Subject subject = client.lookup(topicName);
if (subject == null) {
throw new SchemaNotFoundException("Schema not found for " + topicName);
}
SchemaEntry entry = subject.latest();
if (entry == null)
throw new SchemaNotFoundException("Schema not found for " + topicName);
return new SchemaDetails<Schema>(topicName, entry.getId(), Schema.parse(entry.getSchema()));
}
}
```
|
```package com.linkedin.camus.etl.kafka.mapred;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
import java.util.Collections;
import kafka.common.ErrorMapping;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.TopicMetadataResponse;
import kafka.javaapi.consumer.SimpleConsumer;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.log4j.Logger;
import org.easymock.EasyMock;
import org.apache.hadoop.conf.Configuration;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertEquals;
public class EtlInputFormatTest {
private static final String DUMMY_VALUE = "dummy:1234";
@Test
public void testEmptyWhitelistBlacklistEntries() {
Configuration conf = new Configuration();
conf.set(EtlInputFormat.KAFKA_WHITELIST_TOPIC, ",TopicA,TopicB,,TopicC,");
conf.set(EtlInputFormat.KAFKA_BLACKLIST_TOPIC, ",TopicD,TopicE,,,,,TopicF,");
String[] whitelistTopics = EtlInputFormat.getKafkaWhitelistTopic(conf);
Assert.assertEquals(Arrays.asList("TopicA", "TopicB", "TopicC"), Arrays.asList(whitelistTopics));
String[] blacklistTopics = EtlInputFormat.getKafkaBlacklistTopic(conf);
Assert.assertEquals(Arrays.asList("TopicD", "TopicE", "TopicF"), Arrays.asList(blacklistTopics));
}
@Test
public void testWithOneRetry() {
List<Object> mocks = new ArrayList<Object>();
Configuration configuration = EasyMock.createMock(Configuration.class);
mocks.add(configuration);
EasyMock.expect(configuration.get(EasyMock.anyString())).andReturn(DUMMY_VALUE).anyTimes();
JobContext jobContext = EasyMock.createMock(JobContext.class);
mocks.add(jobContext);
EasyMock.expect(jobContext.getConfiguration()).andReturn(configuration).anyTimes();
List<TopicMetadata> topicMetadatas = new ArrayList<TopicMetadata>();
TopicMetadataResponse topicMetadataResponse = EasyMock.createMock(TopicMetadataResponse.class);
mocks.add(topicMetadataResponse);
EasyMock.expect(topicMetadataResponse.topicsMetadata()).andReturn(topicMetadatas);
SimpleConsumer simpleConsumer = EasyMock.createMock(SimpleConsumer.class);
mocks.add(simpleConsumer);
EasyMock.expect(simpleConsumer.clientId()).andReturn(DUMMY_VALUE).times(2);
EasyMock.expect(simpleConsumer.send((TopicMetadataRequest) EasyMock.anyObject())).andThrow(
new RuntimeException("No TopicMD"));
EasyMock.expect(simpleConsumer.send((TopicMetadataRequest) EasyMock.anyObject())).andReturn(topicMetadataResponse);
simpleConsumer.close();
EasyMock.expectLastCall().andVoid().anyTimes();
EasyMock.replay(mocks.toArray());
EtlInputFormat inputFormat = new EtlInputFormatForUnitTest();
EtlInputFormatForUnitTest.consumerType = EtlInputFormatForUnitTest.ConsumerType.MOCK;
EtlInputFormatForUnitTest.consumer = simpleConsumer;
List<TopicMetadata> actualTopicMetadatas = inputFormat.getKafkaMetadata(jobContext, new ArrayList<String>());
EasyMock.verify(mocks.toArray());
assertEquals(actualTopicMetadatas, topicMetadatas);
}
@Test(expected = RuntimeException.class)
public void testWithThreeRetries() {
List<Object> mocks = new ArrayList<Object>();
Configuration configuration = EasyMock.createMock(Configuration.class);
mocks.add(configuration);
EasyMock.expect(configuration.get(EasyMock.anyString())).andReturn(DUMMY_VALUE).anyTimes();
JobContext jobContext = EasyMock.createMock(JobContext.class);
mocks.add(jobContext);
EasyMock.expect(jobContext.getConfiguration()).andReturn(configuration).anyTimes();
SimpleConsumer simpleConsumer = EasyMock.createMock(SimpleConsumer.class);
mocks.add(simpleConsumer);
EasyMock.expect(simpleConsumer.clientId()).andReturn(DUMMY_VALUE)
.times(EtlInputFormat.NUM_TRIES_TOPIC_METADATA + 1);
Exception ex = new RuntimeException("No TopicMeta");
EasyMock.expect(simpleConsumer.send((TopicMetadataRequest) EasyMock.anyObject())).andThrow(ex)
.times(EtlInputFormat.NUM_TRIES_TOPIC_METADATA);
simpleConsumer.close();
EasyMock.expectLastCall().andVoid().anyTimes();
EasyMock.replay(mocks.toArray());
EtlInputFormat inputFormat = new EtlInputFormatForUnitTest();
EtlInputFormatForUnitTest.consumerType = EtlInputFormatForUnitTest.ConsumerType.MOCK;
EtlInputFormatForUnitTest.consumer = simpleConsumer;
List<TopicMetadata> actualTopicMetadatas = inputFormat.getKafkaMetadata(jobContext, new ArrayList<String>());
EasyMock.verify(mocks.toArray());
}
/**
* Test only refreshing the paritionMetadata when the error code is LeaderNotAvailable.
* @throws Exception
*/
@Test
public void testRefreshPartitioMetadataOnLeaderNotAvailable() throws Exception {
JobContext dummyContext = null;
//A partitionMetadata with errorCode LeaderNotAvailable
PartitionMetadata partitionMetadata1 = createMock(PartitionMetadata.class);
expect(partitionMetadata1.errorCode()).andReturn(ErrorMapping.LeaderNotAvailableCode());
expect(partitionMetadata1.partitionId()).andReturn(0);
replay(partitionMetadata1);
//A partitionMetadata with errorCode not LeaderNotAvailable
PartitionMetadata partitionMetadata2 = createMock(PartitionMetadata.class);
expect(partitionMetadata2.errorCode()).andReturn(ErrorMapping.InvalidMessageCode());
expect(partitionMetadata2.partitionId()).andReturn(0);
replay(partitionMetadata2);
PartitionMetadata mockedReturnedPartitionMetadata = createMock(PartitionMetadata.class);
expect(mockedReturnedPartitionMetadata.errorCode()).andReturn(ErrorMapping.NoError());
expect(mockedReturnedPartitionMetadata.partitionId()).andReturn(0);
replay(mockedReturnedPartitionMetadata);
TopicMetadata mockedTopicMetadata = createMock(TopicMetadata.class);
expect(mockedTopicMetadata.topic()).andReturn("testTopic");
expect(mockedTopicMetadata.partitionsMetadata()).andReturn(
Collections.singletonList(mockedReturnedPartitionMetadata));
replay(mockedTopicMetadata);
EtlInputFormat etlInputFormat =
createMock(EtlInputFormat.class,
EtlInputFormat.class.getMethod("getKafkaMetadata", new Class[] { JobContext.class, List.class }));
EasyMock.expect(etlInputFormat.getKafkaMetadata(dummyContext, Collections.singletonList("testTopic"))).andReturn(
Collections.singletonList(mockedTopicMetadata));
etlInputFormat.setLogger(Logger.getLogger(getClass()));
replay(etlInputFormat);
// For partitionMetadata2, it will not refresh if the errorcode is not LeaderNotAvailable.
assertEquals(partitionMetadata2, etlInputFormat.refreshPartitionMetadataOnLeaderNotAvailable(partitionMetadata2,
mockedTopicMetadata, dummyContext, EtlInputFormat.NUM_TRIES_PARTITION_METADATA));
// For partitionMetadata1, it will refresh if the errorcode is LeaderNotAvailable.
assertEquals(mockedReturnedPartitionMetadata, etlInputFormat.refreshPartitionMetadataOnLeaderNotAvailable(
partitionMetadata1, mockedTopicMetadata, dummyContext, EtlInputFormat.NUM_TRIES_PARTITION_METADATA));
}
/**
* Test only refreshing the paritionMetadata when the error code is LeaderNotAvailable.
* @throws Exception
*/
@Test
public void testRefreshPartitioMetadataWithThreeRetries() throws Exception {
JobContext dummyContext = null;
//A partitionMetadata with errorCode LeaderNotAvailable
PartitionMetadata partitionMetadata = createMock(PartitionMetadata.class);
expect(partitionMetadata.errorCode()).andReturn(ErrorMapping.LeaderNotAvailableCode()).times(EtlInputFormat.NUM_TRIES_PARTITION_METADATA * 2);
expect(partitionMetadata.partitionId()).andReturn(0).times(EtlInputFormat.NUM_TRIES_PARTITION_METADATA * 2);
replay(partitionMetadata);
TopicMetadata mockedTopicMetadata = createMock(TopicMetadata.class);
expect(mockedTopicMetadata.topic()).andReturn("testTopic").times(EtlInputFormat.NUM_TRIES_PARTITION_METADATA);
expect(mockedTopicMetadata.partitionsMetadata()).andReturn(Collections.singletonList(partitionMetadata)).times(
EtlInputFormat.NUM_TRIES_PARTITION_METADATA);
replay(mockedTopicMetadata);
EtlInputFormat etlInputFormat =
createMock(EtlInputFormat.class,
EtlInputFormat.class.getMethod("getKafkaMetadata", new Class[] { JobContext.class, List.class }));
EasyMock.expect(etlInputFormat.getKafkaMetadata(dummyContext, Collections.singletonList("testTopic"))).andReturn(
Collections.singletonList(mockedTopicMetadata)).times(EtlInputFormat.NUM_TRIES_PARTITION_METADATA);
etlInputFormat.setLogger(Logger.getLogger(getClass()));
replay(etlInputFormat);
etlInputFormat.refreshPartitionMetadataOnLeaderNotAvailable(partitionMetadata, mockedTopicMetadata, dummyContext,
EtlInputFormat.NUM_TRIES_PARTITION_METADATA);
verify(mockedTopicMetadata);
verify(etlInputFormat);
}
@After
public void after() {
EtlInputFormatForUnitTest.consumerType = EtlInputFormatForUnitTest.ConsumerType.REGULAR;
}
}
```
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.etl.kafka.mapred;
import com.google.common.base.Strings;
import com.linkedin.camus.coders.CamusWrapper;
import com.linkedin.camus.coders.MessageDecoder;
import com.linkedin.camus.etl.kafka.CamusJob;
import com.linkedin.camus.etl.kafka.coders.KafkaAvroMessageDecoder;
import com.linkedin.camus.etl.kafka.coders.MessageDecoderFactory;
import com.linkedin.camus.etl.kafka.common.EmailClient;
import com.linkedin.camus.etl.kafka.common.EtlKey;
import com.linkedin.camus.etl.kafka.common.EtlRequest;
import com.linkedin.camus.etl.kafka.common.LeaderInfo;
import com.linkedin.camus.workallocater.CamusRequest;
import com.linkedin.camus.workallocater.WorkAllocator;
import java.io.IOException;
import java.net.URI;
import java.security.InvalidParameterException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Pattern;
import kafka.api.PartitionOffsetRequestInfo;
import kafka.common.ErrorMapping;
import kafka.common.TopicAndPartition;
import kafka.javaapi.OffsetRequest;
import kafka.javaapi.OffsetResponse;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.consumer.SimpleConsumer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.log4j.Logger;
/**
* Input format for a Kafka pull job.
*/
public class EtlInputFormat extends InputFormat<EtlKey, CamusWrapper> {
public static final String KAFKA_BLACKLIST_TOPIC = "kafka.blacklist.topics";
public static final String KAFKA_WHITELIST_TOPIC = "kafka.whitelist.topics";
public static final String KAFKA_MOVE_TO_LAST_OFFSET_LIST = "kafka.move.to.last.offset.list";
public static final String KAFKA_MOVE_TO_EARLIEST_OFFSET = "kafka.move.to.earliest.offset";
public static final String KAFKA_CLIENT_BUFFER_SIZE = "kafka.client.buffer.size";
public static final String KAFKA_CLIENT_SO_TIMEOUT = "kafka.client.so.timeout";
public static final String KAFKA_MAX_PULL_HRS = "kafka.max.pull.hrs";
public static final String KAFKA_MAX_PULL_MINUTES_PER_TASK = "kafka.max.pull.minutes.per.task";
public static final String KAFKA_MAX_HISTORICAL_DAYS = "kafka.max.historical.days";
public static final String CAMUS_MESSAGE_DECODER_CLASS = "camus.message.decoder.class";
public static final String ETL_IGNORE_SCHEMA_ERRORS = "etl.ignore.schema.errors";
public static final String ETL_AUDIT_IGNORE_SERVICE_TOPIC_LIST = "etl.audit.ignore.service.topic.list";
public static final String CAMUS_WORK_ALLOCATOR_CLASS = "camus.work.allocator.class";
public static final String CAMUS_WORK_ALLOCATOR_DEFAULT = "com.linkedin.camus.workallocater.BaseAllocator";
private static final int BACKOFF_UNIT_MILLISECONDS = 1000;
public static final int NUM_TRIES_PARTITION_METADATA = 3;
public static final int NUM_TRIES_FETCH_FROM_LEADER = 3;
public static final int NUM_TRIES_TOPIC_METADATA = 3;
public static boolean reportJobFailureDueToOffsetOutOfRange = false;
public static boolean reportJobFailureUnableToGetOffsetFromKafka = false;
public static boolean reportJobFailureDueToLeaderNotAvailable = false;
private static Logger log = null;
public EtlInputFormat() {
if (log == null)
log = Logger.getLogger(getClass());
}
public static void setLogger(Logger log) {
EtlInputFormat.log = log;
}
@Override
public RecordReader<EtlKey, CamusWrapper> createRecordReader(InputSplit split, TaskAttemptContext context)
throws IOException, InterruptedException {
return new EtlRecordReader(this, split, context);
}
/**
* Gets the metadata from Kafka
*
* @param context
* @param metaRequestTopics specify the list of topics to get topicMetadata. The empty list means
* get the TopicsMetadata for all topics.
* @return the list of TopicMetadata
*/
public List<TopicMetadata> getKafkaMetadata(JobContext context, List<String> metaRequestTopics) {
CamusJob.startTiming("kafkaSetupTime");
String brokerString = CamusJob.getKafkaBrokers(context);
if (brokerString.isEmpty())
throw new InvalidParameterException("kafka.brokers must contain at least one node");
List<String> brokers = Arrays.asList(brokerString.split("\\s*,\\s*"));
Collections.shuffle(brokers);
boolean fetchMetaDataSucceeded = false;
int i = 0;
List<TopicMetadata> topicMetadataList = null;
Exception savedException = null;
while (i < brokers.size() && !fetchMetaDataSucceeded) {
SimpleConsumer consumer = createBrokerConsumer(context, brokers.get(i));
log.info(String.format("Fetching metadata from broker %s with client id %s for %d topic(s) %s", brokers.get(i),
consumer.clientId(), metaRequestTopics.size(), metaRequestTopics));
try {
for (int iter = 0; iter < NUM_TRIES_TOPIC_METADATA; iter++) {
try {
topicMetadataList = consumer.send(new TopicMetadataRequest(metaRequestTopics)).topicsMetadata();
fetchMetaDataSucceeded = true;
break;
} catch (Exception e) {
savedException = e;
log.warn(String.format(
"Fetching topic metadata with client id %s for topics [%s] from broker [%s] failed, iter[%s]",
consumer.clientId(), metaRequestTopics, brokers.get(i), iter), e);
try {
Thread.sleep((long) (Math.random() * (iter + 1) * 1000));
} catch (InterruptedException ex) {
log.warn("Caught InterruptedException: " + ex);
}
}
}
} finally {
consumer.close();
i++;
}
}
if (!fetchMetaDataSucceeded) {
throw new RuntimeException("Failed to obtain metadata!", savedException);
}
CamusJob.stopTiming("kafkaSetupTime");
return topicMetadataList;
}
private SimpleConsumer createBrokerConsumer(JobContext context, String broker) {
if (!broker.matches(".+:\\d+"))
throw new InvalidParameterException("The kakfa broker " + broker + " must follow address:port pattern");
String[] hostPort = broker.split(":");
return createSimpleConsumer(context, hostPort[0], Integer.valueOf(hostPort[1]));
}
public SimpleConsumer createSimpleConsumer(JobContext context, String host, int port) {
SimpleConsumer consumer =
new SimpleConsumer(host, port, CamusJob.getKafkaTimeoutValue(context), CamusJob.getKafkaBufferSize(context),
CamusJob.getKafkaClientName(context));
return consumer;
}
/**
* Gets the latest offsets and create the requests as needed
*
* @param context
* @param offsetRequestInfo
* @return
*/
public ArrayList<CamusRequest> fetchLatestOffsetAndCreateEtlRequests(JobContext context,
HashMap<LeaderInfo, ArrayList<TopicAndPartition>> offsetRequestInfo) {
ArrayList<CamusRequest> finalRequests = new ArrayList<CamusRequest>();
for (LeaderInfo leader : offsetRequestInfo.keySet()) {
SimpleConsumer consumer = createSimpleConsumer(context, leader.getUri().getHost(), leader.getUri().getPort());
// Latest Offset
PartitionOffsetRequestInfo partitionLatestOffsetRequestInfo =
new PartitionOffsetRequestInfo(kafka.api.OffsetRequest.LatestTime(), 1);
// Earliest Offset
PartitionOffsetRequestInfo partitionEarliestOffsetRequestInfo =
new PartitionOffsetRequestInfo(kafka.api.OffsetRequest.EarliestTime(), 1);
Map<TopicAndPartition, PartitionOffsetRequestInfo> latestOffsetInfo =
new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
Map<TopicAndPartition, PartitionOffsetRequestInfo> earliestOffsetInfo =
new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
ArrayList<TopicAndPartition> topicAndPartitions = offsetRequestInfo.get(leader);
for (TopicAndPartition topicAndPartition : topicAndPartitions) {
latestOffsetInfo.put(topicAndPartition, partitionLatestOffsetRequestInfo);
earliestOffsetInfo.put(topicAndPartition, partitionEarliestOffsetRequestInfo);
}
OffsetResponse latestOffsetResponse = getLatestOffsetResponse(consumer, latestOffsetInfo, context);
OffsetResponse earliestOffsetResponse = null;
if (latestOffsetResponse != null) {
earliestOffsetResponse = getLatestOffsetResponse(consumer, earliestOffsetInfo, context);
}
consumer.close();
if (earliestOffsetResponse == null) {
log.warn(generateLogWarnForSkippedTopics(earliestOffsetInfo, consumer));
reportJobFailureUnableToGetOffsetFromKafka = true;
continue;
}
for (TopicAndPartition topicAndPartition : topicAndPartitions) {
long latestOffset = latestOffsetResponse.offsets(topicAndPartition.topic(), topicAndPartition.partition())[0];
long earliestOffset =
earliestOffsetResponse.offsets(topicAndPartition.topic(), topicAndPartition.partition())[0];
//TODO: factor out kafka specific request functionality
CamusRequest etlRequest =
new EtlRequest(context, topicAndPartition.topic(), Integer.toString(leader.getLeaderId()),
topicAndPartition.partition(), leader.getUri());
etlRequest.setLatestOffset(latestOffset);
etlRequest.setEarliestOffset(earliestOffset);
finalRequests.add(etlRequest);
}
}
return finalRequests;
}
protected OffsetResponse getLatestOffsetResponse(SimpleConsumer consumer,
Map<TopicAndPartition, PartitionOffsetRequestInfo> offsetInfo, JobContext context) {
for (int i = 0; i < NUM_TRIES_FETCH_FROM_LEADER; i++) {
try {
OffsetResponse offsetResponse =
consumer.getOffsetsBefore(new OffsetRequest(offsetInfo, kafka.api.OffsetRequest.CurrentVersion(), CamusJob
.getKafkaClientName(context)));
if (offsetResponse.hasError()) {
throw new RuntimeException("offsetReponse has error.");
}
return offsetResponse;
} catch (Exception e) {
log.warn("Fetching offset from leader " + consumer.host() + ":" + consumer.port() + " has failed " + (i + 1)
+ " time(s). Reason: " + e.getMessage() + " " + (NUM_TRIES_FETCH_FROM_LEADER - i - 1) + " retries left.");
if (i < NUM_TRIES_FETCH_FROM_LEADER - 1) {
try {
Thread.sleep((long) (Math.random() * (i + 1) * 1000));
} catch (InterruptedException e1) {
log.error("Caught interrupted exception between retries of getting latest offsets. " + e1.getMessage());
}
}
}
}
return null;
}
private String generateLogWarnForSkippedTopics(Map<TopicAndPartition, PartitionOffsetRequestInfo> offsetInfo,
SimpleConsumer consumer) {
StringBuilder sb = new StringBuilder();
sb.append("The following topics will be skipped due to failure in fetching latest offsets from leader "
+ consumer.host() + ":" + consumer.port());
for (TopicAndPartition topicAndPartition : offsetInfo.keySet()) {
sb.append(" " + topicAndPartition.topic());
}
return sb.toString();
}
public String createTopicRegEx(HashSet<String> topicsSet) {
String regex = "";
StringBuilder stringbuilder = new StringBuilder();
for (String whiteList : topicsSet) {
stringbuilder.append(whiteList);
stringbuilder.append("|");
}
regex = "(" + stringbuilder.substring(0, stringbuilder.length() - 1) + ")";
Pattern.compile(regex);
return regex;
}
public List<TopicMetadata> filterWhitelistTopics(List<TopicMetadata> topicMetadataList,
HashSet<String> whiteListTopics) {
ArrayList<TopicMetadata> filteredTopics = new ArrayList<TopicMetadata>();
String regex = createTopicRegEx(whiteListTopics);
for (TopicMetadata topicMetadata : topicMetadataList) {
if (Pattern.matches(regex, topicMetadata.topic())) {
filteredTopics.add(topicMetadata);
} else {
log.info("Discarding topic : " + topicMetadata.topic());
}
}
return filteredTopics;
}
@Override
public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
CamusJob.startTiming("getSplits");
ArrayList<CamusRequest> finalRequests;
HashMap<LeaderInfo, ArrayList<TopicAndPartition>> offsetRequestInfo =
new HashMap<LeaderInfo, ArrayList<TopicAndPartition>>();
try {
// Get Metadata for all topics
List<TopicMetadata> topicMetadataList = getKafkaMetadata(context, new ArrayList<String>());
// Filter any white list topics
HashSet<String> whiteListTopics = new HashSet<String>(Arrays.asList(getKafkaWhitelistTopic(context)));
if (!whiteListTopics.isEmpty()) {
topicMetadataList = filterWhitelistTopics(topicMetadataList, whiteListTopics);
}
// Filter all blacklist topics
HashSet<String> blackListTopics = new HashSet<String>(Arrays.asList(getKafkaBlacklistTopic(context)));
String regex = "";
if (!blackListTopics.isEmpty()) {
regex = createTopicRegEx(blackListTopics);
}
for (TopicMetadata topicMetadata : topicMetadataList) {
if (Pattern.matches(regex, topicMetadata.topic())) {
log.info("Discarding topic (blacklisted): " + topicMetadata.topic());
} else if (!createMessageDecoder(context, topicMetadata.topic())) {
log.info("Discarding topic (Decoder generation failed) : " + topicMetadata.topic());
} else if (topicMetadata.errorCode() != ErrorMapping.NoError()) {
log.info("Skipping the creation of ETL request for Whole Topic : " + topicMetadata.topic() + " Exception : "
+ ErrorMapping.exceptionFor(topicMetadata.errorCode()));
} else {
for (PartitionMetadata partitionMetadata : topicMetadata.partitionsMetadata()) {
// We only care about LeaderNotAvailableCode error on partitionMetadata level
// Error codes such as ReplicaNotAvailableCode should not stop us.
partitionMetadata =
this.refreshPartitionMetadataOnLeaderNotAvailable(partitionMetadata, topicMetadata, context,
NUM_TRIES_PARTITION_METADATA);
if (partitionMetadata.errorCode() == ErrorMapping.LeaderNotAvailableCode()) {
log.info("Skipping the creation of ETL request for Topic : " + topicMetadata.topic()
+ " and Partition : " + partitionMetadata.partitionId() + " Exception : "
+ ErrorMapping.exceptionFor(partitionMetadata.errorCode()));
reportJobFailureDueToLeaderNotAvailable = true;
} else {
if (partitionMetadata.errorCode() != ErrorMapping.NoError()) {
log.warn("Receiving non-fatal error code, Continuing the creation of ETL request for Topic : "
+ topicMetadata.topic() + " and Partition : " + partitionMetadata.partitionId() + " Exception : "
+ ErrorMapping.exceptionFor(partitionMetadata.errorCode()));
}
LeaderInfo leader =
new LeaderInfo(new URI("tcp://" + partitionMetadata.leader().getConnectionString()),
partitionMetadata.leader().id());
if (offsetRequestInfo.containsKey(leader)) {
ArrayList<TopicAndPartition> topicAndPartitions = offsetRequestInfo.get(leader);
topicAndPartitions.add(new TopicAndPartition(topicMetadata.topic(), partitionMetadata.partitionId()));
offsetRequestInfo.put(leader, topicAndPartitions);
} else {
ArrayList<TopicAndPartition> topicAndPartitions = new ArrayList<TopicAndPartition>();
topicAndPartitions.add(new TopicAndPartition(topicMetadata.topic(), partitionMetadata.partitionId()));
offsetRequestInfo.put(leader, topicAndPartitions);
}
}
}
}
}
} catch (Exception e) {
log.error("Unable to pull requests from Kafka brokers. Exiting the program", e);
throw new IOException("Unable to pull requests from Kafka brokers.", e);
}
// Get the latest offsets and generate the EtlRequests
finalRequests = fetchLatestOffsetAndCreateEtlRequests(context, offsetRequestInfo);
Collections.sort(finalRequests, new Comparator<CamusRequest>() {
@Override
public int compare(CamusRequest r1, CamusRequest r2) {
return r1.getTopic().compareTo(r2.getTopic());
}
});
writeRequests(finalRequests, context);
Map<CamusRequest, EtlKey> offsetKeys = getPreviousOffsets(FileInputFormat.getInputPaths(context), context);
Set<String> moveLatest = getMoveToLatestTopicsSet(context);
String camusRequestEmailMessage = "";
for (CamusRequest request : finalRequests) {
if (moveLatest.contains(request.getTopic()) || moveLatest.contains("all")) {
log.info("Moving to latest for topic: " + request.getTopic());
//TODO: factor out kafka specific request functionality
EtlKey oldKey = offsetKeys.get(request);
EtlKey newKey =
new EtlKey(request.getTopic(), ((EtlRequest) request).getLeaderId(), request.getPartition(), 0,
request.getLastOffset());
if (oldKey != null)
newKey.setMessageSize(oldKey.getMessageSize());
offsetKeys.put(request, newKey);
}
EtlKey key = offsetKeys.get(request);
if (key != null) {
request.setOffset(key.getOffset());
request.setAvgMsgSize(key.getMessageSize());
}
if (request.getEarliestOffset() > request.getOffset() || request.getOffset() > request.getLastOffset()) {
if (request.getEarliestOffset() > request.getOffset()) {
log.error("The earliest offset was found to be more than the current offset: " + request);
} else {
log.error("The current offset was found to be more than the latest offset: " + request);
}
boolean move_to_earliest_offset = context.getConfiguration().getBoolean(KAFKA_MOVE_TO_EARLIEST_OFFSET, false);
boolean offsetUnset = request.getOffset() == EtlRequest.DEFAULT_OFFSET;
log.info("move_to_earliest: " + move_to_earliest_offset + " offset_unset: " + offsetUnset);
// When the offset is unset, it means it's a new topic/partition, we also need to consume the earliest offset
if (move_to_earliest_offset || offsetUnset) {
log.error("Moving to the earliest offset available");
request.setOffset(request.getEarliestOffset());
offsetKeys.put(
request,
//TODO: factor out kafka specific request functionality
new EtlKey(request.getTopic(), ((EtlRequest) request).getLeaderId(), request.getPartition(), 0, request
.getOffset()));
} else {
log.error("Offset range from kafka metadata is outside the previously persisted offset, " + request + "\n" +
" Topic " + request.getTopic() + " will be skipped.\n" +
" Please check whether kafka cluster configuration is correct." +
" You can also specify config parameter: " + KAFKA_MOVE_TO_EARLIEST_OFFSET +
" to start processing from earliest kafka metadata offset.");
reportJobFailureDueToOffsetOutOfRange = true;
}
} else if (3 * (request.getOffset() - request.getEarliestOffset())
< request.getLastOffset() - request.getOffset()) {
camusRequestEmailMessage +=
"The current offset is too close to the earliest offset, Camus might be falling behind: "
+ request + "\n";
}
log.info(request);
}
if(!Strings.isNullOrEmpty(camusRequestEmailMessage)) {
EmailClient.sendEmail(camusRequestEmailMessage);
}
writePrevious(offsetKeys.values(), context);
CamusJob.stopTiming("getSplits");
CamusJob.startTiming("hadoop");
CamusJob.setTime("hadoop_start");
WorkAllocator allocator = getWorkAllocator(context);
Properties props = new Properties();
props.putAll(context.getConfiguration().getValByRegex(".*"));
allocator.init(props);
return allocator.allocateWork(finalRequests, context);
}
private Set<String> getMoveToLatestTopicsSet(JobContext context) {
Set<String> topics = new HashSet<String>();
String[] arr = getMoveToLatestTopics(context);
if (arr != null) {
for (String topic : arr) {
topics.add(topic);
}
}
return topics;
}
private boolean createMessageDecoder(JobContext context, String topic) {
try {
MessageDecoderFactory.createMessageDecoder(context, topic);
return true;
} catch (Exception e) {
log.error("failed to create decoder", e);
return false;
}
}
private void writePrevious(Collection<EtlKey> missedKeys, JobContext context) throws IOException {
FileSystem fs = FileSystem.get(context.getConfiguration());
Path output = FileOutputFormat.getOutputPath(context);
if (fs.exists(output)) {
fs.mkdirs(output);
}
output = new Path(output, EtlMultiOutputFormat.OFFSET_PREFIX + "-previous");
SequenceFile.Writer writer =
SequenceFile.createWriter(fs, context.getConfiguration(), output, EtlKey.class, NullWritable.class);
for (EtlKey key : missedKeys) {
writer.append(key, NullWritable.get());
}
writer.close();
}
protected void writeRequests(List<CamusRequest> requests, JobContext context) throws IOException {
FileSystem fs = FileSystem.get(context.getConfiguration());
Path output = FileOutputFormat.getOutputPath(context);
if (fs.exists(output)) {
fs.mkdirs(output);
}
output = new Path(output, EtlMultiOutputFormat.REQUESTS_FILE);
SequenceFile.Writer writer =
SequenceFile.createWriter(fs, context.getConfiguration(), output, EtlRequest.class, NullWritable.class);
for (CamusRequest r : requests) {
//TODO: factor out kafka specific request functionality
writer.append(r, NullWritable.get());
}
writer.close();
}
private Map<CamusRequest, EtlKey> getPreviousOffsets(Path[] inputs, JobContext context) throws IOException {
Map<CamusRequest, EtlKey> offsetKeysMap = new HashMap<CamusRequest, EtlKey>();
for (Path input : inputs) {
FileSystem fs = input.getFileSystem(context.getConfiguration());
for (FileStatus f : fs.listStatus(input, new OffsetFileFilter())) {
log.info("previous offset file:" + f.getPath().toString());
SequenceFile.Reader reader = new SequenceFile.Reader(fs, f.getPath(), context.getConfiguration());
EtlKey key = new EtlKey();
while (reader.next(key, NullWritable.get())) {
//TODO: factor out kafka specific request functionality
CamusRequest request = new EtlRequest(context, key.getTopic(), key.getLeaderId(), key.getPartition());
if (offsetKeysMap.containsKey(request)) {
EtlKey oldKey = offsetKeysMap.get(request);
if (oldKey.getOffset() < key.getOffset()) {
offsetKeysMap.put(request, key);
}
} else {
offsetKeysMap.put(request, key);
}
key = new EtlKey();
}
reader.close();
}
}
return offsetKeysMap;
}
public PartitionMetadata refreshPartitionMetadataOnLeaderNotAvailable(PartitionMetadata partitionMetadata,
TopicMetadata topicMetadata, JobContext context, int numTries) throws InterruptedException {
int tryCounter = 0;
while (tryCounter < numTries && partitionMetadata.errorCode() == ErrorMapping.LeaderNotAvailableCode()) {
log.info("Retry to referesh the topicMetadata on LeaderNotAvailable...");
List<TopicMetadata> topicMetadataList =
this.getKafkaMetadata(context, Collections.singletonList(topicMetadata.topic()));
if (topicMetadataList == null || topicMetadataList.size() == 0) {
log.warn("The topicMetadataList for topic " + topicMetadata.topic() + " is empty.");
} else {
topicMetadata = topicMetadataList.get(0);
boolean partitionFound = false;
for (PartitionMetadata metadataPerPartition : topicMetadata.partitionsMetadata()) {
if (metadataPerPartition.partitionId() == partitionMetadata.partitionId()) {
partitionFound = true;
if (metadataPerPartition.errorCode() != ErrorMapping.LeaderNotAvailableCode()) {
return metadataPerPartition;
} else { //retry again.
if (tryCounter < numTries - 1) {
Thread.sleep((long) (Math.random() * (tryCounter + 1) * BACKOFF_UNIT_MILLISECONDS));
}
break;
}
}
}
if (!partitionFound) {
log.error("No matching partition found in the topicMetadata for Partition: "
+ partitionMetadata.partitionId());
}
}
tryCounter++;
}
return partitionMetadata;
}
public static void setWorkAllocator(JobContext job, Class<WorkAllocator> val) {
job.getConfiguration().setClass(CAMUS_WORK_ALLOCATOR_CLASS, val, WorkAllocator.class);
}
public static WorkAllocator getWorkAllocator(JobContext job) {
try {
return (WorkAllocator) job.getConfiguration()
.getClass(CAMUS_WORK_ALLOCATOR_CLASS, Class.forName(CAMUS_WORK_ALLOCATOR_DEFAULT)).newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void setMoveToLatestTopics(JobContext job, String val) {
job.getConfiguration().set(KAFKA_MOVE_TO_LAST_OFFSET_LIST, val);
}
public static String[] getMoveToLatestTopics(JobContext job) {
return job.getConfiguration().getStrings(KAFKA_MOVE_TO_LAST_OFFSET_LIST);
}
public static void setKafkaClientBufferSize(JobContext job, int val) {
job.getConfiguration().setInt(KAFKA_CLIENT_BUFFER_SIZE, val);
}
public static int getKafkaClientBufferSize(JobContext job) {
return job.getConfiguration().getInt(KAFKA_CLIENT_BUFFER_SIZE, 2 * 1024 * 1024);
}
public static void setKafkaClientTimeout(JobContext job, int val) {
job.getConfiguration().setInt(KAFKA_CLIENT_SO_TIMEOUT, val);
}
public static int getKafkaClientTimeout(JobContext job) {
return job.getConfiguration().getInt(KAFKA_CLIENT_SO_TIMEOUT, 60000);
}
public static void setKafkaMaxPullHrs(JobContext job, int val) {
job.getConfiguration().setInt(KAFKA_MAX_PULL_HRS, val);
}
public static int getKafkaMaxPullHrs(JobContext job) {
return job.getConfiguration().getInt(KAFKA_MAX_PULL_HRS, -1);
}
public static void setKafkaMaxPullMinutesPerTask(JobContext job, int val) {
job.getConfiguration().setInt(KAFKA_MAX_PULL_MINUTES_PER_TASK, val);
}
public static int getKafkaMaxPullMinutesPerTask(JobContext job) {
return job.getConfiguration().getInt(KAFKA_MAX_PULL_MINUTES_PER_TASK, -1);
}
public static void setKafkaMaxHistoricalDays(JobContext job, int val) {
job.getConfiguration().setInt(KAFKA_MAX_HISTORICAL_DAYS, val);
}
public static int getKafkaMaxHistoricalDays(JobContext job) {
return job.getConfiguration().getInt(KAFKA_MAX_HISTORICAL_DAYS, -1);
}
public static void setKafkaBlacklistTopic(JobContext job, String val) {
job.getConfiguration().set(KAFKA_BLACKLIST_TOPIC, val);
}
public static String[] getKafkaBlacklistTopic(JobContext job) {
return getKafkaBlacklistTopic(job.getConfiguration());
}
public static String[] getKafkaBlacklistTopic(Configuration conf) {
final String blacklistStr = conf.get(KAFKA_BLACKLIST_TOPIC);
if (blacklistStr != null && !blacklistStr.isEmpty()) {
return conf.getStrings(KAFKA_BLACKLIST_TOPIC);
} else {
return new String[] {};
}
}
public static void setKafkaWhitelistTopic(JobContext job, String val) {
job.getConfiguration().set(KAFKA_WHITELIST_TOPIC, val);
}
public static String[] getKafkaWhitelistTopic(JobContext job) {
return getKafkaWhitelistTopic(job.getConfiguration());
}
public static String[] getKafkaWhitelistTopic(Configuration conf) {
final String whitelistStr = conf.get(KAFKA_WHITELIST_TOPIC);
if (whitelistStr != null && !whitelistStr.isEmpty()) {
return conf.getStrings(KAFKA_WHITELIST_TOPIC);
} else {
return new String[] {};
}
}
public static void setEtlIgnoreSchemaErrors(JobContext job, boolean val) {
job.getConfiguration().setBoolean(ETL_IGNORE_SCHEMA_ERRORS, val);
}
public static boolean getEtlIgnoreSchemaErrors(JobContext job) {
return job.getConfiguration().getBoolean(ETL_IGNORE_SCHEMA_ERRORS, false);
}
public static void setEtlAuditIgnoreServiceTopicList(JobContext job, String topics) {
job.getConfiguration().set(ETL_AUDIT_IGNORE_SERVICE_TOPIC_LIST, topics);
}
public static String[] getEtlAuditIgnoreServiceTopicList(JobContext job) {
return job.getConfiguration().getStrings(ETL_AUDIT_IGNORE_SERVICE_TOPIC_LIST, "");
}
public static void setMessageDecoderClass(JobContext job, Class<MessageDecoder> cls) {
job.getConfiguration().setClass(CAMUS_MESSAGE_DECODER_CLASS, cls, MessageDecoder.class);
}
public static Class<MessageDecoder> getMessageDecoderClass(JobContext job) {
return (Class<MessageDecoder>) job.getConfiguration().getClass(CAMUS_MESSAGE_DECODER_CLASS,
KafkaAvroMessageDecoder.class);
}
public static Class<MessageDecoder> getMessageDecoderClass(JobContext job, String topicName) {
Class<MessageDecoder> topicDecoder =
(Class<MessageDecoder>) job.getConfiguration().getClass(CAMUS_MESSAGE_DECODER_CLASS + "." + topicName, null);
return topicDecoder == null ? getMessageDecoderClass(job) : topicDecoder;
}
private class OffsetFileFilter implements PathFilter {
@Override
public boolean accept(Path arg0) {
return arg0.getName().startsWith(EtlMultiOutputFormat.OFFSET_PREFIX);
}
}
}
```
|
```package com.linkedin.camus.etl.kafka.mapred;
import java.io.IOException;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.easymock.EasyMock;
import com.linkedin.camus.coders.CamusWrapper;
import com.linkedin.camus.coders.MessageDecoder;
import com.linkedin.camus.etl.kafka.coders.MessageDecoderFactory;
import com.linkedin.camus.schemaregistry.SchemaNotFoundException;
public class EtlRecordReaderForUnitTest extends EtlRecordReader {
public static enum DecoderType {
REGULAR,
SCHEMA_NOT_FOUND_30_PERCENT,
OTHER_30_PERCENT;
}
public static DecoderType decoderType = DecoderType.REGULAR;
public EtlRecordReaderForUnitTest(EtlInputFormatForUnitTest etlInputFormatForUnitTest, InputSplit split,
TaskAttemptContext context) throws IOException, InterruptedException {
super(etlInputFormatForUnitTest, split, context);
}
@Override
protected MessageDecoder createDecoder(String topic) {
switch (decoderType) {
case REGULAR:
return MessageDecoderFactory.createMessageDecoder(context, topic);
case SCHEMA_NOT_FOUND_30_PERCENT:
return createMockDecoder30PercentSchemaNotFound();
case OTHER_30_PERCENT:
return createMockDecoder30PercentOther();
default:
throw new RuntimeException("decoder type undefined");
}
}
public static MessageDecoder createMockDecoder30PercentSchemaNotFound() {
MessageDecoder mockDecoder = EasyMock.createNiceMock(MessageDecoder.class);
EasyMock.expect(mockDecoder.decode(EasyMock.anyObject())).andThrow(new SchemaNotFoundException()).times(3);
EasyMock.expect(mockDecoder.decode(EasyMock.anyObject())).andReturn(new CamusWrapper<String>("dummy")).times(7);
EasyMock.replay(mockDecoder);
return mockDecoder;
}
public static MessageDecoder createMockDecoder30PercentOther() {
MessageDecoder mockDecoder = EasyMock.createNiceMock(MessageDecoder.class);
EasyMock.expect(mockDecoder.decode(EasyMock.anyObject())).andThrow(new RuntimeException()).times(3);
EasyMock.expect(mockDecoder.decode(EasyMock.anyObject())).andReturn(new CamusWrapper<String>("dummy")).times(7);
EasyMock.replay(mockDecoder);
return mockDecoder;
}
public static void reset() {
decoderType = DecoderType.REGULAR;
}
}
```
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.etl.kafka.mapred;
import com.linkedin.camus.coders.CamusWrapper;
import com.linkedin.camus.coders.Message;
import com.linkedin.camus.coders.MessageDecoder;
import com.linkedin.camus.etl.kafka.CamusJob;
import com.linkedin.camus.etl.kafka.coders.MessageDecoderFactory;
import com.linkedin.camus.etl.kafka.common.EtlKey;
import com.linkedin.camus.etl.kafka.common.EtlRequest;
import com.linkedin.camus.etl.kafka.common.ExceptionWritable;
import com.linkedin.camus.etl.kafka.common.KafkaReader;
import com.linkedin.camus.schemaregistry.SchemaNotFoundException;
import java.io.IOException;
import java.util.HashSet;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import org.joda.time.format.PeriodFormatter;
import org.joda.time.format.PeriodFormatterBuilder;
public class EtlRecordReader extends RecordReader<EtlKey, CamusWrapper> {
private static final String PRINT_MAX_DECODER_EXCEPTIONS = "max.decoder.exceptions.to.print";
private static final String DEFAULT_SERVER = "server";
private static final String DEFAULT_SERVICE = "service";
private static final int RECORDS_TO_READ_AFTER_TIMEOUT = 5;
public static enum KAFKA_MSG {
DECODE_SUCCESSFUL,
SKIPPED_SCHEMA_NOT_FOUND,
SKIPPED_OTHER
};
protected TaskAttemptContext context;
private EtlInputFormat inputFormat;
private Mapper<EtlKey, Writable, EtlKey, Writable>.Context mapperContext;
private KafkaReader reader;
private long totalBytes;
private long readBytes = 0;
private int numRecordsReadForCurrentPartition = 0;
private long bytesReadForCurrentPartition = 0;
private boolean skipSchemaErrors = false;
private MessageDecoder decoder;
private final BytesWritable msgValue = new BytesWritable();
private final BytesWritable msgKey = new BytesWritable();
private final EtlKey key = new EtlKey();
private CamusWrapper value;
private int maxPullHours = 0;
private int exceptionCount = 0;
private long maxPullTime = 0;
private long endTimeStamp = 0;
private long curTimeStamp = 0;
private long startTime = 0;
private HashSet<String> ignoreServerServiceList = null;
private PeriodFormatter periodFormatter = null;
private String statusMsg = "";
EtlSplit split;
private static Logger log = Logger.getLogger(EtlRecordReader.class);
/**
* Record reader to fetch directly from Kafka
*
* @param split
* @throws IOException
* @throws InterruptedException
*/
public EtlRecordReader(EtlInputFormat inputFormat, InputSplit split, TaskAttemptContext context) throws IOException,
InterruptedException {
this.inputFormat = inputFormat;
initialize(split, context);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
// For class path debugging
log.info("classpath: " + System.getProperty("java.class.path"));
ClassLoader loader = EtlRecordReader.class.getClassLoader();
log.info("PWD: " + System.getProperty("user.dir"));
log.info("classloader: " + loader.getClass());
log.info("org.apache.avro.Schema: " + loader.getResource("org/apache/avro/Schema.class"));
this.split = (EtlSplit) split;
this.context = context;
if (context instanceof Mapper.Context) {
mapperContext = (Mapper.Context) context;
}
this.skipSchemaErrors = EtlInputFormat.getEtlIgnoreSchemaErrors(context);
if (EtlInputFormat.getKafkaMaxPullHrs(context) != -1) {
this.maxPullHours = EtlInputFormat.getKafkaMaxPullHrs(context);
} else {
this.endTimeStamp = Long.MAX_VALUE;
}
if (EtlInputFormat.getKafkaMaxPullMinutesPerTask(context) != -1) {
this.startTime = System.currentTimeMillis();
this.maxPullTime =
new DateTime(this.startTime).plusMinutes(EtlInputFormat.getKafkaMaxPullMinutesPerTask(context)).getMillis();
} else {
this.maxPullTime = Long.MAX_VALUE;
}
ignoreServerServiceList = new HashSet<String>();
for (String ignoreServerServiceTopic : EtlInputFormat.getEtlAuditIgnoreServiceTopicList(context)) {
ignoreServerServiceList.add(ignoreServerServiceTopic);
}
this.totalBytes = this.split.getLength();
this.periodFormatter =
new PeriodFormatterBuilder().appendMinutes().appendSuffix("m").appendSeconds().appendSuffix("s").toFormatter();
}
@Override
public synchronized void close() throws IOException {
if (reader != null) {
reader.close();
}
}
private CamusWrapper getWrappedRecord(Message message) throws IOException {
CamusWrapper r = null;
try {
r = decoder.decode(message);
mapperContext.getCounter(KAFKA_MSG.DECODE_SUCCESSFUL).increment(1);
} catch (SchemaNotFoundException e) {
mapperContext.getCounter(KAFKA_MSG.SKIPPED_SCHEMA_NOT_FOUND).increment(1);
if (!skipSchemaErrors) {
throw new IOException(e);
}
} catch (Exception e) {
mapperContext.getCounter(KAFKA_MSG.SKIPPED_OTHER).increment(1);
if (!skipSchemaErrors) {
throw new IOException(e);
}
}
return r;
}
private static byte[] getBytes(BytesWritable val) {
byte[] buffer = val.getBytes();
/*
* FIXME: remove the following part once the below jira is fixed
* https://issues.apache.org/jira/browse/HADOOP-6298
*/
long len = val.getLength();
byte[] bytes = buffer;
if (len < buffer.length) {
bytes = new byte[(int) len];
System.arraycopy(buffer, 0, bytes, 0, (int) len);
}
return bytes;
}
@Override
public float getProgress() throws IOException {
if (getPos() == 0) {
return 0f;
}
if (getPos() >= totalBytes) {
return 1f;
}
return (float) ((double) getPos() / totalBytes);
}
private long getPos() throws IOException {
return readBytes;
}
@Override
public EtlKey getCurrentKey() throws IOException, InterruptedException {
return key;
}
@Override
public CamusWrapper getCurrentValue() throws IOException, InterruptedException {
return value;
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (System.currentTimeMillis() > maxPullTime
&& this.numRecordsReadForCurrentPartition >= RECORDS_TO_READ_AFTER_TIMEOUT) {
String maxMsg = "at " + new DateTime(curTimeStamp).toString();
log.info("Kafka pull time limit reached");
statusMsg += " max read " + maxMsg;
context.setStatus(statusMsg);
log.info(key.getTopic() + " max read " + maxMsg);
mapperContext.getCounter("total", "request-time(ms)").increment(reader.getFetchTime());
closeReader();
String topicNotFullyPulledMsg =
String.format("Topic %s:%d not fully pulled, max task time reached %s, pulled %d records", key.getTopic(),
key.getPartition(), maxMsg, this.numRecordsReadForCurrentPartition);
mapperContext.write(key, new ExceptionWritable(topicNotFullyPulledMsg));
log.warn(topicNotFullyPulledMsg);
String timeSpentOnPartition =
this.periodFormatter.print(new Duration(this.startTime, System.currentTimeMillis()).toPeriod());
String timeSpentOnTopicMsg =
String.format("Time spent on topic %s:%d = %s", key.getTopic(), key.getPartition(), timeSpentOnPartition);
mapperContext.write(key, new ExceptionWritable(timeSpentOnTopicMsg));
log.info(timeSpentOnTopicMsg);
reader = null;
}
while (true) {
try {
if (reader == null || !reader.hasNext()) {
if (this.numRecordsReadForCurrentPartition != 0) {
String timeSpentOnPartition =
this.periodFormatter.print(new Duration(this.startTime, System.currentTimeMillis()).toPeriod());
log.info("Time spent on this partition = " + timeSpentOnPartition);
log.info("Num of records read for this partition = " + this.numRecordsReadForCurrentPartition);
log.info("Bytes read for this partition = " + this.bytesReadForCurrentPartition);
log.info("Actual avg size for this partition = " + this.bytesReadForCurrentPartition
/ this.numRecordsReadForCurrentPartition);
}
EtlRequest request = (EtlRequest) split.popRequest();
if (request == null) {
return false;
}
// Reset start time, num of records read and bytes read
this.startTime = System.currentTimeMillis();
this.numRecordsReadForCurrentPartition = 0;
this.bytesReadForCurrentPartition = 0;
if (maxPullHours > 0) {
endTimeStamp = 0;
}
key.set(request.getTopic(), request.getLeaderId(), request.getPartition(), request.getOffset(),
request.getOffset(), 0);
value = null;
log.info("\n\ntopic:" + request.getTopic() + " partition:" + request.getPartition() + " beginOffset:"
+ request.getOffset() + " estimatedLastOffset:" + request.getLastOffset());
statusMsg += statusMsg.length() > 0 ? "; " : "";
statusMsg += request.getTopic() + ":" + request.getLeaderId() + ":" + request.getPartition();
context.setStatus(statusMsg);
if (reader != null) {
closeReader();
}
reader =
new KafkaReader(inputFormat, context, request, CamusJob.getKafkaTimeoutValue(mapperContext),
CamusJob.getKafkaBufferSize(mapperContext));
decoder = createDecoder(request.getTopic());
}
int count = 0;
Message message;
while ((message = reader.getNext(key)) != null) {
readBytes += key.getMessageSize();
count++;
this.numRecordsReadForCurrentPartition++;
this.bytesReadForCurrentPartition += key.getMessageSize();
context.progress();
mapperContext.getCounter("total", "data-read").increment(message.getPayload().length);
mapperContext.getCounter("total", "event-count").increment(1);
message.validate();
long tempTime = System.currentTimeMillis();
CamusWrapper wrapper;
try {
wrapper = getWrappedRecord(message);
if (wrapper == null) {
throw new RuntimeException("null record");
}
} catch (Exception e) {
if (exceptionCount < getMaximumDecoderExceptionsToPrint(context)) {
mapperContext.write(key, new ExceptionWritable(e));
log.info(e.getMessage());
exceptionCount++;
} else if (exceptionCount == getMaximumDecoderExceptionsToPrint(context)) {
log.info("The same exception has occured for more than " + getMaximumDecoderExceptionsToPrint(context)
+ " records. All further exceptions will not be printed");
}
if (System.currentTimeMillis() > maxPullTime) {
exceptionCount = 0;
break;
}
continue;
}
curTimeStamp = wrapper.getTimestamp();
try {
key.setTime(curTimeStamp);
key.addAllPartitionMap(wrapper.getPartitionMap());
setServerService();
} catch (Exception e) {
mapperContext.write(key, new ExceptionWritable(e));
continue;
}
if (endTimeStamp == 0) {
DateTime time = new DateTime(curTimeStamp);
statusMsg += " begin read at " + time.toString();
context.setStatus(statusMsg);
log.info(key.getTopic() + " begin read at " + time.toString());
endTimeStamp = (time.plusHours(this.maxPullHours)).getMillis();
} else if (curTimeStamp > endTimeStamp) {
String maxMsg = "at " + new DateTime(curTimeStamp).toString();
log.info("Kafka Max history hours reached");
mapperContext.write(
key,
new ExceptionWritable(String.format(
"Topic not fully pulled, max task time reached %s, pulled %d records", maxMsg,
this.numRecordsReadForCurrentPartition)));
statusMsg += " max read " + maxMsg;
context.setStatus(statusMsg);
log.info(key.getTopic() + " max read " + maxMsg);
mapperContext.getCounter("total", "request-time(ms)").increment(reader.getFetchTime());
closeReader();
}
long secondTime = System.currentTimeMillis();
value = wrapper;
long decodeTime = ((secondTime - tempTime));
mapperContext.getCounter("total", "decode-time(ms)").increment(decodeTime);
if (reader != null) {
mapperContext.getCounter("total", "request-time(ms)").increment(reader.getFetchTime());
}
return true;
}
log.info("Records read : " + count);
count = 0;
reader = null;
} catch (Throwable t) {
Exception e = new Exception(t.getLocalizedMessage(), t);
e.setStackTrace(t.getStackTrace());
mapperContext.write(key, new ExceptionWritable(e));
reader = null;
continue;
}
}
}
protected MessageDecoder createDecoder(String topic) {
return MessageDecoderFactory.createMessageDecoder(context, topic);
}
private void closeReader() throws IOException {
if (reader != null) {
try {
reader.close();
} catch (Exception e) {
// not much to do here but skip the task
} finally {
reader = null;
}
}
}
public void setServerService() {
if (ignoreServerServiceList.contains(key.getTopic()) || ignoreServerServiceList.contains("all")) {
key.setServer(DEFAULT_SERVER);
key.setService(DEFAULT_SERVICE);
}
}
public static int getMaximumDecoderExceptionsToPrint(JobContext job) {
return job.getConfiguration().getInt(PRINT_MAX_DECODER_EXCEPTIONS, 10);
}
}
```
|
```package com.linkedin.camus.etl.kafka.coders;
import com.linkedin.camus.etl.kafka.common.EtlKey;
import com.linkedin.camus.etl.kafka.partitioner.DefaultPartitioner;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.assertTrue;
public class TestDefaultPartitioner {
@Test
public void testGeneratePartitionPath() throws IOException {
// generatePartitionPath() should take a timestamp and return a formatted string by default
Configuration testConfiguration = new Configuration();
Job testJob = new Job(new Configuration());
DefaultPartitioner testPartitioner = new DefaultPartitioner();
testPartitioner.setConf(testConfiguration);
String actualResult = testPartitioner.generatePartitionedPath(testJob, "testTopic", "1406777693000");
String expectedResult = "testTopic/hourly/2014/07/30/20";
assertTrue(actualResult.equals(expectedResult));
actualResult =
testPartitioner.generateFileName(testJob, "testTopic", "testBrokerId", 123, 100, 500, "1406777693000");
expectedResult = "testTopic.testBrokerId.123.100.500.1406777693000";
assertTrue(actualResult.equals(expectedResult));
}
@Test
public void testEncodedPartition() throws IOException {
EtlKey testEtlKey = new EtlKey();
testEtlKey.setTime(1400549463000L);
Configuration testConfiguration = new Configuration();
Job testJob = new Job(new Configuration());
DefaultPartitioner testPartitioner = new DefaultPartitioner();
testPartitioner.setConf(testConfiguration);
String actualResult = testPartitioner.encodePartition(testJob, testEtlKey);
String expectedResult = "1400547600000";
assertTrue(actualResult.equals(expectedResult));
}
}
```
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.etl.kafka.partitioner;
import com.linkedin.camus.etl.IEtlKey;
import com.linkedin.camus.etl.Partitioner;
import com.linkedin.camus.etl.kafka.common.DateUtils;
import com.linkedin.camus.etl.kafka.mapred.EtlMultiOutputFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobContext;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormatter;
/**
* Partitions incoming data into hourly partitions, generates pathnames of the form:
* {@code etl.destination.path/topic-name/hourly/YYYY/MM/dd/HH}.
*
* The following configurations are supported:
* <ul>
* <li>{@code etl.destination.path} - top-level data output directory, required</li>
* <li>{@code etl.destination.path.topic.sub.dir} - sub-dir to create under topic dir, defaults to {@code hourly}</li>
* <li>{@code etl.default.timezone} - timezone of the events, defaults to {@code America/Los_Angeles}</li>
* <li>{@code etl.output.file.time.partition.mins} - partitions size in minutes, defaults to {@code 60}</li>
* </ul>
*/
public class DefaultPartitioner extends Partitioner {
protected static final String OUTPUT_DATE_FORMAT = "YYYY/MM/dd/HH";
//protected DateTimeZone outputDateTimeZone = null;
protected DateTimeFormatter outputDateFormatter = null;
@Override
public String encodePartition(JobContext context, IEtlKey key) {
long outfilePartitionMs = EtlMultiOutputFormat.getEtlOutputFileTimePartitionMins(context) * 60000L;
return "" + DateUtils.getPartition(outfilePartitionMs, key.getTime(), outputDateFormatter.getZone());
}
@Override
public String generatePartitionedPath(JobContext context, String topic, String encodedPartition) {
StringBuilder sb = new StringBuilder();
sb.append(topic).append("/");
sb.append(EtlMultiOutputFormat.getDestPathTopicSubDir(context)).append("/");
DateTime bucket = new DateTime(Long.valueOf(encodedPartition));
sb.append(bucket.toString(outputDateFormatter));
return sb.toString();
}
@Override
public String generateFileName(JobContext context, String topic, String brokerId, int partitionId, int count,
long offset, String encodedPartition) {
StringBuilder sb = new StringBuilder();
sb.append(topic);
sb.append(".").append(brokerId);
sb.append(".").append(partitionId);
sb.append(".").append(count);
sb.append(".").append(offset);
sb.append(".").append(encodedPartition);
return sb.toString();
}
@Override
public String getWorkingFileName(JobContext context, String topic, String brokerId, int partitionId,
String encodedPartition) {
StringBuilder sb = new StringBuilder();
sb.append("data.").append(topic.replaceAll("\\.", "_"));
sb.append(".").append(brokerId);
sb.append(".").append(partitionId);
sb.append(".").append(encodedPartition);
return sb.toString();
}
@Override
public void setConf(Configuration conf) {
if (conf != null) {
outputDateFormatter =
DateUtils.getDateTimeFormatter(OUTPUT_DATE_FORMAT,
DateTimeZone.forID(conf.get(EtlMultiOutputFormat.ETL_DEFAULT_TIMEZONE, "America/Los_Angeles")));
}
super.setConf(conf);
}
}
```
|
```package com.linkedin.camus.etl.kafka.coders;
import com.linkedin.camus.coders.CamusWrapper;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import java.util.Properties;
public class TestJsonStringMessageDecoder {
@Test
public void testDecodeUnixMilliseconds() {
// Test that the decoder extracts unix_milliseconds
// It should take and return milliseconds
long expectedTimestamp = 1406947271534L;
Properties testProperties = new Properties();
testProperties.setProperty("camus.message.timestamp.format", "unix_milliseconds");
JsonStringMessageDecoder testDecoder = new JsonStringMessageDecoder();
testDecoder.init(testProperties, "testTopic");
String payload = "{\"timestamp\": " + expectedTimestamp + ", \"myData\": \"myValue\"}";
byte[] bytePayload = payload.getBytes();
CamusWrapper actualResult = testDecoder.decode(new TestMessage().setPayload(bytePayload));
long actualTimestamp = actualResult.getTimestamp();
assertEquals(expectedTimestamp, actualTimestamp);
}
@Test
public void testDecodeUnixSeconds() {
// Test that the decoder extracts unix_seconds
// It should receive seconds and return milliseconds
long testTimestamp = 140694727L;
long expectedTimestamp = 140694727000L;
Properties testProperties = new Properties();
testProperties.setProperty("camus.message.timestamp.format", "unix_seconds");
JsonStringMessageDecoder testDecoder = new JsonStringMessageDecoder();
testDecoder.init(testProperties, "testTopic");
String payload = "{\"timestamp\": " + testTimestamp + ", \"myData\": \"myValue\"}";
byte[] bytePayload = payload.getBytes();
CamusWrapper actualResult = testDecoder.decode(new TestMessage().setPayload(bytePayload));
long actualTimestamp = actualResult.getTimestamp();
assertEquals(expectedTimestamp, actualTimestamp);
}
@Test
public void testDecodeWithTimestampFormat() {
// Test that we can specify a date and a pattern and
// get back unix timestamp milliseconds
String testFormat = "yyyy-MM-dd HH:mm:ss Z";
String testTimestamp = "2014-02-01 01:15:27 UTC";
long expectedTimestamp = 1391217327000L;
Properties testProperties = new Properties();
testProperties.setProperty("camus.message.timestamp.format", testFormat);
JsonStringMessageDecoder testDecoder = new JsonStringMessageDecoder();
testDecoder.init(testProperties, "testTopic");
String payload = "{\"timestamp\": \"" + testTimestamp + "\", \"myData\": \"myValue\"}";
byte[] bytePayload = payload.getBytes();
CamusWrapper actualResult = testDecoder.decode(new TestMessage().setPayload(bytePayload));
long actualTimestamp = actualResult.getTimestamp();
assertEquals(expectedTimestamp, actualTimestamp);
}
@Test
public void testDecodeWithIsoFormat() {
// Test that when no format is specified then both
// ISO 8601 format: 1994-11-05T08:15:30-05:00
// and 1994-11-05T13:15:30Z are accepted
String testTimestamp1 = "1994-11-05T08:15:30-05:00";
String testTimestamp2 = "1994-11-05T13:15:30Z";
long expectedTimestamp = 784041330000L;
Properties testProperties = new Properties();
JsonStringMessageDecoder testDecoder = new JsonStringMessageDecoder();
testDecoder.init(testProperties, "testTopic");
String payload = "{\"timestamp\": \"" + testTimestamp1 + "\", \"myData\": \"myValue\"}";
byte[] bytePayload = payload.getBytes();
CamusWrapper actualResult = testDecoder.decode(new TestMessage().setPayload(bytePayload));
long actualTimestamp = actualResult.getTimestamp();
assertEquals(expectedTimestamp, actualTimestamp);
payload = "{\"timestamp\": \"" + testTimestamp2 + "\", \"myData\": \"myValue\"}";
bytePayload = payload.getBytes();
actualResult = testDecoder.decode(new TestMessage().setPayload(bytePayload));
actualTimestamp = actualResult.getTimestamp();
assertEquals(expectedTimestamp, actualTimestamp);
}
@Test(expected = RuntimeException.class)
public void testBadJsonInput() {
byte[] bytePayload = "{\"key: value}".getBytes();
JsonStringMessageDecoder testDecoder = new JsonStringMessageDecoder();
testDecoder.decode(new TestMessage().setPayload(bytePayload));
}
}
```
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.etl.kafka.coders;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.linkedin.camus.coders.CamusWrapper;
import com.linkedin.camus.coders.Message;
import com.linkedin.camus.coders.MessageDecoder;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import java.io.UnsupportedEncodingException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Properties;
/**
* MessageDecoder class that will convert the payload into a JSON object,
* look for a the camus.message.timestamp.field, convert that timestamp to
* a unix epoch long using camus.message.timestamp.format, and then set the CamusWrapper's
* timestamp property to the record's timestamp. If the JSON does not have
* a timestamp or if the timestamp could not be parsed properly, then
* System.currentTimeMillis() will be used.
* <p/>
* camus.message.timestamp.format will be used with SimpleDateFormat. If your
* camus.message.timestamp.field is stored in JSON as a unix epoch timestamp,
* you should set camus.message.timestamp.format to 'unix_seconds' (if your
* timestamp units are seconds) or 'unix_milliseconds' (if your timestamp units
* are milliseconds).
* <p/>
* This MessageDecoder returns a CamusWrapper that works with Strings payloads,
* since JSON data is always a String.
*/
public class JsonStringMessageDecoder extends MessageDecoder<Message, String> {
private static final org.apache.log4j.Logger log = Logger.getLogger(JsonStringMessageDecoder.class);
// Property for format of timestamp in JSON timestamp field.
public static final String CAMUS_MESSAGE_TIMESTAMP_FORMAT = "camus.message.timestamp.format";
public static final String DEFAULT_TIMESTAMP_FORMAT = "[dd/MMM/yyyy:HH:mm:ss Z]";
// Property for the JSON field name of the timestamp.
public static final String CAMUS_MESSAGE_TIMESTAMP_FIELD = "camus.message.timestamp.field";
public static final String DEFAULT_TIMESTAMP_FIELD = "timestamp";
JsonParser jsonParser = new JsonParser();
DateTimeFormatter dateTimeParser = ISODateTimeFormat.dateTimeParser();
private String timestampFormat;
private String timestampField;
@Override
public void init(Properties props, String topicName) {
this.props = props;
this.topicName = topicName;
timestampFormat = props.getProperty(CAMUS_MESSAGE_TIMESTAMP_FORMAT, DEFAULT_TIMESTAMP_FORMAT);
timestampField = props.getProperty(CAMUS_MESSAGE_TIMESTAMP_FIELD, DEFAULT_TIMESTAMP_FIELD);
}
@Override
public CamusWrapper<String> decode(Message message) {
long timestamp = 0;
String payloadString;
JsonObject jsonObject;
try {
payloadString = new String(message.getPayload(), "UTF-8");
} catch (UnsupportedEncodingException e) {
log.error("Unable to load UTF-8 encoding, falling back to system default", e);
payloadString = new String(message.getPayload());
}
// Parse the payload into a JsonObject.
try {
jsonObject = jsonParser.parse(payloadString.trim()).getAsJsonObject();
} catch (RuntimeException e) {
log.error("Caught exception while parsing JSON string '" + payloadString + "'.");
throw new RuntimeException(e);
}
// Attempt to read and parse the timestamp element into a long.
if (jsonObject.has(timestampField)) {
// If timestampFormat is 'unix_seconds',
// then the timestamp only needs converted to milliseconds.
// Also support 'unix' for backwards compatibility.
if (timestampFormat.equals("unix_seconds") || timestampFormat.equals("unix")) {
timestamp = jsonObject.get(timestampField).getAsLong();
// This timestamp is in seconds, convert it to milliseconds.
timestamp = timestamp * 1000L;
}
// Else if this timestamp is already in milliseconds,
// just save it as is.
else if (timestampFormat.equals("unix_milliseconds")) {
timestamp = jsonObject.get(timestampField).getAsLong();
}
// Else if timestampFormat is 'ISO-8601', parse that
else if (timestampFormat.equals("ISO-8601")) {
String timestampString = jsonObject.get(timestampField).getAsString();
try {
timestamp = new DateTime(timestampString).getMillis();
} catch (IllegalArgumentException e) {
log.error("Could not parse timestamp '" + timestampString + "' as ISO-8601 while decoding JSON message.");
}
}
// Otherwise parse the timestamp as a string in timestampFormat.
else {
String timestampString = jsonObject.get(timestampField).getAsString();
try {
timestamp = dateTimeParser.parseDateTime(timestampString).getMillis();
} catch (IllegalArgumentException e) {
try {
timestamp = new SimpleDateFormat(timestampFormat).parse(timestampString).getTime();
} catch (ParseException pe) {
log.error("Could not parse timestamp '" + timestampString + "' while decoding JSON message.");
}
} catch (Exception ee) {
log.error("Could not parse timestamp '" + timestampString + "' while decoding JSON message.");
}
}
}
// If timestamp wasn't set in the above block,
// then set it to current time.
if (timestamp == 0) {
log.warn("Couldn't find or parse timestamp field '" + timestampField
+ "' in JSON message, defaulting to current time.");
timestamp = System.currentTimeMillis();
}
return new CamusWrapper<String>(payloadString, timestamp);
}
}
```
|
```package com.linkedin.camus.etl.kafka.common;
import org.joda.time.format.DateTimeFormatter;
import org.junit.Assert;
import org.junit.Test;
public class DateUtilsTest {
@Test(expected = IllegalArgumentException.class)
public void testGetPSTFormatterBadString() {
DateUtils.getDateTimeFormatter("qwerty");
}
@Test
public void testGetPSTFormatterShortString() {
DateTimeFormatter actualResult = DateUtils.getDateTimeFormatter("yyyy-MM-dd");
Assert.assertEquals("2004-05-03", actualResult.print(1083628800000L));
}
@Test
public void testGetPartition() {
long actualResult = DateUtils.getPartition(500000L, 1083628800000L);
Assert.assertEquals(1083628500000L, actualResult);
}
@Test(expected = ArithmeticException.class)
public void testGetPartitionWithZeroGranularity() {
DateUtils.getPartition(0L, 1083628800000L);
}
@Test
public void testGetPartitionWithZeroTimestamp() {
long actualResult = DateUtils.getPartition(500000L, 0L);
Assert.assertEquals(0L, actualResult);
}
}
```
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.etl.kafka.common;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
public class DateUtils {
public static DateTimeZone PST = DateTimeZone.forID("America/Los_Angeles");
public static DateTimeFormatter MINUTE_FORMATTER = getDateTimeFormatter("YYYY-MM-dd-HH-mm");
public static DateTimeFormatter getDateTimeFormatter(String str) {
return getDateTimeFormatter(str, PST);
}
public static DateTimeFormatter getDateTimeFormatter(String str, DateTimeZone timeZone) {
return DateTimeFormat.forPattern(str).withZone(timeZone);
}
public static long getPartition(long timeGranularityMs, long timestamp) {
return (timestamp / timeGranularityMs) * timeGranularityMs;
}
public static long getPartition(long timeGranularityMs, long timestamp, DateTimeZone outputDateTimeZone) {
long adjustedTimeStamp = outputDateTimeZone.convertUTCToLocal(timestamp);
long partitionedTime = (adjustedTimeStamp / timeGranularityMs) * timeGranularityMs;
return outputDateTimeZone.convertLocalToUTC(partitionedTime, false);
}
public static DateTime getMidnight() {
DateTime time = new DateTime(PST);
return new DateTime(time.getYear(), time.getMonthOfYear(), time.getDayOfMonth(), 0, 0, 0, 0, PST);
}
}
```
|
```package com.linkedin.camus.etl.kafka.partitioner;
import com.linkedin.camus.etl.kafka.common.EtlKey;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Test;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.*;
public class BaseTimeBasedPartitionerTest {
private static final DateTimeZone DATE_TIME_ZONE = DateTimeZone.forID("Europe/Amsterdam");
private BaseTimeBasedPartitioner underTest = new BiHourlyPartitioner();
@Test
public void testEncodePartition() throws Exception {
long time = new DateTime(2014, 1, 1, 3, 0, 0, 0, DATE_TIME_ZONE).getMillis();
String partition = underTest.encodePartition(null, etlKeyWithTime(time));
assertEquals("1388538000000", partition);
}
@Test
public void testGeneratePartitionedPath() throws Exception {
String path = underTest.generatePartitionedPath(null, "tpc", "1388538000000");
assertEquals("tpc/bi-hourly/year=2014/month=janvier/day=01/hour=2", path);
}
@Test
public void testGenerateFileName() throws Exception {
String fileName = underTest.generateFileName(null, "tpc", "brk1", 1, 2, 45330016, "1388538000000");
assertEquals("tpc.brk1.1.2.45330016.1388538000000", fileName);
}
@Test
public void testGetWorkingFileName() throws Exception {
String workingFileName = underTest.getWorkingFileName(null, "tpc", "brk1", 1, "1388538000000");
assertEquals("data.tpc.brk1.1.1388538000000", workingFileName);
}
private EtlKey etlKeyWithTime(long time) {
EtlKey etlKey = new EtlKey();
etlKey.setTime(time);
return etlKey;
}
private static class BiHourlyPartitioner extends BaseTimeBasedPartitioner {
public BiHourlyPartitioner() {
init(TimeUnit.HOURS.toMillis(2), "'bi-hourly'/'year='YYYY/'month='MMMM/'day='dd/'hour='H", Locale.FRENCH, DATE_TIME_ZONE);
}
}
}```
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.etl.kafka.partitioner;
import com.linkedin.camus.etl.IEtlKey;
import com.linkedin.camus.etl.Partitioner;
import com.linkedin.camus.etl.kafka.common.DateUtils;
import org.apache.hadoop.mapreduce.JobContext;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormatter;
import java.util.Locale;
/**
* Base class for time based partitioners.
* Can be configured via {@link #init(long, String, java.util.Locale, org.joda.time.DateTimeZone)}.
*/
abstract public class BaseTimeBasedPartitioner extends Partitioner {
public static final String DEFAULT_TIME_ZONE = "America/Los_Angeles";
/** Size of a partition in milliseconds.*/
private long outfilePartitionMillis = 0;
private DateTimeFormatter outputDirFormatter;
/**
* Initialize the partitioner.
* This method must be invoked once, and before any any other method.
* @param outfilePartitionMillis duration of a partition, e.g. {@code 3,600,000} for hour partitions
* @param destSubTopicPathFormat format of output sub-dir to be created under topic directory,
* typically something like {@code "'hourly'/YYYY/MM/dd/HH"}.
* For formatting rules see {@link org.joda.time.format.DateTimeFormat}.
* @param locale locale to use for formatting of path
* @param outputTimeZone time zone to use for date calculations
*/
protected void init(long outfilePartitionMillis, String destSubTopicPathFormat, Locale locale, DateTimeZone outputTimeZone) {
this.outfilePartitionMillis = outfilePartitionMillis;
this.outputDirFormatter = DateUtils.getDateTimeFormatter(destSubTopicPathFormat, outputTimeZone).withLocale(locale);
}
@Override
public String encodePartition(JobContext context, IEtlKey key) {
return Long.toString(DateUtils.getPartition(outfilePartitionMillis, key.getTime(), outputDirFormatter.getZone()));
}
@Override
public String generatePartitionedPath(JobContext context, String topic, String encodedPartition) {
DateTime bucket = new DateTime(Long.valueOf(encodedPartition));
return topic + "/" + bucket.toString(outputDirFormatter);
}
@Override
public String generateFileName(JobContext context, String topic, String brokerId, int partitionId, int count,
long offset, String encodedPartition) {
return topic + "." + brokerId + "." + partitionId + "." + count + "." + offset + "." + encodedPartition;
}
@Override
public String getWorkingFileName(JobContext context, String topic, String brokerId, int partitionId,
String encodedPartition) {
return "data." + topic.replace('.', '_') + "." + brokerId + "." + partitionId + "." + encodedPartition;
}
}
```
|
```package com.linkedin.camus.sweeper;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.junit.Test;
import com.linkedin.camus.sweeper.utils.DateUtils;
import static org.junit.Assert.*;
public class CamusSingleFolderSweeperPlannerTest extends EasyMockSupport {
@Test
public void testCreateSweeperJobProps() throws Exception {
FileSystem mockedFs = createMock(FileSystem.class);
Path inputDir = new Path("inputDir");
Path outputDir = new Path("outputDir");
DateUtils dUtils = new DateUtils(new Properties());
DateTime currentHour = dUtils.getCurrentHour();
DateTimeFormatter hourFormatter = dUtils.getDateTimeFormatter("YYYY/MM/dd/HH");
String hour = currentHour.minusHours(1).toString(hourFormatter);
Path inputDirWithHour = new Path(inputDir, hour);
Path outputDirWithHour = new Path(outputDir, hour);
//inputDir should exist, but outputDir shouldn't.
EasyMock.expect(mockedFs.exists(inputDir)).andReturn(true).once();
EasyMock.expect(mockedFs.exists(outputDirWithHour)).andReturn(false).once();
FileStatus mockedFileStatus = createMock(FileStatus.class);
FileStatus[] fileStatuses = { mockedFileStatus };
EasyMock.expect(mockedFs.globStatus((Path) EasyMock.anyObject())).andReturn(fileStatuses).once();
EasyMock.expect(mockedFileStatus.getPath()).andReturn(inputDirWithHour).anyTimes();
ContentSummary mockedContentSummary = createMock(ContentSummary.class);
long dataSize = 100;
EasyMock.expect(mockedContentSummary.getLength()).andReturn(dataSize).once();
EasyMock.expect(mockedFs.getContentSummary(inputDirWithHour)).andReturn(mockedContentSummary).once();
replayAll();
String topic = "testTopic";
List<Properties> jobPropsList =
new CamusSingleFolderSweeperPlanner().setPropertiesLogger(new Properties(), Logger.getLogger("testLogger"))
.createSweeperJobProps(topic, inputDir, outputDir, mockedFs);
assertEquals(1, jobPropsList.size());
Properties jobProps = jobPropsList.get(0);
String topicAndHour = topic + ":" + hour;
assertEquals(topic, jobProps.getProperty("topic"));
assertEquals(topicAndHour, jobProps.getProperty(CamusSingleFolderSweeper.TOPIC_AND_HOUR));
assertEquals(inputDirWithHour.toString(), jobProps.getProperty(CamusSingleFolderSweeper.INPUT_PATHS));
assertEquals(outputDirWithHour.toString(), jobProps.getProperty(CamusSingleFolderSweeper.DEST_PATH));
}
}
```
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.sweeper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import com.linkedin.camus.sweeper.utils.DateUtils;
public class CamusSingleFolderSweeperPlanner extends CamusSweeperPlanner {
private static final String CAMUS_SINGLE_FOLDER_SWEEPER_TIMEBASED = "camus.single.folder.sweeper.timebased";
private static final String DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_TIMEBASED = Boolean.TRUE.toString();
private static final String CAMUS_SINGLE_FOLDER_SWEEPER_FOLDER_STRUCTURE =
"camus.single.folder.sweeper.folder.structure";
private static final String DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_DEFAULT_FOLDER_STRUCTURE = "*/*/*/*";
private static final String CAMUS_SINGLE_FOLDER_SWEEPER_TIME_FORMAT = "camus.single.folder.sweeper.time.format";
private static final String DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_TIME_FORMAT = "YYYY/MM/dd/HH";
private static final String CAMUS_SINGLE_FOLDER_SWEEPER_MAX_HOURS_AGO = "camus.single.folder.sweeper.max.hours.ago";
private static final String DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_MAX_HOURS_AGO = "1";
private static final String CAMUS_SINGLE_FOLDER_SWEEPER_MIN_HOURS_AGO = "camus.single.folder.sweeper.min.hours.ago";
private static final String DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_MIN_HOURS_AGO = "1";
private static final Logger LOG = Logger.getLogger(CamusSingleFolderSweeperPlanner.class);
private DateTimeFormatter timeFormatter;
private DateUtils dUtils;
@Override
public CamusSweeperPlanner setPropertiesLogger(Properties props, Logger log) {
dUtils = new DateUtils(props);
timeFormatter =
dUtils.getDateTimeFormatter(props.getProperty(CAMUS_SINGLE_FOLDER_SWEEPER_TIME_FORMAT,
DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_TIME_FORMAT));
return super.setPropertiesLogger(props, log);
}
private DateTime getFolderHour(Path datePath, Path inputDir) {
String datePathStr = datePath.toString();
String inputDirStr = inputDir.toString();
String dateStr = datePathStr.substring(datePathStr.indexOf(inputDirStr) + inputDirStr.length());
return timeFormatter.parseDateTime(dateStr.replaceAll("^/", ""));
}
@Override
public List<Properties> createSweeperJobProps(String topic, Path inputDir, Path outputDir, FileSystem fs)
throws IOException {
return createSweeperJobProps(topic, inputDir, outputDir, fs, new CamusSweeperMetrics());
}
/**
* Create hourly compaction properties for a topic.
* If a topic has multiple hourly folders that need to be deduped, there will be multiple jobs for this topic.
*/
@Override
protected List<Properties> createSweeperJobProps(String topic, Path inputDir, Path outputDir, FileSystem fs,
CamusSweeperMetrics metrics) throws IOException {
LOG.info("creating hourly sweeper job props: topic=" + topic + ", inputDir=" + inputDir + ", outputDir="
+ outputDir);
List<Properties> jobPropsList = new ArrayList<Properties>();
if (!fs.exists(inputDir)) {
LOG.warn("inputdir " + inputDir + " does not exist. Skipping topic " + topic);
return jobPropsList;
}
if (Boolean.valueOf(this.props.getProperty(CAMUS_SINGLE_FOLDER_SWEEPER_TIMEBASED,
DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_TIMEBASED))) {
// Timebased sweeper. Each input folder is inputDir + folderStructure
LOG.info("Time-based sweeper");
String folderStructure =
props.getProperty(CAMUS_SINGLE_FOLDER_SWEEPER_FOLDER_STRUCTURE,
DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_DEFAULT_FOLDER_STRUCTURE);
LOG.info("Sweeper folder structure: " + folderStructure);
for (FileStatus f : fs.globStatus(new Path(inputDir, folderStructure))) {
DateTime folderHour = getFolderHour(f.getPath(), inputDir);
if (shouldProcessHour(folderHour, topic)) {
Properties jobProps = createJobProps(topic, f.getPath(), folderHour, outputDir, fs, metrics);
if (jobProps != null) {
jobPropsList.add(jobProps);
}
}
}
} else {
// Non-timebased sweeper. Each input folder is simply inputDir
LOG.info("Non-time-based sweeper");
Properties jobProps = createJobProps(topic, inputDir, null, outputDir, fs, metrics);
if (jobProps != null) {
jobPropsList.add(jobProps);
}
}
return jobPropsList;
}
private Properties createJobProps(String topic, Path folder, DateTime folderHour, Path outputDir, FileSystem fs,
CamusSweeperMetrics metrics) throws IOException {
Properties jobProps = new Properties();
jobProps.putAll(props);
jobProps.put("topic", topic);
if (folderHour != null) {
jobProps.setProperty(CamusSingleFolderSweeper.FOLDER_HOUR, Long.toString(folderHour.getMillis()));
}
String topicAndHour = topic + ":" + (folderHour != null ? folderHour.toString(timeFormatter) : "");
jobProps.put(CamusSingleFolderSweeper.TOPIC_AND_HOUR, topicAndHour);
long dataSize = fs.getContentSummary(folder).getLength();
metrics.recordDataSizeByTopic(topicAndHour, dataSize);
metrics.addToTotalDataSize(dataSize);
List<Path> sourcePaths = new ArrayList<Path>();
sourcePaths.add(folder);
Path destPath = (folderHour != null ? new Path(outputDir, folderHour.toString(timeFormatter)) : outputDir);
jobProps.put(CamusSingleFolderSweeper.INPUT_PATHS, pathListToCommaSeperated(sourcePaths));
jobProps.put(CamusSingleFolderSweeper.DEST_PATH, destPath.toString());
if (!fs.exists(destPath)) {
LOG.info(topic + " dest dir " + destPath.toString() + " doesn't exist. Processing.");
return jobProps;
} else if (forceReprocess()) {
LOG.info(topic + " dest dir " + destPath.toString() + " exists, but force reprocess set to true. Reprocessing.");
return jobProps;
} else if (sourceDirHasOutliers(fs, sourcePaths, destPath)) {
LOG.info("found outliers for topic " + topic + ". Will add outliers to " + destPath.toString());
this.outlierProperties.add(jobProps);
return null;
} else {
LOG.info(topic + " dest dir " + destPath.toString() + " already exists. Skipping.");
return null;
}
}
private boolean forceReprocess() {
return Boolean.valueOf(this.props.getProperty("camus.sweeper.always.reprocess", Boolean.FALSE.toString()));
}
private boolean sourceDirHasOutliers(FileSystem fs, List<Path> sourcePaths, Path destPath) throws IOException {
long destinationModTime = CamusSingleFolderSweeper.getDestinationModTime(fs, destPath.toString());
for (Path source : sourcePaths) {
for (FileStatus status : fs.globStatus(new Path(source, "*"), new HiddenFilter())) {
if (status.getModificationTime() > destinationModTime) {
return true;
}
}
}
return false;
}
protected boolean shouldProcessHour(DateTime folderHour, String topic) {
DateTime currentHour = dUtils.getCurrentHour();
DateTime maxHoursAgo =
currentHour.minusHours(Integer.parseInt(props.getProperty(CAMUS_SINGLE_FOLDER_SWEEPER_MAX_HOURS_AGO,
DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_MAX_HOURS_AGO)));
DateTime minHoursAgo =
currentHour.minusHours(Integer.parseInt(props.getProperty(CAMUS_SINGLE_FOLDER_SWEEPER_MIN_HOURS_AGO,
DEFAULT_CAMUS_SINGLE_FOLDER_SWEEPER_MIN_HOURS_AGO)));
return (folderHour.isAfter(maxHoursAgo) || folderHour.isEqual(maxHoursAgo))
&& (folderHour.isBefore(minHoursAgo) || folderHour.isEqual(minHoursAgo));
}
}
```
|
```package com.linkedin.camus.etl.kafka.common;
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.apache.hadoop.conf.Configuration;
import org.easymock.EasyMock;
import com.linkedin.camus.etl.kafka.CamusJobTestWithMock;
public class EtlCountsForUnitTest extends EtlCounts {
public enum ProducerType {
REGULAR,
SEND_THROWS_EXCEPTION,
SEND_SUCCEED_THIRD_TIME;
}
public static ProducerType producerType = ProducerType.REGULAR;
public EtlCountsForUnitTest(EtlCounts other) {
super(other.getTopic(), other.getGranularity(), other.getStartTime());
this.counts = other.getCounts();
}
@Override
protected String getMonitoringEventClass(Configuration conf) {
return "com.linkedin.camus.etl.kafka.coders.MonitoringEventForUnitTest";
}
@Override
protected Producer createProducer(Properties props) {
switch (producerType) {
case REGULAR:
return new Producer(new ProducerConfig(props));
case SEND_THROWS_EXCEPTION:
return mockProducerSendThrowsException();
case SEND_SUCCEED_THIRD_TIME:
return mockProducerThirdSendSucceed();
default:
throw new RuntimeException("producer type not found");
}
}
private Producer mockProducerSendThrowsException() {
Producer mockProducer = EasyMock.createMock(Producer.class);
mockProducer.send((KeyedMessage) EasyMock.anyObject());
EasyMock.expectLastCall().andThrow(new RuntimeException("dummyException")).anyTimes();
mockProducer.close();
EasyMock.expectLastCall().anyTimes();
EasyMock.replay(mockProducer);
return mockProducer;
}
private Producer mockProducerThirdSendSucceed() {
Producer mockProducer = EasyMock.createMock(Producer.class);
mockProducer.send((KeyedMessage) EasyMock.anyObject());
EasyMock.expectLastCall().andThrow(new RuntimeException("dummyException")).times(2);
mockProducer.send((KeyedMessage) EasyMock.anyObject());
EasyMock.expectLastCall().times(1);
mockProducer.close();
EasyMock.expectLastCall().anyTimes();
EasyMock.replay(mockProducer);
return mockProducer;
}
public static void reset() {
producerType = ProducerType.REGULAR;
}
}
```
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.etl.kafka.common;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.Map.Entry;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import com.linkedin.camus.coders.MessageEncoder;
import com.linkedin.camus.etl.kafka.CamusJob;
@JsonIgnoreProperties({ "trackingCount", "lastKey", "eventCount", "RANDOM" })
public class EtlCounts {
private static Logger log = Logger.getLogger(EtlCounts.class);
private static final String TOPIC = "topic";
private static final String GRANULARITY = "granularity";
private static final String COUNTS = "counts";
private static final String START_TIME = "startTime";
private static final String END_TIME = "endTime";
private static final String FIRST_TIMESTAMP = "firstTimestamp";
private static final String LAST_TIMESTAMP = "lastTimestamp";
private static final String ERROR_COUNT = "errorCount";
private static final String MONITORING_EVENT_CLASS = "monitoring.event.class";
public static final int NUM_TRIES_PUBLISH_COUNTS = 3;
private String topic;
private long startTime;
private long granularity;
private long errorCount;
private long endTime;
private long lastTimestamp;
private long firstTimestamp = Long.MAX_VALUE;
protected HashMap<String, Source> counts;
private transient EtlKey lastKey;
private transient int eventCount = 0;
private transient static final Random RANDOM = new Random();
public EtlCounts() {
}
public EtlCounts(String topic, long granularity, long currentTime) {
this.topic = topic;
this.granularity = granularity;
this.startTime = currentTime;
this.counts = new HashMap<String, Source>();
}
public EtlCounts(String topic, long granularity) {
this(topic, granularity, System.currentTimeMillis());
}
public EtlCounts(EtlCounts other) {
this(other.topic, other.granularity, other.startTime);
this.counts = other.counts;
}
public HashMap<String, Source> getCounts() {
return counts;
}
public long getEndTime() {
return endTime;
}
public long getErrorCount() {
return errorCount;
}
public long getFirstTimestamp() {
return firstTimestamp;
}
public long getGranularity() {
return granularity;
}
public long getLastTimestamp() {
return lastTimestamp;
}
public long getStartTime() {
return startTime;
}
public String getTopic() {
return topic;
}
public void setCounts(HashMap<String, Source> counts) {
this.counts = counts;
}
public void setEndTime(long endTime) {
this.endTime = endTime;
}
public void setErrorCount(long errorCount) {
this.errorCount = errorCount;
}
public void setFirstTimestamp(long firstTimestamp) {
this.firstTimestamp = firstTimestamp;
}
public void setGranularity(long granularity) {
this.granularity = granularity;
}
public void setLastTimestamp(long lastTimestamp) {
this.lastTimestamp = lastTimestamp;
}
public void setStartTime(long startTime) {
this.startTime = startTime;
}
public void setTopic(String topic) {
this.topic = topic;
}
public int getEventCount() {
return eventCount;
}
public EtlKey getLastKey() {
return lastKey;
}
public void setEventCount(int eventCount) {
this.eventCount = eventCount;
}
public void setLastKey(EtlKey lastKey) {
this.lastKey = lastKey;
}
public void incrementMonitorCount(EtlKey key) {
long monitorPartition = DateUtils.getPartition(granularity, key.getTime());
Source source = new Source(key.getServer(), key.getService(), monitorPartition);
if (counts.containsKey(source.toString())) {
Source countSource = counts.get(source.toString());
countSource.setCount(countSource.getCount() + 1);
counts.put(countSource.toString(), countSource);
} else {
source.setCount(1);
counts.put(source.toString(), source);
}
if (key.getTime() > lastTimestamp) {
lastTimestamp = key.getTime();
}
if (key.getTime() < firstTimestamp) {
firstTimestamp = key.getTime();
}
lastKey = new EtlKey(key);
eventCount++;
}
public void writeCountsToMap(ArrayList<Map<String, Object>> allCountObject, FileSystem fs, Path path)
throws IOException {
Map<String, Object> countFile = new HashMap<String, Object>();
countFile.put(TOPIC, topic);
countFile.put(GRANULARITY, granularity);
countFile.put(COUNTS, counts);
countFile.put(START_TIME, startTime);
countFile.put(END_TIME, endTime);
countFile.put(FIRST_TIMESTAMP, firstTimestamp);
countFile.put(LAST_TIMESTAMP, lastTimestamp);
countFile.put(ERROR_COUNT, errorCount);
allCountObject.add(countFile);
}
public void postTrackingCountToKafka(Configuration conf, String tier, String brokerList) {
MessageEncoder<IndexedRecord, byte[]> encoder;
AbstractMonitoringEvent monitoringDetails;
try {
encoder =
(MessageEncoder<IndexedRecord, byte[]>) Class.forName(conf.get(CamusJob.CAMUS_MESSAGE_ENCODER_CLASS))
.newInstance();
Properties props = new Properties();
for (Entry<String, String> entry : conf) {
props.put(entry.getKey(), entry.getValue());
}
encoder.init(props, "TrackingMonitoringEvent");
monitoringDetails =
(AbstractMonitoringEvent) Class.forName(getMonitoringEventClass(conf))
.getDeclaredConstructor(Configuration.class).newInstance(conf);
} catch (Exception e1) {
throw new RuntimeException(e1);
}
ArrayList<byte[]> monitorSet = new ArrayList<byte[]>();
int counts = 0;
for (Map.Entry<String, Source> singleCount : this.getCounts().entrySet()) {
Source countEntry = singleCount.getValue();
GenericRecord monitoringRecord =
monitoringDetails.createMonitoringEventRecord(countEntry, topic, granularity, tier);
byte[] message = encoder.toBytes((IndexedRecord) monitoringRecord);
monitorSet.add(message);
if (monitorSet.size() >= 2000) {
counts += monitorSet.size();
produceCount(brokerList, monitorSet);
monitorSet.clear();
}
}
if (monitorSet.size() > 0) {
counts += monitorSet.size();
produceCount(brokerList, monitorSet);
}
log.info(topic + " sent " + counts + " counts");
}
protected String getMonitoringEventClass(Configuration conf) {
return conf.get(MONITORING_EVENT_CLASS);
}
private void produceCount(String brokerList, ArrayList<byte[]> monitorSet) {
// Shuffle the broker
Properties props = new Properties();
props.put("metadata.broker.list", brokerList);
props.put("producer.type", "async");
props.put("request.required.acks", "1");
props.put("request.timeout.ms", "30000");
log.debug("Broker list: " + brokerList);
Producer producer = null;
try {
producer = createProducer(props);
for (byte[] message : monitorSet) {
for (int i = 0; i < NUM_TRIES_PUBLISH_COUNTS; i++) {
try {
KeyedMessage keyedMessage = new KeyedMessage("TrackingMonitoringEvent", message);
producer.send(keyedMessage);
break;
} catch (Exception e) {
log.error("Publishing count for topic " + topic + " to " + brokerList.toString() + " has failed " + (i + 1)
+ " times. " + (NUM_TRIES_PUBLISH_COUNTS - i - 1) + " more attempts will be made.");
if (i == NUM_TRIES_PUBLISH_COUNTS - 1) {
throw new RuntimeException(e.getMessage() + ": " + "Have retried maximum (" + NUM_TRIES_PUBLISH_COUNTS
+ ") times.");
}
try {
Thread.sleep((long) (Math.random() * (i + 1) * 1000));
} catch (InterruptedException e1) {
log.error("Caught interrupted exception between retries of publishing counts to Kafka. "
+ e1.getMessage());
}
}
}
}
} catch (Exception e) {
throw new RuntimeException("failed to publish counts to kafka: " + e.getMessage(), e);
} finally {
if (producer != null) {
producer.close();
}
}
}
protected Producer createProducer(Properties props) {
return new Producer(new ProducerConfig(props));
}
}
```
|
``````
|
Please help me generate a test for this class.
|
```package com.linkedin.camus.etl.kafka.mapred;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.log4j.Logger;
import org.codehaus.jackson.map.ObjectMapper;
import com.linkedin.camus.etl.RecordWriterProvider;
import com.linkedin.camus.etl.kafka.common.EtlCounts;
import com.linkedin.camus.etl.kafka.common.EtlKey;
public class EtlMultiOutputCommitter extends FileOutputCommitter {
private Pattern workingFileMetadataPattern;
private HashMap<String, EtlCounts> counts = new HashMap<String, EtlCounts>();
private HashMap<String, EtlKey> offsets = new HashMap<String, EtlKey>();
private HashMap<String, Long> eventCounts = new HashMap<String, Long>();
private TaskAttemptContext context;
private final RecordWriterProvider recordWriterProvider;
private Logger log;
private void mkdirs(FileSystem fs, Path path) throws IOException {
if (!fs.exists(path.getParent())) {
mkdirs(fs, path.getParent());
}
fs.mkdirs(path);
}
public void addCounts(EtlKey key) throws IOException {
String workingFileName = EtlMultiOutputFormat.getWorkingFileName(context, key);
if (!counts.containsKey(workingFileName))
counts.put(workingFileName,
new EtlCounts(key.getTopic(), EtlMultiOutputFormat.getMonitorTimeGranularityMs(context)));
counts.get(workingFileName).incrementMonitorCount(key);
addOffset(key);
}
public void addOffset(EtlKey key) {
String topicPart = key.getTopic() + "-" + key.getLeaderId() + "-" + key.getPartition();
EtlKey offsetKey = new EtlKey(key);
if (offsets.containsKey(topicPart)) {
long totalSize = offsets.get(topicPart).getTotalMessageSize() + key.getMessageSize();
long avgSize = totalSize / (eventCounts.get(topicPart) + 1);
offsetKey.setMessageSize(avgSize);
offsetKey.setTotalMessageSize(totalSize);
} else {
eventCounts.put(topicPart, 0l);
}
eventCounts.put(topicPart, eventCounts.get(topicPart) + 1);
offsets.put(topicPart, offsetKey);
}
public EtlMultiOutputCommitter(Path outputPath, TaskAttemptContext context, Logger log) throws IOException {
super(outputPath, context);
this.context = context;
try {
//recordWriterProvider = EtlMultiOutputFormat.getRecordWriterProviderClass(context).newInstance();
Class<RecordWriterProvider> rwp = EtlMultiOutputFormat.getRecordWriterProviderClass(context);
Constructor<RecordWriterProvider> crwp = rwp.getConstructor(TaskAttemptContext.class);
recordWriterProvider = crwp.newInstance(context);
} catch (Exception e) {
throw new IllegalStateException(e);
}
workingFileMetadataPattern = Pattern.compile(
"data\\.([^\\.]+)\\.([\\d_]+)\\.(\\d+)\\.([^\\.]+)-m-\\d+" + recordWriterProvider.getFilenameExtension());
this.log = log;
}
@Override
public void commitTask(TaskAttemptContext context) throws IOException {
ArrayList<Map<String, Object>> allCountObject = new ArrayList<Map<String, Object>>();
FileSystem fs = FileSystem.get(context.getConfiguration());
if (EtlMultiOutputFormat.isRunMoveData(context)) {
Path workPath = super.getWorkPath();
log.info("work path: " + workPath);
Path baseOutDir = EtlMultiOutputFormat.getDestinationPath(context);
log.info("Destination base path: " + baseOutDir);
for (FileStatus f : fs.listStatus(workPath)) {
String file = f.getPath().getName();
log.info("work file: " + file);
if (file.startsWith("data")) {
String workingFileName = file.substring(0, file.lastIndexOf("-m"));
EtlCounts count = counts.get(workingFileName);
count.setEndTime(System.currentTimeMillis());
String partitionedFile =
getPartitionedPath(context, file, count.getEventCount(), count.getLastKey().getOffset());
Path dest = new Path(baseOutDir, partitionedFile);
if (!fs.exists(dest.getParent())) {
mkdirs(fs, dest.getParent());
}
commitFile(context, f.getPath(), dest);
log.info("Moved file from: " + f.getPath() + " to: " + dest);
if (EtlMultiOutputFormat.isRunTrackingPost(context)) {
count.writeCountsToMap(allCountObject, fs, new Path(workPath, EtlMultiOutputFormat.COUNTS_PREFIX + "."
+ dest.getName().replace(recordWriterProvider.getFilenameExtension(), "")));
}
}
}
if (EtlMultiOutputFormat.isRunTrackingPost(context)) {
Path tempPath = new Path(workPath, "counts." + context.getConfiguration().get("mapred.task.id"));
OutputStream outputStream = new BufferedOutputStream(fs.create(tempPath));
ObjectMapper mapper = new ObjectMapper();
log.info("Writing counts to : " + tempPath.toString());
long time = System.currentTimeMillis();
mapper.writeValue(outputStream, allCountObject);
log.debug("Time taken : " + (System.currentTimeMillis() - time) / 1000);
}
} else {
log.info("Not moving run data.");
}
SequenceFile.Writer offsetWriter = SequenceFile.createWriter(fs, context.getConfiguration(),
new Path(super.getWorkPath(),
EtlMultiOutputFormat.getUniqueFile(context, EtlMultiOutputFormat.OFFSET_PREFIX, "")),
EtlKey.class, NullWritable.class);
for (String s : offsets.keySet()) {
log.info("Avg record size for " + offsets.get(s).getTopic() + ":" + offsets.get(s).getPartition() + " = "
+ offsets.get(s).getMessageSize());
offsetWriter.append(offsets.get(s), NullWritable.get());
}
offsetWriter.close();
super.commitTask(context);
}
protected void commitFile(JobContext job, Path source, Path target) throws IOException {
log.info(String.format("Moving %s to %s", source, target));
if (!FileSystem.get(job.getConfiguration()).rename(source, target)) {
log.error(String.format("Failed to move from %s to %s", source, target));
throw new IOException(String.format("Failed to move from %s to %s", source, target));
}
}
public String getPartitionedPath(JobContext context, String file, int count, long offset) throws IOException {
Matcher m = workingFileMetadataPattern.matcher(file);
if (!m.find()) {
throw new IOException("Could not extract metadata from working filename '" + file + "'");
}
String topic = m.group(1);
String leaderId = m.group(2);
String partition = m.group(3);
String encodedPartition = m.group(4);
String partitionedPath =
EtlMultiOutputFormat.getPartitioner(context, topic).generatePartitionedPath(context, topic, encodedPartition);
partitionedPath += "/" + EtlMultiOutputFormat.getPartitioner(context, topic).generateFileName(context, topic,
leaderId, Integer.parseInt(partition), count, offset, encodedPartition);
return partitionedPath + recordWriterProvider.getFilenameExtension();
}
}
```
|
```package mltk.predictor.evaluation;
import mltk.util.MathUtils;
import org.junit.Assert;
import org.junit.Test;
public class ErrorTest {
@Test
public void testLabel() {
double[] preds = {1, 0, 1, 0};
double[] targets = {1, 0, 0, 1};
Error metric = new Error();
Assert.assertEquals(0.5, metric.eval(preds, targets), MathUtils.EPSILON);
}
@Test
public void testProbability() {
double[] preds = {2, -1.5, 0.3, 5};
double[] targets = {1, 0, 0, 1};
Error metric = new Error();
Assert.assertEquals(0.25, metric.eval(preds, targets), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.evaluation;
import mltk.core.Instances;
/**
* Class for evaluating error rate.
*
* @author Yin Lou
*
*/
public class Error extends SimpleMetric {
/**
* Constructor.
*/
public Error() {
super(false);
}
@Override
public double eval(double[] preds, double[] targets) {
double error = 0;
for (int i = 0; i < preds.length; i++) {
// Handles both probability and predicted label
double cls = preds[i] <= 0 ? 0 : 1;
if (cls != targets[i]) {
error++;
}
}
return error / preds.length;
}
@Override
public double eval(double[] preds, Instances instances) {
double error = 0;
for (int i = 0; i < preds.length; i++) {
// Handles both probability and predicted label
double cls = preds[i] <= 0 ? 0 : 1;
if (cls != instances.get(i).getTarget()) {
error++;
}
}
return error / preds.length;
}
}
```
|
```package mltk.predictor.tree.ensemble.brt;
import org.junit.Assert;
import org.junit.Test;
import mltk.predictor.tree.RegressionTreeLearner;
import mltk.predictor.tree.TreeLearner;
import mltk.util.MathUtils;
public class BRTUtilsTest {
@Test
public void testParseRegressionTreeLearner1() {
String baseLearner = "rt:l:100";
TreeLearner treeLearner = null;
treeLearner = BRTUtils.parseTreeLearner(baseLearner);
Assert.assertTrue(treeLearner instanceof RegressionTreeLearner);
RegressionTreeLearner rtLearner = (RegressionTreeLearner) treeLearner;
Assert.assertEquals(RegressionTreeLearner.Mode.NUM_LEAVES_LIMITED, rtLearner.getConstructionMode());
Assert.assertEquals(100, rtLearner.getMaxNumLeaves());
}
@Test
public void testParseRegressionTreeLearner2() {
String baseLearner = "rt:d:5";
TreeLearner treeLearner = null;
treeLearner = BRTUtils.parseTreeLearner(baseLearner);
Assert.assertTrue(treeLearner instanceof RegressionTreeLearner);
RegressionTreeLearner rtLearner = (RegressionTreeLearner) treeLearner;
Assert.assertEquals(RegressionTreeLearner.Mode.DEPTH_LIMITED, rtLearner.getConstructionMode());
Assert.assertEquals(5, rtLearner.getMaxDepth());
}
@Test
public void testParseRegressionTreeLearner3() {
String baseLearner = "rt:a:0.01";
TreeLearner treeLearner = null;
treeLearner = BRTUtils.parseTreeLearner(baseLearner);
Assert.assertTrue(treeLearner instanceof RegressionTreeLearner);
RegressionTreeLearner rtLearner = (RegressionTreeLearner) treeLearner;
Assert.assertEquals(RegressionTreeLearner.Mode.ALPHA_LIMITED, rtLearner.getConstructionMode());
Assert.assertEquals(0.01, rtLearner.getAlpha(), MathUtils.EPSILON);
}
@Test
public void testParseRegressionTreeLearner4() {
String baseLearner = "rt:s:50";
TreeLearner treeLearner = null;
treeLearner = BRTUtils.parseTreeLearner(baseLearner);
Assert.assertTrue(treeLearner instanceof RegressionTreeLearner);
RegressionTreeLearner rtLearner = (RegressionTreeLearner) treeLearner;
Assert.assertEquals(RegressionTreeLearner.Mode.MIN_LEAF_SIZE_LIMITED, rtLearner.getConstructionMode());
Assert.assertEquals(50, rtLearner.getMinLeafSize());
}
@Test
public void testParseRobustRegressionTreeLearner1() {
String baseLearner = "rrt:l:100";
TreeLearner treeLearner = null;
treeLearner = BRTUtils.parseTreeLearner(baseLearner);
Assert.assertTrue(treeLearner instanceof RegressionTreeLearner);
RegressionTreeLearner rtLearner = (RegressionTreeLearner) treeLearner;
Assert.assertEquals(RegressionTreeLearner.Mode.NUM_LEAVES_LIMITED, rtLearner.getConstructionMode());
Assert.assertEquals(100, rtLearner.getMaxNumLeaves());
}
@Test
public void testParseRobustRegressionTreeLearner2() {
String baseLearner = "rrt:d:5";
TreeLearner treeLearner = null;
treeLearner = BRTUtils.parseTreeLearner(baseLearner);
Assert.assertTrue(treeLearner instanceof RegressionTreeLearner);
RegressionTreeLearner rtLearner = (RegressionTreeLearner) treeLearner;
Assert.assertEquals(RegressionTreeLearner.Mode.DEPTH_LIMITED, rtLearner.getConstructionMode());
Assert.assertEquals(5, rtLearner.getMaxDepth());
}
@Test
public void testParseRobustRegressionTreeLearner3() {
String baseLearner = "rrt:a:0.01";
TreeLearner treeLearner = null;
treeLearner = BRTUtils.parseTreeLearner(baseLearner);
Assert.assertTrue(treeLearner instanceof RegressionTreeLearner);
RegressionTreeLearner rtLearner = (RegressionTreeLearner) treeLearner;
Assert.assertEquals(RegressionTreeLearner.Mode.ALPHA_LIMITED, rtLearner.getConstructionMode());
Assert.assertEquals(0.01, rtLearner.getAlpha(), MathUtils.EPSILON);
}
@Test
public void testParseRobustRegressionTreeLearner4() {
String baseLearner = "rrt:s:50";
TreeLearner treeLearner = null;
treeLearner = BRTUtils.parseTreeLearner(baseLearner);
Assert.assertTrue(treeLearner instanceof RegressionTreeLearner);
RegressionTreeLearner rtLearner = (RegressionTreeLearner) treeLearner;
Assert.assertEquals(RegressionTreeLearner.Mode.MIN_LEAF_SIZE_LIMITED, rtLearner.getConstructionMode());
Assert.assertEquals(50, rtLearner.getMinLeafSize());
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.tree.ensemble.brt;
import mltk.predictor.tree.TreeLearner;
import mltk.predictor.tree.DecisionTableLearner;
import mltk.predictor.tree.RegressionTreeLearner;
class BRTUtils {
public static TreeLearner parseTreeLearner(String baseLearner) {
String[] data = baseLearner.split(":");
if (data.length != 3) {
throw new IllegalArgumentException();
}
TreeLearner rtLearner = null;
switch(data[0]) {
case "rt":
rtLearner = new RegressionTreeLearner();
break;
case "rrt":
rtLearner = new RobustRegressionTreeLearner();
break;
case "dt":
rtLearner = new DecisionTableLearner();
break;
case "rdt":
rtLearner = new RobustDecisionTableLearner();
break;
default:
System.err.println("Unknown regression tree learner: " + data[0]);
throw new IllegalArgumentException();
}
rtLearner.setParameters(data[1] + ":" + data[2]);
return rtLearner;
}
}
```
|
```package mltk.core.io;
import org.junit.Assert;
import org.junit.Test;
import mltk.core.Instance;
public class InstancesReaderTest {
@Test
public void testDenseFormat() {
String[] data = {"0.0", "1.5", "?", "3"};
Instance instance = InstancesReader.parseDenseInstance(data, 3);
Assert.assertTrue(instance.isMissing(2));
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.core.io;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
import mltk.core.Attribute;
import mltk.core.Instance;
import mltk.core.Instances;
import mltk.core.NominalAttribute;
import mltk.core.NumericalAttribute;
import mltk.util.MathUtils;
import mltk.util.tuple.Pair;
/**
* Class for reading instances.
*
* @author Yin Lou
*
*/
public class InstancesReader {
/**
* Reads a set of instances from attribute file and data file. Attribute file can be null. Default delimiter is
* whitespace.
*
* @param attFile the attribute file.
* @param dataFile the data file.
* @return a set of instances.
* @throws IOException
*/
public static Instances read(String attFile, String dataFile) throws IOException {
return read(attFile, dataFile, "\\s+");
}
/**
* Reads a set of instances from attribute file and data file. Attribute file can be null.
*
* @param attFile the attribute file.
* @param dataFile the data file.
* @param delimiter the delimiter.
* @return a set of instances.
* @throws IOException
*/
public static Instances read(String attFile, String dataFile, String delimiter) throws IOException {
if (attFile != null) {
Pair<List<Attribute>, Attribute> pair = AttributesReader.read(attFile);
int classIndex = -1;
if (pair.v2 != null) {
classIndex = pair.v2.getIndex();
pair.v2.setIndex(-1);
}
List<Attribute> attributes = pair.v1;
Instances instances = new Instances(attributes, pair.v2);
int totalLength = instances.dimension();
if (classIndex != -1) {
totalLength++;
}
BufferedReader br = new BufferedReader(new FileReader(dataFile), 65535);
for (;;) {
String line = br.readLine();
if (line == null) {
break;
}
String[] data = line.split(delimiter);
Instance instance = null;
if (data.length >= 2 && data[1].indexOf(':') >= 0) {
// Sparse instance
instance = parseSparseInstance(data);
} else if (data.length == totalLength) {
// Dense instance
instance = parseDenseInstance(data, classIndex);
} else {
System.err.println("Processed as dense vector but the number of attributes provided in the attribute file does not match with the number of columns in this row");
}
if (instance != null) {
instances.add(instance);
}
}
br.close();
// Process skipped features
for (int i = attributes.size() - 1; i >= 0; i--) {
if (attributes.get(i).getIndex() < 0) {
attributes.remove(i);
}
}
return instances;
} else {
List<Attribute> attributes = new ArrayList<>();
Instances instances = new Instances(attributes);
int totalLength = -1;
TreeSet<Integer> attrSet = new TreeSet<>();
BufferedReader br = new BufferedReader(new FileReader(dataFile), 65535);
for (;;) {
String line = br.readLine();
if (line == null) {
break;
}
String[] data = line.split(delimiter);
Instance instance = null;
if (data.length >= 2 && data[1].indexOf(':') >= 0) {
// Sparse instance
instance = parseSparseInstance(data, attrSet);
} else {
// Dense instance
if (totalLength == -1) {
totalLength = data.length;
} else if (data.length == totalLength) {
instance = parseDenseInstance(data, -1);
}
}
if (instance != null) {
instances.add(instance);
}
}
br.close();
if (totalLength == -1) {
for (Integer attIndex : attrSet) {
Attribute att = new NumericalAttribute("f" + attIndex);
att.setIndex(attIndex);
attributes.add(att);
}
} else {
for (int j = 0; j < totalLength; j++) {
Attribute att = new NumericalAttribute("f" + j);
att.setIndex(j);
attributes.add(att);
}
}
assignTargetAttribute(instances);
return instances;
}
}
/**
* Reads a set of dense instances from data file. Default delimiter is whitespace.
*
* @param file the data file.
* @param targetIndex the index of the target attribute, -1 if no target attribute.
* @return a set of dense instances.
* @throws IOException
*/
public static Instances read(String file, int targetIndex) throws IOException {
return read(file, targetIndex, "\\s+");
}
/**
* Reads a set of dense instances from data file.
*
* @param file the data file.
* @param targetIndex the index of the target attribute, -1 if no target attribute.
* @param delimiter the delimiter.
* @return a set of dense instances.
* @throws IOException
*/
public static Instances read(String file, int targetIndex, String delimiter) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(file), 65535);
List<Attribute> attributes = new ArrayList<>();
Instances instances = new Instances(attributes);
for (;;) {
String line = br.readLine();
if (line == null) {
break;
}
String[] data = line.split(delimiter);
Instance instance = parseDenseInstance(data, targetIndex);
instances.add(instance);
}
br.close();
int numAttributes = instances.get(0).getValues().length;
for (int i = 0; i < numAttributes; i++) {
Attribute att = new NumericalAttribute("f" + i);
att.setIndex(i);
attributes.add(att);
}
if (targetIndex >= 0) {
assignTargetAttribute(instances);
}
return instances;
}
/**
* Parses a dense instance from strings.
*
* @param data the string array.
* @param classIndex the class index.
* @return a dense instance from strings.
*/
static Instance parseDenseInstance(String[] data, int classIndex) {
double classValue = Double.NaN;
if (classIndex < 0) {
double[] vector = new double[data.length];
for (int i = 0; i < data.length; i++) {
vector[i] = parseDouble(data[i]);
}
return new Instance(vector, classValue);
} else {
double[] vector = new double[data.length - 1];
for (int i = 0; i < data.length; i++) {
double value = parseDouble(data[i]);
if (i < classIndex) {
vector[i] = value;
} else if (i > classIndex) {
vector[i - 1] = value;
} else {
classValue = value;
}
}
return new Instance(vector, classValue);
}
}
/**
* Parses a sparse instance from strings.
*
* @param data the string array.
* @param attrSet the attributes set.
* @return a sparse instance from strings.
*/
private static Instance parseSparseInstance(String[] data, TreeSet<Integer> attrSet) {
double targetValue = Double.parseDouble(data[0]);
int[] indices = new int[data.length - 1];
double[] values = new double[data.length - 1];
for (int i = 0; i < indices.length; i++) {
String[] pair = data[i + 1].split(":");
indices[i] = Integer.parseInt(pair[0]);
values[i] = Double.parseDouble(pair[1]);
attrSet.add(indices[i]);
}
return new Instance(indices, values, targetValue);
}
/**
* Parses a sparse instance from strings.
*
* @param data the string array.
* @return a sparse instance from strings.
*/
private static Instance parseSparseInstance(String[] data) {
double classValue = Double.parseDouble(data[0]);
int[] indices = new int[data.length - 1];
double[] values = new double[data.length - 1];
for (int i = 0; i < indices.length; i++) {
String[] pair = data[i + 1].split(":");
indices[i] = Integer.parseInt(pair[0]);
values[i] = Double.parseDouble(pair[1]);
}
return new Instance(indices, values, classValue);
}
/**
* Assigns target attribute for a dataset.
*
* @param instances the data set.
*/
private static void assignTargetAttribute(Instances instances) {
boolean isInteger = true;
for (Instance instance : instances) {
if (!MathUtils.isInteger(instance.getTarget())) {
isInteger = false;
break;
}
}
if (isInteger) {
TreeSet<Integer> set = new TreeSet<>();
for (Instance instance : instances) {
double target = instance.getTarget();
set.add((int) target);
}
String[] states = new String[set.size()];
int i = 0;
for (Integer v : set) {
states[i++] = v.toString();
}
instances.setTargetAttribute(new NominalAttribute("target", states));
} else {
instances.setTargetAttribute(new NumericalAttribute("target"));
}
}
/**
* Parses double value from a string. Missing value is supported.
*
* @param s the string to parse.
* @return double value.
*/
private static double parseDouble(String s) {
if (s.equals("?")) {
return Double.NaN;
} else {
return Double.parseDouble(s);
}
}
}
```
|
```package mltk.predictor.evaluation;
import mltk.util.MathUtils;
import org.junit.Assert;
import org.junit.Test;
public class LogisticLossTest {
@Test
public void test() {
double[] preds = {5, -5, -3, 3};
double[] targets = {1, 0, 0, 1};
LogisticLoss metric = new LogisticLoss();
Assert.assertEquals(0.02765135, metric.eval(preds, targets), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.evaluation;
import mltk.core.Instances;
import mltk.util.OptimUtils;
/**
* Class for evaluating logistic loss.
*
* @author Yin Lou
*
*/
public class LogisticLoss extends SimpleMetric {
/**
* Constructor.
*/
public LogisticLoss() {
super(false);
}
@Override
public double eval(double[] preds, double[] targets) {
return OptimUtils.computeLogisticLoss(preds, targets);
}
@Override
public double eval(double[] preds, Instances instances) {
double logisticLoss = 0;
for (int i = 0; i < preds.length; i++) {
logisticLoss += OptimUtils.computeLogisticLoss(preds[i], instances.get(i).getTarget());
}
logisticLoss /= preds.length;
return logisticLoss;
}
}
```
|
```package mltk.predictor.evaluation;
import org.junit.Assert;
import org.junit.Test;
public class MetricFactoryTest {
@Test
public void test() {
Assert.assertEquals(AUC.class, MetricFactory.getMetric("AUC").getClass());
Assert.assertEquals(Error.class, MetricFactory.getMetric("Error").getClass());
Assert.assertEquals(LogisticLoss.class, MetricFactory.getMetric("LogisticLoss").getClass());
Assert.assertEquals(LogLoss.class, MetricFactory.getMetric("LogLoss").getClass());
Assert.assertEquals(LogLoss.class, MetricFactory.getMetric("LogLoss:True").getClass());
Assert.assertEquals(true, ((LogLoss) MetricFactory.getMetric("LogLoss:True")).isRawScore());
Assert.assertEquals(MAE.class, MetricFactory.getMetric("MAE").getClass());
Assert.assertEquals(RMSE.class, MetricFactory.getMetric("RMSE").getClass());
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.evaluation;
import java.util.HashMap;
import java.util.Map;
/**
* Factory class for creating metrics.
*
* @author Yin Lou
*
*/
public class MetricFactory {
private static Map<String, Metric> map;
static {
map = new HashMap<>();
map.put("auc", new AUC());
map.put("error", new Error());
map.put("logisticloss", new LogisticLoss());
map.put("logloss", new LogLoss(false));
map.put("logloss_t", new LogLoss(true));
map.put("mae", new MAE());
map.put("rmse", new RMSE());
}
/**
* Returns the metric.
*
* @param str the metric string.
* @return the metric.
*/
public static Metric getMetric(String str) {
String[] data = str.toLowerCase().split(":");
String name = data[0];
if (data.length == 1) {
if (!map.containsKey(name)) {
throw new IllegalArgumentException("Unrecognized metric name: " + name);
} else {
return map.get(name);
}
} else {
if (name.equals("logloss")) {
if (data[1].startsWith("t")) {
return map.get("logloss_t");
} else {
return map.get(name);
}
} else if (map.containsKey(name)) {
return map.get(name);
}
}
return null;
}
}
```
|
```package mltk.util;
import org.junit.Assert;
import org.junit.Test;
public class ArrayUtilsTest {
@Test
public void testParseDoubleArray() {
String str = "[1.1, 2.2, 3.3, 4.4]";
double[] a = {1.1, 2.2, 3.3, 4.4};
Assert.assertArrayEquals(a, ArrayUtils.parseDoubleArray(str), MathUtils.EPSILON);
}
@Test
public void testParseIntArray() {
String str = "[1, 2, 3, 4]";
int[] a = {1, 2, 3, 4};
Assert.assertArrayEquals(a, ArrayUtils.parseIntArray(str));
}
@Test
public void testIsConstant() {
int[] a = {1, 1, 1};
int[] b = {2, 1, 1};
Assert.assertTrue(ArrayUtils.isConstant(a, 0, a.length, 1));
Assert.assertFalse(ArrayUtils.isConstant(b, 0, b.length, 1));
Assert.assertTrue(ArrayUtils.isConstant(b, 1, b.length, 1));
double[] c = {0.1, 0.1, 0.1, 0.1};
double[] d = {0.2, 0.1, 0.1, 0.1};
Assert.assertTrue(ArrayUtils.isConstant(c, 0, c.length, 0.1));
Assert.assertFalse(ArrayUtils.isConstant(d, 0, d.length, 0.1));
Assert.assertTrue(ArrayUtils.isConstant(d, 1, d.length, 0.1));
}
@Test
public void testGetMedian() {
double[] a = {0.7, 0.4, 0.3, 0.2, 0.5, 0.6, 0.1};
Assert.assertEquals(0.4, ArrayUtils.getMedian(a), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.util;
import java.util.Arrays;
import java.util.List;
/**
* Class for utility functions for arrays.
*
* @author Yin Lou
*
*/
public class ArrayUtils {
/**
* Converts an integer list to int array.
*
* @param list the list.
* @return an int array.
*/
public static int[] toIntArray(List<Integer> list) {
int[] a = new int[list.size()];
for (int i = 0; i < list.size(); i++) {
a[i] = list.get(i);
}
return a;
}
/**
* Converts a double list to double array.
*
* @param list the list.
* @return a double array.
*/
public static double[] toDoubleArray(List<Double> list) {
double[] a = new double[list.size()];
for (int i = 0; i < list.size(); i++) {
a[i] = list.get(i);
}
return a;
}
/**
* Converts a double array to an int array.
*
* @param a the double array.
* @return an int array.
*/
public static int[] toIntArray(double[] a) {
int[] b = new int[a.length];
for (int i = 0; i < a.length; i++) {
b[i] = (int) a[i];
}
return b;
}
/**
* Returns a string representation of the contents of the specified sub-array.
*
* @param a the array.
* @param start the starting index (inclusive).
* @param end the ending index (exclusive).
* @return Returns a string representation of the contents of the specified sub-array.
*/
public static String toString(double[] a, int start, int end) {
StringBuilder sb = new StringBuilder();
sb.append("[").append(a[start]);
for (int i = start + 1; i < end; i++) {
sb.append(", ").append(a[i]);
}
sb.append("]");
return sb.toString();
}
/**
* Parses a double array from a string (default delimiter: ",").
*
* @param str the string representation of a double array.
* @return a double array.
*/
public static double[] parseDoubleArray(String str) {
return parseDoubleArray(str, ",");
}
/**
* Parses a double array from a string.
*
* @param str the string representation of a double array.
* @param delimiter the delimiter.
* @return a double array.
*/
public static double[] parseDoubleArray(String str, String delimiter) {
if (str == null || str.equalsIgnoreCase("null")) {
return null;
}
String[] data = str.substring(1, str.length() - 1).split(delimiter);
double[] a = new double[data.length];
for (int i = 0; i < a.length; i++) {
a[i] = Double.parseDouble(data[i].trim());
}
return a;
}
/**
* Parses an int array from a string (default delimiter: ",").
*
* @param str the string representation of an int array.
* @return an int array.
*/
public static int[] parseIntArray(String str) {
return parseIntArray(str, ",");
}
/**
* Parses an int array from a string.
*
* @param str the string representation of an int array.
* @param delimiter the delimiter.
* @return an int array.
*/
public static int[] parseIntArray(String str, String delimiter) {
if (str == null || str.equalsIgnoreCase("null")) {
return null;
}
String[] data = str.substring(1, str.length() - 1).split(delimiter);
int[] a = new int[data.length];
for (int i = 0; i < a.length; i++) {
a[i] = Integer.parseInt(data[i].trim());
}
return a;
}
/**
* Parses a long array from a string (default delimiter: ",").
*
* @param str the string representation of a long array.
* @return an long array.
*/
public static long[] parseLongArray(String str) {
return parseLongArray(str, ",");
}
/**
* Parses a long array from a string.
*
* @param str the string representation of a long array.
* @param delimiter the delimiter.
* @return a long array.
*/
public static long[] parseLongArray(String str, String delimiter) {
if (str == null || str.equalsIgnoreCase("null")) {
return null;
}
String[] data = str.substring(1, str.length() - 1).split(delimiter);
long[] a = new long[data.length];
for (int i = 0; i < a.length; i++) {
a[i] = Long.parseLong(data[i].trim());
}
return a;
}
/**
* Returns {@code true} if the specified range of an array is constant c.
*
* @param a the array.
* @param begin the index of first element (inclusive).
* @param end the index of last element (exclusive).
* @param c the constant to test.
* @return {@code true} if the specified range of an array is constant c.
*/
public static boolean isConstant(double[] a, int begin, int end, double c) {
for (int i = begin; i < end; i++) {
if (!MathUtils.equals(a[i], c)) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the specified range of an array is constant c.
*
* @param a the array.
* @param begin the index of first element (inclusive).
* @param end the index of last element (exclusive).
* @param c the constant to test.
* @return {@code true} if the specified range of an array is constant c.
*/
public static boolean isConstant(int[] a, int begin, int end, int c) {
for (int i = begin; i < end; i++) {
if (a[i] != c) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the specified range of an array is constant c.
*
* @param a the array.
* @param begin the index of first element (inclusive).
* @param end the index of last element (exclusive).
* @param c the constant to test.
* @return {@code true} if the specified range of an array is constant c.
*/
public static boolean isConstant(byte[] a, int begin, int end, byte c) {
for (int i = begin; i < end; i++) {
if (a[i] != c) {
return false;
}
}
return true;
}
/**
* Returns the median of an array.
*
* @param a the array.
* @return the median of an array.
*/
public static double getMedian(double[] a) {
if (a.length == 0) {
return 0;
}
double[] ary = Arrays.copyOf(a, a.length);
Arrays.sort(ary);
int mid = ary.length / 2;
if (ary.length % 2 == 1) {
return ary[mid];
} else {
return (ary[mid - 1] + ary[mid]) / 2;
}
}
/**
* Returns the index of the search key if it is contained in the array, otherwise returns the insertion point.
*
* @param a the array.
* @param key the search key.
* @return the index of the search key if it is contained in the array, otherwise returns the insertion point.
*/
public static int findInsertionPoint(double[] a, double key) {
int idx = Arrays.binarySearch(a, key);
if (idx < 0) {
idx = -idx - 1;
}
return idx;
}
}
```
|
```package mltk.predictor.tree.ensemble.brt;
import org.junit.Assert;
import org.junit.Test;
import mltk.core.Instances;
import mltk.core.InstancesTestHelper;
import mltk.predictor.evaluation.Evaluator;
import mltk.predictor.evaluation.MetricFactory;
import mltk.predictor.tree.TreeLearner;
public class LogitBoostLearnerTest {
@Test
public void testLogitBoostLearner() {
TreeLearner treeLearner = BRTUtils.parseTreeLearner("rrt:d:3");
Instances instances = InstancesTestHelper.getInstance().getDenseClassificationDataset();
LogitBoostLearner learner = new LogitBoostLearner();
learner.setLearningRate(0.1);
learner.setMetric(MetricFactory.getMetric("auc"));
learner.setTreeLearner(treeLearner);
BRT brt = learner.buildBinaryClassifier(instances, 10);
double auc = Evaluator.evalAreaUnderROC(brt, instances);
Assert.assertTrue(auc > 0.5);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.tree.ensemble.brt;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import mltk.cmdline.Argument;
import mltk.cmdline.CmdLineParser;
import mltk.cmdline.options.HoldoutValidatedLearnerOptions;
import mltk.core.Attribute;
import mltk.core.Instance;
import mltk.core.Instances;
import mltk.core.NominalAttribute;
import mltk.core.io.InstancesReader;
import mltk.predictor.evaluation.ConvergenceTester;
import mltk.predictor.evaluation.Error;
import mltk.predictor.evaluation.Metric;
import mltk.predictor.evaluation.MetricFactory;
import mltk.predictor.evaluation.SimpleMetric;
import mltk.predictor.io.PredictorWriter;
import mltk.predictor.tree.RTree;
import mltk.predictor.tree.TreeLearner;
import mltk.util.MathUtils;
import mltk.util.OptimUtils;
import mltk.util.Permutation;
import mltk.util.Random;
/**
* Class for logit boost learner.
*
* <p>
* Reference:<br>
* P. Li. Robust logitboost and adaptive base class (abc) logitboost. In <i>Proceedings of the 26th Conference on
* Uncertainty in Artificial Intelligence (UAI)</i>, Catalina Island, CA, USA, 2010.
* </p>
*
* @author Yin Lou
*
*/
public class LogitBoostLearner extends BRTLearner {
static class Options extends HoldoutValidatedLearnerOptions {
@Argument(name = "-b", description = "base learner (tree:mode:parameter) (default: rt:l:100)")
String baseLearner = "rt:l:100";
@Argument(name = "-m", description = "maximum number of iterations", required = true)
int maxNumIters = -1;
@Argument(name = "-s", description = "seed of the random number generator (default: 0)")
long seed = 0L;
@Argument(name = "-l", description = "learning rate (default: 0.01)")
double learningRate = 0.01;
}
/**
* Trains an additive logistic regression.
*
* <pre>
* Usage: mltk.predictor.tree.ensemble.brt.LogitBoostLearner
* -t train set path
* -m maximum number of iterations
* [-v] valid set path
* [-e] evaluation metric (default: default metric of task)
* [-S] convergence criteria (default: -1)
* [-r] attribute file path
* [-o] output model path
* [-V] verbose (default: true)
* [-b] base learner (tree:mode:parameter) (default: rt:l:100)
* [-s] seed of the random number generator (default: 0)
* [-l] learning rate (default: 0.01)
* </pre>
*
* @param args the command line arguments.
* @throws Exception
*/
public static void main(String[] args) throws Exception {
Options opts = new Options();
CmdLineParser parser = new CmdLineParser(LogitBoostLearner.class, opts);
Metric metric = null;
TreeLearner rtLearner = null;
try {
parser.parse(args);
if (opts.metric == null) {
metric = new Error();
} else {
metric = MetricFactory.getMetric(opts.metric);
}
// Using robust version of the base tree learner
opts.baseLearner = "r" + opts.baseLearner;
rtLearner = BRTUtils.parseTreeLearner(opts.baseLearner);
} catch (IllegalArgumentException e) {
parser.printUsage();
System.exit(1);
}
Random.getInstance().setSeed(opts.seed);
ConvergenceTester ct = ConvergenceTester.parse(opts.cc);
Instances trainSet = InstancesReader.read(opts.attPath, opts.trainPath);
LogitBoostLearner learner = new LogitBoostLearner();
learner.setLearningRate(opts.learningRate);
learner.setMaxNumIters(opts.maxNumIters);
learner.setVerbose(opts.verbose);
learner.setMetric(metric);
learner.setTreeLearner(rtLearner);
learner.setConvergenceTester(ct);
if (opts.validPath != null) {
Instances validSet = InstancesReader.read(opts.attPath, opts.validPath);
learner.setValidSet(validSet);
}
long start = System.currentTimeMillis();
BRT brt = learner.build(trainSet);
long end = System.currentTimeMillis();
System.out.println("Time: " + (end - start) / 1000.0 + " (s).");
if (opts.outputModelPath != null) {
PredictorWriter.write(brt, opts.outputModelPath);
}
}
/**
* Constructor.
*/
public LogitBoostLearner() {
}
@Override
public BRT build(Instances instances) {
if (metric == null) {
metric = new Error();
}
if (validSet != null) {
return buildClassifier(instances, validSet, maxNumIters);
} else {
return buildClassifier(instances, maxNumIters);
}
}
/**
* Builds a classifier.
*
* @param trainSet the training set.
* @param validSet the validation set.
* @param maxNumIters the maximum number of iterations.
* @return a classifier.
*/
public BRT buildBinaryClassifier(Instances trainSet, Instances validSet, int maxNumIters) {
Attribute classAttribute = trainSet.getTargetAttribute();
if (classAttribute.getType() != Attribute.Type.NOMINAL) {
throw new IllegalArgumentException("Class attribute must be nominal.");
}
NominalAttribute clazz = (NominalAttribute) classAttribute;
if (clazz.getCardinality() != 2) {
throw new IllegalArgumentException("Only binary classification is accepted.");
}
BRT brt = new BRT(1);
treeLearner.cache(trainSet);
List<Attribute> attributes = trainSet.getAttributes();
int limit = (int) (attributes.size() * alpha);
int[] indices = new int[limit];
Permutation perm = new Permutation(attributes.size());
if (alpha < 1) {
perm.permute();
}
// Backup targets and weights
double[] targetTrain = new double[trainSet.size()];
double[] weightTrain = new double[targetTrain.length];
for (int i = 0; i < targetTrain.length; i++) {
Instance instance = trainSet.get(i);
targetTrain[i] = instance.getTarget();
weightTrain[i] = instance.getWeight();
}
// Initialization
double[] predTrain = new double[targetTrain.length];
double[] probTrain = new double[targetTrain.length];
computeProbabilities(predTrain, probTrain);
double[] rTrain = new double[targetTrain.length];
OptimUtils.computePseudoResidual(predTrain, targetTrain, rTrain);
double[] predValid = new double[validSet.size()];
// Resets the convergence tester
ct.setMetric(metric);
for (int iter = 0; iter < maxNumIters; iter++) {
// Prepare attributes
if (alpha < 1) {
int[] a = perm.getPermutation();
for (int i = 0; i < indices.length; i++) {
indices[i] = a[i];
}
Arrays.sort(indices);
List<Attribute> attList = trainSet.getAttributes(indices);
trainSet.setAttributes(attList);
}
// Prepare training set
for (int i = 0; i < targetTrain.length; i++) {
Instance instance = trainSet.get(i);
double prob = probTrain[i];
double w = prob * (1 - prob);
instance.setTarget(rTrain[i] * weightTrain[i]);
instance.setWeight(w * weightTrain[i]);
}
RTree rt = (RTree) treeLearner.build(trainSet);
if (learningRate != 1) {
rt.multiply(learningRate);
}
brt.trees[0].add(rt);
for (int i = 0; i < predTrain.length; i++) {
double pred = rt.regress(trainSet.get(i));
predTrain[i] += pred;
}
for (int i = 0; i < predValid.length; i++) {
double pred = rt.regress(validSet.get(i));
predValid[i] += pred;
}
if (alpha < 1) {
// Restore attributes
trainSet.setAttributes(attributes);
}
// Update residuals and probabilities
OptimUtils.computePseudoResidual(predTrain, targetTrain, rTrain);
computeProbabilities(predTrain, probTrain);
double measure = metric.eval(predValid, validSet);
ct.add(measure);
if (verbose) {
System.out.println("Iteration " + iter + ": " + measure);
}
if (ct.isConverged()) {
break;
}
}
// Search the best model on validation set
int idx = ct.getBestIndex();
for (int i = brt.trees[0].size() - 1; i > idx; i--) {
brt.trees[0].removeLast();
}
// Restore targets and weights
for (int i = 0; i < targetTrain.length; i++) {
Instance instance = trainSet.get(i);
instance.setTarget(targetTrain[i]);
instance.setWeight(weightTrain[i]);
}
treeLearner.evictCache();
return brt;
}
/**
* Builds a classifier.
*
* @param trainSet the training set.
* @param maxNumIters the maximum number of iterations.
* @return a classifier.
*/
public BRT buildBinaryClassifier(Instances trainSet, int maxNumIters) {
Attribute classAttribute = trainSet.getTargetAttribute();
if (classAttribute.getType() != Attribute.Type.NOMINAL) {
throw new IllegalArgumentException("Class attribute must be nominal.");
}
NominalAttribute clazz = (NominalAttribute) classAttribute;
if (clazz.getCardinality() != 2) {
throw new IllegalArgumentException("Only binary classification is accepted.");
}
SimpleMetric simpleMetric = (SimpleMetric) metric;
BRT brt = new BRT(1);
treeLearner.cache(trainSet);
List<Attribute> attributes = trainSet.getAttributes();
int limit = (int) (attributes.size() * alpha);
int[] indices = new int[limit];
Permutation perm = new Permutation(attributes.size());
if (alpha < 1) {
perm.permute();
}
// Backup targets and weights
double[] targetTrain = new double[trainSet.size()];
double[] weightTrain = new double[targetTrain.length];
for (int i = 0; i < targetTrain.length; i++) {
Instance instance = trainSet.get(i);
targetTrain[i] = instance.getTarget();
weightTrain[i] = instance.getWeight();
}
// Initialization
double[] predTrain = new double[targetTrain.length];
double[] probTrain = new double[targetTrain.length];
computeProbabilities(predTrain, probTrain);
double[] rTrain = new double[targetTrain.length];
OptimUtils.computePseudoResidual(predTrain, targetTrain, rTrain);
List<Double> measureList = new ArrayList<>(maxNumIters);
for (int iter = 0; iter < maxNumIters; iter++) {
// Prepare attributes
if (alpha < 1) {
int[] a = perm.getPermutation();
for (int i = 0; i < indices.length; i++) {
indices[i] = a[i];
}
Arrays.sort(indices);
List<Attribute> attList = trainSet.getAttributes(indices);
trainSet.setAttributes(attList);
}
// Prepare training set
for (int i = 0; i < targetTrain.length; i++) {
Instance instance = trainSet.get(i);
double prob = probTrain[i];
double w = prob * (1 - prob);
instance.setTarget(rTrain[i] * weightTrain[i]);
instance.setWeight(w * weightTrain[i]);
}
RTree rt = (RTree) treeLearner.build(trainSet);
if (learningRate != 1) {
rt.multiply(learningRate);
}
brt.trees[0].add(rt);
for (int i = 0; i < predTrain.length; i++) {
double pred = rt.regress(trainSet.get(i));
predTrain[i] += pred;
}
if (alpha < 1) {
// Restore attributes
trainSet.setAttributes(attributes);
}
// Update residuals and probabilities
OptimUtils.computePseudoResidual(predTrain, targetTrain, rTrain);
computeProbabilities(predTrain, probTrain);
double measure = simpleMetric.eval(predTrain, targetTrain);
measureList.add(measure);
if (verbose) {
System.out.println("Iteration " + iter + ": " + measure);
}
}
// Search the best model on validation set
int idx = metric.searchBestMetricValueIndex(measureList);
for (int i = brt.trees[0].size() - 1; i > idx; i--) {
brt.trees[0].removeLast();
}
// Restore targets and weights
for (int i = 0; i < targetTrain.length; i++) {
Instance instance = trainSet.get(i);
instance.setTarget(targetTrain[i]);
instance.setWeight(weightTrain[i]);
}
treeLearner.evictCache();
return brt;
}
/**
* Builds a classifier.
*
* @param trainSet the training set.
* @param validSet the validation set.
* @param maxNumIters the maximum number of iterations.
* @return a classifier.
*/
public BRT buildClassifier(Instances trainSet, Instances validSet, int maxNumIters) {
Attribute classAttribute = trainSet.getTargetAttribute();
if (classAttribute.getType() != Attribute.Type.NOMINAL) {
throw new IllegalArgumentException("Class attribute must be nominal.");
}
NominalAttribute clazz = (NominalAttribute) classAttribute;
final int numClasses = clazz.getCardinality();
if (numClasses == 2) {
return buildBinaryClassifier(trainSet, validSet, maxNumIters);
} else {
System.err.println("Multiclass LogitBoost, only use mis-classification rate as metric now");
final double l = learningRate * (numClasses - 1.0) / numClasses;
BRT brt = new BRT(numClasses);
treeLearner.cache(trainSet);
List<Attribute> attributes = trainSet.getAttributes();
int limit = (int) (attributes.size() * alpha);
int[] indices = new int[limit];
Permutation perm = new Permutation(attributes.size());
if (alpha < 1) {
perm.permute();
}
// Backup targets and weights
double[] targetTrain = new double[trainSet.size()];
double[] weightTrain = new double[targetTrain.length];
for (int i = 0; i < targetTrain.length; i++) {
Instance instance = trainSet.get(i);
targetTrain[i] = instance.getTarget();
weightTrain[i] = instance.getWeight();
}
double[] targetValid = new double[validSet.size()];
for (int i = 0; i < targetValid.length; i++) {
targetValid[i] = validSet.get(i).getTarget();
}
// Initialization
double[][] predTrain = new double[numClasses][targetTrain.length];
double[][] probTrain = new double[numClasses][targetTrain.length];
int[][] rTrain = new int[numClasses][targetTrain.length];
for (int k = 0; k < numClasses; k++) {
int[] rkTrain = rTrain[k];
double[] probkTrain = probTrain[k];
for (int i = 0; i < rkTrain.length; i++) {
rkTrain[i] = MathUtils.indicator(targetTrain[i] == k);
probkTrain[i] = 1.0 / numClasses;
}
}
double[][] predValid = new double[numClasses][validSet.size()];
for (int iter = 0; iter < maxNumIters; iter++) {
// Prepare attributes
if (alpha < 1) {
int[] a = perm.getPermutation();
for (int i = 0; i < indices.length; i++) {
indices[i] = a[i];
}
Arrays.sort(indices);
List<Attribute> attList = trainSet.getAttributes(indices);
trainSet.setAttributes(attList);
}
for (int k = 0; k < numClasses; k++) {
// Prepare training set
int[] rkTrain = rTrain[k];
double[] probkTrain = probTrain[k];
for (int i = 0; i < targetTrain.length; i++) {
Instance instance = trainSet.get(i);
double pk = probkTrain[i];
double t = rkTrain[i] - pk;
double w = pk * (1 - pk);
instance.setTarget(t * weightTrain[i]);
instance.setWeight(w * weightTrain[i]);
}
RTree rt = (RTree) treeLearner.build(trainSet);
rt.multiply(l);
brt.trees[k].add(rt);
double[] predkTrain = predTrain[k];
for (int i = 0; i < predkTrain.length; i++) {
double p = rt.regress(trainSet.get(i));
predkTrain[i] += p;
}
double[] predkValid = predValid[k];
for (int i = 0; i < predkValid.length; i++) {
double p = rt.regress(validSet.get(i));
predkValid[i] += p;
}
}
if (alpha < 1) {
// Restore attributes
trainSet.setAttributes(attributes);
}
// Update probabilities
computeProbabilities(predTrain, probTrain);
if (verbose) {
double error = 0;
for (int i = 0; i < targetValid.length; i++) {
double p = 0;
double max = Double.NEGATIVE_INFINITY;
for (int k = 0; k < numClasses; k++) {
if (predValid[k][i] > max) {
max = predValid[k][i];
p = k;
}
}
if (p != targetValid[i]) {
error++;
}
}
error /= targetValid.length;
System.out.println("Iteration " + iter + ": " + error);
}
}
// Restore targets and weights
for (int i = 0; i < targetTrain.length; i++) {
Instance instance = trainSet.get(i);
instance.setTarget(targetTrain[i]);
instance.setWeight(weightTrain[i]);
}
treeLearner.evictCache();
return brt;
}
}
/**
* Builds a classifier.
*
* @param trainSet the training set.
* @param maxNumIters the maximum number of iterations.
* @return a classifier.
*/
public BRT buildClassifier(Instances trainSet, int maxNumIters) {
Attribute classAttribute = trainSet.getTargetAttribute();
if (classAttribute.getType() != Attribute.Type.NOMINAL) {
throw new IllegalArgumentException("Class attribute must be nominal.");
}
NominalAttribute clazz = (NominalAttribute) classAttribute;
final int numClasses = clazz.getCardinality();
if (numClasses == 2) {
return buildBinaryClassifier(trainSet, maxNumIters);
} else {
final int n = trainSet.size();
final double l = learningRate * (numClasses - 1.0) / numClasses;
BRT brt = new BRT(numClasses);
treeLearner.cache(trainSet);
List<Attribute> attributes = trainSet.getAttributes();
int limit = (int) (attributes.size() * alpha);
int[] indices = new int[limit];
Permutation perm = new Permutation(attributes.size());
if (alpha < 1) {
perm.permute();
}
// Backup targets and weights
double[] target = new double[n];
double[] weight = new double[n];
for (int i = 0; i < n; i++) {
Instance instance = trainSet.get(i);
target[i] = instance.getTarget();
weight[i] = instance.getWeight();
}
// Initialization
double[][] predTrain = new double[numClasses][n];
double[][] probTrain = new double[numClasses][n];
int[][] rTrain = new int[numClasses][n];
for (int k = 0; k < numClasses; k++) {
int[] rkTrain = rTrain[k];
double[] probkTrain = probTrain[k];
for (int i = 0; i < n; i++) {
rkTrain[i] = MathUtils.indicator(target[i] == k);
probkTrain[i] = 1.0 / numClasses;
}
}
for (int iter = 0; iter < maxNumIters; iter++) {
// Prepare attributes
if (alpha < 1) {
int[] a = perm.getPermutation();
for (int i = 0; i < indices.length; i++) {
indices[i] = a[i];
}
Arrays.sort(indices);
List<Attribute> attList = trainSet.getAttributes(indices);
trainSet.setAttributes(attList);
}
for (int k = 0; k < numClasses; k++) {
// Prepare training set
int[] rkTrain = rTrain[k];
double[] probkTrain = probTrain[k];
for (int i = 0; i < n; i++) {
Instance instance = trainSet.get(i);
double pk = probkTrain[i];
double t = rkTrain[i] - pk;
double w = pk * (1 - pk);
instance.setTarget(t * weight[i]);
instance.setWeight(w * weight[i]);
}
RTree rt = (RTree) treeLearner.build(trainSet);
rt.multiply(l);
brt.trees[k].add(rt);
double[] predkTrain = predTrain[k];
for (int i = 0; i < n; i++) {
double p = rt.regress(trainSet.get(i));
predkTrain[i] += p;
}
}
if (alpha < 1) {
// Restore attributes
trainSet.setAttributes(attributes);
}
// Update probabilities
computeProbabilities(predTrain, probTrain);
if (verbose) {
double error = 0;
for (int i = 0; i < n; i++) {
double p = 0;
double maxProb = -1;
for (int k = 0; k < numClasses; k++) {
if (probTrain[k][i] > maxProb) {
maxProb = probTrain[k][i];
p = k;
}
}
if (p != target[i]) {
error++;
}
}
error /= n;
System.out.println("Iteration " + iter + ": " + error);
}
}
// Restore targets and weights
for (int i = 0; i < n; i++) {
Instance instance = trainSet.get(i);
instance.setTarget(target[i]);
instance.setWeight(weight[i]);
}
treeLearner.evictCache();
return brt;
}
}
@Override
public void setTreeLearner(TreeLearner treeLearner) {
if (!treeLearner.isRobust()) {
throw new IllegalArgumentException("Only robust tree learners are accepted");
}
this.treeLearner = treeLearner;
}
protected void computeProbabilities(double[] pred, double[] prob) {
for (int i = 0; i < pred.length; i++) {
prob[i] = MathUtils.sigmoid(pred[i]);
}
}
protected void computeProbabilities(double[][] pred, double[][] prob) {
for (int i = 0; i < pred[0].length; i++) {
double max = Double.NEGATIVE_INFINITY;
for (int k = 0; k < pred.length; k++) {
if (max < pred[k][i]) {
max = pred[k][i];
}
}
double sum = 0;
for (int k = 0; k < pred.length; k++) {
double p = Math.exp(pred[k][i] - max);
prob[k][i] = p;
sum += p;
}
for (int k = 0; k < pred.length; k++) {
prob[k][i] /= sum;
}
}
}
}
```
|
```package mltk.util;
import org.junit.Assert;
import org.junit.Test;
public class OptimUtilsTest {
@Test
public void testGetProbability() {
Assert.assertEquals(0.5, OptimUtils.getProbability(0), MathUtils.EPSILON);
}
@Test
public void testGetResidual() {
Assert.assertEquals(-1.0, OptimUtils.getResidual(1.0, 0), MathUtils.EPSILON);
}
@Test
public void testGetPseudoResidual() {
Assert.assertEquals(0.5, OptimUtils.getPseudoResidual(0, 1), MathUtils.EPSILON);
Assert.assertEquals(-0.5, OptimUtils.getPseudoResidual(0, 0), MathUtils.EPSILON);
}
@Test
public void testComputeLogisticLoss() {
Assert.assertEquals(0.693147181, OptimUtils.computeLogisticLoss(0, 1), MathUtils.EPSILON);
Assert.assertEquals(0.693147181, OptimUtils.computeLogisticLoss(0, -1), MathUtils.EPSILON);
Assert.assertEquals(0.006715348, OptimUtils.computeLogisticLoss(5, 1), MathUtils.EPSILON);
Assert.assertEquals(5.006715348, OptimUtils.computeLogisticLoss(5, -1), MathUtils.EPSILON);
}
@Test
public void testIsConverged() {
Assert.assertTrue(OptimUtils.isConverged(0.100000001, 0.1, 1e-6));
Assert.assertFalse(OptimUtils.isConverged(0.15, 0.1, 1e-6));
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.util;
import java.util.List;
/**
* Class for utility functions for optimization.
*
* @author Yin Lou
*
*/
public class OptimUtils {
/**
* Returns the gain for variance reduction. This method is mostly used
* in tree learners.
*
* @param sum the sum of responses.
* @param weight the total weight.
* @return the gain for variance reduction.
*/
public static double getGain(double sum, double weight) {
if (weight < MathUtils.EPSILON) {
return 0;
} else {
return sum / weight * sum;
}
}
/**
* Returns the probability of being in positive class.
*
* @param pred the prediction.
* @return the probability of being in positive class.
*/
public static double getProbability(double pred) {
return 1.0 / (1.0 + Math.exp(-pred));
}
/**
* Returns the residual.
*
* @param pred the prediction.
* @param target the target.
* @return the residual.
*/
public static double getResidual(double pred, double target) {
return target - pred;
}
/**
* Returns the pseudo residual.
*
* @param pred the prediction.
* @param cls the class label.
* @return the pseudo residual.
*/
public static double getPseudoResidual(double pred, double cls) {
return cls - getProbability(pred);
}
/**
* Computes the pseudo residuals.
*
* @param prediction the prediction array.
* @param y the class label array.
* @param residual the residual array.
*/
public static void computePseudoResidual(double[] prediction, double[] y, double[] residual) {
for (int i = 0; i < residual.length; i++) {
residual[i] = getPseudoResidual(prediction[i], y[i]);
}
}
/**
* Computes the probabilities.
*
* @param pred the prediction array.
* @param prob the probability array.
*/
public static void computeProbabilities(double[] pred, double[] prob) {
for (int i = 0; i < pred.length; i++) {
prob[i] = getProbability(pred[i]);
}
}
/**
* Computes the logistic loss for binary classification problems.
*
* @param pred the prediction.
* @param cls the class label.
* @return the logistic loss for binary classification problems.
*/
public static double computeLogisticLoss(double pred, double cls) {
if (cls == 1) {
return Math.log(1 + Math.exp(-pred));
} else {
return Math.log(1 + Math.exp(pred));
}
}
/**
* Computes the logistic loss for binary classification problems.
*
* @param pred the prediction array.
* @param y the class label array.
* @return the logistic loss for binary classification problems.
*/
public static double computeLogisticLoss(double[] pred, double[] y) {
double loss = 0;
for (int i = 0; i < pred.length; i++) {
loss += computeLogisticLoss(pred[i], y[i]);
}
return loss / y.length;
}
/**
* Computes the log loss (cross entropy) for binary classification problems.
*
* @param prob the probability.
* @param y the class label.
* @return the log loss.
*/
public static double computeLogLoss(double prob, double y) {
return computeLogLoss(prob, y, false);
}
/**
* Computes the log loss (cross entropy) for binary classification problems.
*
* @param p the input.
* @param y the class label.
* @param isRawScore {@code true} if the input is raw score.
* @return the log loss.
*/
public static double computeLogLoss(double p, double y, boolean isRawScore) {
if (isRawScore) {
p = MathUtils.sigmoid(p);
}
if (y == 1) {
return -Math.log(p);
} else {
return -Math.log(1 - p);
}
}
/**
* Computes the log loss (cross entropy) for binary classification problems.
*
* @param prob the probabilities.
* @param y the class label array.
* @return the log loss.
*/
public static double computeLogLoss(double[] prob, double[] y) {
return computeLogLoss(prob, y, false);
}
/**
* Computes the log loss (cross entropy) for binary classification problems.
*
* @param p the input.
* @param y the targets
* @param isRawScore {@code true} if the input is raw score.
* @return the log loss.
*/
public static double computeLogLoss(double[] p, double[] y, boolean isRawScore) {
double logLoss = 0;
for (int i = 0; i < p.length; i++) {
logLoss += computeLogLoss(p[i], y[i], isRawScore);
}
return logLoss;
}
/**
* Computes the quadratic loss for regression problems.
*
* @param residual the residual array.
* @return the quadratic loss for regression problems.
*/
public static double computeQuadraticLoss(double[] residual) {
return StatUtils.sumSq(residual) / (2 * residual.length);
}
/**
* Returns gradient on the intercept in regression problems. Residuals will be updated accordingly.
*
* @param residual the residual array.
* @return the fitted intercept.
*/
public static double fitIntercept(double[] residual) {
double delta = StatUtils.mean(residual);
VectorUtils.subtract(residual, delta);
return delta;
}
/**
* Returns gradient on the intercept in binary classification problems. Predictions and residuals will be updated accordingly.
*
* @param prediction the prediction array.
* @param residual the residual array.
* @param y the class label array.
* @return the fitted intercept.
*/
public static double fitIntercept(double[] prediction, double[] residual, double[] y) {
double delta = 0;
// Use Newton-Raphson's method to approximate
// 1st derivative
double eta = 0;
// 2nd derivative
double theta = 0;
for (int i = 0; i < prediction.length; i++) {
double r = residual[i];
double t = Math.abs(r);
eta += r;
theta += t * (1 - t);
}
if (Math.abs(theta) > MathUtils.EPSILON) {
delta = eta / theta;
// Update predictions
VectorUtils.add(prediction, delta);
computePseudoResidual(prediction, y, residual);
}
return delta;
}
/**
* Returns {@code true} if the relative improvement is less than a threshold.
*
* @param prevLoss the previous loss.
* @param currLoss the current loss.
* @param epsilon the threshold.
* @return {@code true} if the relative improvement is less than a threshold.
*/
public static boolean isConverged(double prevLoss, double currLoss, double epsilon) {
if (prevLoss < MathUtils.EPSILON) {
return true;
} else {
return (prevLoss - currLoss) / prevLoss < epsilon;
}
}
/**
* Returns {@code true} if the array of metric values is converged.
*
* @param p an array of metric values.
* @param isLargerBetter {@code true} if larger value is better.
* @return {@code true} if the list of metric values is converged.
*/
public static boolean isConverged(double[] p, boolean isLargerBetter) {
final int bn = p.length;
if (p.length <= 20) {
return false;
}
double bestPerf = p[bn - 1];
double worstPerf = p[bn - 20];
for (int i = bn - 20; i < bn; i++) {
if (MathUtils.isFirstBetter(p[i], bestPerf, isLargerBetter)) {
bestPerf = p[i];
}
if (!MathUtils.isFirstBetter(p[i], worstPerf, isLargerBetter)) {
worstPerf = p[i];
}
}
double relMaxMin = Math.abs(worstPerf - bestPerf) / worstPerf;
double relImprov;
if (MathUtils.isFirstBetter(p[bn - 1], p[bn - 21], isLargerBetter)) {
relImprov = Math.abs(p[bn - 21] - p[bn - 1]) / p[bn - 21];
} else {
// Overfitting
relImprov = Double.NaN;
}
return relMaxMin < 0.02 && (Double.isNaN(relImprov) || relImprov < 0.005);
}
/**
* Returns {@code true} if the list of metric values is converged.
*
* @param list a list of metric values.
* @param isLargerBetter {@code true} if larger value is better.
* @return {@code true} if the list of metric values is converged.
*/
public static boolean isConverged(List<Double> list, boolean isLargerBetter) {
if (list.size() <= 20) {
return false;
}
final int bn = list.size();
double bestPerf = list.get(bn - 1);
double worstPerf = list.get(bn - 20);
for (int i = bn - 20; i < bn; i++) {
if (MathUtils.isFirstBetter(list.get(i), bestPerf, isLargerBetter)) {
bestPerf = list.get(i);
}
if (!MathUtils.isFirstBetter(list.get(i), worstPerf, isLargerBetter)) {
worstPerf = list.get(i);
}
}
double relMaxMin = Math.abs(worstPerf - bestPerf) / worstPerf;
double relImprov;
if (MathUtils.isFirstBetter(list.get(bn - 1), list.get(bn - 21), isLargerBetter)) {
relImprov = Math.abs(list.get(bn - 21) - list.get(bn - 1)) / list.get(bn - 21);
} else {
// Overfitting
relImprov = Double.NaN;
}
return relMaxMin < 0.02 && (Double.isNaN(relImprov) || relImprov < 0.005);
}
}
```
|
```package mltk.predictor.tree;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import mltk.core.Attribute;
import mltk.core.Instances;
import mltk.core.InstancesTestHelper;
public class RegressionTreeLearnerTest {
@Test
public void testRegressionTreeLearner1() {
RegressionTreeLearner rtLearner = new RegressionTreeLearner();
rtLearner.setConstructionMode(RegressionTreeLearner.Mode.DEPTH_LIMITED);
rtLearner.setMaxDepth(2);
Instances instances = InstancesTestHelper.getInstance().getDenseRegressionDataset();
RegressionTree rt = rtLearner.build(instances);
TreeInteriorNode root = (TreeInteriorNode) rt.getRoot();
Assert.assertEquals(0, root.attIndex);
Assert.assertTrue(root.getLeftChild() != null);
Assert.assertTrue(root.getRightChild() != null);
}
@Test
public void testRegressionTreeLearner2() {
RegressionTreeLearner rtLearner = new RegressionTreeLearner();
rtLearner.setConstructionMode(RegressionTreeLearner.Mode.DEPTH_LIMITED);
rtLearner.setMaxDepth(2);
Instances instances = InstancesTestHelper.getInstance()
.getDenseRegressionDataset().copy();
// Apply feature selection
List<Attribute> attributes = instances.getAttributes(1);
instances.setAttributes(attributes);
RegressionTree rt = rtLearner.build(instances);
TreeInteriorNode root = (TreeInteriorNode) rt.getRoot();
Assert.assertEquals(1, root.attIndex);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.tree;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import mltk.cmdline.Argument;
import mltk.cmdline.CmdLineParser;
import mltk.cmdline.options.LearnerOptions;
import mltk.core.Attribute;
import mltk.core.Instances;
import mltk.core.io.InstancesReader;
import mltk.predictor.evaluation.Evaluator;
import mltk.predictor.io.PredictorWriter;
import mltk.util.Random;
import mltk.util.Stack;
import mltk.util.Element;
import mltk.util.OptimUtils;
import mltk.util.tuple.DoublePair;
import mltk.util.tuple.IntDoublePair;
/**
* Class for learning regression trees.
*
* @author Yin Lou
*
*/
public class RegressionTreeLearner extends RTreeLearner {
static class Options extends LearnerOptions {
@Argument(name = "-m", description = "construction mode:parameter. Construction mode can be alpha limited (a), depth limited (d), number of leaves limited (l) and minimum leaf size limited (s) (default: a:0.001)")
String mode = "a:0.001";
@Argument(name = "-s", description = "seed of the random number generator (default: 0)")
long seed = 0L;
}
/**
* Trains a regression tree.
*
* <pre>
* Usage: mltk.predictor.tree.RegressionTreeLearner
* -t train set path
* [-r] attribute file path
* [-o] output model path
* [-V] verbose (default: true)
* [-m] construction mode:parameter. Construction mode can be alpha limited (a), depth limited (d), number of leaves limited (l) and minimum leaf size limited (s) (default: a:0.001)
* [-s] seed of the random number generator (default: 0)
* </pre>
*
* @param args the command line arguments.
* @throws Exception
*/
public static void main(String[] args) throws Exception {
Options opts = new Options();
CmdLineParser parser = new CmdLineParser(RegressionTreeLearner.class, opts);
RegressionTreeLearner learner = new RegressionTreeLearner();
try {
parser.parse(args);
learner.setParameters(opts.mode);
} catch (IllegalArgumentException e) {
parser.printUsage();
System.exit(1);
}
Random.getInstance().setSeed(opts.seed);
Instances trainSet = InstancesReader.read(opts.attPath, opts.trainPath);
long start = System.currentTimeMillis();
RegressionTree rt = learner.build(trainSet);
long end = System.currentTimeMillis();
System.out.println("Time: " + (end - start) / 1000.0 + " (s).");
System.out.println(Evaluator.evalRMSE(rt, trainSet));
if (opts.outputModelPath != null) {
PredictorWriter.write(rt, opts.outputModelPath);
}
}
/**
* Enumeration of construction mode.
*
* @author Yin Lou
*
*/
public enum Mode {
DEPTH_LIMITED, NUM_LEAVES_LIMITED, ALPHA_LIMITED, MIN_LEAF_SIZE_LIMITED;
}
protected int maxDepth;
protected int maxNumLeaves;
protected int minLeafSize;
protected double alpha;
protected Mode mode;
/**
* Constructor.
*/
public RegressionTreeLearner() {
alpha = 0.01;
mode = Mode.ALPHA_LIMITED;
}
@Override
public RegressionTree build(Instances instances) {
RegressionTree rt = null;
switch (mode) {
case ALPHA_LIMITED:
rt = buildAlphaLimitedTree(instances, alpha);
break;
case NUM_LEAVES_LIMITED:
rt = buildNumLeafLimitedTree(instances, maxNumLeaves);
break;
case DEPTH_LIMITED:
rt = buildDepthLimitedTree(instances, maxDepth);
break;
case MIN_LEAF_SIZE_LIMITED:
rt = buildMinLeafSizeLimitedTree(instances, minLeafSize);
default:
break;
}
return rt;
}
@Override
public void setParameters(String mode) {
String[] data = mode.split(":");
if (data.length != 2) {
throw new IllegalArgumentException();
}
switch (data[0]) {
case "a":
this.setConstructionMode(Mode.ALPHA_LIMITED);
this.setAlpha(Double.parseDouble(data[1]));
break;
case "d":
this.setConstructionMode(Mode.DEPTH_LIMITED);
this.setMaxDepth(Integer.parseInt(data[1]));
break;
case "l":
this.setConstructionMode(Mode.NUM_LEAVES_LIMITED);
this.setMaxNumLeaves(Integer.parseInt(data[1]));
break;
case "s":
this.setConstructionMode(Mode.MIN_LEAF_SIZE_LIMITED);
this.setMinLeafSize(Integer.parseInt(data[1]));
break;
default:
throw new IllegalArgumentException();
}
}
@Override
public boolean isRobust() {
return false;
}
/**
* Returns the alpha.
*
* @return the alpha.
*/
public double getAlpha() {
return alpha;
}
/**
* Returns the construction mode.
*
* @return the construction mode.
*/
public Mode getConstructionMode() {
return mode;
}
/**
* Returns the maximum depth.
*
* @return the maximum depth.
*/
public int getMaxDepth() {
return maxDepth;
}
/**
* Returns the maximum number of leaves.
*
* @return the maximum number of leaves.
*/
public int getMaxNumLeaves() {
return maxNumLeaves;
}
/**
* Returns the minimum leaf size.
*
* @return the minimum leaf size.
*/
public int getMinLeafSize() {
return minLeafSize;
}
/**
* Sets the alpha. Alpha is the maximum proportion of the training set in the leaf node.
*
* @param alpha the alpha.
*/
public void setAlpha(double alpha) {
this.alpha = alpha;
}
/**
* Sets the construction mode.
*
* @param mode the construction mode.
*/
public void setConstructionMode(Mode mode) {
this.mode = mode;
}
/**
* Sets the maximum depth.
*
* @param maxDepth the maximum depth.
*/
public void setMaxDepth(int maxDepth) {
this.maxDepth = maxDepth;
}
/**
* Sets the maximum number of leaves.
*
* @param maxNumLeaves the maximum number of leaves.
*/
public void setMaxNumLeaves(int maxNumLeaves) {
this.maxNumLeaves = maxNumLeaves;
}
/**
* Sets the minimum leaf size.
*
* @param minLeafSize the minimum leaf size.
*/
public void setMinLeafSize(int minLeafSize) {
this.minLeafSize = minLeafSize;
}
protected RegressionTree buildAlphaLimitedTree(Instances instances, double alpha) {
final int limit = (int) (alpha * instances.size());
return buildMinLeafSizeLimitedTree(instances, limit);
}
protected RegressionTree buildDepthLimitedTree(Instances instances, int maxDepth) {
RegressionTree tree = new RegressionTree();
final int limit = 5;
// stats[0]: totalWeights
// stats[1]: sum
// stats[2]: weightedMean
// stats[3]: splitEval
double[] stats = new double[4];
if (maxDepth <= 0) {
getStats(instances, stats);
tree.root = new RegressionTreeLeaf(stats[1]);
return tree;
}
Map<TreeNode, Dataset> datasets = new HashMap<>();
Map<TreeNode, Integer> depths = new HashMap<>();
Dataset dataset = null;
if (this.cache != null) {
dataset = Dataset.create(this.cache, instances);
} else {
dataset = Dataset.create(instances);
}
tree.root = createNode(dataset, limit, stats);
PriorityQueue<Element<TreeNode>> q = new PriorityQueue<>();
q.add(new Element<TreeNode>(tree.root, stats[2]));
datasets.put(tree.root, dataset);
depths.put(tree.root, 0);
while (!q.isEmpty()) {
Element<TreeNode> elemt = q.remove();
TreeNode node = elemt.element;
Dataset data = datasets.get(node);
int depth = depths.get(node);
if (!node.isLeaf()) {
TreeInteriorNode interiorNode = (TreeInteriorNode) node;
Dataset left = new Dataset(data.instances);
Dataset right = new Dataset(data.instances);
split(data, interiorNode, left, right);
if (depth >= maxDepth - 1) {
getStats(left.instances, stats);
interiorNode.left = new RegressionTreeLeaf(stats[2]);
getStats(right.instances, stats);
interiorNode.right = new RegressionTreeLeaf(stats[2]);
} else {
interiorNode.left = createNode(left, limit, stats);
if (!interiorNode.left.isLeaf()) {
q.add(new Element<TreeNode>(interiorNode.left, stats[3]));
datasets.put(interiorNode.left, left);
depths.put(interiorNode.left, depth + 1);
}
interiorNode.right = createNode(right, limit, stats);
if (!interiorNode.right.isLeaf()) {
q.add(new Element<TreeNode>(interiorNode.right, stats[3]));
datasets.put(interiorNode.right, right);
depths.put(interiorNode.right, depth + 1);
}
}
}
}
return tree;
}
protected RegressionTree buildMinLeafSizeLimitedTree(Instances instances, int limit) {
RegressionTree tree = new RegressionTree();
// stats[0]: totalWeights
// stats[1]: sum
// stats[2]: weightedMean
// stats[3]: splitEval
double[] stats = new double[4];
Dataset dataset = null;
if (this.cache != null) {
dataset = Dataset.create(this.cache, instances);
} else {
dataset = Dataset.create(instances);
}
Stack<TreeNode> nodes = new Stack<>();
Stack<Dataset> datasets = new Stack<>();
tree.root = createNode(dataset, limit, stats);
nodes.push(tree.root);
datasets.push(dataset);
while (!nodes.isEmpty()) {
TreeNode node = nodes.pop();
Dataset data = datasets.pop();
if (!node.isLeaf()) {
TreeInteriorNode interiorNode = (TreeInteriorNode) node;
Dataset left = new Dataset(data.instances);
Dataset right = new Dataset(data.instances);
split(data, interiorNode, left, right);
interiorNode.left = createNode(left, limit, stats);
interiorNode.right = createNode(right, limit, stats);
nodes.push(interiorNode.left);
datasets.push(left);
nodes.push(interiorNode.right);
datasets.push(right);
}
}
return tree;
}
protected RegressionTree buildNumLeafLimitedTree(Instances instances, int maxNumLeaves) {
RegressionTree tree = new RegressionTree();
final int limit = 5;
// stats[0]: totalWeights
// stats[1]: sum
// stats[2]: weightedMean
// stats[3]: splitEval
double[] stats = new double[4];
Map<TreeNode, Double> nodePred = new HashMap<>();
Map<TreeNode, Dataset> datasets = new HashMap<>();
Dataset dataset = null;
if (this.cache != null) {
dataset = Dataset.create(this.cache, instances);
} else {
dataset = Dataset.create(instances);
}
PriorityQueue<Element<TreeNode>> q = new PriorityQueue<>();
tree.root = createNode(dataset, limit, stats);
q.add(new Element<TreeNode>(tree.root, stats[2]));
datasets.put(tree.root, dataset);
nodePred.put(tree.root, stats[1]);
int numLeaves = 0;
while (!q.isEmpty()) {
Element<TreeNode> elemt = q.remove();
TreeNode node = elemt.element;
Dataset data = datasets.get(node);
if (!node.isLeaf()) {
TreeInteriorNode interiorNode = (TreeInteriorNode) node;
Dataset left = new Dataset(data.instances);
Dataset right = new Dataset(data.instances);
split(data, interiorNode, left, right);
interiorNode.left = createNode(left, limit, stats);
if (!interiorNode.left.isLeaf()) {
nodePred.put(interiorNode.left, stats[2]);
q.add(new Element<TreeNode>(interiorNode.left, stats[3]));
datasets.put(interiorNode.left, left);
} else {
numLeaves++;
}
interiorNode.right = createNode(right, limit, stats);
if (!interiorNode.right.isLeaf()) {
nodePred.put(interiorNode.right, stats[2]);
q.add(new Element<TreeNode>(interiorNode.right, stats[3]));
datasets.put(interiorNode.right, right);
} else {
numLeaves++;
}
if (numLeaves + q.size() >= maxNumLeaves) {
break;
}
}
}
// Convert interior nodes to leaves
Map<TreeNode, TreeNode> parent = new HashMap<>();
traverse(tree.root, parent);
while (!q.isEmpty()) {
Element<TreeNode> elemt = q.remove();
TreeNode node = elemt.element;
double prediction = nodePred.get(node);
TreeInteriorNode interiorNode = (TreeInteriorNode) parent.get(node);
if (interiorNode.left == node) {
interiorNode.left = new RegressionTreeLeaf(prediction);
} else {
interiorNode.right = new RegressionTreeLeaf(prediction);
}
}
return tree;
}
protected TreeNode createNode(Dataset dataset, int limit, double[] stats) {
boolean stdIs0 = getStats(dataset.instances, stats);
final double totalWeights = stats[0];
final double sum = stats[1];
final double weightedMean = stats[2];
// 1. Check basic leaf conditions
if (dataset.instances.size() < limit || stdIs0) {
TreeNode node = new RegressionTreeLeaf(weightedMean);
return node;
}
// 2. Find best split
double bestEval = Double.POSITIVE_INFINITY;
List<IntDoublePair> splits = new ArrayList<>();
List<Attribute> attributes = dataset.instances.getAttributes();
for (int j = 0; j < attributes.size(); j++) {
int attIndex = attributes.get(j).getIndex();
String attName = attributes.get(j).getName();
List<IntDoublePair> sortedList = dataset.sortedLists.get(attName);
List<Double> uniqueValues = new ArrayList<>(sortedList.size());
List<DoublePair> histogram = new ArrayList<>(sortedList.size());
getHistogram(dataset.instances, sortedList, uniqueValues, totalWeights, sum, histogram);
if (uniqueValues.size() > 1) {
DoublePair split = split(uniqueValues, histogram, totalWeights, sum);
if (split.v2 <= bestEval) {
IntDoublePair splitPoint = new IntDoublePair(attIndex, split.v1);
if (split.v2 < bestEval) {
splits.clear();
bestEval = split.v2;
}
splits.add(splitPoint);
}
}
}
if (bestEval < Double.POSITIVE_INFINITY) {
Random rand = Random.getInstance();
IntDoublePair splitPoint = splits.get(rand.nextInt(splits.size()));
int attIndex = splitPoint.v1;
TreeNode node = new TreeInteriorNode(attIndex, splitPoint.v2);
stats[3] = bestEval + totalWeights * weightedMean * weightedMean;
return node;
} else {
TreeNode node = new RegressionTreeLeaf(weightedMean);
return node;
}
}
protected void split(Dataset data, TreeInteriorNode node, Dataset left, Dataset right) {
data.split(node.getSplitAttributeIndex(), node.getSplitPoint(), left, right);
}
protected DoublePair split(List<Double> uniqueValues, List<DoublePair> hist, double totalWeights, double sum) {
double weight1 = hist.get(0).v1;
double weight2 = totalWeights - weight1;
double sum1 = hist.get(0).v2;
double sum2 = sum - sum1;
double bestEval = -(OptimUtils.getGain(sum1, weight1) + OptimUtils.getGain(sum2, weight2));
List<Double> splits = new ArrayList<>();
splits.add((uniqueValues.get(0) + uniqueValues.get(0 + 1)) / 2);
for (int i = 1; i < uniqueValues.size() - 1; i++) {
final double w = hist.get(i).v1;
final double s = hist.get(i).v2;
weight1 += w;
weight2 -= w;
sum1 += s;
sum2 -= s;
double eval1 = OptimUtils.getGain(sum1, weight1);
double eval2 = OptimUtils.getGain(sum2, weight2);
double eval = -(eval1 + eval2);
if (eval <= bestEval) {
double split = (uniqueValues.get(i) + uniqueValues.get(i + 1)) / 2;
if (eval < bestEval) {
bestEval = eval;
splits.clear();
}
splits.add(split);
}
}
Random rand = Random.getInstance();
double split = splits.get(rand.nextInt(splits.size()));
return new DoublePair(split, bestEval);
}
protected void traverse(TreeNode node, Map<TreeNode, TreeNode> parent) {
if (!node.isLeaf()) {
TreeInteriorNode interiorNode = (TreeInteriorNode) node;
if (interiorNode.left != null) {
parent.put(interiorNode.left, node);
traverse(interiorNode.left, parent);
}
if (interiorNode.right != null) {
parent.put(interiorNode.right, node);
traverse(interiorNode.right, parent);
}
}
}
}
```
|
```package mltk.predictor.tree;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import mltk.core.Attribute;
import mltk.core.Instances;
import mltk.core.InstancesTestHelper;
public class DecisionTableLearnerTest {
@Test
public void testDecisionTableLearner1() {
DecisionTableLearner rtLearner = new DecisionTableLearner();
rtLearner.setConstructionMode(DecisionTableLearner.Mode.ONE_PASS_GREEDY);
rtLearner.setMaxDepth(2);
Instances instances = InstancesTestHelper.getInstance().getDenseRegressionDataset();
DecisionTable rt = rtLearner.build(instances);
int[] attributeIndices = rt.getAttributeIndices();
Assert.assertEquals(2, attributeIndices.length);
Assert.assertEquals(0, attributeIndices[0]);
}
@Test
public void testDecisionTableLearner2() {
DecisionTableLearner rtLearner = new DecisionTableLearner();
rtLearner.setConstructionMode(DecisionTableLearner.Mode.ONE_PASS_GREEDY);
rtLearner.setMaxDepth(2);
Instances instances = InstancesTestHelper.getInstance()
.getDenseRegressionDataset().copy();
// Apply feature selection
List<Attribute> attributes = instances.getAttributes(1);
instances.setAttributes(attributes);
DecisionTable rt = rtLearner.build(instances);
int[] attributeIndices = rt.getAttributeIndices();
Assert.assertEquals(2, attributeIndices.length);
Assert.assertEquals(1, attributeIndices[0]);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.tree;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import mltk.core.Attribute;
import mltk.core.Attribute.Type;
import mltk.core.BinnedAttribute;
import mltk.core.Instance;
import mltk.core.Instances;
import mltk.core.NominalAttribute;
import mltk.util.ArrayUtils;
import mltk.util.OptimUtils;
import mltk.util.Random;
import mltk.util.StatUtils;
import mltk.util.tuple.DoublePair;
import mltk.util.tuple.IntDoublePair;
import mltk.util.tuple.LongDoublePair;
import mltk.util.tuple.LongDoublePairComparator;
/**
* Class for learning decision tables.
*
* <p>
* Reference:<br>
* Y. Lou and M. Obukhov. BDT: Boosting Decision Tables for High Accuracy and Scoring Efficiency. In <i>Proceedings of the
* 23rd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining (KDD)</i>, Halifax, Nova Scotia, Canada, 2017.
* </p>
*
* This class has a different implementation to better fit the design of this package.
*
* @author Yin Lou
*
*/
public class DecisionTableLearner extends RTreeLearner {
/**
* Enumeration of construction mode.
*
* @author Yin Lou
*
*/
public enum Mode {
ONE_PASS_GREEDY, MULTI_PASS_CYCLIC, MULTI_PASS_RANDOM;
}
protected Mode mode;
protected int maxDepth;
protected int numPasses;
/**
* Constructor.
*/
public DecisionTableLearner() {
mode = Mode.ONE_PASS_GREEDY;
maxDepth = 6;
numPasses = 2;
}
@Override
public void setParameters(String mode) {
String[] data = mode.split(":");
if (data.length != 2) {
throw new IllegalArgumentException();
}
this.setMaxDepth(Integer.parseInt(data[1]));
switch (data[0]) {
case "g":
this.setConstructionMode(Mode.ONE_PASS_GREEDY);
break;
case "c":
this.setConstructionMode(Mode.MULTI_PASS_CYCLIC);
this.setNumPasses(2);
break;
case "r":
this.setConstructionMode(Mode.MULTI_PASS_RANDOM);
this.setNumPasses(2);
break;
default:
throw new IllegalArgumentException();
}
}
@Override
public boolean isRobust() {
return false;
}
/**
* Returns the construction mode.
*
* @return the construction mode.
*/
public Mode getConstructionMode() {
return mode;
}
/**
* Sets the construction mode.
*
* @param mode the construction mode.
*/
public void setConstructionMode(Mode mode) {
this.mode = mode;
}
/**
* Returns the maximum depth.
*
* @return the maximum depth.
*/
public int getMaxDepth() {
return maxDepth;
}
/**
* Sets the maximum depth.
*
* @param maxDepth the maximum depth.
*/
public void setMaxDepth(int maxDepth) {
this.maxDepth = maxDepth;
}
/**
* Returns the number of passes. This parameter is used in multi-pass cyclic mode.
*
* @return the number of passes.
*/
public int getNumPasses() {
return numPasses;
}
/**
* Sets the number of passes.
*
* @param numPasses the number of passes.
*/
public void setNumPasses(int numPasses) {
this.numPasses = numPasses;
}
@Override
public DecisionTable build(Instances instances) {
DecisionTable ot = null;
switch (mode) {
case ONE_PASS_GREEDY:
ot = buildOnePassGreedy(instances, maxDepth);
break;
case MULTI_PASS_CYCLIC:
ot = buildMultiPassCyclic(instances, maxDepth, numPasses);
break;
case MULTI_PASS_RANDOM:
ot = buildMultiPassRandom(instances, maxDepth, numPasses);
default:
break;
}
return ot;
}
/**
* Builds a standard oblivious regression tree using greedy tree induction.
*
* @param instances the training set.
* @param maxDepth the maximum depth.
* @return an oblivious regression tree.
*/
public DecisionTable buildOnePassGreedy(Instances instances, int maxDepth) {
// stats[0]: totalWeights
// stats[1]: sum
// stats[2]: weightedMean
double[] stats = new double[3];
Map<Long, Dataset> map = new HashMap<>(instances.size());
List<Integer> attList = new ArrayList<>(maxDepth);
List<Double> splitList = new ArrayList<>(maxDepth);
Dataset dataset = null;
if (this.cache != null) {
dataset = Dataset.create(this.cache, instances);
} else {
dataset = Dataset.create(instances);
}
map.put(Long.valueOf(0L), dataset);
if (maxDepth <= 0) {
getStats(dataset.instances, stats);
final double weightedMean = stats[2];
return new DecisionTable(
new int[] {},
new double[] {},
new long[] { 0L },
new double[] { weightedMean });
}
List<Attribute> attributes = instances.getAttributes();
List<List<Double>> featureValues = new ArrayList<>(attributes.size());
for (int j = 0; j < attributes.size(); j++) {
Attribute attribute = attributes.get(j);
List<Double> values = new ArrayList<>();
if (attribute.getType() == Type.BINNED) {
int numBins = ((BinnedAttribute) attribute).getNumBins();
for (int i = 0; i < numBins; i++) {
values.add((double) i);
}
} else if (attribute.getType() == Type.NOMINAL) {
int cardinality = ((NominalAttribute) attribute).getCardinality();
for (int i = 0; i < cardinality; i++) {
values.add((double) i);
}
} else {
Set<Double> set = new HashSet<>();
for (Instance instance : instances) {
set.add(instance.getValue(attribute));
}
values.addAll(set);
Collections.sort(values);
}
featureValues.add(values);
}
for (int d = 0; d < maxDepth; d++) {
double bestGain = Double.NEGATIVE_INFINITY;
List<IntDoublePair> splitCandidates = new ArrayList<>();
for (int j = 0; j < attributes.size(); j++) {
List<Double> values = featureValues.get(j);
if (values.size() <= 1) {
continue;
}
Attribute attribute = attributes.get(j);
int attIndex = attribute.getIndex();
String attName = attribute.getName();
double[] gains = new double[values.size() - 1];
for (Dataset data : map.values()) {
getStats(data.instances, stats);
final double totalWeights = stats[0];
final double sum = stats[1];
List<IntDoublePair> sortedList = data.sortedLists.get(attName);
List<Double> uniqueValues = new ArrayList<>(sortedList.size());
List<DoublePair> histogram = new ArrayList<>(sortedList.size());
getHistogram(data.instances, sortedList, uniqueValues, totalWeights, sum, histogram);
double[] localGains = evalSplits(uniqueValues, histogram, totalWeights, sum);
processGains(uniqueValues, localGains, values, gains);
}
int idx = StatUtils.indexOfMax(gains);
if (bestGain <= gains[idx]) {
double split = (values.get(idx) + values.get(idx + 1)) / 2;
if (bestGain < gains[idx]) {
bestGain = gains[idx];
splitCandidates.clear();
}
splitCandidates.add(new IntDoublePair(attIndex, split));
}
}
if (splitCandidates.size() == 0) {
break;
}
Random rand = Random.getInstance();
IntDoublePair split = splitCandidates.get(rand.nextInt(splitCandidates.size()));
attList.add(split.v1);
splitList.add(split.v2);
Map<Long, Dataset> mapNew = new HashMap<>(map.size() * 2);
for (Map.Entry<Long, Dataset> entry : map.entrySet()) {
Long key = entry.getKey();
Dataset data = entry.getValue();
Dataset left = new Dataset(data.instances);
Dataset right = new Dataset(data.instances);
data.split(split.v1, split.v2, left, right);
if (left.instances.size() > 0) {
Long leftKey = (key << 1) | 1L;
mapNew.put(leftKey, left);
}
if (right.instances.size() > 0) {
Long rightKey = key << 1;
mapNew.put(rightKey, right);
}
}
map = mapNew;
}
int[] attIndices = ArrayUtils.toIntArray(attList);
double[] splits = ArrayUtils.toDoubleArray(splitList);
List<LongDoublePair> list = new ArrayList<>(splits.length);
for (Map.Entry<Long, Dataset> entry : map.entrySet()) {
Long key = entry.getKey();
Dataset data = entry.getValue();
getStats(data.instances, stats);
list.add(new LongDoublePair(key, stats[2]));
}
Collections.sort(list, new LongDoublePairComparator());
long[] predIndices = new long[list.size()];
double[] predValues = new double[list.size()];
for (int i = 0; i < predIndices.length; i++) {
LongDoublePair pair = list.get(i);
predIndices[i] = pair.v1;
predValues[i] = pair.v2;
}
return new DecisionTable(attIndices, splits, predIndices, predValues);
}
/**
* Builds an oblivious regression tree using multi-pass cyclic backfitting.
*
* @param instances the training set.
* @param maxDepth the maximum depth.
* @param numPasses the number of passes.
* @return an oblivious regression tree.
*/
public DecisionTable buildMultiPassCyclic(Instances instances, int maxDepth, int numPasses) {
// stats[0]: totalWeights
// stats[1]: sum
// stats[2]: weightedMean
double[] stats = new double[3];
Map<Long, Dataset> map = new HashMap<>(instances.size());
int[] attIndices = new int[maxDepth];
double[] splits = new double[maxDepth];
Dataset dataset = null;
if (this.cache != null) {
dataset = Dataset.create(this.cache, instances);
} else {
dataset = Dataset.create(instances);
}
map.put(Long.valueOf(0L), dataset);
if (maxDepth <= 0) {
getStats(dataset.instances, stats);
final double weightedMean = stats[2];
return new DecisionTable(
new int[] {},
new double[] {},
new long[] { 0L },
new double[] { weightedMean });
}
List<Attribute> attributes = instances.getAttributes();
List<List<Double>> featureValues = new ArrayList<>(attributes.size());
for (int j = 0; j < attributes.size(); j++) {
Attribute attribute = attributes.get(j);
List<Double> values = new ArrayList<>();
if (attribute.getType() == Type.BINNED) {
int numBins = ((BinnedAttribute) attribute).getNumBins();
for (int i = 0; i < numBins; i++) {
values.add((double) i);
}
} else if (attribute.getType() == Type.NOMINAL) {
int cardinality = ((NominalAttribute) attribute).getCardinality();
for (int i = 0; i < cardinality; i++) {
values.add((double) i);
}
} else {
Set<Double> set = new HashSet<>();
for (Instance instance : instances) {
set.add(instance.getValue(attribute));
}
values.addAll(set);
Collections.sort(values);
}
featureValues.add(values);
}
for (int pass = 0; pass < numPasses; pass++) {
for (int d = 0; d < maxDepth; d++) {
double bestGain = Double.NEGATIVE_INFINITY;
List<IntDoublePair> splitCandidates = new ArrayList<>();
// Remove depth d
Set<Long> processedKeys = new HashSet<>();
Map<Long, Dataset> mapNew = new HashMap<>(map.size());
for (Map.Entry<Long, Dataset> entry : map.entrySet()) {
Long key = entry.getKey();
if (processedKeys.contains(key)) {
continue;
}
Dataset data = entry.getValue();
int s = maxDepth - d - 1;
Long otherKey = key ^ (1L << s);
if (map.containsKey(otherKey)) {
long check = (key >> s) & 1;
Dataset left = null;
Dataset right = null;
if (check > 0) {
left = data;
right = map.get(otherKey);
} else {
left = map.get(otherKey);
right = data;
}
// This key will be updated anyway
mapNew.put(key, Dataset.merge(left, right));
processedKeys.add(key);
processedKeys.add(otherKey);
} else {
mapNew.put(key, data);
}
}
map = mapNew;
for (int j = 0; j < attributes.size(); j++) {
Attribute attribute = attributes.get(j);
int attIndex = attribute.getIndex();
String attName = attribute.getName();
List<Double> values = featureValues.get(j);
if (values.size() <= 1) {
continue;
}
double[] gains = new double[values.size() - 1];
for (Dataset data : map.values()) {
getStats(data.instances, stats);
final double totalWeights = stats[0];
final double sum = stats[1];
List<IntDoublePair> sortedList = data.sortedLists.get(attName);
List<Double> uniqueValues = new ArrayList<>(sortedList.size());
List<DoublePair> histogram = new ArrayList<>(sortedList.size());
getHistogram(data.instances, sortedList, uniqueValues, totalWeights, sum, histogram);
double[] localGains = evalSplits(uniqueValues, histogram, totalWeights, sum);
processGains(uniqueValues, localGains, values, gains);
}
int idx = StatUtils.indexOfMax(gains);
if (bestGain <= gains[idx]) {
double split = (values.get(idx) + values.get(idx + 1)) / 2;
if (bestGain < gains[idx]) {
bestGain = gains[idx];
splitCandidates.clear();
}
splitCandidates.add(new IntDoublePair(attIndex, split));
}
}
if (splitCandidates.size() == 0) {
break;
}
Random rand = Random.getInstance();
IntDoublePair split = splitCandidates.get(rand.nextInt(splitCandidates.size()));
attIndices[d] = split.v1;
splits[d] = split.v2;
mapNew = new HashMap<>(map.size() * 2);
for (Map.Entry<Long, Dataset> entry : map.entrySet()) {
Long key = entry.getKey();
Dataset data = entry.getValue();
Dataset left = new Dataset(data.instances);
Dataset right = new Dataset(data.instances);
data.split(split.v1, split.v2, left, right);
int s = maxDepth - d - 1;
if (left.instances.size() > 0) {
Long leftKey = key | (1L << s);
mapNew.put(leftKey, left);
}
if (right.instances.size() > 0) {
Long rightKey = key & ~(1L << s);
mapNew.put(rightKey, right);
}
}
map = mapNew;
}
}
List<LongDoublePair> list = new ArrayList<>(splits.length);
for (Map.Entry<Long, Dataset> entry : map.entrySet()) {
Long key = entry.getKey();
Dataset data = entry.getValue();
getStats(data.instances, stats);
list.add(new LongDoublePair(key, stats[2]));
}
Collections.sort(list, new LongDoublePairComparator());
long[] predIndices = new long[list.size()];
double[] predValues = new double[list.size()];
for (int i = 0; i < predIndices.length; i++) {
LongDoublePair pair = list.get(i);
predIndices[i] = pair.v1;
predValues[i] = pair.v2;
}
return new DecisionTable(attIndices, splits, predIndices, predValues);
}
/**
* Builds an oblivious regression tree using multi-pass random backfitting.
*
* @param instances the training set.
* @param maxDepth the maximum depth.
* @param numPasses the number of passes.
* @return an oblivious regression tree.
*/
public DecisionTable buildMultiPassRandom(Instances instances, int maxDepth, int numPasses) {
// stats[0]: totalWeights
// stats[1]: sum
// stats[2]: weightedMean
double[] stats = new double[3];
Map<Long, Dataset> map = new HashMap<>(instances.size());
int[] attIndices = new int[maxDepth];
double[] splits = new double[maxDepth];
Dataset dataset = null;
if (this.cache != null) {
dataset = Dataset.create(this.cache, instances);
} else {
dataset = Dataset.create(instances);
}
map.put(Long.valueOf(0L), dataset);
if (maxDepth <= 0) {
getStats(dataset.instances, stats);
final double weightedMean = stats[2];
return new DecisionTable(
new int[] {},
new double[] {},
new long[] { 0L },
new double[] { weightedMean });
}
List<Attribute> attributes = instances.getAttributes();
List<List<Double>> featureValues = new ArrayList<>(attributes.size());
for (int j = 0; j < attributes.size(); j++) {
Attribute attribute = attributes.get(j);
List<Double> values = new ArrayList<>();
if (attribute.getType() == Type.BINNED) {
int numBins = ((BinnedAttribute) attribute).getNumBins();
for (int i = 0; i < numBins; i++) {
values.add((double) i);
}
} else if (attribute.getType() == Type.NOMINAL) {
int cardinality = ((NominalAttribute) attribute).getCardinality();
for (int i = 0; i < cardinality; i++) {
values.add((double) i);
}
} else {
Set<Double> set = new HashSet<>();
for (Instance instance : instances) {
set.add(instance.getValue(attribute));
}
values.addAll(set);
Collections.sort(values);
}
featureValues.add(values);
}
for (int iter = 0; iter < numPasses; iter++) {
for (int k = 0; k < maxDepth; k++) {
double bestGain = Double.NEGATIVE_INFINITY;
List<IntDoublePair> splitCandidates = new ArrayList<>();
int d = k;
if (iter > 0) {
d = Random.getInstance().nextInt(maxDepth);
}
// Remove depth d
Set<Long> processedKeys = new HashSet<>();
Map<Long, Dataset> mapNew = new HashMap<>(map.size());
for (Map.Entry<Long, Dataset> entry : map.entrySet()) {
Long key = entry.getKey();
if (processedKeys.contains(key)) {
continue;
}
Dataset data = entry.getValue();
int s = maxDepth - d - 1;
Long otherKey = key ^ (1L << s);
if (map.containsKey(otherKey)) {
long check = (key >> s) & 1;
Dataset left = null;
Dataset right = null;
if (check > 0) {
left = data;
right = map.get(otherKey);
} else {
left = map.get(otherKey);
right = data;
}
// This key will be updated anyway
mapNew.put(key, Dataset.merge(left, right));
processedKeys.add(key);
processedKeys.add(otherKey);
} else {
mapNew.put(key, data);
}
}
map = mapNew;
for (int j = 0; j < attributes.size(); j++) {
Attribute attribute = attributes.get(j);
int attIndex = attribute.getIndex();
String attName = attribute.getName();
List<Double> values = featureValues.get(j);
if (values.size() <= 1) {
continue;
}
double[] gains = new double[values.size() - 1];
for (Dataset data : map.values()) {
getStats(data.instances, stats);
final double totalWeights = stats[0];
final double sum = stats[1];
List<IntDoublePair> sortedList = data.sortedLists.get(attName);
List<Double> uniqueValues = new ArrayList<>(sortedList.size());
List<DoublePair> histogram = new ArrayList<>(sortedList.size());
getHistogram(data.instances, sortedList, uniqueValues, totalWeights, sum, histogram);
double[] localGains = evalSplits(uniqueValues, histogram, totalWeights, sum);
processGains(uniqueValues, localGains, values, gains);
}
int idx = StatUtils.indexOfMax(gains);
if (bestGain <= gains[idx]) {
double split = (values.get(idx) + values.get(idx + 1)) / 2;
if (bestGain < gains[idx]) {
bestGain = gains[idx];
splitCandidates.clear();
}
splitCandidates.add(new IntDoublePair(attIndex, split));
}
}
if (splitCandidates.size() == 0) {
break;
}
Random rand = Random.getInstance();
IntDoublePair split = splitCandidates.get(rand.nextInt(splitCandidates.size()));
attIndices[d] = split.v1;
splits[d] = split.v2;
mapNew = new HashMap<>(map.size() * 2);
for (Map.Entry<Long, Dataset> entry : map.entrySet()) {
Long key = entry.getKey();
Dataset data = entry.getValue();
Dataset left = new Dataset(data.instances);
Dataset right = new Dataset(data.instances);
data.split(split.v1, split.v2, left, right);
int s = maxDepth - d - 1;
if (left.instances.size() > 0) {
Long leftKey = key | (1L << s);
mapNew.put(leftKey, left);
}
if (right.instances.size() > 0) {
Long rightKey = key & ~(1L << s);
mapNew.put(rightKey, right);
}
}
map = mapNew;
}
}
List<LongDoublePair> list = new ArrayList<>(splits.length);
for (Map.Entry<Long, Dataset> entry : map.entrySet()) {
Long key = entry.getKey();
Dataset data = entry.getValue();
getStats(data.instances, stats);
list.add(new LongDoublePair(key, stats[2]));
}
Collections.sort(list, new LongDoublePairComparator());
long[] predIndices = new long[list.size()];
double[] predValues = new double[list.size()];
for (int i = 0; i < predIndices.length; i++) {
LongDoublePair pair = list.get(i);
predIndices[i] = pair.v1;
predValues[i] = pair.v2;
}
return new DecisionTable(attIndices, splits, predIndices, predValues);
}
protected void processGains(List<Double> uniqueValues, double[] localGains, List<Double> values, double[] gains) {
int i = 0;
int j = 0;
double noSplitGain = localGains[localGains.length - 1];
double minV = uniqueValues.get(0);
while (j < gains.length) {
double v2 = values.get(j);
if (v2 < minV) {
gains[j] += noSplitGain;
j++;
} else {
break;
}
}
double prevGain = localGains[i];
while (i < localGains.length && j < gains.length) {
double v1 = uniqueValues.get(i);
double v2 = values.get(j);
if (v1 == v2) {
gains[j] += localGains[i];
prevGain = localGains[i];
i++;
j++;
}
while (v1 > v2) {
gains[j] += prevGain;
j++;
v2 = values.get(j);
}
}
while (j < gains.length) {
gains[j] += noSplitGain;
j++;
}
}
protected double[] evalSplits(List<Double> uniqueValues, List<DoublePair> hist, double totalWeights, double sum) {
double weight1 = hist.get(0).v1;
double weight2 = totalWeights - weight1;
double sum1 = hist.get(0).v2;
double sum2 = sum - sum1;
double[] gains = new double[uniqueValues.size()];
gains[0] = OptimUtils.getGain(sum1, weight1) + OptimUtils.getGain(sum2, weight2);
for (int i = 1; i < uniqueValues.size() - 1; i++) {
final double w = hist.get(i).v1;
final double s = hist.get(i).v2;
weight1 += w;
weight2 -= w;
sum1 += s;
sum2 -= s;
gains[i] = OptimUtils.getGain(sum1, weight1) + OptimUtils.getGain(sum2, weight2);
}
// gain for no split
gains[uniqueValues.size() - 1] = OptimUtils.getGain(sum, totalWeights);
return gains;
}
}
```
|
```package mltk.predictor.evaluation;
import mltk.util.MathUtils;
import org.junit.Assert;
import org.junit.Test;
public class MAETest {
@Test
public void test() {
double[] preds = {1, 2, 3, 4};
double[] targets = {0.1, 0.2, 0.3, 0.4};
MAE metric = new MAE();
Assert.assertEquals(2.25, metric.eval(preds, targets), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.evaluation;
import mltk.core.Instances;
/**
* Class for evaluating mean absolute error (MAE).
*
* @author Yin Lou
*
*/
public class MAE extends SimpleMetric {
/**
* Constructor.
*/
public MAE() {
super(false);
}
@Override
public double eval(double[] preds, double[] targets) {
double mae = 0;
for (int i = 0; i < preds.length; i++) {
mae += Math.abs(targets[i] - preds[i]);
}
mae /= preds.length;
return mae;
}
@Override
public double eval(double[] preds, Instances instances) {
double mae = 0;
for (int i = 0; i < preds.length; i++) {
mae += Math.abs(instances.get(i).getTarget() - preds[i]);
}
mae /= preds.length;
return mae;
}
}
```
|
```package mltk.predictor.evaluation;
import mltk.util.MathUtils;
import org.junit.Assert;
import org.junit.Test;
public class LogLossTest {
@Test
public void testProb() {
double[] preds = {0.8, 0.1, 0.05, 0.9};
double[] targets = {1, 0, 0, 1};
LogLoss metric = new LogLoss(false);
Assert.assertEquals(0.485157877, metric.eval(preds, targets), MathUtils.EPSILON);
}
@Test
public void testRawScore() {
double[] preds = {5, -5, -3, 3};
double[] targets = {1, 0, 0, 1};
LogLoss metric = new LogLoss(true);
Assert.assertEquals(0.1106054, metric.eval(preds, targets), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.evaluation;
import mltk.core.Instances;
import mltk.util.OptimUtils;
/**
* Class for evaluating log loss (cross entropy).
*
* @author Yin Lou
*
*/
public class LogLoss extends SimpleMetric {
protected boolean isRawScore;
/**
* Constructor.
*/
public LogLoss() {
this(false);
}
/**
* Constructor.
*
* @param isRawScore {@code true} if raw score is expected as input.
*/
public LogLoss(boolean isRawScore) {
super(false);
this.isRawScore = isRawScore;
}
@Override
public double eval(double[] preds, double[] targets) {
return OptimUtils.computeLogLoss(preds, targets, isRawScore);
}
@Override
public double eval(double[] preds, Instances instances) {
double logLoss = 0;
for (int i = 0; i < preds.length; i++) {
logLoss += OptimUtils.computeLogLoss(preds[i], instances.get(i).getTarget(), isRawScore);
}
logLoss /= preds.length;
return logLoss;
}
/**
* Returns {@code true} if raw score is expected as input.
*
* @return {@code true} if raw score is expected as input.
*/
public boolean isRawScore() {
return isRawScore;
}
}
```
|
```package mltk.util;
import org.junit.Assert;
import org.junit.Test;
public class VectorUtilsTest {
@Test
public void testAdd() {
double[] a = {1, 2, 3, 4};
double[] b = {2, 3, 4, 5};
VectorUtils.add(a, 1);
Assert.assertArrayEquals(b, a, MathUtils.EPSILON);
}
@Test
public void testSubtract() {
double[] a = {1, 2, 3, 4};
double[] b = {2, 3, 4, 5};
VectorUtils.subtract(b, 1);
Assert.assertArrayEquals(a, b, MathUtils.EPSILON);
}
@Test
public void testMultiply() {
double[] a = {1, 2, 3, 4};
double[] b = {2, 4, 6, 8};
VectorUtils.multiply(a, 2);
Assert.assertArrayEquals(b, a, MathUtils.EPSILON);
}
@Test
public void testDivide() {
double[] a = {1, 2, 3, 4};
double[] b = {2, 4, 6, 8};
VectorUtils.divide(b, 2);
Assert.assertArrayEquals(a, b, MathUtils.EPSILON);
}
@Test
public void testL2norm() {
double[] a = {1, 2, 3, 4};
Assert.assertEquals(5.477225575, VectorUtils.l2norm(a), MathUtils.EPSILON);
}
@Test
public void testL1norm() {
double[] a = {1, -2, 3, -4};
Assert.assertEquals(10, VectorUtils.l1norm(a), MathUtils.EPSILON);
}
@Test
public void testDotProduct() {
double[] a = {1, 2, 3, 4};
double[] b = {0, -1, 0, 1};
Assert.assertEquals(2, VectorUtils.dotProduct(a, b), MathUtils.EPSILON);
}
@Test
public void testCorrelation() {
double[] a = {1, 2, 3, 4};
double[] b = {2, 4, 6, 8};
double[] c = {-2, -4, -6, -8};
Assert.assertEquals(1, VectorUtils.correlation(a, b), MathUtils.EPSILON);
Assert.assertEquals(-1, VectorUtils.correlation(a, c), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.util;
import mltk.core.DenseVector;
import mltk.core.SparseVector;
import mltk.core.Vector;
/**
* Class for utility functions for real vectors.
*
* @author Yin Lou
*
*/
public class VectorUtils {
/**
* Adds a constant to all elements in the array.
*
* @param a the vector.
* @param v the constant.
*/
public static void add(double[] a, double v) {
for (int i = 0; i < a.length; i++) {
a[i] += v;
}
}
/**
* Subtracts a constant from all elements in the array.
*
* @param a the vector.
* @param v the constant.
*/
public static void subtract(double[] a, double v) {
for (int i = 0; i < a.length; i++) {
a[i] -= v;
}
}
/**
* Multiplies a constant to all elements in the array.
*
* @param a the vector.
* @param v the constant.
*/
public static void multiply(double[] a, double v) {
for (int i = 0; i < a.length; i++) {
a[i] *= v;
}
}
/**
* Divides a constant to all elements in the array.
*
* @param a the vector.
* @param v the constant.
*/
public static void divide(double[] a, double v) {
for (int i = 0; i < a.length; i++) {
a[i] /= v;
}
}
/**
* Returns the L2 norm of a vector.
*
* @param a the vector.
* @return the L2 norm of a vector.
*/
public static double l2norm(double[] a) {
return Math.sqrt(StatUtils.sumSq(a));
}
/**
* Returns the L2 norm of a vector.
*
* @param v the vector.
* @return the L2 norm of a vector.
*/
public static double l2norm(Vector v) {
return l2norm(v.getValues());
}
/**
* Returns the L1 norm of a vector.
*
* @param a the vector.
* @return the L1 norm of a vector.
*/
public static double l1norm(double[] a) {
double norm = 0;
for (double v : a) {
norm += Math.abs(v);
}
return norm;
}
/**
* Returns the L1 norm of a vector.
*
* @param v the vector.
* @return the L1 norm of a vector.
*/
public static double l1norm(Vector v) {
return l1norm(v.getValues());
}
/**
* Returns the dot product of two vectors.
*
* @param a the 1st vector.
* @param b the 2nd vector.
* @return the dot product of two vectors.
*/
public static double dotProduct(double[] a, double[] b) {
double s = 0;
for (int i = 0; i < a.length; i++) {
s += a[i] * b[i];
}
return s;
}
/**
* Returns the dot product of two vectors.
*
* @param a the 1st vector.
* @param b the 2nd vector.
* @return the dot product of two vectors.
*/
public static double dotProduct(DenseVector a, DenseVector b) {
return dotProduct(a.getValues(), b.getValues());
}
/**
* Returns the dot product of two vectors.
*
* @param a the 1st vector.
* @param b the 2nd vector.
* @return the dot product of two vectors.
*/
public static double dotProduct(SparseVector a, DenseVector b) {
int[] indices1 = a.getIndices();
double[] values1 = a.getValues();
double[] values2 = b.getValues();
double s = 0;
for (int i = 0; i < indices1.length; i++) {
s += values1[i] * values2[indices1[i]];
}
return s;
}
/**
* Returns the dot product of two vectors.
*
* @param a the 1st vector.
* @param b the 2nd vector.
* @return the dot product of two vectors.
*/
public static double dotProduct(DenseVector a, SparseVector b) {
return dotProduct(b, a);
}
/**
* Returns the dot product of two vectors.
*
* @param a the 1st vector.
* @param b the 2nd vector.
* @return the dot product of two vectors.
*/
public static double dotProduct(SparseVector a, SparseVector b) {
int[] indices1 = a.getIndices();
double[] values1 = a.getValues();
int[] indices2 = b.getIndices();
double[] values2 = b.getValues();
double s = 0;
int i = 0;
int j = 0;
while (i < indices1.length && j < indices2.length) {
if (indices1[i] < indices2[j]) {
i++;
} else if (indices1[i] > indices2[j]) {
j++;
} else {
s += values1[i] * values2[j];
i++;
j++;
}
}
return s;
}
/**
* Returns the Pearson correlation coefficient between two vectors.
*
* @param a the 1st vector.
* @param b the 2nd vector.
* @return the Pearson correlation coefficient between two vectors.
*/
public static double correlation(double[] a, double[] b) {
double mean1 = StatUtils.mean(a);
double mean2 = StatUtils.mean(b);
double x = 0;
double s1 = 0;
double s2 = 0;
for (int i = 0; i < a.length; i++) {
double d1 = (a[i] - mean1);
double d2 = (b[i] - mean2);
x += d1 * d2;
s1 += d1 * d1;
s2 += d2 * d2;
}
return x / Math.sqrt(s1 * s2);
}
}
```
|
```package mltk.predictor.evaluation;
import mltk.util.MathUtils;
import org.junit.Assert;
import org.junit.Test;
public class AUCTest {
@Test
public void test1() {
double[] preds = {0.8, 0.1, 0.05, 0.9};
double[] targets = {1, 0, 0, 1};
AUC metric = new AUC();
Assert.assertEquals(1, metric.eval(preds, targets), MathUtils.EPSILON);
}
@Test
public void test2() {
double[] preds = {0.5, 0.5, 0.5, 0.5};
double[] targets = {1, 0, 0, 1};
AUC metric = new AUC();
Assert.assertEquals(0.5, metric.eval(preds, targets), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.evaluation;
import java.util.Arrays;
import java.util.Comparator;
import mltk.core.Instances;
import mltk.util.tuple.DoublePair;
/**
* Class for evaluating area under ROC curve.
*
* @author Yin Lou
*
*/
public class AUC extends SimpleMetric {
private class DoublePairComparator implements Comparator<DoublePair> {
@Override
public int compare(DoublePair o1, DoublePair o2) {
int cmp = Double.compare(o1.v1, o2.v1);
if (cmp == 0) {
cmp = Double.compare(o1.v2, o2.v2);
}
return cmp;
}
}
/**
* Constructor.
*/
public AUC() {
super(true);
}
@Override
public double eval(double[] preds, double[] targets) {
DoublePair[] a = new DoublePair[preds.length];
for (int i = 0; i < preds.length; i++) {
a[i] = new DoublePair(preds[i], targets[i]);
}
return eval(a);
}
@Override
public double eval(double[] preds, Instances instances) {
DoublePair[] a = new DoublePair[preds.length];
for (int i = 0; i < preds.length; i++) {
a[i] = new DoublePair(preds[i], instances.get(i).getTarget());
}
return eval(a);
}
protected double eval(DoublePair[] a) {
Arrays.sort(a, new DoublePairComparator());
double[] fraction = new double[a.length];
for (int idx = 0; idx < fraction.length;) {
int begin = idx;
double pos = 0;
for (; idx < fraction.length && a[idx].v1 == a[begin].v1; idx++) {
pos += a[idx].v2;
}
double frac = pos / (idx - begin);
for (int i = begin; i < idx; i++) {
fraction[i] = frac;
}
}
double tt = 0;
double tf = 0;
double ft = 0;
double ff = 0;
for (int i = 0; i < a.length; i++) {
tf += a[i].v2;
ff += 1 - a[i].v2;
}
double area = 0;
double tpfPrev = 0;
double fpfPrev = 0;
for (int i = a.length - 1; i >= 0; i--) {
tt += fraction[i];
tf -= fraction[i];
ft += 1 - fraction[i];
ff -= 1 - fraction[i];
double tpf = tt / (tt + tf);
double fpf = 1.0 - ff / (ft + ff);
area += 0.5 * (tpf + tpfPrev) * (fpf - fpfPrev);
tpfPrev = tpf;
fpfPrev = fpf;
}
return area;
}
}
```
|
```package mltk.core.processor;
import org.junit.Assert;
import org.junit.Test;
import mltk.core.BinnedAttribute;
import mltk.core.Instances;
import mltk.core.InstancesTestHelper;
import mltk.util.MathUtils;
public class DiscretizerTest {
@Test
public void testMissingValue() {
Instances instances = InstancesTestHelper.getInstance().getDenseClassificationDatasetWMissing().copy();
Discretizer.discretize(instances, 0, 10);
Assert.assertTrue(instances.getAttributes().get(0).getClass() == BinnedAttribute.class);
for (int i = 0; i < 10; i++) {
Assert.assertTrue(instances.get(i).isMissing(0));
}
for (int i = 10; i < 20; i++) {
Assert.assertFalse(instances.get(i).isMissing(0));
Assert.assertTrue(MathUtils.isInteger(instances.get(i).getValue(0)));
}
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.core.processor;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import mltk.cmdline.Argument;
import mltk.cmdline.CmdLineParser;
import mltk.core.Attribute;
import mltk.core.BinnedAttribute;
import mltk.core.Bins;
import mltk.core.Instance;
import mltk.core.Instances;
import mltk.core.Attribute.Type;
import mltk.core.io.AttributesReader;
import mltk.core.io.InstancesReader;
import mltk.core.io.InstancesWriter;
import mltk.util.Element;
import mltk.util.tuple.DoublePair;
/**
* Class for discretizers.
*
* @author Yin Lou
*
*/
public class Discretizer {
static class Options {
@Argument(name = "-r", description = "attribute file path")
String attPath = null;
@Argument(name = "-t", description = "training file path")
String trainPath = null;
@Argument(name = "-i", description = "input dataset path", required = true)
String inputPath = null;
@Argument(name = "-d", description = "discretized attribute file path")
String disAttPath = null;
@Argument(name = "-m", description = "output attribute file path")
String outputAttPath = null;
@Argument(name = "-o", description = "output dataset path", required = true)
String outputPath = null;
@Argument(name = "-n", description = "maximum num of bins (default: 256)")
int maxNumBins = 256;
}
/**
* Discretizes datasets.
*
* <pre>
* Usage: mltk.core.processor.Discretizer
* -i input dataset path
* -o output dataset path
* [-r] attribute file path
* [-t] training file path
* [-d] discretized attribute file path
* [-m] output attribute file path
* [-n] maximum num of bins (default: 256)
* </pre>
*
* @param args the command line arguments.
* @throws Exception
*/
public static void main(String[] args) throws Exception {
Options app = new Options();
CmdLineParser parser = new CmdLineParser(Discretizer.class, app);
try {
parser.parse(args);
if (app.maxNumBins < 0) {
throw new IllegalArgumentException();
}
} catch (IllegalArgumentException e) {
parser.printUsage();
System.exit(1);
}
List<Attribute> attributes = null;
if (app.trainPath != null) {
Instances trainSet = InstancesReader.read(app.attPath, app.trainPath);
attributes = trainSet.getAttributes();
for (int i = 0; i < attributes.size(); i++) {
Attribute attribute = attributes.get(i);
if (attribute.getType() == Type.NUMERIC) {
// Only discretize numeric attributes
Discretizer.discretize(trainSet, i, app.maxNumBins);
}
}
} else if (app.disAttPath != null) {
attributes = AttributesReader.read(app.disAttPath).v1;
} else {
parser.printUsage();
System.exit(1);
}
Instances instances = InstancesReader.read(app.attPath, app.inputPath);
List<Attribute> attrs = instances.getAttributes();
for (int i = 0; i < attrs.size(); i++) {
Attribute attr = attrs.get(i);
if (attr.getType() == Type.NUMERIC) {
BinnedAttribute binnedAttr = (BinnedAttribute) attributes.get(i);
// Only discretize numeric attributes
Discretizer.discretize(instances, i, binnedAttr.getBins());
}
}
if (app.outputAttPath != null) {
InstancesWriter.write(instances, app.outputAttPath, app.outputPath);
} else {
InstancesWriter.write(instances, app.outputPath);
}
}
/**
* Compute bins for a list of values.
*
* @param x the vector of input data.
* @param maxNumBins the number of bins.
* @return bins for a list of values.
*/
public static Bins computeBins(double[] x, int maxNumBins) {
List<Element<Double>> list = new ArrayList<>();
for (double v : x) {
if (!Double.isNaN(v)) {
list.add(new Element<Double>(1.0, v));
}
}
return computeBins(list, maxNumBins);
}
/**
* Compute bins for a specified attribute.
*
* @param instances the dataset to discretize.
* @param attIndex the attribute index.
* @param maxNumBins the number of bins.
* @return bins for a specified attribute.
*/
public static Bins computeBins(Instances instances, int attIndex, int maxNumBins) {
Attribute attribute = instances.getAttributes().get(attIndex);
List<Element<Double>> list = new ArrayList<>();
for (Instance instance : instances) {
if (!instance.isMissing(attribute.getIndex())) {
list.add(new Element<Double>(instance.getWeight(), instance.getValue(attribute)));
}
}
return computeBins(list, maxNumBins);
}
/**
* Compute bins for a list of values.
*
* @param list the histogram.
* @param maxNumBins the number of bins.
* @return bins for a list of values.
*/
public static Bins computeBins(List<Element<Double>> list, int maxNumBins) {
Collections.sort(list);
List<DoublePair> stats = new ArrayList<>();
getStats(list, stats);
if (stats.size() <= maxNumBins) {
double[] a = new double[stats.size()];
for (int i = 0; i < a.length; i++) {
a[i] = stats.get(i).v1;
}
return new Bins(a, a);
} else {
double totalWeight = 0;
for (DoublePair stat : stats) {
totalWeight += stat.v2;
}
double binSize = totalWeight / maxNumBins;
List<Double> boundaryList = new ArrayList<>();
List<Double> medianList = new ArrayList<>();
int start = 0;
double weight = 0;
for (int i = 0; i < stats.size(); i++) {
weight += stats.get(i).v2;
totalWeight -= stats.get(i).v2;
if (weight >= binSize) {
if (i == start) {
boundaryList.add(stats.get(start).v1);
medianList.add(stats.get(start).v1);
weight = 0;
start = i + 1;
} else {
double d1 = weight - binSize;
double d2 = stats.get(i).v2 - d1;
if (d1 < d2) {
boundaryList.add(stats.get(i).v1);
medianList.add(getMedian(stats, start, weight / 2));
start = i + 1;
weight = 0;
} else {
weight -= stats.get(i).v2;
boundaryList.add(stats.get(i - 1).v1);
medianList.add(getMedian(stats, start, weight / 2));
start = i;
weight = stats.get(i).v2;
}
}
binSize = (totalWeight + weight) / (maxNumBins - boundaryList.size());
} else if (i == stats.size() - 1) {
boundaryList.add(stats.get(i).v1);
medianList.add(getMedian(stats, start, weight / 2));
}
}
double[] boundaries = new double[boundaryList.size()];
double[] medians = new double[medianList.size()];
for (int i = 0; i < boundaries.length; i++) {
boundaries[i] = boundaryList.get(i);
medians[i] = medianList.get(i);
}
return new Bins(boundaries, medians);
}
}
/**
* Discretizes an attribute using bins.
*
* @param instances the dataset to discretize.
* @param attIndex the attribute index.
* @param bins the bins.
*/
public static void discretize(Instances instances, int attIndex, Bins bins) {
Attribute attribute = instances.getAttributes().get(attIndex);
BinnedAttribute binnedAttribute = new BinnedAttribute(attribute.getName(), bins);
binnedAttribute.setIndex(attribute.getIndex());
instances.getAttributes().set(attIndex, binnedAttribute);
for (Instance instance : instances) {
if (!instance.isMissing(attribute.getIndex())) {
int v = bins.getIndex(instance.getValue(attribute.getIndex()));
instance.setValue(attribute.getIndex(), v);
}
}
}
/**
* Discretized an attribute with specified number of bins.
*
* @param instances the dataset to discretize.
* @param attIndex the attribute index.
* @param maxNumBins the number of bins.
*/
public static void discretize(Instances instances, int attIndex, int maxNumBins) {
Bins bins = computeBins(instances, attIndex, maxNumBins);
discretize(instances, attIndex, bins);
}
static double getMedian(List<DoublePair> stats, int start, double midPoint) {
double weight = 0;
for (int i = start; i < stats.size(); i++) {
weight += stats.get(i).v2;
if (weight >= midPoint) {
return stats.get(i).v1;
}
}
return stats.get((start + stats.size()) / 2).v1;
}
static void getStats(List<Element<Double>> list, List<DoublePair> stats) {
if (list.size() == 0) {
return;
}
double totalWeight = list.get(0).element;
double lastValue = list.get(0).weight;
for (int i = 1; i < list.size(); i++) {
Element<Double> element = list.get(i);
double value = element.weight;
double weight = element.element;
if (value != lastValue) {
stats.add(new DoublePair(lastValue, totalWeight));
lastValue = value;
totalWeight = weight;
} else {
totalWeight += weight;
}
}
stats.add(new DoublePair(lastValue, totalWeight));
}
/**
* Constructor.
*/
public Discretizer() {
}
}
```
|
```package mltk.predictor.evaluation;
import mltk.util.MathUtils;
import org.junit.Assert;
import org.junit.Test;
public class RMSETest {
@Test
public void test() {
double[] preds = {1, 2, 3, 4};
double[] targets = {1.1, 1.9, 3.2, 4};
RMSE metric = new RMSE();
Assert.assertEquals(0.122474487, metric.eval(preds, targets), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.predictor.evaluation;
import mltk.core.Instances;
/**
* Class for evaluating root mean squared error (RMSE).
*
* @author Yin Lou
*
*/
public class RMSE extends SimpleMetric {
/**
* Constructor.
*/
public RMSE() {
super(false);
}
@Override
public double eval(double[] preds, double[] targets) {
double rmse = 0;
for (int i = 0; i < preds.length; i++) {
double d = targets[i] - preds[i];
rmse += d * d;
}
rmse = Math.sqrt(rmse / preds.length);
return rmse;
}
@Override
public double eval(double[] preds, Instances instances) {
double rmse = 0;
for (int i = 0; i < preds.length; i++) {
double d = instances.get(i).getTarget() - preds[i];
rmse += d * d;
}
rmse = Math.sqrt(rmse / preds.length);
return rmse;
}
}
```
|
```package mltk.core;
import org.junit.Assert;
import org.junit.Test;
import mltk.util.MathUtils;
public class BinsTest {
@Test
public void testBins() {
Bins bins = new Bins(new double[] {1, 5, 6}, new double[] {0.5, 2.5, 5.5});
Assert.assertEquals(0, bins.getIndex(-1));
Assert.assertEquals(0, bins.getIndex(0.3));
Assert.assertEquals(0, bins.getIndex(1));
Assert.assertEquals(1, bins.getIndex(1.1));
Assert.assertEquals(1, bins.getIndex(5));
Assert.assertEquals(2, bins.getIndex(5.5));
Assert.assertEquals(2, bins.getIndex(6.5));
Assert.assertEquals(0.5, bins.getValue(0), MathUtils.EPSILON);
Assert.assertEquals(2.5, bins.getValue(1), MathUtils.EPSILON);
Assert.assertEquals(5.5, bins.getValue(2), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.core;
import mltk.util.ArrayUtils;
/**
* Class for bins. Each bin is defined as its upper bound and median.
*
* @author Yin Lou
*
*/
public class Bins {
/**
* The upper bounds for each bin
*/
protected double[] boundaries;
/**
* The medians for each bin
*/
protected double[] medians;
protected Bins() {
}
/**
* Constructor.
*
* @param boundaries the uppber bounds for each bin.
* @param medians the medians for each bin.
*/
public Bins(double[] boundaries, double[] medians) {
if (boundaries.length != medians.length) {
throw new IllegalArgumentException("Boundary size doesn't match medians size");
}
this.boundaries = boundaries;
this.medians = medians;
}
/**
* Returns the number of bins.
*
* @return the number of bins.
*/
public int size() {
return boundaries.length;
}
/**
* Returns the bin index given a real value using binary search.
*
* @param value the real value to discretize.
* @return the discretized index.
*/
public int getIndex(double value) {
if (value < boundaries[0]) {
return 0;
} else if (value >= boundaries[boundaries.length - 1]) {
return boundaries.length - 1;
} else {
return ArrayUtils.findInsertionPoint(boundaries, value);
}
}
/**
* Returns the median of a bin.
*
* @param index the index of the bin.
* @return the median of the bin.
*/
public double getValue(int index) {
return medians[index];
}
/**
* Returns the upper bounds for each bin.
*
* @return the upper bounds for each bin.
*/
public double[] getBoundaries() {
return boundaries;
}
/**
* Returns the medians for each bin.
*
* @return the medians for each bin.
*/
public double[] getMedians() {
return medians;
}
}
```
|
```package mltk.util;
import org.junit.Assert;
import org.junit.Test;
public class StatUtilsTest {
private int[] a = {1, 4, 3, 2};
private double[] b = {-1.2, 1.2, -5.3, 5.3};
@Test
public void testMax() {
Assert.assertEquals(4, StatUtils.max(a));
Assert.assertEquals(5.3, StatUtils.max(b), MathUtils.EPSILON);
}
@Test
public void testIndexOfMax() {
Assert.assertEquals(1, StatUtils.indexOfMax(a));
Assert.assertEquals(3, StatUtils.indexOfMax(b));
}
@Test
public void testMin() {
Assert.assertEquals(1, StatUtils.min(a));
Assert.assertEquals(-5.3, StatUtils.min(b), MathUtils.EPSILON);
}
@Test
public void testIndexOfMin() {
Assert.assertEquals(0, StatUtils.indexOfMin(a));
Assert.assertEquals(2, StatUtils.indexOfMin(b));
}
@Test
public void testSum() {
Assert.assertEquals(0, StatUtils.sum(b), MathUtils.EPSILON);
}
@Test
public void testSumSq() {
Assert.assertEquals(59.06, StatUtils.sumSq(b), MathUtils.EPSILON);
Assert.assertEquals(b[0] * b[0], StatUtils.sumSq(b, 0, 1), MathUtils.EPSILON);
}
@Test
public void testMean() {
Assert.assertEquals(0, StatUtils.mean(b), MathUtils.EPSILON);
}
@Test
public void testVariance() {
Assert.assertEquals(19.686666667, StatUtils.variance(b), MathUtils.EPSILON);
}
@Test
public void testStd() {
Assert.assertEquals(Math.sqrt(19.686666667), StatUtils.sd(b), MathUtils.EPSILON);
}
@Test
public void testRms() {
Assert.assertEquals(Math.sqrt(59.06 / b.length), StatUtils.rms(b), MathUtils.EPSILON);
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.util;
/**
* Class for utility functions for computing statistics.
*
* @author Yin Lou
*
*/
public class StatUtils {
/**
* Returns the maximum element in an array.
*
* @param a the array.
* @return the maximum element in an array.
*/
public static int max(int[] a) {
int max = a[0];
for (int i = 1; i < a.length; i++) {
if (a[i] > max) {
max = a[i];
}
}
return max;
}
/**
* Returns the maximum element in an array.
*
* @param a the array.
* @return the maximum element in an array.
*/
public static double max(double[] a) {
double max = a[0];
for (int i = 1; i < a.length; i++) {
if (a[i] > max) {
max = a[i];
}
}
return max;
}
/**
* Returns the index of maximum element.
*
* @param a the array.
* @return the index of maximum element.
*/
public static int indexOfMax(int[] a) {
int max = a[0];
int idx = 0;
for (int i = 1; i < a.length; i++) {
if (a[i] > max) {
max = a[i];
idx = i;
}
}
return idx;
}
/**
* Returns the index of maximum element.
*
* @param a the array.
* @return the index of maximum element.
*/
public static int indexOfMax(double[] a) {
double max = a[0];
int idx = 0;
for (int i = 1; i < a.length; i++) {
if (a[i] > max) {
max = a[i];
idx = i;
}
}
return idx;
}
/**
* Returns the minimum element in an array.
*
* @param a the array.
* @return the minimum element in an array.
*/
public static int min(int[] a) {
int min = a[0];
for (int i = 1; i < a.length; i++) {
if (a[i] < min) {
min = a[i];
}
}
return min;
}
/**
* Returns the minimum element in an array.
*
* @param a the array.
* @return the minimum element in an array.
*/
public static double min(double[] a) {
double min = a[0];
for (int i = 1; i < a.length; i++) {
if (a[i] < min) {
min = a[i];
}
}
return min;
}
/**
* Returns the index of minimum element.
*
* @param a the array.
* @return the index of minimum element.
*/
public static int indexOfMin(int[] a) {
int min = a[0];
int idx = 0;
for (int i = 1; i < a.length; i++) {
if (a[i] < min) {
min = a[i];
idx = i;
}
}
return idx;
}
/**
* Returns the index of minimum element.
*
* @param a the array.
* @return the index of minimum element.
*/
public static int indexOfMin(double[] a) {
double min = a[0];
int idx = 0;
for (int i = 1; i < a.length; i++) {
if (a[i] < min) {
min = a[i];
idx = i;
}
}
return idx;
}
/**
* Returns the sum of elements in an array.
*
* @param a the array.
* @return the sum of elements in an array.
*/
public static double sum(double[] a) {
double sum = 0;
for (double v : a) {
sum += v;
}
return sum;
}
/**
* Returns the sum of squares.
*
* @param a the array.
* @return the sum of squares.
*/
public static double sumSq(double[] a) {
return sumSq(a, 0, a.length);
}
/**
* Returns the sum of squares within a specific range.
*
* @param a the array.
* @param fromIndex the index of the first element (inclusive).
* @param toIndex the index of the last element (exclusive).
* @return the sum of squares.
*/
public static double sumSq(double[] a, int fromIndex, int toIndex) {
double sq = 0.0;
for (int i = fromIndex; i < toIndex; i++) {
sq += a[i] * a[i];
}
return sq;
}
/**
* Returns the mean.
*
* @param a the array.
* @return the mean.
*/
public static double mean(double[] a) {
return mean(a, a.length);
}
/**
* Returns the mean.
*
* @param a the array.
* @param n the total number of elements.
* @return the mean.
*/
public static double mean(double[] a, int n) {
double avg = 0.0;
for (double v : a) {
avg += v;
}
return avg / n;
}
/**
* Returns the variance.
*
* @param a the array.
* @return the variance.
*/
public static double variance(double[] a) {
return variance(a, a.length);
}
/**
* Returns the variance.
*
* @param a the array.
* @param n the total number of elements.
* @return the variance.
*/
public static double variance(double[] a, int n) {
double avg = mean(a, n);
double sq = 0.0;
for (double v : a) {
double d = v - avg;
sq += d * d;
}
return sq / (n - 1.0);
}
/**
* Returns the standard variance.
*
* @param a the array.
* @return the standard variance.
*/
public static double sd(double[] a) {
return sd(a, a.length);
}
/**
* Returns the standard variance.
*
* @param a the array.
* @param n the total number of elements.
* @return the standard variance.
*/
public static double sd(double[] a, int n) {
return Math.sqrt(variance(a, n));
}
/**
* Returns the root mean square.
*
* @param a the array.
* @return the root mean square.
*/
public static double rms(double[] a) {
double rms = 0.0;
for (double v : a) {
rms += v * v;
}
rms /= a.length;
return Math.sqrt(rms);
}
/**
* Returns the mean absolute deviation around a central point.
*
* @param a the array.
* @param centralPoint the central point.
* @return the mean absolute deviation around a central point.
*/
public static double mad(double[] a, double centralPoint) {
double mad = 0.0;
for (double v : a) {
mad += Math.abs(v - centralPoint);
}
return mad / a.length;
}
}
```
|
```package mltk.util;
import org.junit.Assert;
import org.junit.Test;
public class MathUtilsTest {
@Test
public void testEquals() {
Assert.assertTrue(MathUtils.equals(0.1, 0.10000001));
Assert.assertFalse(MathUtils.equals(0.0, 1.0));
}
@Test
public void testIndicator() {
Assert.assertEquals(1, MathUtils.indicator(true));
Assert.assertEquals(0, MathUtils.indicator(false));
}
@Test
public void testIsFirstBetter() {
Assert.assertTrue(MathUtils.isFirstBetter(0.5, 0, true));
Assert.assertFalse(MathUtils.isFirstBetter(0.5, 0, false));
}
@Test
public void testIsInteger() {
Assert.assertTrue(MathUtils.isInteger(1.0));
Assert.assertFalse(MathUtils.isInteger(1.1));
}
@Test
public void testIsZero() {
Assert.assertTrue(MathUtils.isZero(MathUtils.EPSILON / 2));
Assert.assertFalse(MathUtils.isZero(MathUtils.EPSILON * 2));
}
@Test
public void testSigmoid() {
Assert.assertEquals(0.5, MathUtils.sigmoid(0), MathUtils.EPSILON);
}
@Test
public void testSign() {
Assert.assertEquals(1, MathUtils.sign(0.5));
Assert.assertEquals(0, MathUtils.sign(0.0));
Assert.assertEquals(-1, MathUtils.sign(-0.5));
Assert.assertEquals(1, MathUtils.sign(2));
Assert.assertEquals(0, MathUtils.sign(0));
Assert.assertEquals(-1, MathUtils.sign(-2));
}
}
```
|
Please help me generate a test for this class.
|
```package mltk.util;
/**
* Class for utility functions for math.
*
* @author Yin Lou
*
*/
public class MathUtils {
/**
* 1e-8
*/
public static final double EPSILON = 1e-8;
/**
* log(2)
*/
public static final double LOG2 = Math.log(2);
/**
* Returns {@code true} if two doubles are equal to within {@link mltk.util.MathUtils#EPSILON}.
*
* @param a the 1st number.
* @param b the 2nd number.
* @return {@code true} if two doubles are equal to within {@link mltk.util.MathUtils#EPSILON}.
*/
public static boolean equals(double a, double b) {
return Math.abs(a - b) < EPSILON;
}
/**
* Returns 1 if the input is true and 0 otherwise.
*
* @param b the input.
* @return 1 if the input is true and 0 otherwise.
*/
public static int indicator(boolean b) {
return b ? 1 : 0;
}
/**
* Returns {@code true} if the first value is better.
*
* @param a the 1st value.
* @param b the 2nd value.
* @param isLargerBetter {@code true} if the first value is better.
* @return {@code true} if the first value is better.
*/
public static boolean isFirstBetter(double a, double b, boolean isLargerBetter) {
if (isLargerBetter) {
return a > b;
} else {
return a < b;
}
}
/**
* Returns {@code true} if the floating number is integer.
*
* @param v the floating number.
* @return {@code true} if the floating number is integer.
*/
public static boolean isInteger(double v) {
return (v % 1) == 0;
}
/**
* Returns {@code true} if the floating number is zero.
*
* @param v the floating number.
* @return {@code true} if the floating number is zero.
*/
public static boolean isZero(double v) {
return Math.abs(v) < EPSILON;
}
/**
* Returns the value of a sigmoid function.
*
* @param a the number.
* @return the value of a sigmoid function.
*/
public static double sigmoid(double a) {
return 1 / (1 + Math.exp(-a));
}
/**
* Returns the sign of a number.
*
* @param a the number.
* @return the sign of a number.
*/
public static int sign(double a) {
if (a < 0) {
return -1;
} else if (a > 0) {
return 1;
} else {
return 0;
}
}
/**
* Returns the sign of a number.
*
* @param a the number.
* @return the sign of a number.
*/
public static int sign(int a) {
if (a < 0) {
return -1;
} else if (a > 0) {
return 1;
} else {
return 0;
}
}
/**
* Performs division and returns default value when division by zero.
*
* @param a the numerator.
* @param b the denominator.
* @param dv the default value.
* @return a / b or default value when division by zero.
*/
public static double divide(double a, double b, double dv) {
return isZero(b) ? dv : a / b;
}
}
```
|
```package com.lakesidemutual.customercore.tests.domain.customer;
import static org.junit.Assert.assertEquals;
import java.util.Calendar;
import java.util.Date;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import com.lakesidemutual.customercore.domain.customer.Address;
import com.lakesidemutual.customercore.domain.customer.CustomerAggregateRoot;
import com.lakesidemutual.customercore.domain.customer.CustomerProfileEntity;
import com.lakesidemutual.customercore.tests.TestUtils;
@RunWith(SpringRunner.class)
@ActiveProfiles("test")
public class CustomerAggregateRootTests {
private CustomerAggregateRoot customerA;
@Before
public void setUp() {
customerA = TestUtils.createTestCustomer("rgpp0wkpec", "Max", "Mustermann",
TestUtils.createDate(1, Calendar.JANUARY, 1990), "Oberseestrasse 10", "8640", "Rapperswil",
"max@example.com", "055 222 41 11");
}
@Test
public void whenAddressChanges_updateMoveHistory() {
final String oldStreetAddress = "Oberseestrasse 10";
final String oldPostalCode = "8640";
final String oldCity = "Rapperswil";
final String newStreetAddress = "Musterstrasse 1";
final String newPostalCode = "1234";
final String newCity = "Musterstadt";
Address newAddress = new Address(newStreetAddress, newPostalCode, newCity);
customerA.moveToAddress(newAddress);
assertEquals(1, customerA.getCustomerProfile().getMoveHistory().size());
Address oldAddress = customerA.getCustomerProfile().getMoveHistory().iterator().next();
assertEquals(oldStreetAddress, oldAddress.getStreetAddress());
assertEquals(oldPostalCode, oldAddress.getPostalCode());
assertEquals(oldCity, oldAddress.getCity());
assertEquals(newStreetAddress, customerA.getCustomerProfile().getCurrentAddress().getStreetAddress());
assertEquals(newPostalCode, customerA.getCustomerProfile().getCurrentAddress().getPostalCode());
assertEquals(newCity, customerA.getCustomerProfile().getCurrentAddress().getCity());
}
@Test
public void whenAddressDoesntChange_dontUpdateMoveHistory() {
final String oldStreetAddress = "Oberseestrasse 10";
final String oldPostalCode = "8640";
final String oldCity = "Rapperswil";
Address oldAddress = new Address(oldStreetAddress, oldPostalCode, oldCity);
customerA.moveToAddress(oldAddress);
assertEquals(0, customerA.getCustomerProfile().getMoveHistory().size());
assertEquals(oldStreetAddress, customerA.getCustomerProfile().getCurrentAddress().getStreetAddress());
assertEquals(oldPostalCode, customerA.getCustomerProfile().getCurrentAddress().getPostalCode());
assertEquals(oldCity, customerA.getCustomerProfile().getCurrentAddress().getCity());
}
@Test
public void whenExistingCustomerId_thenUpdateCustomerProfile() {
final String newFirstname = "Maxima";
final String newLastname = "Musterfrau";
final Date newBirthday = TestUtils.createDate(1, Calendar.JANUARY, 1990);
final String oldStreetAddress = "Oberseestrasse 10";
final String oldPostalCode = "8640";
final String oldCity = "Rapperswil";
final String newStreetAddress = "Musterstrasse 1";
final String newPostalCode = "1234";
final String newCity = "Musterstadt";
final String newEmail = "maxima@example.com";
final String newPhoneNumber = "055 222 41 11";
Address newAddress = new Address(newStreetAddress, newPostalCode, newCity);
CustomerProfileEntity updatedCustomerProfile = new CustomerProfileEntity(newFirstname, newLastname, newBirthday, newAddress, newEmail, newPhoneNumber);
customerA.updateCustomerProfile(updatedCustomerProfile);
CustomerProfileEntity customerProfile = customerA.getCustomerProfile();
assertEquals(newFirstname, customerProfile.getFirstname());
assertEquals(newLastname, customerProfile.getLastname());
assertEquals(newBirthday, customerProfile.getBirthday());
assertEquals(newStreetAddress, customerProfile.getCurrentAddress().getStreetAddress());
assertEquals(newPostalCode, customerProfile.getCurrentAddress().getPostalCode());
assertEquals(newCity, customerProfile.getCurrentAddress().getCity());
assertEquals(newEmail, customerProfile.getEmail());
assertEquals(newPhoneNumber, customerProfile.getPhoneNumber());
assertEquals(1, customerA.getCustomerProfile().getMoveHistory().size());
Address oldAddress = customerA.getCustomerProfile().getMoveHistory().iterator().next();
assertEquals(oldStreetAddress, oldAddress.getStreetAddress());
assertEquals(oldPostalCode, oldAddress.getPostalCode());
assertEquals(oldCity, oldAddress.getCity());
}
}
```
|
Please help me generate a test for this class.
|
```package com.lakesidemutual.customercore.domain.customer;
import javax.persistence.Embedded;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
import javax.persistence.Table;
import org.microserviceapipatterns.domaindrivendesign.RootEntity;
import io.github.adr.embedded.MADR;
/**
* CustomerAggregateRoot is the root entity of the Customer aggregate. Note that there is
* no class for the Customer aggregate, so the package can be seen as aggregate.
*/
@Entity
@Table(name = "customers")
public class CustomerAggregateRoot implements RootEntity {
@EmbeddedId
private CustomerId id;
@Embedded
private CustomerProfileEntity customerProfile;
public CustomerAggregateRoot() {
}
public CustomerAggregateRoot(CustomerId id, CustomerProfileEntity customerProfile) {
this.id = id;
this.customerProfile = customerProfile;
}
public CustomerProfileEntity getCustomerProfile() {
return customerProfile;
}
public CustomerId getId() {
return id;
}
@MADR(
value = 1,
title = "Data transfer between interface layer and domain layer",
contextAndProblem = "Need to pass information from the interfaces layer to the domain layer without introducing a layering violation",
alternatives = {
"Pass existing domain objects",
"Pass the DTOs directly",
"Pass the components of the DTO",
"Add a new value type in the domain layer and use it as parameter object"
},
chosenAlternative = "Pass existing domain objects",
justification = "This solution doesn't introduce a layering violation and it is simple because it doesn't require any additional classes."
)
public void moveToAddress(Address address) {
customerProfile.moveToAddress(address);
}
public void updateCustomerProfile(CustomerProfileEntity updatedCustomerProfile) {
customerProfile.setFirstname(updatedCustomerProfile.getFirstname());
customerProfile.setLastname(updatedCustomerProfile.getLastname());
customerProfile.setBirthday(updatedCustomerProfile.getBirthday());
customerProfile.moveToAddress(updatedCustomerProfile.getCurrentAddress());
customerProfile.setEmail(updatedCustomerProfile.getEmail());
customerProfile.setPhoneNumber(CustomerFactory.formatPhoneNumber(updatedCustomerProfile.getPhoneNumber()));
}
}
```
|
```package com.lakesidemutual.customercore.tests.interfaces;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.hasSize;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.Optional;
import org.hamcrest.Matcher;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
import org.springframework.data.domain.Sort;
import org.springframework.http.MediaType;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.ResultMatcher;
import org.springframework.web.servlet.HandlerInterceptor;
import com.lakesidemutual.customercore.application.CustomerService;
import com.lakesidemutual.customercore.domain.customer.Address;
import com.lakesidemutual.customercore.domain.customer.CustomerAggregateRoot;
import com.lakesidemutual.customercore.domain.customer.CustomerFactory;
import com.lakesidemutual.customercore.domain.customer.CustomerProfileEntity;
import com.lakesidemutual.customercore.infrastructure.CustomerRepository;
import com.lakesidemutual.customercore.interfaces.CustomerInformationHolder;
import com.lakesidemutual.customercore.interfaces.dtos.customer.CustomerProfileUpdateRequestDto;
import com.lakesidemutual.customercore.tests.TestUtils;
@RunWith(SpringRunner.class)
@ActiveProfiles("test")
@WebMvcTest(value = CustomerInformationHolder.class)
@WithMockUser
public class CustomerInformationHolderTests {
private CustomerAggregateRoot customerA;
private CustomerAggregateRoot customerB;
private CustomerAggregateRoot customerC;
@TestConfiguration
static class AuthenticationControllerTestContextConfiguration {
@Bean
public HandlerInterceptor rateLimitInterceptor() {
// This makes sure that the rate limiter is not active during unit-testing.
return new HandlerInterceptor() {};
}
@Bean
public CustomerService customerService() {
return new CustomerService();
}
}
@Autowired
private MockMvc mvc;
@MockBean
private CustomerRepository customerRepository;
@MockBean
private CustomerFactory customerFactory;
@Before
public void setUp() {
customerA = TestUtils.createTestCustomer("rgpp0wkpec", "Max", "Mustermann",
TestUtils.createDate(1, Calendar.JANUARY, 1990), "Oberseestrasse 10", "8640", "Rapperswil",
"max@example.com", "055 222 41 11");
customerB = TestUtils.createTestCustomer("btpchn7eg8", "Hans", "Mustermann",
TestUtils.createDate(1, Calendar.JANUARY, 1990), "Oberseestrasse 11", "8640", "Rapperswil",
"hans@example.com", "055 222 41 12");
customerC = TestUtils.createTestCustomer("5xvivyzxvc", "Anna", "Musterfrau",
TestUtils.createDate(1, Calendar.JANUARY, 1990), "Oberseestrasse 12", "8640", "Rapperswil",
"anna@example.com", "055 222 41 13");
}
@Test
public void whenNewCustomerIsCreated_thenReturnNewCustomer() throws Exception {
String firstname = "Max";
String lastname = "Mustermann";
Date birthday = TestUtils.createDate(1, Calendar.JANUARY, 1990);
String streetAddress = "Oberseestrasse 10";
String postalCode = "8640";
String city = "Rapperswil";
String email = "max@example.com";
String phoneNumber = "055 222 41 11";
CustomerProfileUpdateRequestDto registrationDto = new CustomerProfileUpdateRequestDto(
firstname, lastname, birthday, streetAddress,
postalCode, city, email, phoneNumber);
Address currentAddress = new Address(streetAddress, postalCode, city);
CustomerProfileEntity customerProfile = new CustomerProfileEntity(
firstname, lastname, birthday, currentAddress,
email, phoneNumber);
Mockito.when(customerFactory.create(customerProfile)).thenReturn(customerA);
mvc.perform(post("/customers").content(TestUtils.asJsonString(registrationDto))
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(new CustomerResultMatcher("$", customerA));
}
@Test
public void whenCustomersExist_thenGetAllCustomersShouldAllCustomers() throws Exception {
Mockito.when(customerRepository.findAll(Sort.by(Sort.Direction.ASC, "customerProfile.firstname", "customerProfile.lastname"))).thenReturn(Arrays.asList(customerA, customerB, customerC));
mvc.perform(get("/customers"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(3)))
.andExpect(new CustomerResultMatcher("$.customers[0]", customerA))
.andExpect(new CustomerResultMatcher("$.customers[1]", customerB))
.andExpect(new CustomerResultMatcher("$.customers[2]", customerC));
}
@Test
public void whenExistingCustomerIdIsUsed_thenCustomerShouldBeReturned() throws Exception {
Mockito.when(customerRepository.findById(customerA.getId())).thenReturn(Optional.of(customerA));
Mockito.when(customerRepository.findById(customerB.getId())).thenReturn(Optional.of(customerB));
Mockito.when(customerRepository.findById(customerC.getId())).thenReturn(Optional.of(customerC));
mvc.perform(get("/customers/" + customerA.getId().toString()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(1)))
.andExpect(new CustomerResultMatcher("$.customers[0]", customerA));
mvc.perform(get("/customers/" + customerB.getId().toString()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(1)))
.andExpect(new CustomerResultMatcher("$.customers[0]", customerB));
mvc.perform(get("/customers/" + customerC.getId().toString()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(1)))
.andExpect(new CustomerResultMatcher("$.customers[0]", customerC));
mvc.perform(get("/customers/" + customerA.getId().toString() + "," + customerB.getId().toString() + "," + customerC.getId().toString()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(3)))
.andExpect(new CustomerResultMatcher("$.customers[0]", customerA))
.andExpect(new CustomerResultMatcher("$.customers[1]", customerB))
.andExpect(new CustomerResultMatcher("$.customers[2]", customerC));
}
@Test
public void whenExistingCustomerIdIsUsedWithFieldsParameter_thenCustomerFieldsShouldBeReturned() throws Exception {
Mockito.when(customerRepository.findById(customerA.getId())).thenReturn(Optional.of(customerA));
mvc.perform(get("/customers/" + customerA.getId().toString() + "/?fields=firstname"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(1)))
.andExpect(jsonPath("$.customers[0].firstname", is(customerA.getCustomerProfile().getFirstname())))
.andExpect(jsonPath("$.customers[0].lastname").doesNotExist())
.andExpect(jsonPath("$.customers[0].streetAddress").doesNotExist())
.andExpect(jsonPath("$.customers[0].email").doesNotExist());
mvc.perform(get("/customers/" + customerA.getId().toString() + "/?fields=lastname,streetAddress,email"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(1)))
.andExpect(jsonPath("$.customers[0].firstname").doesNotExist())
.andExpect(jsonPath("$.customers[0].lastname", is(customerA.getCustomerProfile().getLastname())))
.andExpect(jsonPath("$.customers[0].streetAddress", is(customerA.getCustomerProfile().getCurrentAddress().getStreetAddress())))
.andExpect(jsonPath("$.customers[0].email", is(customerA.getCustomerProfile().getEmail())));
}
@Test
public void whenNoCustomersExist_thenGetAllCustomersShouldReturnEmptyArray() throws Exception {
mvc.perform(get("/customers")).andExpect(status().isOk())
.andExpect(jsonPath("$.size", is(0)))
.andExpect(jsonPath("$.customers", hasSize(0)));
}
@Test
public void whenNonexistingCustomerIdIsUsed_thenEmptyArrayShouldBeReturned() throws Exception {
mvc.perform(get("/customers/" + customerA.getId().toString()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(0)));
mvc.perform(get("/customers/" + customerB.getId().toString()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(0)));
mvc.perform(get("/customers/" + customerC.getId().toString()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.customers", hasSize(0)));
}
@Test
public void whenExistingCustomerIdIsUsed_thenCustomerProfileCanBeUpdated() throws Exception {
CustomerAggregateRoot updatedCustomerA = TestUtils.createTestCustomer("rgpp0wkpec", "Maxima", "Musterfrau",
TestUtils.createDate(1, Calendar.APRIL, 1990), "Musterstrasse 1", "1234", "Musterstadt",
"maxima@example.com", "055 222 41 11");
CustomerProfileEntity profile = updatedCustomerA.getCustomerProfile();
Address address = profile.getCurrentAddress();
CustomerProfileUpdateRequestDto profileUpdateRequestDto = new CustomerProfileUpdateRequestDto(
profile.getFirstname(), profile.getLastname(), profile.getBirthday(), address.getStreetAddress(),
address.getPostalCode(), address.getCity(), profile.getEmail(), profile.getPhoneNumber());
Mockito.when(customerRepository.findById(customerA.getId())).thenReturn(Optional.of(customerA));
mvc.perform(
put("/customers/" + customerA.getId().getId())
.content(TestUtils.asJsonString(profileUpdateRequestDto))
.contentType(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(new CustomerResultMatcher("$", updatedCustomerA));
}
@Test
public void whenNonexistingCustomerIdIsUsed_thenCustomerProfileUpdateFails() throws Exception {
CustomerAggregateRoot updatedCustomerA = TestUtils.createTestCustomer("rgpp0wkpec", "Maxima", "Musterfrau",
TestUtils.createDate(1, Calendar.APRIL, 1990), "Musterstrasse 1", "1234", "Musterstadt",
"maxima@example.com", "055 222 41 11");
CustomerProfileEntity profile = updatedCustomerA.getCustomerProfile();
CustomerProfileUpdateRequestDto profileUpdateRequestDto = new CustomerProfileUpdateRequestDto(
profile.getFirstname(), profile.getLastname(), profile.getBirthday(), profile.getCurrentAddress().getStreetAddress(),
profile.getCurrentAddress().getPostalCode(), profile.getCurrentAddress().getCity(), profile.getEmail(), profile.getPhoneNumber());
mvc.perform(
put("/customers/" + customerA.getId().getId())
.content(TestUtils.asJsonString(profileUpdateRequestDto))
.contentType(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isNotFound());
}
}
final class CustomerResultMatcher implements ResultMatcher {
private String jsonPathPrefix;
private CustomerAggregateRoot customer;
CustomerResultMatcher(String jsonPathPrefix, CustomerAggregateRoot customer) {
this.jsonPathPrefix = jsonPathPrefix;
this.customer = customer;
}
@Override
public void match(MvcResult result) throws Exception {
JsonMatcher jsonMatcher = new JsonMatcher(jsonPathPrefix);
jsonMatcher.matchJson(result, ".customerId", customer.getId().toString());
new CustomerProfileResultMatcher(jsonPathPrefix, customer.getCustomerProfile()).match(result);
}
}
final class CustomerProfileResultMatcher implements ResultMatcher {
private String jsonPathPrefix;
private CustomerProfileEntity profile;
CustomerProfileResultMatcher(String jsonPathPrefix, CustomerProfileEntity profile) {
this.jsonPathPrefix = jsonPathPrefix;
this.profile = profile;
}
@Override
public void match(MvcResult result) throws Exception {
JsonMatcher jsonMatcher = new JsonMatcher(jsonPathPrefix);
jsonMatcher.matchJson(result, ".firstname", profile.getFirstname());
jsonMatcher.matchJson(result, ".lastname", profile.getLastname());
// FIXME Some default seems to have changed
// jsonMatcher.matchJson(result, ".birthday", TestUtils.createISO8601Timestamp(profile.getBirthday()));
jsonMatcher.matchJson(result, ".streetAddress",
profile.getCurrentAddress().getStreetAddress());
jsonMatcher.matchJson(result, ".postalCode",
profile.getCurrentAddress().getPostalCode());
jsonMatcher.matchJson(result, ".city", profile.getCurrentAddress().getCity());
jsonMatcher.matchJson(result, ".email", profile.getEmail());
jsonMatcher.matchJson(result, ".phoneNumber", profile.getPhoneNumber());
}
}
final class JsonMatcher {
private String jsonPathPrefix;
JsonMatcher(String jsonPathPrefix) {
this.jsonPathPrefix = jsonPathPrefix;
}
<T> void matchJson(MvcResult result, String jsonPath, Matcher<T> matcher) throws Exception {
jsonPath(jsonPathPrefix + jsonPath, matcher).match(result);
}
void matchJson(MvcResult result, String jsonPath, String expected) throws Exception {
jsonPath(jsonPathPrefix + jsonPath, is(expected)).match(result);
}
}
```
|
Please help me generate a test for this class.
|
```package com.lakesidemutual.policymanagement.interfaces;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn;
import java.util.List;
import java.util.stream.Collectors;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.hateoas.Link;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.lakesidemutual.policymanagement.domain.customer.CustomerId;
import com.lakesidemutual.policymanagement.domain.policy.PolicyAggregateRoot;
import com.lakesidemutual.policymanagement.infrastructure.CustomerCoreRemoteProxy;
import com.lakesidemutual.policymanagement.infrastructure.PolicyRepository;
import com.lakesidemutual.policymanagement.interfaces.dtos.customer.CustomerDto;
import com.lakesidemutual.policymanagement.interfaces.dtos.customer.CustomerIdDto;
import com.lakesidemutual.policymanagement.interfaces.dtos.customer.CustomerNotFoundException;
import com.lakesidemutual.policymanagement.interfaces.dtos.customer.PaginatedCustomerResponseDto;
import com.lakesidemutual.policymanagement.interfaces.dtos.policy.PolicyDto;
/**
* This REST controller gives clients access to the customer data. It is an example of the
* <i>Information Holder Resource</i> pattern. This particular one is a special type of information holder called <i>Master Data Holder</i>.
*
* @see <a href="https://www.microservice-api-patterns.org/patterns/responsibility/endpointRoles/InformationHolderResource">Information Holder Resource</a>
* @see <a href="https://www.microservice-api-patterns.org/patterns/responsibility/informationHolderEndpointTypes/MasterDataHolder">Master Data Holder</a>
*/
@RestController
@RequestMapping("/customers")
public class CustomerInformationHolder {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private PolicyRepository policyRepository;
@Autowired
private CustomerCoreRemoteProxy customerCoreRemoteProxy;
@Operation(summary = "Get all customers.")
@GetMapping
public ResponseEntity<PaginatedCustomerResponseDto> getCustomers(
@Parameter(description = "search terms to filter the customers by name", required = false) @RequestParam(value = "filter", required = false, defaultValue = "") String filter,
@Parameter(description = "the maximum number of customers per page", required = false) @RequestParam(value = "limit", required = false, defaultValue = "10") Integer limit,
@Parameter(description = "the offset of the page's first customer", required = false) @RequestParam(value = "offset", required = false, defaultValue = "0") Integer offset) {
logger.debug("Fetching a page of customers (offset={},limit={},filter='{}')", offset, limit, filter);
PaginatedCustomerResponseDto paginatedResponseIn = customerCoreRemoteProxy.getCustomers(filter, limit, offset);
PaginatedCustomerResponseDto paginatedResponseOut = createPaginatedCustomerResponseDto(
paginatedResponseIn.getFilter(),
paginatedResponseIn.getLimit(),
paginatedResponseIn.getOffset(),
paginatedResponseIn.getSize(),
paginatedResponseIn.getCustomers());
return ResponseEntity.ok(paginatedResponseOut);
}
private PaginatedCustomerResponseDto createPaginatedCustomerResponseDto(String filter, Integer limit, Integer offset, int size, List<CustomerDto> customerDtos) {
customerDtos.forEach(this::addCustomerLinks);
PaginatedCustomerResponseDto paginatedCustomerResponseDto = new PaginatedCustomerResponseDto(filter, limit, offset, size, customerDtos);
paginatedCustomerResponseDto.add(linkTo(methodOn(CustomerInformationHolder.class).getCustomers(filter, limit, offset)).withSelfRel());
if (offset > 0) {
paginatedCustomerResponseDto.add(linkTo(
methodOn(CustomerInformationHolder.class).getCustomers(filter, limit, Math.max(0, offset - limit)))
.withRel("prev"));
}
if (offset < size - limit) {
paginatedCustomerResponseDto.add(linkTo(methodOn(CustomerInformationHolder.class).getCustomers(filter, limit, offset + limit))
.withRel("next"));
}
return paginatedCustomerResponseDto;
}
/**
* The CustomerDto could contain a nested list containing the customer's policies. However, many clients may not be
* interested in the policies when they access the customer resource. To avoid sending large messages containing lots
* of data that is not or seldom needed we instead add a link to a separate endpoint which returns the customer's policies.
* This is an example of the <i>Linked Information Holder</i> pattern.
*
* @see <a href="https://www.microservice-api-patterns.org/patterns/quality/referenceManagement/LinkedInformationHolder">Linked Information Holder</a>
*/
private void addCustomerLinks(CustomerDto customerDto) {
CustomerIdDto customerId = new CustomerIdDto(customerDto.getCustomerId());
Link selfLink = linkTo(methodOn(CustomerInformationHolder.class).getCustomer(customerId)).withSelfRel();
Link policiesLink = linkTo(methodOn(CustomerInformationHolder.class).getPolicies(customerId, "")).withRel("policies");
customerDto.add(selfLink);
customerDto.add(policiesLink);
}
/**
* Returns the customer with the given customer id. Example Usage:
*
* <pre>
* <code>
* GET http://localhost:8090/customers/rgpp0wkpec
*
* {
* "customerId" : "rgpp0wkpec",
* "firstname" : "Max",
* "lastname" : "Mustermann",
* "birthday" : "1989-12-31T23:00:00.000+0000",
* "streetAddress" : "Oberseestrasse 10",
* "postalCode" : "8640",
* "city" : "Rapperswil",
* "email" : "admin@example.com",
* "phoneNumber" : "055 222 4111",
* "moveHistory" : [ ]
* }
* </code>
* </pre>
* If the given customer id is not valid, an error response with HTTP Status Code 404 is returned. The response body contains additional
* information about the error in JSON form. This is an example of the <a href="https://www.microservice-api-patterns.org/patterns/quality/qualityManagementAndGovernance/ErrorReport">Error Report</a>
* pattern:
* <pre>
* <code>
* GET http://localhost:8090/customers/123
*
* {
* "timestamp" : "2018-09-18T08:28:44.644+0000",
* "status" : 404,
* "error" : "Not Found",
* "message" : "Failed to find a customer with id '123'.",
* "path" : "/customers/123"
* }
* </code>
* </pre>
*
* @see <a href="https://www.microservice-api-patterns.org/patterns/quality/qualityManagementAndGovernance/ErrorReport">https://www.microservice-api-patterns.org/patterns/quality/qualityManagementAndGovernance/ErrorReport</a>
*/
@Operation(summary = "Get customer with a given customer id.")
@GetMapping(value = "/{customerIdDto}")
public ResponseEntity<CustomerDto> getCustomer(
@Parameter(description = "the customer's unique id", required = true) @PathVariable CustomerIdDto customerIdDto) {
CustomerId customerId = new CustomerId(customerIdDto.getId());
logger.debug("Fetching a customer with id '{}'", customerId.getId());
CustomerDto customer = customerCoreRemoteProxy.getCustomer(customerId);
if(customer == null) {
final String errorMessage = "Failed to find a customer with id '{}'";
logger.warn(errorMessage, customerId.getId());
throw new CustomerNotFoundException(errorMessage);
}
addCustomerLinks(customer);
return ResponseEntity.ok(customer);
}
@Operation(summary = "Get a customer's policies.")
@GetMapping(value = "/{customerIdDto}/policies")
public ResponseEntity<List<PolicyDto>> getPolicies(
@Parameter(description = "the customer's unique id", required = true) @PathVariable CustomerIdDto customerIdDto,
@Parameter(description = "a comma-separated list of the fields that should be expanded in the response", required = false) @RequestParam(value = "expand", required = false, defaultValue = "") String expand) {
CustomerId customerId = new CustomerId(customerIdDto.getId());
logger.debug("Fetching policies for customer with id '{}' (fields='{}')", customerId.getId(), expand);
List<PolicyAggregateRoot> policies = policyRepository.findAllByCustomerIdOrderByCreationDateDesc(customerId);
List<PolicyDto> policyDtos = policies.stream().map(p -> createPolicyDto(p, expand)).collect(Collectors.toList());
return ResponseEntity.ok(policyDtos);
}
private PolicyDto createPolicyDto(PolicyAggregateRoot policy, String expand) {
PolicyDto policyDto = PolicyDto.fromDomainObject(policy);
if(expand.equals("customer")) {
CustomerDto customer = customerCoreRemoteProxy.getCustomer(policy.getCustomerId());
policyDto.setCustomer(customer);
}
Link selfLink = linkTo(methodOn(PolicyInformationHolder.class).getPolicy(policy.getId(), expand)).withSelfRel();
policyDto.add(selfLink);
return policyDto;
}
}
```
|
```package com.lakesidemutual.customercore.tests.interfaces.dtos.customer;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.json.JsonTest;
import org.springframework.boot.test.json.JacksonTester;
import org.springframework.test.context.junit4.SpringRunner;
import com.lakesidemutual.customercore.interfaces.dtos.customer.AddressDto;
@RunWith(SpringRunner.class)
@JsonTest
public class AddressDtoTests {
@Autowired
private JacksonTester<AddressDto> json;
@Test
public void deserializeJson() throws Exception {
String content = "{\"streetAddress\":\"Oberseestrasse 10\",\"postalCode\":\"8640\",\"city\":\"Rapperswil\"}";
AddressDto address = json.parseObject(content);
assertThat(address.getStreetAddress()).isEqualTo("Oberseestrasse 10");
assertThat(address.getPostalCode()).isEqualTo("8640");
assertThat(address.getCity()).isEqualTo("Rapperswil");
}
@Test
public void serializeJson() throws Exception {
AddressDto address = new AddressDto("Oberseestrasse 10", "8640", "Rapperswil");
assertJsonPropertyEquals(address, "@.streetAddress", "Oberseestrasse 10");
assertJsonPropertyEquals(address, "@.postalCode", "8640");
assertJsonPropertyEquals(address, "@.city", "Rapperswil");
}
private void assertJsonPropertyEquals(AddressDto address, String key, String value) throws Exception {
assertThat(json.write(address)).extractingJsonPathStringValue(key).isEqualTo(value);
}
}
```
|
Please help me generate a test for this class.
|
```package com.lakesidemutual.customermanagement.interfaces.dtos;
/**
* AddressDto is a data transfer object (DTO) that represents the postal address of a customer.
* */
public class AddressDto {
private String streetAddress;
private String postalCode;
private String city;
public AddressDto() {
}
public String getStreetAddress() {
return streetAddress;
}
public String getPostalCode() {
return postalCode;
}
public String getCity() {
return city;
}
public void setStreetAddress(String streetAddress) {
this.streetAddress = streetAddress;
}
public void setPostalCode(String postalCode) {
this.postalCode = postalCode;
}
public void setCity(String city) {
this.city = city;
}
}
```
|
```package com.lakesidemutual.policymanagement.tests.infrastructure;
import static org.assertj.core.api.Assertions.assertThat;
import java.math.BigDecimal;
import java.util.Calendar;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import org.springframework.test.context.junit4.SpringRunner;
import com.lakesidemutual.policymanagement.domain.customer.CustomerId;
import com.lakesidemutual.policymanagement.domain.policy.PolicyAggregateRoot;
import com.lakesidemutual.policymanagement.infrastructure.PolicyRepository;
import com.lakesidemutual.policymanagement.tests.TestUtils;
@RunWith(SpringRunner.class)
@DataJpaTest
public class PolicyRepositoryTests {
@Autowired
private PolicyRepository policyRepository;
@Autowired
private TestEntityManager entityManager;
@Before
public void setUp() {
entityManager.persist(TestUtils.createTestPolicy("h3riovf4xq", "rgpp0wkpec", TestUtils.createDate(2, Calendar.JANUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2020), BigDecimal.valueOf(1500), BigDecimal.valueOf(1000000), BigDecimal.valueOf(250)));
entityManager.persist(TestUtils.createTestPolicy("h3riovf5xq", "rgpp0wkpec", TestUtils.createDate(7, Calendar.JANUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2020), BigDecimal.valueOf(1500), BigDecimal.valueOf(100000), BigDecimal.valueOf(190)));
entityManager.persist(TestUtils.createTestPolicy("h3riovf6xq", "rgpp0wkpec", TestUtils.createDate(3, Calendar.JANUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2020), BigDecimal.valueOf(1500), BigDecimal.valueOf(10000), BigDecimal.valueOf(120)));
entityManager.persist(TestUtils.createTestPolicy("h3riovf7xq", "rgpp1wkpec", TestUtils.createDate(5, Calendar.JANUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2020), BigDecimal.valueOf(1500), BigDecimal.valueOf(1000000), BigDecimal.valueOf(180)));
entityManager.persist(TestUtils.createTestPolicy("h3riovf8xq", "rgpp2wkpec", TestUtils.createDate(4, Calendar.JANUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2019), TestUtils.createDate(1, Calendar.FEBRUARY, 2020), BigDecimal.valueOf(1500), BigDecimal.valueOf(1000000), BigDecimal.valueOf(200)));
}
@Test
public void testFindAllByCustomerIdOrderByCreationDateDesc1() throws Exception {
List<PolicyAggregateRoot> policies = policyRepository.findAllByCustomerIdOrderByCreationDateDesc(new CustomerId("rgpp0wkpec"));
assertThat(policies).size().isEqualTo(3);
PolicyAggregateRoot policy1 = policies.get(0);
PolicyAggregateRoot policy2 = policies.get(1);
PolicyAggregateRoot policy3 = policies.get(2);
assertThat(policy1.getCreationDate()).isEqualTo(TestUtils.createDate(7, Calendar.JANUARY, 2019));
assertThat(policy2.getCreationDate()).isEqualTo(TestUtils.createDate(3, Calendar.JANUARY, 2019));
assertThat(policy3.getCreationDate()).isEqualTo(TestUtils.createDate(2, Calendar.JANUARY, 2019));
}
@Test
public void testFindAllByCustomerIdOrderByCreationDateDesc2() throws Exception {
List<PolicyAggregateRoot> policies = policyRepository.findAllByCustomerIdOrderByCreationDateDesc(new CustomerId("rgpp1wkpec"));
assertThat(policies).size().isEqualTo(1);
PolicyAggregateRoot policy1 = policies.get(0);
assertThat(policy1.getCreationDate()).isEqualTo(TestUtils.createDate(5, Calendar.JANUARY, 2019));
}
@Test
public void testFindAllByCustomerIdOrderByCreationDateDesc3() throws Exception {
List<PolicyAggregateRoot> policies = policyRepository.findAllByCustomerIdOrderByCreationDateDesc(new CustomerId("abcdef"));
assertThat(policies).size().isEqualTo(0);
}
}
```
|
Please help me generate a test for this class.
|
```package com.lakesidemutual.policymanagement.infrastructure;
import java.util.List;
import org.microserviceapipatterns.domaindrivendesign.Repository;
import org.springframework.data.jpa.repository.JpaRepository;
import com.lakesidemutual.policymanagement.domain.customer.CustomerId;
import com.lakesidemutual.policymanagement.domain.policy.PolicyAggregateRoot;
import com.lakesidemutual.policymanagement.domain.policy.PolicyId;
/**
* The PolicyRepository can be used to read and write PolicyAggregateRoot objects from and to the backing database. Spring automatically
* searches for interfaces that extend the JpaRepository interface and creates a corresponding Spring bean for each of them. For more information
* on repositories visit the <a href="https://docs.spring.io/spring-data/jpa/docs/current/reference/html/">Spring Data JPA - Reference Documentation</a>.
* */
public interface PolicyRepository extends JpaRepository<PolicyAggregateRoot, PolicyId>, Repository {
default PolicyId nextId() {
return PolicyId.random();
}
public List<PolicyAggregateRoot> findAllByCustomerIdOrderByCreationDateDesc(CustomerId customerId);
}
```
|
```package example.customers;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import example.customers.Customer.Address;
import example.customers.Customer.Address.Location;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
/**
* Integration tests for {@link CustomerRepository}.
*
* @author Oliver Gierke
*/
@SpringBootTest
@RunWith(SpringRunner.class)
public class CustomerRepositoryIntegrationTest {
@Autowired CustomerRepository repository;
@Test
public void savesAndFindsCustomer() {
Customer customer = repository.save(new Customer("Dave", "Matthews",
new Address("street", "zipCode", "city", new Location(55.349451, -131.673817))));
assertThat(repository.findOne(customer.getId()), is(customer));
}
}
```
|
Please help me generate a test for this class.
|
```package example.customers;
import java.util.UUID;
import org.springframework.data.repository.CrudRepository;
/**
* @author Oliver Gierke
*/
public interface CustomerRepository extends CrudRepository<Customer, UUID> {}
```
|
```package example.stores;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import example.stores.Store.Address;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.geo.Distance;
import org.springframework.data.geo.Metrics;
import org.springframework.data.geo.Point;
import org.springframework.test.context.junit4.SpringRunner;
/**
* Integration tests for {@link StoreRepository}.
*
* @author Oliver Gierke
*/
@SpringBootTest
@RunWith(SpringRunner.class)
public class StoreRepositoryIntegrationTests {
@Autowired StoreRepository repository;
@Before
@After
public void setUp() {
repository.deleteAll();
}
@Test
public void findsStoresByLocation() {
Point location = new Point(-73.995146, 40.740337);
Store store = new Store("Foo", new Address("street", "city", "zip", location));
store = repository.save(store);
Page<Store> stores = repository.findByAddressLocationNear(location, new Distance(1.0, Metrics.KILOMETERS),
new PageRequest(0, 10));
assertThat(stores.getContent(), hasSize(1));
assertThat(stores.getContent(), hasItem(store));
}
}
```
|
Please help me generate a test for this class.
|
```package example.stores;
import lombok.Value;
import java.util.UUID;
import org.springframework.data.annotation.Id;
import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
import org.springframework.data.mongodb.core.index.GeoSpatialIndexed;
import org.springframework.data.mongodb.core.mapping.Document;
/**
* Entity to represent a {@link Store}.
*
* @author Oliver Gierke
*/
@Value
@Document
public class Store {
@Id UUID id = UUID.randomUUID();
String name;
Address address;
@Value
public static class Address {
String street, city, zip;
@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE) Point location;
}
}
```
|
```package org.adblockplus.libadblockplus.android.webview.content_type;
import android.net.Uri;
import org.adblockplus.ContentType;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_ACCEPT;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_CONTENT_LENGTH;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_REQUESTED_WITH;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_REQUESTED_WITH_XMLHTTPREQUEST;
import static org.adblockplus.libadblockplus.HttpClient.MIME_TYPE_TEXT_HTML;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class HeadersContentTypeDetectorTest extends BaseContentTypeDetectorTest
{
private static final Uri URI_EXAMPLE = parseUri("https://example.com");
private static final Map<String, String> XML_HEADER = new HashMap<String, String>()
{{
put(HEADER_REQUESTED_WITH, HEADER_REQUESTED_WITH_XMLHTTPREQUEST);
}};
private static final Map<String, String> BROKEN_XML_HEADER = new HashMap<String, String>()
{{
put(HEADER_REQUESTED_WITH, "OtherValue");
}};
private static final Map<String, String> SUBDOCUMENT_HEADER = new HashMap<String, String>()
{{
put(HEADER_ACCEPT, MIME_TYPE_TEXT_HTML);
}};
private static final Map<String, String> CONTENT_LENGTH_HEADER = new HashMap<String, String>()
{{
put(HEADER_CONTENT_LENGTH, "1000");
}};
private final HeadersContentTypeDetector detector = new HeadersContentTypeDetector();
@Test
public void testHeaderRequests()
{
assertEquals(ContentType.XMLHTTPREQUEST,
detector.detect(mockRequest(URI_EXAMPLE, XML_HEADER)));
assertEquals(ContentType.SUBDOCUMENT,
detector.detect(mockRequest(URI_EXAMPLE, SUBDOCUMENT_HEADER)));
// not detected
assertNull(detector.detect(mockRequest(URI_EXAMPLE, BROKEN_XML_HEADER)));
// totally different header (expected not detected)
assertNull(detector.detect(mockRequest(URI_EXAMPLE, CONTENT_LENGTH_HEADER)));
}
}
```
|
Please help me generate a test for this class.
|
```package org.adblockplus.libadblockplus.android.webview.content_type;
import android.webkit.WebResourceRequest;
import org.adblockplus.ContentType;
import java.util.Map;
import timber.log.Timber;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_ACCEPT;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_REQUESTED_WITH;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_REQUESTED_WITH_XMLHTTPREQUEST;
import static org.adblockplus.libadblockplus.HttpClient.MIME_TYPE_TEXT_HTML;
/**
* Detects content type based on headers
* <p>
* It has a limited functionality and can detect only
* two types of content:
* - {@link ContentType#XMLHTTPREQUEST} and
* - {@link ContentType#SUBDOCUMENT}
* <p>
* Should be used in {@link OrderedContentTypeDetector}
*/
public class HeadersContentTypeDetector implements ContentTypeDetector
{
@Override
public ContentType detect(final WebResourceRequest request)
{
final Map<String, String> headers = request.getRequestHeaders();
final boolean isXmlHttpRequest =
headers.containsKey(HEADER_REQUESTED_WITH) &&
HEADER_REQUESTED_WITH_XMLHTTPREQUEST.equals(headers.get(HEADER_REQUESTED_WITH));
if (isXmlHttpRequest)
{
Timber.w("using xmlhttprequest content type");
return ContentType.XMLHTTPREQUEST;
}
final String acceptType = headers.get(HEADER_ACCEPT);
if (acceptType != null && acceptType.contains(MIME_TYPE_TEXT_HTML))
{
Timber.w("using subdocument content type");
return ContentType.SUBDOCUMENT;
}
// not detected
return null;
}
}
```
|
```package org.adblockplus.libadblockplus.test;
import org.adblockplus.libadblockplus.FileSystemUtils;
import org.adblockplus.libadblockplus.JsValue;
import org.adblockplus.libadblockplus.MockFileSystem;
import org.junit.Test;
import java.io.File;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class FileSystemTest extends BaseJsEngineTest
{
protected final MockFileSystem mockFileSystem = new MockFileSystem();
// get file path relative to basePath
protected String unresolve(final String filename)
{
return FileSystemUtils.unresolve(basePath, new File(filename));
}
@Override
public void setUp()
{
setUpFileSystem(mockFileSystem);
super.setUp();
}
@Test
public void testWriteError()
{
mockFileSystem.success = false;
jsEngine.evaluate("let error = true; _fileSystem.write('foo', 'bar', function(e) {error = e})").dispose();
final JsValue error = jsEngine.evaluate("error");
assertFalse(error.isUndefined());
assertNotNull(error.asString());
error.dispose();
}
private ReadResult readFile()
{
jsEngine.evaluate("let result = {}; _fileSystem.read('', function(r) {result.content = r.content;}, function(error) {result.error = error;})").dispose();
final JsValue content = jsEngine.evaluate("result.content");
final JsValue error = jsEngine.evaluate("result.error");
return new ReadResult(content, error);
}
@Test
public void testRead()
{
final String CONTENT = "foo";
mockFileSystem.contentToRead = CONTENT;
final ReadResult result = readFile();
assertEquals(CONTENT, result.getContent().asString());
assertTrue(result.getError().isUndefined());
result.dispose();
}
@Test
public void testReadError()
{
mockFileSystem.success = false;
final ReadResult result = readFile();
assertTrue(result.getContent().isUndefined());
assertFalse(result.getError().isUndefined());
assertNotNull(result.getError().asString());
result.dispose();
}
@Test
public void testReadException()
{
mockFileSystem.exception = true;
final ReadResult result = readFile();
assertTrue(result.getContent().isUndefined());
assertFalse(result.getError().isUndefined());
assertNotNull(result.getError().asString());
result.dispose();
}
@Test
public void testWrite()
{
jsEngine.evaluate("let error = true; _fileSystem.write('foo', 'bar', function(e) {error = e})").dispose();
assertNotNull(mockFileSystem.lastWrittenFile);
assertEquals("foo", unresolve(mockFileSystem.lastWrittenFile));
assertNotNull(mockFileSystem.lastWrittenContent);
assertEquals("bar", mockFileSystem.lastWrittenContent);
final JsValue value = jsEngine.evaluate("error");
assertTrue(value.isUndefined());
value.dispose();
}
@Test
public void testWriteException()
{
mockFileSystem.exception = true;
jsEngine.evaluate("let error = true; _fileSystem.write('foo', 'bar', function(e) {error = e})").dispose();
final JsValue error = jsEngine.evaluate("error");
assertFalse(error.isUndefined());
assertNotNull(error.asString());
error.dispose();
}
@Test
public void testMoveError()
{
mockFileSystem.success = false;
jsEngine.evaluate("let error; _fileSystem.move('foo', 'bar', function(e) {error = e})").dispose();
final JsValue error = jsEngine.evaluate("error");
assertFalse(error.isUndefined());
assertNotNull(error.asString());
error.dispose();
}
@Test
public void testMove()
{
jsEngine.evaluate("let error = true; _fileSystem.move('foo', 'bar', function(e) {error = e})").dispose();
assertNotNull(mockFileSystem.movedFrom);
assertEquals("foo", unresolve(mockFileSystem.movedFrom));
assertNotNull(mockFileSystem.movedTo);
assertEquals("bar", unresolve(mockFileSystem.movedTo));
final JsValue value = jsEngine.evaluate("error");
assertTrue(value.isUndefined());
value.dispose();
}
@Test
public void testMoveException()
{
mockFileSystem.exception = true;
jsEngine.evaluate("let error; _fileSystem.move('foo', 'bar', function(e) {error = e})").dispose();
final JsValue error = jsEngine.evaluate("error");
assertFalse(error.isUndefined());
assertNotNull(error.asString());
error.dispose();
}
@Test
public void testRemoveError()
{
mockFileSystem.success = false;
jsEngine.evaluate("let error = true; _fileSystem.remove('foo', function(e) {error = e})").dispose();
final JsValue error = jsEngine.evaluate("error");
assertFalse(error.isUndefined());
assertNotNull(error.asString());
error.dispose();
}
@Test
public void testRemove()
{
jsEngine.evaluate("let error = true; _fileSystem.remove('foo', function(e) {error = e})").dispose();
assertNotNull(mockFileSystem.removedFile);
assertEquals("foo", unresolve(mockFileSystem.removedFile));
final JsValue value = jsEngine.evaluate("error");
assertTrue(value.isUndefined());
value.dispose();
}
@Test
public void testRemoveException()
{
mockFileSystem.exception = true;
jsEngine.evaluate("let error = true; _fileSystem.remove('foo', function(e) {error = e})").dispose();
final JsValue error = jsEngine.evaluate("error");
assertFalse(error.isUndefined());
assertNotNull(error.asString());
error.dispose();
}
@Test
public void testStat()
{
final boolean EXISTS = true;
final long MODIFIED = 1337L;
mockFileSystem.statExists = EXISTS;
mockFileSystem.statLastModified = MODIFIED;
jsEngine.evaluate("let result; _fileSystem.stat('foo', function(r) {result = r})").dispose();
assertNotNull(mockFileSystem.statFile);
assertEquals("foo", unresolve(mockFileSystem.statFile));
final JsValue result_error = jsEngine.evaluate("result.error");
assertTrue(result_error.isUndefined());
result_error.dispose();
final JsValue exists = jsEngine.evaluate("result.exists");
assertTrue(exists.isBoolean());
assertEquals(EXISTS, exists.asBoolean());
exists.dispose();
final JsValue modified = jsEngine.evaluate("result.lastModified");
assertTrue(modified.isNumber());
assertEquals(MODIFIED, modified.asLong());
modified.dispose();
}
@Test
public void testStatError()
{
mockFileSystem.success = false;
jsEngine.evaluate("let result; _fileSystem.stat('foo', function(r) {result = r})").dispose();
final JsValue error = jsEngine.evaluate("result.error");
assertFalse(error.isUndefined());
assertNotNull(error.asString());
error.dispose();
}
@Test
public void testStatException()
{
mockFileSystem.exception = true;
jsEngine.evaluate("let result; _fileSystem.stat('foo', function(r) {result = r})").dispose();
final JsValue error = jsEngine.evaluate("result.error");
assertFalse(error.isUndefined());
assertNotNull(error.asString());
error.dispose();
}
private static class ReadResult
{
private final JsValue content;
private final JsValue error;
public ReadResult(final JsValue content, final JsValue error)
{
this.content = content;
this.error = error;
}
public JsValue getContent()
{
return content;
}
public JsValue getError()
{
return error;
}
public void dispose()
{
content.dispose();
error.dispose();
}
}
}
```
|
Please help me generate a test for this class.
|
```package org.adblockplus.libadblockplus;
import java.nio.ByteBuffer;
public abstract class FileSystem
{
static
{
System.loadLibrary(BuildConfig.nativeLibraryName);
registerNatives();
}
/**
* Result of a stat operation, i.e. information about a file.
*/
public static class StatResult
{
private boolean exists;
private long modified;
public StatResult(final boolean exists, final long modified)
{
this.exists = exists;
this.modified = modified;
}
public boolean isExists()
{
return exists;
}
public long getModified()
{
return modified;
}
}
/**
* Default callback type for asynchronous filesystem calls.
*/
public static class Callback implements Disposable
{
protected final long ptr;
private final Disposer disposer;
Callback(final long ptr)
{
this.ptr = ptr;
this.disposer = new Disposer(this, new DisposeWrapper(this.ptr));
}
private static final class DisposeWrapper implements Disposable
{
private final long ptr;
public DisposeWrapper(final long ptr)
{
this.ptr = ptr;
}
@Override
public void dispose()
{
callbackDtor(this.ptr);
}
}
@Override
public void dispose()
{
this.disposer.dispose();
}
/**
* @param error An error string. Empty is success.
*/
public void onFinished(final String error)
{
callbackOnFinished(this.ptr, error);
}
}
/**
* Callback type for the asynchronous Read call.
*/
public static class ReadCallback implements Disposable
{
protected final long ptr;
private final Disposer disposer;
ReadCallback(final long ptr)
{
this.ptr = ptr;
this.disposer = new Disposer(this, new DisposeWrapper(this.ptr));
}
private static final class DisposeWrapper implements Disposable
{
private final long ptr;
public DisposeWrapper(final long ptr)
{
this.ptr = ptr;
}
@Override
public void dispose()
{
readCallbackDtor(this.ptr);
}
}
@Override
public void dispose()
{
this.disposer.dispose();
}
/**
* @param output char array with file content,
* (*direct* buffer, allocated with `ByteBuffer.allocateDirect`)
*/
public void onFinished(final ByteBuffer output)
{
readCallbackOnFinished(this.ptr, output);
}
}
/**
* Callback type for the asynchronous Stat call.
*/
public static class StatCallback implements Disposable
{
protected final long ptr;
private final Disposer disposer;
StatCallback(final long ptr)
{
this.ptr = ptr;
this.disposer = new Disposer(this, new DisposeWrapper(this.ptr));
}
private static final class DisposeWrapper implements Disposable
{
private final long ptr;
public DisposeWrapper(final long ptr)
{
this.ptr = ptr;
}
@Override
public void dispose()
{
statCallbackDtor(this.ptr);
}
}
@Override
public void dispose()
{
this.disposer.dispose();
}
/**
* @param result StatResult data.
* @param error error string. `Null` if no error.
*/
public void onFinished(final StatResult result, final String error)
{
statCallbackOnFinished(this.ptr, result, error);
}
}
/**
* Reads from a file.
* @param filename File name.
* @param doneCallback The callback called on completion with the input data.
* @param errorCallback The callback called if an error occurred.
*/
public abstract void read(final String filename,
final ReadCallback doneCallback,
final Callback errorCallback);
/**
* Writes to a file.
* @param filename File name.
* @param data The data to write, *direct* buffer (allocated with `env->NewDirectByteBuffer()`)
* @param callback The callback called on completion.
*/
public abstract void write(final String filename,
final ByteBuffer data,
final Callback callback);
/**
* Moves a file (i.e. renames it).
* @param fromFilename Current file name.
* @param toFilename New file name.
* @param callback The callback called on completion.
*/
public abstract void move(final String fromFilename,
final String toFilename,
final Callback callback);
/**
* Removes a file.
* @param filename File name.
* @param callback The callback called on completion.
*/
public abstract void remove(final String filename,
final Callback callback);
/**
* Retrieves information about a file.
* @param filename File name.
* @param callback The callback called on completion.
*/
public abstract void stat(final String filename,
final StatCallback callback);
private static native void callbackOnFinished(long ptr, String error);
private static native void callbackDtor(long ptr);
private static native void readCallbackOnFinished(long ptr, ByteBuffer output);
private static native void readCallbackDtor(long ptr);
private static native void statCallbackOnFinished(long ptr, StatResult result, String error);
private static native void statCallbackDtor(long ptr);
private static native void registerNatives();
}
```
|
```package org.adblockplus.libadblockplus.test;
import org.adblockplus.libadblockplus.HeaderEntry;
import org.adblockplus.libadblockplus.HttpClient;
import org.adblockplus.libadblockplus.JsValue;
import org.adblockplus.libadblockplus.MockHttpClient;
import org.adblockplus.libadblockplus.ServerResponse;
import org.adblockplus.libadblockplus.android.Utils;
import org.junit.Test;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.LinkedList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class HttpClientTest extends BaseFilterEngineTest
{
private static final int RESPONSE_STATUS = 123;
private static final String HEADER_KEY = "Foo";
private static final String HEADER_VALUE = "Bar";
private static final Charset CHARSET = StandardCharsets.UTF_8;
private static final String RESPONSE = "(responseText)";
private final MockHttpClient mockHttpClient = new MockHttpClient();
@Override
public void setUp()
{
final ServerResponse response = new ServerResponse();
response.setResponseStatus(RESPONSE_STATUS);
response.setStatus(ServerResponse.NsStatus.OK);
response.setResponse(Utils.stringToByteBuffer(RESPONSE, CHARSET));
final List<HeaderEntry> headers = new LinkedList<>();
headers.add(new HeaderEntry(HEADER_KEY, HEADER_VALUE));
response.setResponseHeaders(headers);
mockHttpClient.response = response;
setUpHttpClient(mockHttpClient);
super.setUp();
}
@Test
public void testSuccessfulRequest()
{
jsEngine.evaluate(
"let foo; _webRequest.GET('http://example.com/', {X: 'Y'}, function(result) {foo = result;} )").dispose();
waitForDefined("foo");
assertTrue(mockHttpClient.called.get());
assertNotNull(mockHttpClient.getSpecificRequest("http://example.com/", HttpClient.REQUEST_METHOD_GET));
final JsValue foo = jsEngine.evaluate("foo");
assertFalse(foo.isUndefined());
foo.dispose();
final JsValue status = jsEngine.evaluate("foo.status");
assertEquals(
ServerResponse.NsStatus.OK.getStatusCode(),
status.asLong());
status.dispose();
final JsValue responseStatus = jsEngine.evaluate("foo.responseStatus");
assertEquals(
Long.valueOf(RESPONSE_STATUS).longValue(),
responseStatus.asLong());
responseStatus.dispose();
final JsValue responseText = jsEngine.evaluate("foo.responseText");
assertEquals(RESPONSE, responseText.asString());
responseStatus.dispose();
final JsValue respHeaders = jsEngine.evaluate("JSON.stringify(foo.responseHeaders)");
assertEquals(
"{\"" + HEADER_KEY + "\":\"" + HEADER_VALUE + "\"}",
respHeaders.asString());
respHeaders.dispose();
}
@Test
public void testRequestException()
{
mockHttpClient.exception.set(true);
jsEngine.evaluate(
"let foo; _webRequest.GET('http://example.com/', {X: 'Y'}, function(result) {foo = result;} )").dispose();
waitForDefined("foo");
assertTrue(mockHttpClient.called.get());
assertNotNull(mockHttpClient.getSpecificRequest("http://example.com/", HttpClient.REQUEST_METHOD_GET));
final JsValue foo = jsEngine.evaluate("foo");
assertFalse(foo.isUndefined());
foo.dispose();
final JsValue status = jsEngine.evaluate("foo.status");
assertEquals(
ServerResponse.NsStatus.ERROR_FAILURE.getStatusCode(),
status.asLong());
status.dispose();
}
}
```
|
Please help me generate a test for this class.
|
```package org.adblockplus.libadblockplus;
public abstract class HttpClient
{
public static final String HEADER_REFERRER = "Referer";
public static final String HEADER_REQUESTED_WITH = "X-Requested-With";
public static final String HEADER_REQUESTED_WITH_XMLHTTPREQUEST = "XMLHttpRequest";
public static final String HEADER_REQUESTED_RANGE = "Range";
public static final String HEADER_LOCATION = "Location";
public static final String HEADER_COOKIE = "Cookie";
public static final String HEADER_USER_AGENT = "User-Agent";
public static final String HEADER_ACCEPT = "Accept";
public static final String HEADER_REFRESH = "Refresh";
// use low-case strings as in WebResponse all header keys are lowered-case
public static final String HEADER_SET_COOKIE = "set-cookie";
public static final String HEADER_WWW_AUTHENTICATE = "www-authenticate";
public static final String HEADER_PROXY_AUTHENTICATE = "proxy-authenticate";
public static final String HEADER_EXPIRES = "expires";
public static final String HEADER_DATE = "date";
public static final String HEADER_RETRY_AFTER = "retry-after";
public static final String HEADER_LAST_MODIFIED = "last-modified";
public static final String HEADER_VIA = "via";
public static final String HEADER_SITEKEY = "x-adblock-key";
public static final String HEADER_CONTENT_TYPE = "content-type";
public static final String HEADER_CONTENT_LENGTH = "content-length";
public static final String HEADER_CSP = "content-security-policy";
static
{
System.loadLibrary(BuildConfig.nativeLibraryName);
registerNatives();
}
public static final int STATUS_CODE_OK = 200;
/**
* Possible values for request method argument (see `request(..)` method)
*/
public static final String REQUEST_METHOD_GET = "GET";
public static final String REQUEST_METHOD_POST = "POST";
public static final String REQUEST_METHOD_HEAD = "HEAD";
public static final String REQUEST_METHOD_OPTIONS = "OPTIONS";
public static final String REQUEST_METHOD_PUT = "PUT";
public static final String REQUEST_METHOD_DELETE = "DELETE";
public static final String REQUEST_METHOD_TRACE = "TRACE";
/**
* Some MIME types
*/
public static final String MIME_TYPE_TEXT_HTML = "text/html";
/**
* Checks if HTTP status code is a redirection.
* @param httpStatusCode HTTP status code to check.
* @return True for redirect status code.
*/
public static boolean isRedirectCode(final int httpStatusCode)
{
return httpStatusCode >= 300 && httpStatusCode <= 399;
}
/**
* HTTP status cannot be greater that 599 and less than 100
*
* @param httpStatusCode HTTP status code to check.
* @return True when status is valid
*/
public static boolean isValidCode(final int httpStatusCode)
{
return httpStatusCode >= 100 && httpStatusCode <= 599;
}
/**
* Checks if HTTP status code is a success code.
* @param httpStatusCode HTTP status code to check.
* @return True for success status code.
*/
public static boolean isSuccessCode(final int httpStatusCode)
{
return httpStatusCode >= STATUS_CODE_OK && httpStatusCode <= 299;
}
/**
* Checks if HTTP status code means no content.
* See: https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html
* @param httpStatusCode HTTP status code to check.
* @return True for no content code.
*/
public static boolean isNoContentCode(final int httpStatusCode)
{
return httpStatusCode == 204 || httpStatusCode == 304 ||
(httpStatusCode >= 100 && httpStatusCode <= 199);
}
/**
* Generic callback
*/
public interface Callback
{
/**
* @param response server response.
*/
void onFinished(final ServerResponse response);
}
/**
* Callback type invoked when the server response is ready (used from JNI code).
*/
public static class JniCallback implements Callback, Disposable
{
protected final long ptr;
private final Disposer disposer;
public JniCallback(final long ptr)
{
this.ptr = ptr;
this.disposer = new Disposer(this, new DisposeWrapper(this.ptr));
}
private static final class DisposeWrapper implements Disposable
{
private final long ptr;
public DisposeWrapper(final long ptr)
{
this.ptr = ptr;
}
@Override
public void dispose()
{
callbackDtor(this.ptr);
}
}
@Override
public void dispose()
{
this.disposer.dispose();
}
/**
* @param response server response.
*/
@Override
public void onFinished(final ServerResponse response)
{
callbackOnFinished(this.ptr, response);
}
}
/**
* Performs a HTTP request.
* @param request HttpRequest
* @param callback to invoke when the server response is ready.
*/
public abstract void request(final HttpRequest request, final Callback callback);
private static native void callbackOnFinished(long ptr, ServerResponse response);
private static native void callbackDtor(long ptr);
private static native void registerNatives();
}
```
|
```package org.adblockplus.libadblockplus.test;
import org.adblockplus.libadblockplus.JsValue;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class DefaultPropertiesAppInfoJsObjectTest extends BaseJsEngineTest
{
@Test
public void testDefaultProperties()
{
final JsValue version = jsEngine.evaluate("_appInfo.version");
assertEquals("1.0", version.asString());
version.dispose();
final JsValue name = jsEngine.evaluate("_appInfo.name");
assertEquals("libadblockplus-android", name.asString());
name.dispose();
final JsValue application = jsEngine.evaluate("_appInfo.application");
assertEquals("android", application.asString());
application.dispose();
final JsValue appVersion = jsEngine.evaluate("_appInfo.applicationVersion");
assertEquals("0", appVersion.asString());
application.dispose();
final JsValue locale = jsEngine.evaluate("_appInfo.locale");
assertEquals("en-US", locale.asString());
locale.dispose();
}
}
```
|
Please help me generate a test for this class.
|
```package org.adblockplus;
import org.jetbrains.annotations.NotNull;
/**
* Data class which identifies the application when downloading {@link Subscription}s.
*
* In most cases it is recommended to NOT create {@link AppInfo} object just pass a null value to
* {@link AdblockEngineFactory#getAdblockEngineBuilder} so the correct {@link AppInfo} object is generated
* automatically.
*/
public class AppInfo
{
private final String version = "1.0";
private final String name = "libadblockplus-android";
private final String application;
private final String applicationVersion;
private final String locale;
private AppInfo(@NotNull final String application, @NotNull final String applicationVersion,
@NotNull final String locale)
{
this.application = application;
this.applicationVersion = applicationVersion;
this.locale = locale;
}
/**
* Creates {@link AppInfo.Builder} object.
* @return {@link Builder} object.
*/
@NotNull
public static Builder builder()
{
return new Builder();
}
/**
* {@link AppInfo} builder class.
*/
public static class Builder
{
private String application = "android";
private String applicationVersion = "0";
private String locale = "en-US";
private Builder()
{
}
/**
* Sets application name.
* @param application application name
* @return {@link Builder} to allow chaining
*/
public Builder setApplication(@NotNull final String application)
{
this.application = application;
return this;
}
/**
* Sets application version.
* @param applicationVersion application version
* @return {@link Builder} to allow chaining
*/
public Builder setApplicationVersion(@NotNull final String applicationVersion)
{
this.applicationVersion = applicationVersion;
return this;
}
/**
* Sets application locale.
* @param locale application locale
* @return {@link Builder} to allow chaining
*/
public Builder setLocale(@NotNull final String locale)
{
this.locale = locale;
return this;
}
/**
* Builds the {@link AppInfo} object.
* @note: In most cases it is recommended to NOT create {@link AppInfo} object just pass a null value to
* {@link AdblockEngineFactory#getAdblockEngineBuilder} so the correct {@link AppInfo} object is generated
* automatically.
* @return {@link AppInfo} object
*/
public AppInfo build()
{
return new AppInfo(this.application, this.applicationVersion, this.locale);
}
}
}
```
|
```package org.adblockplus.libadblockplus;
import java.util.List;
public class TestEventCallback extends EventCallback
{
private boolean called;
private List<JsValue> params;
public TestEventCallback()
{
reset();
}
public boolean isCalled()
{
return called;
}
public List<JsValue> getParams()
{
return params;
}
public void reset()
{
this.called = false;
this.params = null;
}
@Override
public void eventCallback(final List<JsValue> params)
{
this.called = true;
this.params = params;
}
}
```
|
Please help me generate a test for this class.
|
```package org.adblockplus.libadblockplus;
import java.util.List;
public abstract class EventCallback implements Disposable
{
private final Disposer disposer;
protected final long ptr;
static
{
System.loadLibrary(BuildConfig.nativeLibraryName);
registerNatives();
}
public EventCallback()
{
this.ptr = ctor(this);
this.disposer = new Disposer(this, new DisposeWrapper(this.ptr));
}
public abstract void eventCallback(List<JsValue> params);
@Override
public void dispose()
{
this.disposer.dispose();
}
private static final class DisposeWrapper implements Disposable
{
private final long ptr;
public DisposeWrapper(final long ptr)
{
this.ptr = ptr;
}
@Override
public void dispose()
{
dtor(this.ptr);
}
}
private static native void registerNatives();
private static native long ctor(Object obj);
private static native void dtor(long ptr);
}
```
|
```package org.adblockplus.libadblockplus.android.settings;
import org.adblockplus.ConnectionType;
import org.junit.Test;
import java.util.LinkedList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class AdblockSettingsTest
{
private static AdblockSettings buildModel(final int subscriptionsCount,
final int allowlistedDomainsCount)
{
final AdblockSettings settings = new AdblockSettings();
settings.setAdblockEnabled(true);
settings.setAcceptableAdsEnabled(true);
settings.setAllowedConnectionType(ConnectionType.WIFI);
final List<SubscriptionInfo> subscriptions = new LinkedList<>();
for (int i = 0; i < subscriptionsCount; i++)
{
subscriptions.add(new SubscriptionInfo("URL" + (i + 1), "Title" + (i + 1)));
}
settings.setSelectedSubscriptions(subscriptions);
final List<String> domains = new LinkedList<>();
for (int i = 0; i < allowlistedDomainsCount; i++)
{
domains.add("www.domain" + (i + 1) + ".com");
}
settings.setAllowlistedDomains(domains);
return settings;
}
@Test
public void testAdblockEnabled()
{
final AdblockSettings settings = new AdblockSettings();
settings.setAdblockEnabled(true);
assertTrue(settings.isAdblockEnabled());
settings.setAdblockEnabled(false);
assertFalse(settings.isAdblockEnabled());
}
@Test
public void testAcceptableAds()
{
final AdblockSettings settings = new AdblockSettings();
settings.setAcceptableAdsEnabled(true);
assertTrue(settings.isAcceptableAdsEnabled());
settings.setAcceptableAdsEnabled(false);
assertFalse(settings.isAcceptableAdsEnabled());
}
@Test
public void testAllowedConnectionType()
{
final AdblockSettings settings = new AdblockSettings();
for (ConnectionType eachConnectionType : ConnectionType.values())
{
settings.setAllowedConnectionType(eachConnectionType);
assertEquals(eachConnectionType, settings.getAllowedConnectionType());
}
}
@Test
public void testSubscriptions()
{
for (int i = 0; i < 3; i++)
{
final AdblockSettings settings = buildModel(i, 1);
assertEquals(i, settings.getSelectedSubscriptions().size());
}
}
@Test
public void testAllowlistedDomains()
{
for (int i = 0; i < 3; i++)
{
final AdblockSettings settings = buildModel(1, i);
assertEquals(i, settings.getAllowlistedDomains().size());
}
}
}
```
|
Please help me generate a test for this class.
|
```package org.adblockplus.libadblockplus.android.settings;
import org.adblockplus.ConnectionType;
import java.io.Serializable;
import java.util.List;
/**
* Adblock settings
*/
public class AdblockSettings implements Serializable
{
private volatile boolean adblockEnabled;
private volatile boolean acceptableAdsEnabled;
private List<SubscriptionInfo> selectedSubscriptions;
private List<SubscriptionInfo> availableSubscriptions;
private List<String> allowlistedDomains;
private ConnectionType allowedConnectionType;
public boolean isAdblockEnabled()
{
return adblockEnabled;
}
public void setAdblockEnabled(final boolean adblockEnabled)
{
this.adblockEnabled = adblockEnabled;
}
public boolean isAcceptableAdsEnabled()
{
return acceptableAdsEnabled;
}
public void setAcceptableAdsEnabled(final boolean acceptableAdsEnabled)
{
this.acceptableAdsEnabled = acceptableAdsEnabled;
}
public List<SubscriptionInfo> getSelectedSubscriptions()
{
return selectedSubscriptions;
}
public void setSelectedSubscriptions(final List<SubscriptionInfo> selectedSubscriptions)
{
this.selectedSubscriptions = selectedSubscriptions;
}
public List<SubscriptionInfo> getAvailableSubscriptions()
{
return availableSubscriptions;
}
public void setAvailableSubscriptions(final List<SubscriptionInfo> availableSubscriptions)
{
this.availableSubscriptions = availableSubscriptions;
}
public List<String> getAllowlistedDomains()
{
return allowlistedDomains;
}
public void setAllowlistedDomains(final List<String> allowlistedDomains)
{
this.allowlistedDomains = allowlistedDomains;
}
public ConnectionType getAllowedConnectionType()
{
return allowedConnectionType;
}
public void setAllowedConnectionType(final ConnectionType allowedConnectionType)
{
this.allowedConnectionType = allowedConnectionType;
}
@Override
public String toString()
{
return "AdblockSettings{" +
"adblockEnabled=" + adblockEnabled +
", acceptableAdsEnabled=" + acceptableAdsEnabled +
", availableSubscriptions:" + (availableSubscriptions != null ? availableSubscriptions.size() : 0) +
", selectedSubscriptions:" + (selectedSubscriptions != null ? selectedSubscriptions.size() : 0) +
", allowlistedDomains:" + (allowlistedDomains != null ? allowlistedDomains.size() : 0) +
", allowedConnectionType=" + (allowedConnectionType != null ? allowedConnectionType.getValue() : "null") +
'}';
}
}
```
|
```package org.adblockplus.libadblockplus.android.webview.content_type;
import android.net.Uri;
import org.adblockplus.ContentType;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_ACCEPT;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_REQUESTED_WITH;
import static org.adblockplus.libadblockplus.HttpClient.HEADER_REQUESTED_WITH_XMLHTTPREQUEST;
import static org.adblockplus.libadblockplus.HttpClient.MIME_TYPE_TEXT_HTML;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class OrderedContentTypeDetectorTest extends BaseContentTypeDetectorTest
{
private static final Uri URI_IMAGE = parseUri("https://www.example.com/file.jpg?name=value");
private static final Map<String, String> XML_HEADER = new HashMap<String, String>()
{{
put(HEADER_REQUESTED_WITH, HEADER_REQUESTED_WITH_XMLHTTPREQUEST);
}};
private static final Map<String, String> SUBDOCUMENT_HEADER = new HashMap<String, String>()
{{
put(HEADER_ACCEPT, MIME_TYPE_TEXT_HTML);
}};
@Test
public void testEmptyDetector()
{
assertNull(new OrderedContentTypeDetector().detect(mockRequest(URI_IMAGE, XML_HEADER)));
}
@Test
public void testProperDetectingOrder()
{
final OrderedContentTypeDetector regexFirst = new OrderedContentTypeDetector(
new UrlFileExtensionTypeDetector(),
new HeadersContentTypeDetector()
);
final OrderedContentTypeDetector headersFirst = new OrderedContentTypeDetector(
new HeadersContentTypeDetector(),
new UrlFileExtensionTypeDetector()
);
assertEquals(ContentType.IMAGE,
regexFirst.detect(mockRequest(URI_IMAGE, XML_HEADER)));
assertEquals(ContentType.XMLHTTPREQUEST,
headersFirst.detect(mockRequest(URI_IMAGE, XML_HEADER)));
assertEquals(ContentType.SUBDOCUMENT,
headersFirst.detect(mockRequest(URI_IMAGE, SUBDOCUMENT_HEADER)));
}
}
```
|
Please help me generate a test for this class.
|
```package org.adblockplus.libadblockplus.android.webview.content_type;
import android.webkit.WebResourceRequest;
import org.adblockplus.ContentType;
/**
* Detects content type based on {@link HeadersContentTypeDetector}
* and {@link UrlFileExtensionTypeDetector}
* <p>
* Can accept a list of content type detectors
* <p>
* {@link ContentType#XMLHTTPREQUEST} is detected separately
* just by checking header `HEADER_REQUESTED_WITH_XMLHTTPREQUEST`
*/
public class OrderedContentTypeDetector implements ContentTypeDetector
{
private final ContentTypeDetector[] detectors;
/**
* Creates an instance of a `MultipleContentTypeDetector`
* with provided detectors
* <p>
* At the moment only {@link HeadersContentTypeDetector}
* and {@link UrlFileExtensionTypeDetector} exists
*
* @param detectors an array of instances of {@link ContentTypeDetector}
*/
public OrderedContentTypeDetector(final ContentTypeDetector... detectors)
{
this.detectors = detectors;
}
@Override
public ContentType detect(final WebResourceRequest request)
{
ContentType contentType;
for (final ContentTypeDetector detector : detectors)
{
contentType = detector.detect(request);
// if contentType == null, that means
// that the detector was unavailable to detect content type
if (contentType != null)
{
return contentType;
}
}
// returning result
// if nothing found, its safe to return null
return null;
}
}
```
|
```package org.adblockplus.libadblockplus.test;
import org.adblockplus.libadblockplus.sitekey.PublicKeyHolder;
import org.adblockplus.libadblockplus.sitekey.PublicKeyHolderImpl;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class PublicKeyHolderImplTest
{
private final Random random = new Random();
private PublicKeyHolder publicKeyHolder;
@Before
public void setUp()
{
publicKeyHolder = new PublicKeyHolderImpl();
}
private String generateString()
{
return String.valueOf(Math.abs(random.nextLong()));
}
@Test
public void testPutGet()
{
final String url = generateString();
final String publicKey = generateString();
assertFalse(publicKeyHolder.contains(url));
publicKeyHolder.put(url, publicKey);
assertTrue(publicKeyHolder.contains(url));
assertEquals(publicKey, publicKeyHolder.get(url));
}
@Test
public void testPutGetAny()
{
final String url = generateString();
final String publicKey = generateString();
assertFalse(publicKeyHolder.contains(url));
publicKeyHolder.put(url, publicKey);
assertTrue(publicKeyHolder.contains(url));
final List<String> list = new ArrayList<>();
assertNull(publicKeyHolder.getAny(list, null), null);
assertEquals("", publicKeyHolder.getAny(list, ""));
list.add(url);
assertEquals(publicKey, publicKeyHolder.getAny(list, null));
final String testUrlAfter = "testUrlAfter";
list.add(testUrlAfter);
assertFalse(publicKeyHolder.contains(testUrlAfter));
assertEquals(publicKey, publicKeyHolder.getAny(list, null));
final String testUrlBefore = "testUrlBefore";
list.add(0, testUrlBefore);
assertFalse(publicKeyHolder.contains(testUrlBefore));
assertEquals(publicKey, publicKeyHolder.getAny(list, null));
}
@Test
public void testGetAnyForTwo()
{
final String url1 = generateString();
final String publicKey1 = generateString();
assertFalse(publicKeyHolder.contains(url1));
publicKeyHolder.put(url1, publicKey1);
assertTrue(publicKeyHolder.contains(url1));
final String url2 = generateString();
final String publicKey2 = generateString();
assertFalse(publicKeyHolder.contains(url2));
publicKeyHolder.put(url2, publicKey2);
assertTrue(publicKeyHolder.contains(url2));
final List<String> list = new ArrayList<>();
list.add(url1);
assertEquals(publicKey1, publicKeyHolder.getAny(list, null));
list.clear();
list.add(url2);
assertEquals(publicKey2, publicKeyHolder.getAny(list, null));
// undefined behaviour (because map does not have order)
list.add(url1);
final String publicKey = publicKeyHolder.getAny(list, null);
assertNotNull(publicKey);
assertTrue(publicKey1.equals(publicKey) || publicKey2.equals(publicKey));
}
@Test
public void testStripPadding()
{
final String publicKey = "somePublicKey";
assertNull(PublicKeyHolderImpl.stripPadding(null));
assertEquals("", PublicKeyHolderImpl.stripPadding(""));
assertEquals(publicKey, PublicKeyHolderImpl.stripPadding(publicKey));
assertEquals(publicKey, PublicKeyHolderImpl.stripPadding(publicKey + "="));
assertEquals(publicKey, PublicKeyHolderImpl.stripPadding(publicKey + "=="));
}
}
```
|
Please help me generate a test for this class.
|
```package org.adblockplus.libadblockplus.sitekey;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Thread-safe implementation of PublicKeyHolder
*/
public class PublicKeyHolderImpl implements PublicKeyHolder
{
/**
* Strip `=` padding at the end of base64 public key
* @param publicKey full public key
* @return public key with stripped padding or `null`
*/
public static String stripPadding(final String publicKey)
{
if (publicKey == null)
{
return null;
}
final StringBuilder sb = new StringBuilder(publicKey);
while (sb.length() > 0 && sb.charAt(sb.length() - 1) == '=')
{
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
private Map<String, String> map = Collections.synchronizedMap(new HashMap<String, String>());
@Override
public boolean contains(final String url)
{
return map.containsKey(url);
}
@Override
public String get(final String url)
{
return map.get(url);
}
@Override
public String getAny(final List<String> urls, final String defaultValue)
{
for (final String url : urls)
{
final String publicKey = get(url);
if (publicKey != null)
{
return publicKey;
}
}
return defaultValue;
}
@Override
public void put(final String url, final String publicKey)
{
map.put(url, publicKey);
}
@Override
public void clear()
{
map.clear();
}
}
```
|
```package sample.model.support;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import sample.context.support.IdGenerator;
public class IdGeneratorMock implements IdGenerator {
private final ConcurrentMap<String, AtomicLong> uidMap = new ConcurrentHashMap<>();
@Override
public String generate(String key) {
uidMap.computeIfAbsent(key, k -> new AtomicLong(0));
return String.valueOf(uidMap.get(key).incrementAndGet());
}
}
```
|
Please help me generate a test for this class.
|
```package sample.model;
import java.util.Map;
import java.util.function.Function;
import org.springframework.stereotype.Component;
import lombok.RequiredArgsConstructor;
import sample.context.DomainHelper;
import sample.context.support.IdGenerator;
import sample.model.asset.CashInOut;
/**
* Domain-specific implementation of IdGenerator
*/
@Component
@RequiredArgsConstructor(staticName = "of")
public class DomainIdGenerator implements IdGenerator {
private final DomainHelper dh;
private final Map<String, Function<Long, String>> uidMap = Map.of(
CashInOut.class.getSimpleName(), id -> CashInOut.formatId(id));
/** {@inheritDoc} */
@Override
public String generate(String key) {
if (!uidMap.containsKey(key)) {
throw new IllegalArgumentException("Unsupported generation key. [" + key + "]");
}
return uidMap.get(key).apply(dh.setting().nextId(key));
}
}
```
|
```package sample.model.asset;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.math.BigDecimal;
import java.time.LocalDate;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import sample.model.BusinessDayHandler;
import sample.model.DataFixtures;
import sample.model.DomainTester;
import sample.model.DomainTester.DomainTesterBuilder;
import sample.model.support.HolidayAccessorMock;
// low: Simple normal system verification only
public class CashBalanceTest {
private DomainTester tester;
private BusinessDayHandler businessDay;
@BeforeEach
public void before() {
tester = DomainTesterBuilder.from(CashBalance.class).build();
businessDay = BusinessDayHandler.of(tester.time(), new HolidayAccessorMock());
}
@AfterEach
public void after() {
tester.close();
}
@Test
public void add() {
LocalDate baseDay = businessDay.day();
tester.tx(rep -> {
CashBalance cb = rep.save(DataFixtures.cb("test1", baseDay, "USD", "10.02"));
// 10.02 + 11.51 = 21.53
assertEquals(new BigDecimal("21.53"), cb.add(rep, new BigDecimal("11.51")).getAmount());
// 21.53 + 11.516 = 33.04 (Fractional Rounding Confirmation)
assertEquals(new BigDecimal("33.04"), cb.add(rep, new BigDecimal("11.516")).getAmount());
// 33.04 - 41.51 = -8.47 (Negative value/negative residual allowance)
assertEquals(new BigDecimal("-8.47"), cb.add(rep, new BigDecimal("-41.51")).getAmount());
});
}
@Test
public void getOrNew() {
LocalDate baseDay = businessDay.day();
LocalDate baseMinus1Day = businessDay.day(-1);
tester.tx(rep -> {
rep.save(DataFixtures.cb("test1", baseDay, "JPY", "1000"));
rep.save(DataFixtures.cb("test2", baseMinus1Day, "JPY", "3000"));
// Verification of balances in existence
var cbNormal = CashBalance.getOrNew(rep, "test1", "JPY");
assertEquals("test1", cbNormal.getAccountId());
assertEquals(baseDay, cbNormal.getBaseDay());
assertEquals(new BigDecimal("1000"), cbNormal.getAmount());
// Verification of carryover of balances that do not exist on the base date
var cbRoll = CashBalance.getOrNew(rep, "test2", "JPY");
assertEquals("test2", cbRoll.getAccountId());
assertEquals(baseDay, cbRoll.getBaseDay());
assertEquals(new BigDecimal("3000"), cbRoll.getAmount());
// Verification of generation of accounts that do not hold balances
var cbNew = CashBalance.getOrNew(rep, "test3", "JPY");
assertEquals("test3", cbNew.getAccountId());
assertEquals(baseDay, cbNew.getBaseDay());
assertEquals(BigDecimal.ZERO, cbNew.getAmount());
});
}
}
```
|
Please help me generate a test for this class.
|
```package sample.model.asset;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.Optional;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import lombok.Data;
import sample.context.DomainEntity;
import sample.context.orm.OrmRepository;
import sample.model.constraints.Amount;
import sample.model.constraints.Currency;
import sample.model.constraints.ISODate;
import sample.model.constraints.ISODateTime;
import sample.model.constraints.IdStr;
import sample.util.Calculator;
import sample.util.TimePoint;
/**
* Represents the account balance.
*/
@Entity
@Data
public class CashBalance implements DomainEntity {
private static final String SequenceId = "cash_balance_id_seq";
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = SequenceId)
@SequenceGenerator(name = SequenceId, sequenceName = SequenceId, allocationSize = 1)
private Long id;
/** account Id */
@IdStr
private String accountId;
@ISODate
private LocalDate baseDay;
@Currency
private String currency;
@Amount
private BigDecimal amount;
@ISODateTime
private LocalDateTime updateDate;
/**
* Reflects the specified amount in the balance.
* low Although Currency is used here, the actual number of currency digits and
* fractional processing definitions are managed in the DB, configuration files,
* etc.
*/
public CashBalance add(final OrmRepository rep, BigDecimal addAmount) {
int scale = java.util.Currency.getInstance(currency).getDefaultFractionDigits();
this.setAmount(Calculator.of(amount)
.scale(scale, RoundingMode.DOWN)
.add(addAmount)
.decimal());
return rep.update(this);
}
/**
* Retrieves the balance of the designated account.
* (If it does not exist, it will be retrieved after the carryover is saved.)
* low: Proper consideration of multiple currencies and detailed screening is
* not the main point, so I will skip it.
*/
public static CashBalance getOrNew(final OrmRepository rep, String accountId, String currency) {
LocalDate baseDay = rep.dh().time().day();
var jpql = """
SELECT c
FROM CashBalance c
WHERE c.accountId=?1 AND c.currency=?2 AND c.baseDay=?3
ORDER BY c.baseDay DESC
""";
Optional<CashBalance> m = rep.tmpl().get(jpql, accountId, currency, baseDay);
return m.orElseGet(() -> create(rep, accountId, currency));
}
private static CashBalance create(final OrmRepository rep, String accountId, String currency) {
TimePoint now = rep.dh().time().tp();
var jpql = """
SELECT c
FROM CashBalance c
WHERE c.accountId=?1 AND c.currency=?2
ORDER BY c.baseDay DESC
""";
Optional<CashBalance> current = rep.tmpl().get(jpql, accountId, currency);
var amount = BigDecimal.ZERO;
if (current.isPresent()) { // balance carried forward
amount = current.get().getAmount();
}
var m = new CashBalance();
m.setAccountId(accountId);
m.setBaseDay(now.getDay());
m.setCurrency(currency);
m.setAmount(amount);
m.setUpdateDate(now.getDate());
return rep.save(m);
}
}
```
|
```package sample.context;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
import sample.context.support.ResourceBundleHandler;
public class ResourceBundleHandlerTest {
@Test
public void checkLabel() {
ResourceBundleHandler handler = new ResourceBundleHandler();
assertTrue(0 < handler.labels("messages").size());
}
}
```
|
Please help me generate a test for this class.
|
```package sample.context.support;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.ResourceBundle;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import org.springframework.context.support.ResourceBundleMessageSource;
import org.springframework.stereotype.Component;
/**
* Provides simple access to ResourceBundle.
* <p>
* This component should be used in message properties for i18n applications,
* such as providing a list of labels via API.
* <p>
* ResourceBundle has different characteristics (map concept) from the standard
* MessageSource, which is intended for simple string conversion. Therefore, it
* is managed in a separate instance.
*/
@Component
public class ResourceBundleHandler {
private static final String DEFAULT_ENCODING = "UTF-8";
private final Map<String, ResourceBundle> bundleMap = new ConcurrentHashMap<>();
/**
* Returns the ResourceBundle of the specified message source.
* <p>
* It is not necessary to include the extension (.properties) in the basename.
*/
public ResourceBundle get(String basename) {
return get(basename, Locale.getDefault());
}
public synchronized ResourceBundle get(String basename, Locale locale) {
bundleMap.putIfAbsent(keyname(basename, locale),
ResourceBundleFactory.create(basename, locale, DEFAULT_ENCODING));
return bundleMap.get(keyname(basename, locale));
}
private String keyname(String basename, Locale locale) {
return basename + "_" + locale.toLanguageTag();
}
/**
* Returns the label key and value Map of the specified message source.
* <p>
* It is not necessary to include the extension (.properties) in the basename.
*/
public Map<String, String> labels(String basename) {
return labels(basename, Locale.getDefault());
}
public Map<String, String> labels(String basename, Locale locale) {
ResourceBundle bundle = get(basename, locale);
return bundle.keySet().stream().collect(Collectors.toMap(
key -> key,
key -> bundle.getString(key)));
}
/**
* Factory to acquire ResourceBundle via Spring's MessageSource.
* <p>
* Allows specification of encoding for property files.
*/
public static class ResourceBundleFactory extends ResourceBundleMessageSource {
/** Return ResourceBundle. */
public static ResourceBundle create(String basename, Locale locale, String encoding) {
var factory = new ResourceBundleFactory();
factory.setDefaultEncoding(encoding);
return Optional.ofNullable(factory.getResourceBundle(basename, locale))
.orElseThrow(() -> new IllegalArgumentException(
"No resource file with the specified basename was found. [" + basename + "]"));
}
}
}
```
|
```package sample.model.master;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import sample.model.DataFixtures;
import sample.model.DomainTester;
import sample.model.DomainTester.DomainTesterBuilder;
public class StaffAuthorityTest {
private DomainTester tester;
@BeforeEach
public void before() {
tester = DomainTesterBuilder.from(StaffAuthority.class).build();
tester.txInitializeData(rep -> {
DataFixtures.staffAuth("staffA", "ID000001", "ID000002", "ID000003").forEach((auth) -> rep.save(auth));
DataFixtures.staffAuth("staffB", "ID000001", "ID000002").forEach((auth) -> rep.save(auth));
});
}
@AfterEach
public void after() {
tester.close();
}
@Test
public void find() {
tester.tx(rep -> {
assertEquals(3, StaffAuthority.find(rep, "staffA").size());
assertEquals(2, StaffAuthority.find(rep, "staffB").size());
});
}
}
```
|
Please help me generate a test for this class.
|
```package sample.model.master;
import java.util.List;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import lombok.Data;
import sample.context.DomainEntity;
import sample.context.orm.OrmRepository;
import sample.model.constraints.IdStr;
import sample.model.constraints.Name;
/**
* Represents the authority assigned to an staff.
*/
@Entity
@Data
public class StaffAuthority implements DomainEntity {
private static final String SequenceId = "staff_authority_id_seq";
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = SequenceId)
@SequenceGenerator(name = SequenceId, sequenceName = SequenceId, allocationSize = 1)
private Long id;
@IdStr
private String staffId;
/** Authority Name. */
@Name
private String authority;
/** Returns a list of privileges associated with the staff ID. */
public static List<StaffAuthority> find(final OrmRepository rep, String staffId) {
var jpql = "SELECT sa FROM StaffAuthority sa WHERE sa.staffId=?1";
return rep.tmpl().find(jpql, staffId);
}
}
```
|
```package sample.model.master;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import sample.model.DataFixtures;
import sample.model.DomainTester;
import sample.model.DomainTester.DomainTesterBuilder;
import sample.model.master.Holiday.FindHoliday;
import sample.model.master.Holiday.RegHoliday;
import sample.model.master.Holiday.RegHolidayItem;
import sample.util.DateUtils;
public class HolidayTest {
private DomainTester tester;
@BeforeEach
public void before() {
tester = DomainTesterBuilder.from(Holiday.class).build();
tester.txInitializeData(rep -> {
Stream.of("2015-09-21", "2015-09-22", "2015-09-23", "2016-09-21")
.map(DataFixtures::holiday)
.forEach(m -> rep.save(m));
});
}
@AfterEach
public void after() {
tester.close();
}
@Test
public void get() {
tester.tx(rep -> {
Optional<Holiday> day = Holiday.get(rep, LocalDate.of(2015, 9, 22));
assertTrue(day.isPresent());
assertEquals(LocalDate.of(2015, 9, 22), day.get().getDay());
});
}
@Test
public void find() {
tester.tx(rep -> {
assertEquals(3, Holiday.find(rep,
FindHoliday.builder().year(2015).build()).size());
assertEquals(1, Holiday.find(rep,
FindHoliday.builder().year(2016).build()).size());
});
}
@Test
public void register() {
List<RegHolidayItem> items = Stream.of("2016-09-21", "2016-09-22", "2016-09-23")
.map(s -> RegHolidayItem.builder()
.holiday(DateUtils.day(s))
.name("休日")
.build())
.toList();
tester.tx(rep -> {
Holiday.register(rep, RegHoliday
.builder()
.year(2016)
.list(items)
.build());
assertEquals(3, Holiday.find(rep,
FindHoliday.builder().year(2016).build()).size());
});
}
}
```
|
Please help me generate a test for this class.
|
```package sample.model.master;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.validation.Valid;
import lombok.Builder;
import lombok.Data;
import sample.context.DomainMetaEntity;
import sample.context.Dto;
import sample.context.ErrorKeys;
import sample.context.ValidationException;
import sample.context.orm.OrmRepository;
import sample.model.constraints.Category;
import sample.model.constraints.CategoryEmpty;
import sample.model.constraints.ISODate;
import sample.model.constraints.ISODateTime;
import sample.model.constraints.IdStr;
import sample.model.constraints.Name;
import sample.model.constraints.NameEmpty;
import sample.model.constraints.OutlineEmpty;
import sample.model.constraints.Year;
import sample.util.DateUtils;
/**
* Represents a holiday master.
*/
@Entity
@Data
public class Holiday implements DomainMetaEntity {
private static final String SequenceId = "holiday_id_seq";
public static final String CategoryDefault = "default";
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = SequenceId)
@SequenceGenerator(name = SequenceId, sequenceName = SequenceId, allocationSize = 1)
private Long id;
/** Holiday classification (currency, country, financial institution, etc.) */
@Category
private String category;
@ISODate
@Column(name = "holiday", nullable = false)
private LocalDate day;
@NameEmpty(max = 40)
private String name;
@OutlineEmpty
private String outline;
@ISODateTime
private LocalDateTime createDate;
@IdStr
private String createId;
@ISODateTime
private LocalDateTime updateDate;
@IdStr
private String updateId;
public static Optional<Holiday> get(final OrmRepository rep, LocalDate day) {
return get(rep, CategoryDefault, day);
}
public static Optional<Holiday> get(final OrmRepository rep, String category, LocalDate day) {
var jpql = """
SELECT h FROM Holiday h WHERE h.category=?1 AND h.day=?2
""";
return rep.tmpl().get(jpql, category, day);
}
public static Holiday load(final OrmRepository rep, LocalDate day) {
return load(rep, CategoryDefault, day);
}
public static Holiday load(final OrmRepository rep, String category, LocalDate day) {
return get(rep, category, day)
.orElseThrow(
() -> ValidationException.of(ErrorKeys.EntityNotFound, day.toString()));
}
public static List<Holiday> find(final OrmRepository rep, final FindHoliday param) {
var category = param.category != null ? param.category : CategoryDefault;
var fromDay = LocalDate.ofYearDay(param.year, 1);
var toDay = DateUtils.dayTo(param.year);
var jpql = """
SELECT h
FROM Holiday h
WHERE h.category=?1 AND h.day BETWEEN ?2 AND ?3
ORDER BY h.day
""";
return rep.tmpl().find(jpql, category, fromDay, toDay);
}
@Builder
public static record FindHoliday(
@CategoryEmpty String category,
@Year Integer year) {
}
/**
* Register holiday master.
* <p>
* Batch registration after deleting all holidays for the specified year.
*/
public static void register(final OrmRepository rep, final RegHoliday param) {
var category = param.category != null ? param.category : CategoryDefault;
var fromDay = LocalDate.ofYearDay(param.year, 1);
var toDay = DateUtils.dayTo(param.year);
var jpqlDel = """
DELETE FROM Holiday h WHERE h.category=?1 AND h.day BETWEEN ?2 AND ?3
""";
rep.tmpl().execute(jpqlDel, category, fromDay, toDay);
rep.flushAndClear();
System.out.println(rep.findAll(Holiday.class));
param.list.stream()
.filter(v -> DateUtils.includes(v.holiday(), fromDay, toDay))
.forEach(v -> rep.saveOrUpdate(v.create(param)));
}
@Builder
public static record RegHoliday(
@CategoryEmpty String category,
@Year Integer year,
@Valid List<RegHolidayItem> list) implements Dto {
}
@Builder
public static record RegHolidayItem(
@ISODate LocalDate holiday,
@Name(max = 40) String name) implements Dto {
public Holiday create(RegHoliday parent) {
var m = new Holiday();
m.setCategory(parent.category != null ? parent.category : CategoryDefault);
m.setDay(this.holiday);
m.setName(this.name);
return m;
}
}
}
```
|
```package sample.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.math.BigDecimal;
import org.junit.jupiter.api.Test;
public class ConvertUtilsTest {
@Test
public void checkQuietly() {
assertEquals(Long.valueOf(8), ConvertUtils.quietlyLong("8"));
assertNull(ConvertUtils.quietlyLong("a"));
assertEquals(Integer.valueOf(8), ConvertUtils.quietlyInt("8"));
assertNull(ConvertUtils.quietlyInt("a"));
assertEquals(new BigDecimal("8.3"), ConvertUtils.quietlyDecimal("8.3"));
assertNull(ConvertUtils.quietlyDecimal("a"));
assertTrue(ConvertUtils.quietlyBool("true"));
assertFalse(ConvertUtils.quietlyBool("a"));
}
@Test
public void checkSubstring() {
assertEquals("あ𠮷い", ConvertUtils.substring("あ𠮷い", 0, 3));
assertEquals("𠮷", ConvertUtils.substring("あ𠮷い", 1, 2));
assertEquals("𠮷い", ConvertUtils.substring("あ𠮷い", 1, 3));
assertEquals("い", ConvertUtils.substring("あ𠮷い", 2, 3));
assertEquals("あ𠮷", ConvertUtils.left("あ𠮷い", 2));
assertEquals("あ𠮷", ConvertUtils.leftStrict("あ𠮷い", 6, "UTF-8"));
}
}
```
|
Please help me generate a test for this class.
|
```package sample.util;
import java.math.BigDecimal;
import java.util.Optional;
/** Supports various type/string conversions. */
public abstract class ConvertUtils {
/** Converts to Long without exception. (null if conversion is not possible) */
public static Long quietlyLong(Object value) {
try {
return Optional.ofNullable(value).map(v -> Long.parseLong(v.toString())).orElse(null);
} catch (NumberFormatException e) {
return null;
}
}
/**
* Converts to Integer without exception. (null if conversion is not possible)
*/
public static Integer quietlyInt(Object value) {
try {
return Optional.ofNullable(value).map(v -> Integer.parseInt(v.toString())).orElse(null);
} catch (NumberFormatException e) {
return null;
}
}
/**
* Convert to BigDecimal without exception. (null if conversion is not possible)
*/
public static BigDecimal quietlyDecimal(Object value) {
try {
return Optional.ofNullable(value).map((v) -> new BigDecimal(v.toString())).orElse(null);
} catch (NumberFormatException e) {
return null;
}
}
/**
* Converts to Boolean without exception. (false if conversion is not possible)
*/
public static Boolean quietlyBool(Object value) {
return Optional.ofNullable(value).map((v) -> Boolean.parseBoolean(v.toString())).orElse(false);
}
/** Extracts the specified string. (Surrogate pairs supported) */
public static String substring(String text, int start, int end) {
if (text == null)
return null;
int spos = text.offsetByCodePoints(0, start);
int epos = text.length() < end ? text.length() : end;
return text.substring(spos, text.offsetByCodePoints(spos, epos - start));
}
/**
* Obtains a string with a specified number of characters from the left.
* (Surrogate pairs supported)
*/
public static String left(String text, int len) {
return substring(text, 0, len);
}
/** Obtains a string with the specified number of bytes from the left. */
public static String leftStrict(String text, int lenByte, String charset) {
StringBuilder sb = new StringBuilder();
try {
int cnt = 0;
for (int i = 0; i < text.length(); i++) {
String v = text.substring(i, i + 1);
byte[] b = v.getBytes(charset);
if (lenByte < cnt + b.length) {
break;
} else {
sb.append(v);
cnt += b.length;
}
}
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
return sb.toString();
}
}
```
|
```package sample.model.support;
import java.util.HashMap;
import java.util.Map;
import sample.context.support.AppSetting;
import sample.context.support.AppSettingHandler;
public class AppSettingHandlerMock implements AppSettingHandler {
private final Map<String, String> valueMap = new HashMap<>();
@Override
public AppSetting setting(String id) {
this.valueMap.putIfAbsent(id, null);
return AppSetting.of(id, this.valueMap.get(id));
}
@Override
public AppSetting change(String id, String value) {
this.valueMap.put(id, value);
return this.setting(id);
}
@Override
public long nextId(String id) {
this.valueMap.putIfAbsent(id, "0");
var v = Long.valueOf(this.valueMap.get(id)) + 1;
this.valueMap.put(id, String.valueOf(v));
return v;
}
}
```
|
Please help me generate a test for this class.
|
```package sample.context.support;
import java.util.Optional;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import lombok.extern.slf4j.Slf4j;
import sample.context.orm.repository.SystemRepository;
/**
* Provides access to application configuration information.
*/
public interface AppSettingHandler {
/** Returns application configuration information. */
AppSetting setting(String id);
/** Change application configuration information. */
AppSetting change(String id, String value);
/** The following values are automatically numbered. */
long nextId(String id);
@Component
@Slf4j
public static class AppSettingHandlerImpl implements AppSettingHandler {
public static final String CacheItemKey = "AppSettingHandler.appSetting";
private static final String UIDKeyPrefix = "uid.";
private final SystemRepository rep;
public AppSettingHandlerImpl(SystemRepository rep) {
this.rep = rep;
}
/** {@inheritDoc} */
@Override
@Cacheable(cacheNames = CacheItemKey, key = "#id")
@Transactional(SystemRepository.BeanNameTx)
public AppSetting setting(String id) {
Optional<AppSetting> setting = rep.get(AppSetting.class, id);
if (setting.isEmpty()) {
log.warn("Initial registered settings do not exist [{}]", id);
return rep.save(AppSetting.of(id, null));
} else {
return setting.get();
}
}
/** {@inheritDoc} */
@Override
@CacheEvict(cacheNames = CacheItemKey, key = "#id")
@Transactional(SystemRepository.BeanNameTx)
public AppSetting change(String id, String value) {
return AppSetting.load(rep, id).change(rep, value);
}
/** {@inheritDoc} */
@Override
@Transactional(transactionManager = SystemRepository.BeanNameTx, propagation = Propagation.REQUIRES_NEW)
public synchronized long nextId(String id) {
String uidKey = UIDKeyPrefix + id;
if (rep.get(AppSetting.class, uidKey).isEmpty()) {
rep.save(AppSetting.of(uidKey, "0"));
rep.flushAndClear();
}
var setting = rep.loadForUpdate(AppSetting.class, uidKey);
long nextId = setting.longValue() + 1;
setting.setValue(String.valueOf(nextId));
rep.update(setting);
return nextId;
}
}
}
```
|
```package sample.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.time.LocalDate;
import java.time.LocalDateTime;
import org.junit.jupiter.api.Test;
public class TimePointTest {
@Test
public void checkInit() {
LocalDate targetDay = LocalDate.of(2015, 8, 28);
LocalDateTime targetDate = LocalDateTime.of(2015, 8, 29, 1, 23, 31);
TimePoint tp = TimePoint.of(targetDay, targetDate);
assertEquals(targetDay, tp.day());
assertEquals(targetDate, tp.date());
TimePoint tpDay = TimePoint.of(targetDay);
assertEquals(targetDay, tpDay.day());
assertEquals(targetDay.atStartOfDay(), tpDay.date());
TimePoint now = TimePoint.now();
assertNotNull(now.day());
assertNotNull(now.date());
}
@Test
public void checkCompare() {
LocalDate targetDay = LocalDate.of(2015, 8, 28);
LocalDateTime targetDate = LocalDateTime.of(2015, 8, 29, 1, 23, 31);
TimePoint tp = TimePoint.of(targetDay, targetDate);
assertTrue(tp.equalsDay(LocalDate.of(2015, 8, 28)));
assertFalse(tp.equalsDay(LocalDate.of(2015, 8, 27)));
assertFalse(tp.equalsDay(LocalDate.of(2015, 8, 29)));
assertTrue(tp.beforeDay(LocalDate.of(2015, 8, 29)));
assertFalse(tp.beforeDay(LocalDate.of(2015, 8, 28)));
assertFalse(tp.beforeDay(LocalDate.of(2015, 8, 27)));
assertTrue(tp.afterDay(LocalDate.of(2015, 8, 27)));
assertFalse(tp.afterDay(LocalDate.of(2015, 8, 28)));
assertFalse(tp.afterDay(LocalDate.of(2015, 8, 29)));
assertTrue(tp.beforeEqualsDay(LocalDate.of(2015, 8, 29)));
assertTrue(tp.beforeEqualsDay(LocalDate.of(2015, 8, 28)));
assertFalse(tp.beforeEqualsDay(LocalDate.of(2015, 8, 27)));
assertTrue(tp.afterEqualsDay(LocalDate.of(2015, 8, 27)));
assertTrue(tp.afterEqualsDay(LocalDate.of(2015, 8, 28)));
assertFalse(tp.afterEqualsDay(LocalDate.of(2015, 8, 29)));
}
}
```
|
Please help me generate a test for this class.
|
```package sample.util;
import java.io.Serializable;
import java.time.Clock;
import java.time.LocalDate;
import java.time.LocalDateTime;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import sample.model.constraints.ISODate;
import sample.model.constraints.ISODateTime;
/**
* Represents a LocalDate/LocalDateTime pair.
* <p>
* This is intended for use in cases where the business day switch does not
* occur at 0:00.
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class TimePoint implements Serializable {
private static final long serialVersionUID = 1L;
/** Date (business day) */
@ISODate
private LocalDate day;
/** System date and time in date */
@ISODateTime
private LocalDateTime date;
public LocalDate day() {
return getDay();
}
public LocalDateTime date() {
return getDate();
}
/** Is it the same as the specified date? (day == targetDay) */
public boolean equalsDay(LocalDate targetDay) {
return day.compareTo(targetDay) == 0;
}
/** Is it earlier than the specified date? (day < targetDay) */
public boolean beforeDay(LocalDate targetDay) {
return day.compareTo(targetDay) < 0;
}
/** Before the specified date? (day <= targetDay) */
public boolean beforeEqualsDay(LocalDate targetDay) {
return day.compareTo(targetDay) <= 0;
}
/** Is it later than the specified date? (targetDay < day) */
public boolean afterDay(LocalDate targetDay) {
return 0 < day.compareTo(targetDay);
}
/** Is it after the specified date? (targetDay <= day) */
public boolean afterEqualsDay(LocalDate targetDay) {
return 0 <= day.compareTo(targetDay);
}
/** Generate a TimePoint based on date/time. */
public static TimePoint of(LocalDate day, LocalDateTime date) {
return new TimePoint(day, date);
}
/** Generate a TimePoint based on a date. */
public static TimePoint of(LocalDate day) {
return of(day, day.atStartOfDay());
}
/** Generate TimePoints. */
public static TimePoint now() {
LocalDateTime now = LocalDateTime.now();
return of(now.toLocalDate(), now);
}
/** Generate TimePoints. */
public static TimePoint now(Clock clock) {
LocalDateTime now = LocalDateTime.now(clock);
return of(now.toLocalDate(), now);
}
}
```
|
```package sample.model;
import sample.ApplicationProperties;
import sample.context.DomainHelper;
import sample.context.Timestamper;
import sample.context.support.AppSettingHandler;
import sample.context.support.IdGenerator;
import sample.model.support.AppSettingHandlerMock;
import sample.model.support.IdGeneratorMock;
import sample.model.support.TimestamperMock;
public class MockDomainHelper implements DomainHelper {
private final TimestamperMock time = TimestamperMock.of(null, null);
private final IdGeneratorMock uid = new IdGeneratorMock();
private final AppSettingHandlerMock setting = new AppSettingHandlerMock();
private final ApplicationProperties props = new ApplicationProperties();
@Override
public Timestamper time() {
return this.time;
}
@Override
public IdGenerator uid() {
return this.uid;
}
@Override
public AppSettingHandler setting() {
return this.setting;
}
@Override
public ApplicationProperties props() {
return this.props;
}
}
```
|
Please help me generate a test for this class.
|
```package sample.context;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.stereotype.Component;
import lombok.RequiredArgsConstructor;
import sample.ApplicationProperties;
import sample.context.actor.Actor;
import sample.context.actor.ActorSession;
import sample.context.spring.ObjectProviderAccessor;
import sample.context.support.AppSettingHandler;
import sample.context.support.IdGenerator;
/**
* Provides access to infrastructure layer components required for domain
* processing.
*/
public interface DomainHelper {
/** Returns the currently logged-in use case user. */
default Actor actor() {
return ActorSession.actor();
}
/** Returns date/time utility. */
Timestamper time();
/** Returns UID Generator */
IdGenerator uid();
/** Returns the application configuration utility. */
AppSettingHandler setting();
/** Returns application properties. */
ApplicationProperties props();
/**
* DomainHelper implementation considering lazy loading
* <p>
* Use this for use with DI containers.
*/
@Component
@RequiredArgsConstructor(staticName = "of")
public static class DomainHelperProviderImpl implements DomainHelper {
private final ApplicationProperties props;
private final ObjectProvider<Timestamper> time;
private final ObjectProvider<IdGenerator> uid;
private final ObjectProvider<AppSettingHandler> setting;
private final ObjectProviderAccessor accessor;
/** {@inheritDoc} */
@Override
public Timestamper time() {
return this.accessor.bean(this.time, Timestamper.class);
}
/** {@inheritDoc} */
@Override
public IdGenerator uid() {
return this.accessor.bean(uid, IdGenerator.class);
}
/** {@inheritDoc} */
@Override
public AppSettingHandler setting() {
return this.accessor.bean(setting, AppSettingHandler.class);
}
/** {@inheritDoc} */
@Override
public ApplicationProperties props() {
return this.props;
}
}
}
```
|
```package sample.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.math.BigDecimal;
import java.math.RoundingMode;
import org.junit.jupiter.api.Test;
public class CalculatorTest {
@Test
public void calculation() {
// (10 + 2 - 4) * 4 / 8 = 4
assertEquals(
4,
Calculator.of(10).add(2).subtract(4).multiply(4).divideBy(8).intValue());
// (12.4 + 0.033 - 2.33) * 0.3 / 3.3 = 0.91 (RoundingMode.DOWN)
assertEquals(
new BigDecimal("0.91"),
Calculator.of(12.4).scale(2).add(0.033).subtract(2.33).multiply(0.3).divideBy(3.3).decimal());
}
@Test
public void roundingAlways() {
// 3.333 -> 3.334 -> 3.335 (= 3.34)
assertEquals(
new BigDecimal("3.34"),
Calculator.of(3.333).scale(2, RoundingMode.HALF_UP)
.add(0.001).add(0.001).decimal());
// 3.333 -> 3.330 -> 3.330 (= 3.33)
assertEquals(
new BigDecimal("3.33"),
Calculator.of(3.333).scale(2, RoundingMode.HALF_UP).roundingAlways(true)
.add(0.001).add(0.001).decimal());
}
}
```
|
Please help me generate a test for this class.
|
```package sample.util;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.concurrent.atomic.AtomicReference;
/**
* Calculation utility.
* <p>
* this calculation is not thread safe.
*/
public final class Calculator {
private final AtomicReference<BigDecimal> value = new AtomicReference<>();
private int scale = 0;
private RoundingMode mode = RoundingMode.DOWN;
/** When do fraction processing each calculation. */
private boolean roundingAlways = false;
private int defaultScale = 18;
private Calculator(Number v) {
try {
this.value.set(new BigDecimal(v.toString()));
} catch (NumberFormatException e) {
this.value.set(BigDecimal.ZERO);
}
}
private Calculator(BigDecimal v) {
this.value.set(v);
}
/**
* Set scale value.
* <p>
* call it before a calculation.
*/
public Calculator scale(int scale) {
return scale(scale, RoundingMode.DOWN);
}
/**
* Set scale value.
* <p>
* call it before a calculation.
*/
public Calculator scale(int scale, RoundingMode mode) {
this.scale = scale;
this.mode = mode;
return this;
}
/**
* Set roundingAlways value.
* <p>
* call it before a calculation.
*/
public Calculator roundingAlways(boolean roundingAlways) {
this.roundingAlways = roundingAlways;
return this;
}
public Calculator add(Number v) {
try {
add(new BigDecimal(v.toString()));
} catch (NumberFormatException e) {
}
return this;
}
public Calculator add(BigDecimal v) {
value.set(rounding(value.get().add(v)));
return this;
}
private BigDecimal rounding(BigDecimal v) {
return roundingAlways ? v.setScale(scale, mode) : v;
}
public Calculator subtract(Number v) {
try {
subtract(new BigDecimal(v.toString()));
} catch (NumberFormatException e) {
}
return this;
}
public Calculator subtract(BigDecimal v) {
value.set(rounding(value.get().subtract(v)));
return this;
}
public Calculator multiply(Number v) {
try {
multiply(new BigDecimal(v.toString()));
} catch (NumberFormatException e) {
}
return this;
}
public Calculator multiply(BigDecimal v) {
value.set(rounding(value.get().multiply(v)));
return this;
}
public Calculator divideBy(Number v) {
try {
divideBy(new BigDecimal(v.toString()));
} catch (NumberFormatException e) {
}
return this;
}
public Calculator divideBy(BigDecimal v) {
BigDecimal ret = roundingAlways ? value.get().divide(v, scale, mode)
: value.get().divide(v, defaultScale, mode);
value.set(ret);
return this;
}
/** Return a calculation result. */
public int intValue() {
return decimal().intValue();
}
/** Return a calculation result. */
public long longValue() {
return decimal().longValue();
}
/** Return a calculation result. */
public BigDecimal decimal() {
BigDecimal v = value.get();
return v != null ? v.setScale(scale, mode) : BigDecimal.ZERO;
}
public static Calculator of() {
return new Calculator(BigDecimal.ZERO);
}
public static Calculator of(Number v) {
return new Calculator(v);
}
public static Calculator of(BigDecimal v) {
return new Calculator(v);
}
}
```
|
```package sample.model.asset;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.math.BigDecimal;
import java.time.LocalDate;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import sample.model.BusinessDayHandler;
import sample.model.DataFixtures;
import sample.model.DomainTester;
import sample.model.DomainTester.DomainTesterBuilder;
import sample.model.account.Account;
import sample.model.support.HolidayAccessorMock;
// low: Focus on simple verification
public class AssetTest {
private DomainTester tester;
private BusinessDayHandler businessDay;
@BeforeEach
public void before() {
tester = DomainTesterBuilder.from(
Account.class, CashBalance.class, Cashflow.class, CashInOut.class).build();
businessDay = BusinessDayHandler.of(tester.time(), new HolidayAccessorMock());
}
@AfterEach
public void after() {
tester.close();
}
@Test
public void canWithdraw() {
// 10000 + (1000 - 2000) - 8000 = 1000
tester.tx(rep -> {
rep.save(DataFixtures.account("test"));
rep.save(DataFixtures
.cb("test", LocalDate.of(2014, 11, 18), "JPY", "10000"));
rep.save(DataFixtures
.cf("test", "1000", LocalDate.of(2014, 11, 18), LocalDate.of(2014, 11, 20)));
rep.save(DataFixtures
.cf("test", "-2000", LocalDate.of(2014, 11, 19), LocalDate.of(2014, 11, 21)));
rep.save(DataFixtures.cio(rep.dh().uid(), businessDay, "test", "8000", true));
assertTrue(Asset.of("test")
.canWithdraw(rep, "JPY", new BigDecimal("1000"), LocalDate.of(2014, 11, 21)));
assertFalse(Asset.of("test")
.canWithdraw(rep, "JPY", new BigDecimal("1001"), LocalDate.of(2014, 11, 21)));
});
}
}
```
|
Please help me generate a test for this class.
|
```package sample.model.asset;
import java.math.BigDecimal;
import java.time.LocalDate;
import lombok.Getter;
import sample.context.orm.OrmRepository;
import sample.util.Calculator;
/**
* Represents the asset concept of the account.
* Entity under asset is handled across the board.
* low: In actual development, considerations for multi-currency and
* in-execution/binding cash flow actions can be quite complex for some
* services.
*/
@Getter
public class Asset {
private final String accountId;
private Asset(String accountId) {
this.accountId = accountId;
}
/**
* Determines if a withdrawal is possible.
* <p>
* 0 <= account balance + unrealized cash flow - (withdrawal request bound
* amount + withdrawal request amount)
* low: Since this is a judgment only, the scale specification is omitted. When
* returning the surplus amount, specify it properly.
*/
public boolean canWithdraw(final OrmRepository rep, String currency, BigDecimal absAmount, LocalDate valueDay) {
var calc = Calculator.of(CashBalance.getOrNew(rep, accountId, currency).getAmount());
Cashflow.findUnrealize(rep, accountId, currency, valueDay).stream().forEach((cf) -> calc.add(cf.getAmount()));
CashInOut.findUnprocessed(rep, accountId, currency, true)
.forEach((withdrawal) -> calc.add(withdrawal.getAbsAmount().negate()));
calc.add(absAmount.negate());
return 0 <= calc.decimal().signum();
}
public static Asset of(String accountId) {
return new Asset(accountId);
}
}
```
|
```package sample.util;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
public class CheckerTest {
@Test
public void match() {
assertTrue(Checker.match(Regex.rAlnum, "19azAZ"));
assertFalse(Checker.match(Regex.rAlnum, "19azAZ-"));
assertTrue(Checker.match(Regex.rKanji, "漢字"));
assertFalse(Checker.match(Regex.rAlnum, "漢字ひらがな"));
}
@Test
public void len() {
assertTrue(Checker.len("テスト文字列", 6));
assertFalse(Checker.len("テスト文字列超", 6));
// surrogate pair check
assertTrue("テスト文字𩸽".length() == 7);
assertTrue(Checker.len("テスト文字𩸽", 6));
}
}
```
|
Please help me generate a test for this class.
|
```package sample.util;
/**
* Represents a simple input checker.
*/
public abstract class Checker {
/**
* Does the string match the regular expression. (null is acceptable)
* <p>
* It is recommended to use the Regex constant for the regex argument.
*/
public static boolean match(String regex, Object v) {
return v != null ? v.toString().matches(regex) : true;
}
/** Character digit check, true if max or less (surrogate pair support) */
public static boolean len(String v, int max) {
return wordSize(v) <= max;
}
private static int wordSize(String v) {
return v.codePointCount(0, v.length());
}
}
```
|
```package sample.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Optional;
import org.junit.jupiter.api.Test;
public class DateUtilsTest {
private final LocalDateTime targetDate = LocalDateTime.of(2015, 8, 29, 1, 23, 31);
private final LocalDate targetDay = LocalDate.of(2015, 8, 29);
@Test
public void checkConvert() {
assertEquals(targetDay, DateUtils.day("2015-08-29"));
assertEquals(
targetDate,
DateUtils.date("2015-08-29T01:23:31", DateTimeFormatter.ISO_LOCAL_DATE_TIME));
assertEquals(
Optional.of(targetDate),
DateUtils.dateOpt("2015-08-29T01:23:31", DateTimeFormatter.ISO_LOCAL_DATE_TIME));
assertEquals(Optional.empty(), DateUtils.dateOpt(null, DateTimeFormatter.ISO_LOCAL_DATE_TIME));
assertEquals(targetDate, DateUtils.date("20150829012331", "yyyyMMddHHmmss"));
assertEquals(LocalDateTime.of(2015, 8, 29, 0, 0, 0), DateUtils.dateByDay(targetDay));
assertEquals(LocalDateTime.of(2015, 8, 29, 23, 59, 59), DateUtils.dateTo(targetDay));
}
@Test
public void checkFormat() {
assertEquals(
"01:23:31",
DateUtils.dateFormat(targetDate, DateTimeFormatter.ISO_LOCAL_TIME));
assertEquals("08/29 01:23", DateUtils.dateFormat(targetDate, "MM/dd HH:mm"));
}
@Test
public void checkSupport() {
LocalDate startDay = LocalDate.of(2015, 8, 1);
LocalDate endDay = LocalDate.of(2015, 8, 31);
assertTrue(DateUtils.between(startDay, endDay).isPresent());
assertFalse(DateUtils.between(startDay, null).isPresent());
assertFalse(DateUtils.between(null, endDay).isPresent());
assertEquals(30, DateUtils.between(startDay, endDay).get().getDays()); // Note that it is not 31
LocalDateTime startDate = LocalDateTime.of(2015, 8, 1, 01, 23, 31);
LocalDateTime endDate = LocalDateTime.of(2015, 8, 31, 00, 23, 31);
assertTrue(DateUtils.between(startDate, endDate).isPresent());
assertFalse(DateUtils.between(startDate, null).isPresent());
assertFalse(DateUtils.between(null, endDate).isPresent());
assertEquals(29, DateUtils.between(startDate, endDate).get().toDays()); // Note that it is not 30
assertTrue(DateUtils.isWeekend(LocalDate.of(2015, 8, 29)));
assertFalse(DateUtils.isWeekend(LocalDate.of(2015, 8, 28)));
}
}
```
|
Please help me generate a test for this class.
|
```package sample.util;
import java.time.DayOfWeek;
import java.time.Duration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.Period;
import java.time.Year;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalQuery;
import java.util.Date;
import java.util.Optional;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.Assert;
/**
* Represents a frequently used date/time utility.
*/
public abstract class DateUtils {
private static WeekendQuery WeekendQuery = new WeekendQuery();
/** Converts a given string (YYYY-MM-DD) to LocalDate. */
public static LocalDate day(String dayStr) {
return dayOpt(dayStr).orElse(null);
}
/** Converts to LocalDate based on the specified string and format type. */
public static LocalDate day(String dateStr, DateTimeFormatter formatter) {
return dayOpt(dateStr, formatter).orElse(null);
}
/** Converts to LocalDate based on the specified string and format string. */
public static LocalDate day(String dateStr, String format) {
return day(dateStr, DateTimeFormatter.ofPattern(format));
}
/** Converts a given string (YYYYY-MM-DD) to LocalDate. */
public static Optional<LocalDate> dayOpt(String dayStr) {
if (StringUtils.isBlank(dayStr)) {
return Optional.empty();
}
return Optional.of(LocalDate.parse(dayStr.trim(), DateTimeFormatter.ISO_LOCAL_DATE));
}
/** Converts to LocalDate based on the specified string and format type. */
public static Optional<LocalDate> dayOpt(String dateStr, DateTimeFormatter formatter) {
if (StringUtils.isBlank(dateStr)) {
return Optional.empty();
}
return Optional.of(LocalDate.parse(dateStr.trim(), formatter));
}
/** Converts to LocalDate based on the specified string and format type. */
public static Optional<LocalDate> dayOpt(String dateStr, String format) {
return dayOpt(dateStr, DateTimeFormatter.ofPattern(format));
}
/** Converts from Date to LocalDateTime. */
public static LocalDateTime date(Date date) {
return LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault());
}
/** Converts from LocalDateTime to Date. */
public static Date date(LocalDateTime date) {
return Date.from(date.atZone(ZoneId.systemDefault()).toInstant());
}
/** Converts to LocalDateTime based on the specified string and format type. */
public static LocalDateTime date(String dateStr, DateTimeFormatter formatter) {
return dateOpt(dateStr, formatter).orElse(null);
}
/**
* Converts to LocalDateTime based on the specified string and format string.
*/
public static LocalDateTime date(String dateStr, String format) {
return date(dateStr, DateTimeFormatter.ofPattern(format));
}
/** Converts from Date to LocalDateTime. */
public static Optional<LocalDateTime> dateOpt(Date date) {
return date == null ? Optional.empty() : Optional.of(date(date));
}
/** Converts from LocalDateTime to Date. */
public static Optional<Date> dateOpt(LocalDateTime date) {
return date == null ? Optional.empty() : Optional.of(date(date));
}
/** Converts to LocalDateTime based on the specified string and format type. */
public static Optional<LocalDateTime> dateOpt(String dateStr, DateTimeFormatter formatter) {
if (StringUtils.isBlank(dateStr))
return Optional.empty();
return Optional.of(LocalDateTime.parse(dateStr.trim(), formatter));
}
/** Converts to LocalDateTime based on the specified string and format type. */
public static Optional<LocalDateTime> dateOpt(String dateStr, String format) {
return dateOpt(dateStr, DateTimeFormatter.ofPattern(format));
}
/** Converts the given date to LocalDateTime. */
public static LocalDateTime dateByDay(LocalDate day) {
return dateByDayOpt(day).orElse(null);
}
/** Converts the given date to LocalDateTime. */
public static Optional<LocalDateTime> dateByDayOpt(LocalDate day) {
return Optional.ofNullable(day).map((v) -> v.atStartOfDay());
}
/**
* Returns the date and time one millisecond subtracted from the day after the
* specified date.
*/
public static LocalDateTime dateTo(LocalDate day) {
return dateToOpt(day).orElse(null);
}
/**
* Returns the date and time one millisecond subtracted from the day after the
* specified date.
*/
public static Optional<LocalDateTime> dateToOpt(LocalDate day) {
return Optional.ofNullable(day).map((v) -> v.atTime(23, 59, 59));
}
/**
* Changes to a string (YYYYY-MM-DD) based on the specified date/time type and
* format type.
*/
public static String dayFormat(LocalDate day) {
return dayFormatOpt(day).orElse(null);
}
/**
* Changes to a string (YYYYY-MM-DD) based on the specified date/time type and
* format type.
*/
public static Optional<String> dayFormatOpt(LocalDate day) {
return Optional.ofNullable(day).map((v) -> v.format(DateTimeFormatter.ISO_LOCAL_DATE));
}
/**
* Changes to a string based on the specified LocalDateTime type and format
* type.
*/
public static String dateFormat(LocalDateTime date, DateTimeFormatter formatter) {
return dateFormatOpt(date, formatter).orElse(null);
}
/**
* Changes to a string based on the specified LocalDateTime type and format
* type.
*/
public static Optional<String> dateFormatOpt(LocalDateTime date, DateTimeFormatter formatter) {
return Optional.ofNullable(date).map((v) -> v.format(formatter));
}
/**
* Changes the specified LocalDateTime type and format string to a string based
* on the specified date/time type and format string.
*/
public static String dateFormat(LocalDateTime date, String format) {
return dateFormatOpt(date, format).orElse(null);
}
/**
* Changes the specified LocalDateTime type and format string to a string based
* on the specified date/time type and format string.
*/
public static Optional<String> dateFormatOpt(LocalDateTime date, String format) {
return Optional.ofNullable(date).map((v) -> v.format(DateTimeFormatter.ofPattern(format)));
}
/** Get the date interval. */
public static Optional<Period> between(LocalDate start, LocalDate end) {
if (start == null || end == null) {
return Optional.empty();
}
return Optional.of(Period.between(start, end));
}
/** Get the interval between LocalDateTime. */
public static Optional<Duration> between(LocalDateTime start, LocalDateTime end) {
if (start == null || end == null) {
return Optional.empty();
}
return Optional.of(Duration.between(start, end));
}
/** true if targetDay <= baseDay */
public static boolean isBeforeEquals(LocalDate baseDay, LocalDate targetDay) {
return targetDay.isBefore(baseDay) || targetDay.isEqual(baseDay);
}
/** true if baseDay <= targetDay */
public static boolean isAfterEquals(LocalDate baseDay, LocalDate targetDay) {
return targetDay.isAfter(baseDay) || targetDay.isEqual(baseDay);
}
/** true if targetDate <= baseDate */
public static boolean isBeforeEquals(LocalDateTime baseDate, LocalDateTime targetDate) {
return targetDate.isBefore(baseDate) || targetDate.isEqual(baseDate);
}
/** true if baseDate <= targetDate */
public static boolean isAfterEquals(LocalDateTime baseDate, LocalDateTime targetDate) {
return targetDate.isAfter(baseDate) || targetDate.isEqual(baseDate);
}
/** true if targetDay is included in the period */
public static boolean includes(LocalDate targetDay, LocalDate fromDay, LocalDate toDay) {
return isAfterEquals(fromDay, targetDay) && isBeforeEquals(toDay, targetDay);
}
/** true if targetDate is included in the period */
public static boolean includes(LocalDateTime targetDate, LocalDateTime fromDate, LocalDateTime toDate) {
return isAfterEquals(fromDate, targetDate) && isBeforeEquals(toDate, targetDate);
}
/**
* Determines if the specified business day is a weekend (Saturday or Sunday).
* (Argument is required)
*/
public static boolean isWeekend(LocalDate day) {
Assert.notNull(day, "day is required.");
return day.query(WeekendQuery);
}
/** Get the last day of the designated year. */
public static LocalDate dayTo(int year) {
return LocalDate.ofYearDay(year, Year.of(year).isLeap() ? 366 : 365);
}
/** TemporalQuery>Boolean< for weekend decisions. */
public static class WeekendQuery implements TemporalQuery<Boolean> {
@Override
public Boolean queryFrom(TemporalAccessor temporal) {
var dayOfWeek = DayOfWeek.of(temporal.get(ChronoField.DAY_OF_WEEK));
return dayOfWeek == DayOfWeek.SATURDAY || dayOfWeek == DayOfWeek.SUNDAY;
}
}
}
```
|
```package org.camelcookbook.parallelprocessing.endpointconsumers;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
/**
* Test class that exercises parallel threads in Seda.
*/
public class EndpointConsumersSedaTest extends CamelTestSupport {
@Override
public RouteBuilder createRouteBuilder() {
return new EndpointConsumersSedaRoute();
}
@Test
public void testParallelConsumption() throws InterruptedException {
final int messageCount = 100;
MockEndpoint mockOut = getMockEndpoint("mock:out");
mockOut.setExpectedMessageCount(messageCount);
mockOut.setResultWaitTime(5000);
for (int i = 0; i < messageCount; i++) {
template.sendBody("seda:in", "Message[" + i + "]");
}
assertMockEndpointsSatisfied();
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.parallelprocessing.endpointconsumers;
import org.apache.camel.builder.RouteBuilder;
/**
* Route that demonstrates increasing the number of consumers on a SEDA endpoint.
*/
public class EndpointConsumersSedaRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
from("seda:in?concurrentConsumers=10")
.delay(200)
.log("Processing ${body}:${threadName}")
.to("mock:out");
}
}
```
|
```package org.camelcookbook.routing.multicast;
import org.apache.camel.EndpointInject;
import org.apache.camel.Produce;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
public class MulticastExceptionHandlingInStrategyTest extends CamelTestSupport {
public static final String MESSAGE_BODY = "Message to be multicast";
@Produce(uri = "direct:start")
protected ProducerTemplate template;
@EndpointInject(uri = "mock:first")
private MockEndpoint mockFirst;
@EndpointInject(uri = "mock:second")
private MockEndpoint mockSecond;
@EndpointInject(uri = "mock:afterMulticast")
private MockEndpoint afterMulticast;
@EndpointInject(uri = "mock:exceptionHandler")
private MockEndpoint exceptionHandler;
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new MulticastExceptionHandlingInStrategyRoute();
}
@Test
public void testMessageRoutedToMulticastEndpoints() throws InterruptedException {
mockFirst.setExpectedMessageCount(1);
mockFirst.message(0).body().isEqualTo(MESSAGE_BODY);
mockSecond.setExpectedMessageCount(1);
afterMulticast.setExpectedMessageCount(1);
afterMulticast.message(0)
.predicate().simple("${header.multicast_exception} != null");
exceptionHandler.setExpectedMessageCount(1);
String response = (String) template.requestBody(MESSAGE_BODY);
assertEquals("Oops,All OK here", response);
assertMockEndpointsSatisfied();
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.routing.multicast;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
/**
* Multicast example with exceptions handled in the AggregationStrategy.
*/
public class MulticastExceptionHandlingInStrategyRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
from("direct:start")
.multicast().aggregationStrategy(new ExceptionHandlingAggregationStrategy())
.to("direct:first")
.to("direct:second")
.end()
.log("continuing with ${body}")
.to("mock:afterMulticast")
.transform(body()); // copy the In message to the Out message; this will become the route response
from("direct:first")
.onException(Exception.class)
.log("Caught exception")
.to("mock:exceptionHandler")
.transform(constant("Oops"))
.end()
.to("mock:first")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
throw new IllegalStateException("something went horribly wrong");
}
});
from("direct:second")
.to("mock:second")
.transform(constant("All OK here"));
}
}
```
|
```package org.camelcookbook.splitjoin.aggregateintervals;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
/**
* Test class that demonstrates a aggregation using completion intervals.
*/
public class AggregateCompletionIntervalTest extends CamelTestSupport {
@Override
public RouteBuilder createRouteBuilder() {
return new AggregateCompletionIntervalRoute();
}
@Test
public void testAggregation() throws InterruptedException {
MockEndpoint mockOut = getMockEndpoint("mock:out");
mockOut.setMinimumExpectedMessageCount(6);
sendAndSleep("direct:in", "One", "group", "odd");
sendAndSleep("direct:in", "Two", "group", "even");
sendAndSleep("direct:in", "Three", "group", "odd");
sendAndSleep("direct:in", "Four", "group", "even");
sendAndSleep("direct:in", "Five", "group", "odd");
sendAndSleep("direct:in", "Six", "group", "even");
sendAndSleep("direct:in", "Seven", "group", "odd");
sendAndSleep("direct:in", "Eight", "group", "even");
sendAndSleep("direct:in", "Nine", "group", "odd");
sendAndSleep("direct:in", "Ten", "group", "even");
assertMockEndpointsSatisfied();
}
private void sendAndSleep(String endpointUri, String body, String headerName, String headerValue) throws InterruptedException {
template.sendBodyAndHeader(endpointUri, body, headerName, headerValue);
Thread.sleep(100);
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.splitjoin.aggregateintervals;
import org.apache.camel.builder.RouteBuilder;
import org.camelcookbook.splitjoin.aggregate.SetAggregationStrategy;
class AggregateCompletionIntervalRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
from("direct:in")
.log("${threadName} - ${body}")
.aggregate(header("group"), new SetAggregationStrategy())
.completionSize(10).completionInterval(400)
.log("${threadName} - out")
.delay(500)
.to("mock:out")
.end();
}
}
```
|
```package org.camelcookbook.extend.binding;
import org.junit.Assert;
import org.junit.Test;
/**
* Unit test for {@link MyBean}
*/
public class MyBeanTest {
@Test
public void testHello() {
MyBean bean = new MyBean();
final String result = bean.sayHello("Scott", false);
Assert.assertEquals("Hello Scott", result);
}
@Test
public void testHelloHipster() {
MyBean bean = new MyBean();
final String result = bean.sayHello("Scott", true);
Assert.assertEquals("Yo Scott", result);
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.extend.binding;
public class MyBean {
public String sayHello(String name, boolean hipster) {
return (hipster) ? ("Yo " + name) : ("Hello " + name);
}
}
```
|
```package org.camelcookbook.extend.predicate;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
public class PredicateInlineTest extends CamelTestSupport {
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new MyPredicateInlineRoute();
}
@Test
public void testMyPredicateInline() throws Exception {
final String newYork = "<someXml><city>New York</city></someXml>";
final String boston = "<someXml><city>Boston</city></someXml>";
getMockEndpoint("mock:boston").expectedBodiesReceived(boston);
template.sendBody("direct:start", newYork);
template.sendBody("direct:start", boston);
assertMockEndpointsSatisfied();
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.extend.predicate;
public class MyPredicate {
public boolean isWhatIWant(String body) {
return ((body != null) && body.contains("Boston"));
}
}
```
|
```package org.camelcookbook.splitjoin.splitaggregate;
import java.util.Arrays;
import java.util.Collections;
import java.util.Set;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
/**
* Demonstrates the splitting of a payload, processing of each of the fragments and reaggregating the results.
*/
public class SplitAggregateExceptionHandlingTest extends CamelTestSupport {
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new SplitAggregateExceptionHandlingRoute();
}
@Test
public void testHandlesException() throws Exception {
String[] array = new String[]{"one", "two", "three"};
MockEndpoint mockOut = getMockEndpoint("mock:out");
mockOut.expectedMessageCount(1);
template.sendBody("direct:in", array);
assertMockEndpointsSatisfied();
Exchange exchange = mockOut.getReceivedExchanges().get(0);
@SuppressWarnings("unchecked")
Set<String> backendResponses = Collections.checkedSet(exchange.getIn().getBody(Set.class), String.class);
assertTrue(backendResponses.containsAll(Arrays.asList("Processed: one", "Failed: two", "Processed: three")));
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.splitjoin.splitaggregate;
import org.apache.camel.builder.RouteBuilder;
public class SplitAggregateExceptionHandlingRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
from("direct:in")
.split(body(), new ExceptionHandlingSetAggregationStrategy())
.inOut("direct:someBackEnd")
.end()
.to("mock:out");
from("direct:someBackEnd")
.choice()
.when(simple("${header.CamelSplitIndex} == 1"))
.throwException(new IllegalStateException())
.otherwise()
.transform(simple("Processed: ${body}"))
.end();
}
}
```
|
```package org.camelcookbook.transactions.rollback;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.spring.CamelSpringTestSupport;
import org.camelcookbook.transactions.dao.AuditLogDao;
import org.junit.Test;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* Demonstrates the behavior of marking a transaction for rollback, while continuing processing.
*/
public class RollbackMarkRollbackOnlySpringTest extends CamelSpringTestSupport {
@Override
protected AbstractApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("META-INF/spring/rollbackMarkRollbackOnly-context.xml");
}
@Test
public void testTransactedRollback() throws InterruptedException {
AuditLogDao auditLogDao = getMandatoryBean(AuditLogDao.class, "auditLogDao");
String message = "this message will explode";
assertEquals(0, auditLogDao.getAuditCount(message));
// the message does not proceed further down the route after the rollback statement
MockEndpoint mockCompleted = getMockEndpoint("mock:out");
mockCompleted.setExpectedMessageCount(0);
// no exception is thrown despite the transaction rolling back
template.sendBody("direct:transacted", message);
assertMockEndpointsSatisfied();
assertEquals(0, auditLogDao.getAuditCount(message)); // the insert was rolled back
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.transactions.rollback;
import org.apache.camel.builder.RouteBuilder;
/**
* Demonstrates the use of the markRollbackOnly statement roll back the transaction without throwing a transaction.
*/
public class RollbackMarkRollbackOnlyRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
from("direct:transacted")
.transacted()
.log("Processing message: ${body}")
.setHeader("message", body())
.to("sql:insert into audit_log (message) values (:#message)")
.choice()
.when(simple("${body} contains 'explode'"))
.log("Message cannot be processed further - rolling back insert")
.markRollbackOnly()
.otherwise()
.log("Message processed successfully")
.end()
.to("mock:out");
}
}
```
|
```package org.camelcookbook.transactions.rollback;
import javax.sql.DataSource;
import org.apache.camel.CamelContext;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.component.sql.SqlComponent;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.impl.SimpleRegistry;
import org.apache.camel.spring.spi.SpringTransactionPolicy;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.camelcookbook.transactions.dao.AuditLogDao;
import org.camelcookbook.transactions.dao.MessageDao;
import org.camelcookbook.transactions.utils.EmbeddedDataSourceFactory;
import org.junit.Test;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
/**
* Demonstrates the behavior of marking the last transaction for rollback.
*/
public class RollbackMarkRollbackOnlyLastTest extends CamelTestSupport {
private AuditLogDao auditLogDao;
private MessageDao messageDao;
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RollbackMarkRollbackOnlyLastRoute();
}
@Override
protected CamelContext createCamelContext() throws Exception {
SimpleRegistry registry = new SimpleRegistry();
DataSource dataSource = EmbeddedDataSourceFactory.getDataSource("sql/schema.sql");
DataSourceTransactionManager transactionManager = new DataSourceTransactionManager(dataSource);
registry.put("transactionManager", transactionManager);
{
SpringTransactionPolicy propagationRequiresNew = new SpringTransactionPolicy();
propagationRequiresNew.setTransactionManager(transactionManager);
propagationRequiresNew.setPropagationBehaviorName("PROPAGATION_REQUIRES_NEW");
registry.put("PROPAGATION_REQUIRES_NEW", propagationRequiresNew);
}
{
SpringTransactionPolicy propagationRequiresNew2 = new SpringTransactionPolicy();
propagationRequiresNew2.setTransactionManager(transactionManager);
propagationRequiresNew2.setPropagationBehaviorName("PROPAGATION_REQUIRES_NEW");
registry.put("PROPAGATION_REQUIRES_NEW-2", propagationRequiresNew2);
}
auditLogDao = new AuditLogDao(dataSource);
messageDao = new MessageDao(dataSource);
CamelContext camelContext = new DefaultCamelContext(registry);
SqlComponent sqlComponent = new SqlComponent();
sqlComponent.setDataSource(dataSource);
camelContext.addComponent("sql", sqlComponent);
return camelContext;
}
@Test
public void testFailure() throws InterruptedException {
String message = "this message will explode";
assertEquals(0, auditLogDao.getAuditCount(message));
// the outer route will continue to run as though nothing happened
MockEndpoint mockOut1 = getMockEndpoint("mock:out1");
mockOut1.setExpectedMessageCount(1);
mockOut1.message(0).body().isEqualTo(message);
// processing will not have reached the mock endpoint in the sub-route
MockEndpoint mockOut2 = getMockEndpoint("mock:out2");
mockOut2.setExpectedMessageCount(0);
template.sendBody("direct:route1", message);
assertMockEndpointsSatisfied();
assertEquals(0, auditLogDao.getAuditCount(message));
assertEquals(1, messageDao.getMessageCount(message));
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.transactions.rollback;
import org.apache.camel.builder.RouteBuilder;
/**
* Demonstrates the behavior of marking the last transaction for rollback.
*/
public class RollbackMarkRollbackOnlyLastRoute extends RouteBuilder {
@Override
public void configure() {
from("direct:route1").id("route1")
.setHeader("message", simple("${body}"))
.policy("PROPAGATION_REQUIRES_NEW").id("tx1")
.to("sql:insert into messages (message) values (:#message)")
.to("direct:route2")
.to("mock:out1")
.end();
from("direct:route2").id("route2")
.policy("PROPAGATION_REQUIRES_NEW-2").id("tx2")
.to("sql:insert into audit_log (message) values (:#message)")
.choice()
.when(simple("${body} contains 'explode'"))
.log("Message cannot be processed further - rolling back insert")
.markRollbackOnlyLast()
.otherwise()
.log("Message processed successfully")
.end()
.to("mock:out2")
.end();
}
}
```
|
```package org.camelcookbook.routing.throttler;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
public class ThrottlerDynamicTest extends CamelTestSupport {
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new ThrottlerDynamicRoute();
}
@Test
public void testThrottleDynamic() throws Exception {
final int throttleRate = 3;
final int messageCount = throttleRate + 2;
getMockEndpoint("mock:unthrottled").expectedMessageCount(messageCount);
getMockEndpoint("mock:throttled").expectedMessageCount(throttleRate);
getMockEndpoint("mock:after").expectedMessageCount(throttleRate);
for (int i = 0; i < messageCount; i++) {
Exchange exchange = getMandatoryEndpoint("direct:start").createExchange();
{
Message in = exchange.getIn();
in.setHeader("throttleRate", throttleRate);
in.setBody("Camel Rocks");
}
template.asyncSend("direct:start", exchange);
}
// the test will stop once all of the conditions have been met
// the only way this set of conditions can happen is if 2
// messages are currently suspended for throttling
assertMockEndpointsSatisfied();
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.routing.throttler;
import org.apache.camel.builder.RouteBuilder;
public class ThrottlerDynamicRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
from("direct:start")
.to("mock:unthrottled")
.throttle(header("ThrottleRate")).timePeriodMillis(10000)
.to("mock:throttled")
.end()
.to("mock:after");
}
}
```
|
```package org.camelcookbook.examples.testing.java;
import org.apache.camel.builder.RouteBuilder;
/**
* Route builder that performs a slow transformation on the body of the exchange.
*/
public class SlowlyTransformingRoute extends RouteBuilder {
private String sourceUri;
private String targetUri;
public void setSourceUri(String sourceUri) {
this.sourceUri = sourceUri;
}
public void setTargetUri(String targetUri) {
this.targetUri = targetUri;
}
@Override
public void configure() throws Exception {
from(sourceUri)
.to("seda:transformBody");
from("seda:transformBody?concurrentConsumers=15")
.transform(simple("Modified: ${body}"))
.delay(100) // simulate a slow transformation
.to("seda:sendTransformed");
from("seda:sendTransformed")
.resequence().simple("${header.mySequenceId}").stream()
.to(targetUri);
}
}
```
|
Please help me generate a test for this class.
|
```package org.camelcookbook.monitoring.log;
import org.apache.camel.builder.RouteBuilder;
public class LogRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
from("direct:start")
.to("log:myLog")
.to("mock:result");
from("direct:startAll")
.to("log:myLog?level=INFO&showAll=true&multiline=true")
.to("mock:result");
}
}
```
|