gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.testdriver.directio; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.io.Text; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import com.asakusafw.runtime.directio.DataFormat; import com.asakusafw.runtime.directio.hadoop.HadoopDataSource; import com.asakusafw.runtime.directio.hadoop.HadoopDataSourceProfile; import com.asakusafw.testdriver.core.DataModelReflection; import com.asakusafw.testdriver.core.DataModelSource; import com.asakusafw.testdriver.core.SpiExporterRetriever; /** * Test for {@link DirectFileOutputRetriever}. */ @RunWith(Parameterized.class) public class DirectFileOutputRetrieverTest { /** * Profile context. */ @Rule public final ProfileContext profile = new ProfileContext(); /** * A temporary folder. */ @Rule public final TemporaryFolder folder = new TemporaryFolder(); private final Class<? extends DataFormat<Text>> format; /** * Returns the parameters. * @return the parameters */ @Parameters public static List<Object[]> data() { return Arrays.asList(new Object[][] { { MockStreamFormat.class }, { MockFileFormat.class }, }); } /** * Creates a new instance. * @param format the format. */ public DirectFileOutputRetrieverTest(Class<? extends DataFormat<Text>> format) { this.format = format; } /** * truncate. * @throws Exception if failed */ @Test public void truncate() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); File file = put("base/hoge.txt", "Hello, world!"); File deep = put("base/d/e/e/p/hoge.txt", "Hello, world!"); File outer = put("outer/hoge.txt", "Hello, world!"); assertThat(file.exists(), is(true)); assertThat(deep.exists(), is(true)); assertThat(outer.exists(), is(true)); testee.truncate( new MockOutputDescription("base", "something", format), profile.getTextContext()); assertThat(file.exists(), is(false)); assertThat(deep.exists(), is(false)); assertThat(outer.exists(), is(true)); } /** * truncate with placeholders. * @throws Exception if failed */ @Test public void truncate_placeholders() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); File file = put("base/hoge.txt", "Hello, world!"); File deep = put("base/d/e/e/p/hoge.txt", "Hello, world!"); File outer = put("outer/hoge.txt", "Hello, world!"); assertThat(file.exists(), is(true)); assertThat(deep.exists(), is(true)); assertThat(outer.exists(), is(true)); testee.truncate( new MockOutputDescription("base", "output-{id}", format), profile.getTextContext()); assertThat(file.exists(), is(false)); assertThat(deep.exists(), is(false)); assertThat(outer.exists(), is(true)); } /** * truncate with wildcard. * @throws Exception if failed */ @Test public void truncate_wildcard() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); File file = put("base/hoge.txt", "Hello, world!"); File deep = put("base/d/e/e/p/hoge.txt", "Hello, world!"); File outer = put("outer/hoge.txt", "Hello, world!"); assertThat(file.exists(), is(true)); assertThat(deep.exists(), is(true)); assertThat(outer.exists(), is(true)); testee.truncate( new MockOutputDescription("base", "output-*", format), profile.getTextContext()); assertThat(file.exists(), is(false)); assertThat(deep.exists(), is(false)); assertThat(outer.exists(), is(true)); } /** * truncate empty target. * @throws Exception if failed */ @Test public void truncate_empty() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); testee.truncate( new MockOutputDescription("base", "something", format), profile.getTextContext()); } /** * truncate with variables in path. * @throws Exception if failed */ @Test public void truncate_variable() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); File file = put("base/hoge.txt", "Hello, world!"); File deep = put("base/d/e/e/p/hoge.txt", "Hello, world!"); File outer = put("outer/hoge.txt", "Hello, world!"); assertThat(file.exists(), is(true)); assertThat(deep.exists(), is(true)); assertThat(outer.exists(), is(true)); testee.truncate( new MockOutputDescription("${target}", "something", format), profile.getTextContext("target", "base")); assertThat(file.exists(), is(false)); assertThat(deep.exists(), is(false)); assertThat(outer.exists(), is(true)); } /** * simple input. * @throws Exception if failed */ @Test public void createInput() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); put("base/output.txt", "Hello, world!"); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); DataModelSource input = testee.createSource( new MockTextDefinition(), new MockOutputDescription("base", "output.txt", format), profile.getTextContext()); List<String> list = get(input); assertThat(list, is(Arrays.asList("Hello, world!"))); } /** * output multiple records. * @throws Exception if failed */ @Test public void createInput_multirecord() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); put("base/output.txt", "Hello1", "Hello2", "Hello3"); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); DataModelSource input = testee.createSource( new MockTextDefinition(), new MockOutputDescription("base", "output.txt", format), profile.getTextContext()); List<String> list = get(input); assertThat(list.size(), is(3)); assertThat(list, hasItem("Hello1")); assertThat(list, hasItem("Hello2")); assertThat(list, hasItem("Hello3")); } /** * output multiple files. * @throws Exception if failed */ @Test public void createInput_multifile() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); put("base/output-1.txt", "Hello1"); put("base/output-2.txt", "Hello2"); put("base/output-3.txt", "Hello3"); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); DataModelSource input = testee.createSource( new MockTextDefinition(), new MockOutputDescription("base", "{value}.txt", format), profile.getTextContext()); List<String> list = get(input); assertThat(list.size(), is(3)); assertThat(list, hasItem("Hello1")); assertThat(list, hasItem("Hello2")); assertThat(list, hasItem("Hello3")); } /** * output with variables. * @throws Exception if failed */ @Test public void createInput_variables() throws Exception { profile.add("vars", HadoopDataSource.class, "base"); profile.add("vars", new File(folder.getRoot(), "testing")); profile.put(); put("testing/output.txt", "Hello, world!"); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); DataModelSource input = testee.createSource( new MockTextDefinition(), new MockOutputDescription("${vbase}", "${voutput}.txt", format), profile.getTextContext("vbase", "base", "voutput", "output")); List<String> list = get(input); assertThat(list, is(Arrays.asList("Hello, world!"))); } /** * output with placeholders. * @throws Exception if failed */ @Test public void createInput_placeholders() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); put("base/output-1.txt", "Hello1"); put("base/output-2.txt", "Hello2"); put("base/output-3.txt", "Hello3"); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); DataModelSource input = testee.createSource( new MockTextDefinition(), new MockOutputDescription("base", "output-{id}.txt", format), profile.getTextContext()); List<String> list = get(input); assertThat(list.size(), is(3)); assertThat(list, hasItem("Hello1")); assertThat(list, hasItem("Hello2")); assertThat(list, hasItem("Hello3")); } /** * output with wildcard. * @throws Exception if failed */ @Test public void createInput_wildcard() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); put("base/output-1.txt", "Hello1"); put("base/output-2.txt", "Hello2"); put("base/output-3.txt", "Hello3"); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); DataModelSource input = testee.createSource( new MockTextDefinition(), new MockOutputDescription("base", "output-*.txt", format), profile.getTextContext()); List<String> list = get(input); assertThat(list.size(), is(3)); assertThat(list, hasItem("Hello1")); assertThat(list, hasItem("Hello2")); assertThat(list, hasItem("Hello3")); } /** * configuration is not found. * @throws Exception if failed */ @Test(expected = IOException.class) public void no_config() throws Exception { DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); testee.truncate( new MockOutputDescription("base", "something", format), profile.getTextContext()); } /** * datasource is not found. * @throws Exception if failed */ @Test(expected = IOException.class) public void no_datasource() throws Exception { profile.add("root", HadoopDataSource.class, "other"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); DirectFileOutputRetriever testee = new DirectFileOutputRetriever(); testee.truncate( new MockOutputDescription("base", "something", format), profile.getTextContext()); } /** * using SPI. * @throws Exception if failed */ @Test public void spi() throws Exception { profile.add("root", HadoopDataSource.class, "/"); profile.add("root", HadoopDataSourceProfile.KEY_PATH, folder.getRoot().toURI().toURL().toString()); profile.put(); put("base/output.txt", "Hello, world!"); SpiExporterRetriever testee = new SpiExporterRetriever(getClass().getClassLoader()); DataModelSource input = testee.createSource( new MockTextDefinition(), new MockOutputDescription("base", "output.txt", format), profile.getTextContext()); List<String> list = get(input); assertThat(list, is(Arrays.asList("Hello, world!"))); } private List<String> get(DataModelSource input) throws IOException { try { MockTextDefinition def = new MockTextDefinition(); List<String> results = new ArrayList<>(); while (true) { DataModelReflection next = input.next(); if (next == null) { break; } results.add(def.toObject(next).toString()); } return results; } finally { input.close(); } } private File put(String targetPath, String... contents) throws IOException { File target = new File(folder.getRoot(), targetPath); target.getParentFile().mkdirs(); try (PrintWriter w = new PrintWriter(target, "UTF-8")) { for (String line : contents) { w.println(line); } } return target; } }
/** * Copyright (C) 2016 Hyphenate Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cn.ucai.superwechat.adapter; import java.util.List; import com.hyphenate.chat.EMClient; import com.hyphenate.easeui.utils.EaseUserUtils; import com.hyphenate.util.EasyUtils; import cn.ucai.superwechat.R; import cn.ucai.superwechat.db.InviteMessgeDao; import cn.ucai.superwechat.domain.InviteMessage; import cn.ucai.superwechat.domain.InviteMessage.InviteMesageStatus; import cn.ucai.superwechat.ui.NewFriendsMsgActivity; import cn.ucai.superwechat.utils.MFGT; import android.app.Activity; import android.app.ProgressDialog; import android.content.ContentValues; import android.content.Context; import android.text.TextUtils; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; public class NewFriendsMsgAdapter extends ArrayAdapter<InviteMessage> { private Context context; private InviteMessgeDao messgeDao; public NewFriendsMsgAdapter(Context context, int textViewResourceId, List<InviteMessage> objects) { super(context, textViewResourceId, objects); this.context = context; messgeDao = new InviteMessgeDao(context); } @Override public View getView(int position, View convertView, ViewGroup parent) { final ViewHolder holder; if (convertView == null) { holder = new ViewHolder(); convertView = View.inflate(context, R.layout.em_row_invite_msg, null); holder.LayoutInvite = (LinearLayout) convertView.findViewById(R.id.layout_intent); holder.avator = (ImageView) convertView.findViewById(R.id.avatar); holder.reason = (TextView) convertView.findViewById(R.id.message); holder.name = (TextView) convertView.findViewById(R.id.name); holder.agree = (Button) convertView.findViewById(R.id.agree); holder.status = (TextView) convertView.findViewById(R.id.user_state); holder.groupContainer = (LinearLayout) convertView.findViewById(R.id.ll_group); holder.groupname = (TextView) convertView.findViewById(R.id.tv_groupName); // holder.time = (TextView) convertView.findViewById(R.id.time); convertView.setTag(holder); } else { holder = (ViewHolder) convertView.getTag(); } String str1 = context.getResources().getString(R.string.Has_agreed_to_your_friend_request); String str2 = context.getResources().getString(R.string.agree); String str3 = context.getResources().getString(R.string.Request_to_add_you_as_a_friend); String str4 = context.getResources().getString(R.string.Apply_to_the_group_of); String str5 = context.getResources().getString(R.string.Has_agreed_to); String str6 = context.getResources().getString(R.string.Has_refused_to); String str7 = context.getResources().getString(R.string.refuse); String str8 = context.getResources().getString(R.string.invite_join_group); String str9 = context.getResources().getString(R.string.accept_join_group); String str10 = context.getResources().getString(R.string.refuse_join_group); final InviteMessage msg = getItem(position); if (msg != null) { holder.agree.setVisibility(View.INVISIBLE); if (msg.getGroupId() != null) { // show group name holder.groupContainer.setVisibility(View.VISIBLE); holder.groupname.setText(msg.getGroupName()); } else { holder.groupContainer.setVisibility(View.GONE); } EaseUserUtils.setAppUserAvatarByPath(context, msg.getAvatar(), holder.avator,msg.getGroupId()); holder.reason.setText(msg.getReason()); holder.name.setText(msg.getUsernick()); // holder.time.setText(DateUtils.getTimestampString(new // Date(msg.getTime()))); if (msg.getStatus() == InviteMesageStatus.BEAGREED) { holder.status.setVisibility(View.INVISIBLE); holder.reason.setText(str1); } else if (msg.getStatus() == InviteMesageStatus.BEINVITEED || msg.getStatus() == InviteMesageStatus.BEAPPLYED || msg.getStatus() == InviteMesageStatus.GROUPINVITATION) { holder.agree.setVisibility(View.VISIBLE); holder.agree.setEnabled(true); // holder.agree.setBackgroundResource(android.R.drawable.btn_default); holder.agree.setText(str2); holder.status.setVisibility(View.GONE); holder.status.setEnabled(true); holder.status.setBackgroundResource(android.R.drawable.btn_default); holder.status.setText(str7); if (msg.getStatus() == InviteMesageStatus.BEINVITEED) { if (msg.getReason() == null) { // use default text holder.reason.setText(str3); } } else if (msg.getStatus() == InviteMesageStatus.BEAPPLYED) { //application to join group if (TextUtils.isEmpty(msg.getReason())) { holder.reason.setText(str4 + msg.getGroupName()); } } else if (msg.getStatus() == InviteMesageStatus.GROUPINVITATION) { if (TextUtils.isEmpty(msg.getReason())) { holder.reason.setText(str8 + msg.getGroupName()); } } // set click listener holder.agree.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // accept invitation acceptInvitation(holder.agree, holder.status, msg); } }); holder.status.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // decline invitation refuseInvitation(holder.agree, holder.status, msg); } }); } else if (msg.getStatus() == InviteMesageStatus.AGREED) { holder.status.setVisibility(View.VISIBLE); holder.status.setText(str5); holder.status.setBackgroundDrawable(null); holder.status.setEnabled(false); } else if (msg.getStatus() == InviteMesageStatus.REFUSED) { holder.status.setVisibility(View.VISIBLE); holder.status.setText(str6); holder.status.setBackgroundDrawable(null); holder.status.setEnabled(false); } else if (msg.getStatus() == InviteMesageStatus.GROUPINVITATION_ACCEPTED) { holder.status.setVisibility(View.VISIBLE); String str = msg.getGroupInviter() + str9 + msg.getGroupName(); holder.status.setText(str); holder.status.setBackgroundDrawable(null); holder.status.setEnabled(false); } else if (msg.getStatus() == InviteMesageStatus.GROUPINVITATION_DECLINED) { holder.status.setVisibility(View.VISIBLE); String str = msg.getGroupInviter() + str10 + msg.getGroupName(); holder.status.setText(str); holder.status.setBackgroundDrawable(null); holder.status.setEnabled(false); } holder.LayoutInvite.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { MFGT.gotoFriend((NewFriendsMsgActivity)context,msg.getFrom()); } }); } return convertView; } /** * accept invitation */ private void acceptInvitation(final Button buttonAgree, final TextView buttonRefuse, final InviteMessage msg) { final ProgressDialog pd = new ProgressDialog(context); String str1 = context.getResources().getString(R.string.Are_agree_with); final String str2 = context.getResources().getString(R.string.Has_agreed_to); final String str3 = context.getResources().getString(R.string.Agree_with_failure); pd.setMessage(str1); pd.setCanceledOnTouchOutside(false); pd.show(); new Thread(new Runnable() { public void run() { // call api try { if (msg.getStatus() == InviteMesageStatus.BEINVITEED) {//accept be friends EMClient.getInstance().contactManager().acceptInvitation(msg.getFrom()); } else if (msg.getStatus() == InviteMesageStatus.BEAPPLYED) { //accept application to join group EMClient.getInstance().groupManager().acceptApplication(msg.getFrom(), msg.getGroupId()); } else if (msg.getStatus() == InviteMesageStatus.GROUPINVITATION) { EMClient.getInstance().groupManager().acceptInvitation(msg.getGroupId(), msg.getGroupInviter()); } msg.setStatus(InviteMesageStatus.AGREED); // update database ContentValues values = new ContentValues(); values.put(InviteMessgeDao.COLUMN_NAME_STATUS, msg.getStatus().ordinal()); messgeDao.updateMessage(msg.getId(), values); ((Activity) context).runOnUiThread(new Runnable() { @Override public void run() { pd.dismiss(); buttonAgree.setText(str2); buttonAgree.setBackgroundDrawable(null); buttonAgree.setEnabled(false); buttonAgree.setVisibility(View.GONE); buttonRefuse.setVisibility(View.VISIBLE); buttonRefuse.setText(str2); buttonRefuse.setBackgroundDrawable(null); } }); } catch (final Exception e) { ((Activity) context).runOnUiThread(new Runnable() { @Override public void run() { pd.dismiss(); Toast.makeText(context, str3 + e.getMessage(), Toast.LENGTH_LONG).show(); } }); } } }).start(); } /** * decline invitation */ private void refuseInvitation(final Button buttonAgree, final TextView buttonRefuse, final InviteMessage msg) { final ProgressDialog pd = new ProgressDialog(context); String str1 = context.getResources().getString(R.string.Are_refuse_with); final String str2 = context.getResources().getString(R.string.Has_refused_to); final String str3 = context.getResources().getString(R.string.Refuse_with_failure); pd.setMessage(str1); pd.setCanceledOnTouchOutside(false); pd.show(); new Thread(new Runnable() { public void run() { // call api try { if (msg.getStatus() == InviteMesageStatus.BEINVITEED) {//decline the invitation EMClient.getInstance().contactManager().declineInvitation(msg.getFrom()); } else if (msg.getStatus() == InviteMesageStatus.BEAPPLYED) { //decline application to join group EMClient.getInstance().groupManager().declineApplication(msg.getFrom(), msg.getGroupId(), ""); } else if (msg.getStatus() == InviteMesageStatus.GROUPINVITATION) { EMClient.getInstance().groupManager().declineInvitation(msg.getGroupId(), msg.getGroupInviter(), ""); } msg.setStatus(InviteMesageStatus.REFUSED); // update database ContentValues values = new ContentValues(); values.put(InviteMessgeDao.COLUMN_NAME_STATUS, msg.getStatus().ordinal()); messgeDao.updateMessage(msg.getId(), values); ((Activity) context).runOnUiThread(new Runnable() { @Override public void run() { pd.dismiss(); buttonRefuse.setText(str2); buttonRefuse.setBackgroundDrawable(null); buttonRefuse.setEnabled(false); buttonAgree.setVisibility(View.INVISIBLE); } }); } catch (final Exception e) { ((Activity) context).runOnUiThread(new Runnable() { @Override public void run() { pd.dismiss(); Toast.makeText(context, str3 + e.getMessage(), Toast.LENGTH_SHORT).show(); } }); } } }).start(); } private static class ViewHolder { LinearLayout LayoutInvite; ImageView avator; TextView name; TextView reason; Button agree; TextView status; LinearLayout groupContainer; TextView groupname; // TextView time; } }
/* * Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ensembl.healthcheck.testcase.eg_compara; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.ensembl.healthcheck.DatabaseRegistryEntry; import org.ensembl.healthcheck.DatabaseType; import org.ensembl.healthcheck.ReportManager; import org.ensembl.healthcheck.Team; import org.ensembl.healthcheck.testcase.AbstractTemplatedTestCase; import org.ensembl.healthcheck.util.AbstractStringMapRowMapper; import org.ensembl.healthcheck.util.CollectionUtils; import org.ensembl.healthcheck.util.DBUtils; import org.ensembl.healthcheck.util.DefaultMapRowMapper; import org.ensembl.healthcheck.util.MapRowMapper; public class EGForeignKeyMethodLinkSpeciesSetId extends AbstractTemplatedTestCase { public EGForeignKeyMethodLinkSpeciesSetId() { setTeamResponsible(Team.ENSEMBL_GENOMES); appliesToType(DatabaseType.COMPARA); setDescription("Checks the consistency of MLSS foreign keys"); } @Override protected boolean runTest(DatabaseRegistryEntry dbre) { boolean result = true; result &= assertNoEmptyNames(dbre); result &= assertNoSource(dbre); result &= assertMlssIdForeignKeysAndRanges(dbre); result &= assertMlssGeneTreeRootOrphans(dbre); result &= assertGeneTreeRootOrphans(dbre); result &= assertMlssGenomicAlignOrphans(dbre); result &= assertGenomicAlignOrphans(dbre); result &= assertMethodLinkSpeciesSetCounts(dbre); return result; } /** * Check for MLSS which lack a name */ protected boolean assertNoEmptyNames(DatabaseRegistryEntry dbre) { boolean result = true; Connection con = dbre.getConnection(); int numOfUnsetNames = DBUtils .getRowCount( con, "SELECT count(*) FROM method_link_species_set WHERE name = 'NULL' OR name IS NULL"); if (numOfUnsetNames > 0) { ReportManager.problem(this, con, "FAILED method_link_species_set table contains " + numOfUnsetNames + " with no name"); result = false; } return result; } /** * Check for MLSS which lack a source */ protected boolean assertNoSource(DatabaseRegistryEntry dbre) { boolean result = true; Connection con = dbre.getConnection(); int numOfUnsetSources = DBUtils .getRowCount( con, "SELECT count(*) FROM method_link_species_set WHERE source = 'NULL' OR source IS NULL"); if (numOfUnsetSources > 0) { ReportManager.problem(this, con, "FAILED method_link_species_set table contains " + numOfUnsetSources + " with no source"); result = false; } return result; } /** * Loops through all known method link types from * {@link #getMethodLinkTypeToTable()} and uses * {@link #getMethodLinkTypeRange()} to assert that all link method link * species set identifiers have the correct method_link_id range */ protected boolean assertMlssIdForeignKeysAndRanges( DatabaseRegistryEntry dbre) { boolean result = true; Connection con = dbre.getConnection(); Map<String, String> typeToTable = getMethodLinkTypeToTable(); Map<String, List<Integer>> typeToRanges = getMethodLinkTypeRange(); for (Map.Entry<String, String> entry : typeToTable.entrySet()) { String type = entry.getKey(); String table = entry.getValue(); List<Integer> ranges = typeToRanges.get(type); Integer lower = ranges.get(0); Integer upper = ranges.get(1); result &= checkForOrphansWithConstraint(con, "method_link_species_set", "method_link_species_set_id", table, "method_link_species_set_id", "method_link_id >= " + lower + " and method_link_id < " + upper); result &= checkForOrphans(con, table, "method_link_species_set_id", "method_link_species_set", "method_link_species_set_id"); } return result; } /** * Check for the number of MLSS unlinked to a protein tree and those protein * tree members unlinked to a MLSS */ protected boolean assertMlssGeneTreeRootOrphans( DatabaseRegistryEntry dbre) { return checkForOrphansWithConstraint( dbre.getConnection(), "method_link_species_set", "method_link_species_set_id", "gene_tree_root", "method_link_species_set_id", "method_link_id IN (SELECT method_link_id FROM method_link WHERE class LIKE 'ProteinTree.%')"); } protected boolean assertGeneTreeRootOrphans(DatabaseRegistryEntry dbre) { return checkForOrphans(dbre.getConnection(), "gene_tree_root", "method_link_species_set_id", "method_link_species_set", "method_link_species_set_id"); } protected boolean assertMlssGenomicAlignOrphans( DatabaseRegistryEntry dbre) { return checkForOrphansWithConstraint( dbre.getConnection(), "method_link_species_set", "method_link_species_set_id", "genomic_align_block", "method_link_species_set_id", "method_link_id BETWEEN 1 AND 99"); } protected boolean assertGenomicAlignOrphans(DatabaseRegistryEntry dbre) { return checkForOrphans(dbre.getConnection(), "genomic_align_block", "method_link_species_set_id", "method_link_species_set", "method_link_species_set_id"); } // Hashed out because we do not do this kind of analysis yet // protected boolean assertNCTreeMethodLinkSpeciesSet( // DatabaseRegistryEntry dbre) { // return checkForOrphansWithConstraint( // dbre.getConnection(), // "method_link_species_set", // "method_link_species_set_id", // "nc_tree_member", // "method_link_species_set_id", // "method_link_id IN (SELECT method_link_id FROM method_link WHERE class LIKE 'NCTree.%')"); // } /** * loops through all method link species sets where an expected count is * known from {@link #getMethodLinkTypeToExpectedCounts()} and asserts that * the number of species in the method link species set is equal to one of * those values */ protected boolean assertMethodLinkSpeciesSetCounts( DatabaseRegistryEntry dbre) { boolean result = true; Map<String, List<Long>> methodLinkToMlssId = getMethodLinkTypeToMlssId(dbre); Map<Long, Integer> mlssIdToCount = getMlssIdCount(dbre); Map<String, List<Integer>> methodLinkTypeExpectedCounts = getMethodLinkTypeToExpectedCounts(); for (Map.Entry<String, List<Long>> methodLink : methodLinkToMlssId .entrySet()) { String methodLinkType = methodLink.getKey(); if (!methodLinkTypeExpectedCounts.containsKey(methodLinkType)) continue; for (Long methodLinkSpeciesSetId : methodLink.getValue()) { Integer count = mlssIdToCount.get(methodLinkSpeciesSetId); if (count != null) { boolean countOkay = false; List<Integer> expectedCounts = methodLinkTypeExpectedCounts .get(methodLinkType); for (int expected : expectedCounts) { if (count == expected) { countOkay = true; break; } } if (!countOkay) { result = false; String expecteds = StringUtils .join(expectedCounts, ','); ReportManager.problem(this, dbre.getConnection(), "MLSS ID " + methodLinkSpeciesSetId + " of type " + methodLinkType + " count was " + count + ". We expected [" + expecteds + "]"); } } else { ReportManager.problem(this, dbre.getConnection(), "No count found for MLSS ID " + methodLinkSpeciesSetId + " of type " + methodLinkType); } } } return result; } protected Map<Long, Integer> getMlssIdCount(DatabaseRegistryEntry dbre) { return getTemplate(dbre) .queryForMap( "select mlss.method_link_species_set_id, count(*) from method_link_species_set mlss join species_set ss using (species_set_id) group by mlss.method_link_species_set_id", new DefaultMapRowMapper<Long, Integer>(Long.class, Integer.class)); } protected Map<String, List<Long>> getMethodLinkTypeToMlssId( DatabaseRegistryEntry dbre) { MapRowMapper<String, List<Long>> mapper = new AbstractStringMapRowMapper<List<Long>>() { @Override public List<Long> mapRow(ResultSet resultSet, int position) throws SQLException { List<Long> longs = CollectionUtils.createArrayList(); existingObject(longs, resultSet, position); return longs; } @Override public void existingObject(List<Long> currentValue, ResultSet resultSet, int position) throws SQLException { currentValue.add(resultSet.getLong(2)); } }; return getTemplate(dbre) .queryForMap( "select ml.type, mlss.method_link_species_set_id from method_link ml join method_link_species_set mlss using (method_link_id)", mapper); } protected Map<String, List<Integer>> getMethodLinkTypeToExpectedCounts() { Map<String, List<Integer>> output = CollectionUtils.createHashMap(); List<Integer> pairwise = Arrays.asList(2); output.put("ENSEMBL_ORTHOLOGUES", pairwise); output.put("ENSEMBL_PARALOGUES", Arrays.asList(1, 2)); output.put("BLASTZ_NET", pairwise); output.put("LASTZ_NET", pairwise); output.put("TRANSLATED_BLAT_NET", pairwise); return output; } protected Map<String, List<Integer>> getMethodLinkTypeRange() { Map<String, List<Integer>> output = CollectionUtils.createHashMap(); output.put("ENSEMBL_ORTHOLOGUES", Arrays.asList(201, 300)); output.put("ENSEMBL_PARALOGUES", Arrays.asList(201, 300)); output.put("SYNTENY", Arrays.asList(101, 200)); output.put("FAMILY", Arrays.asList(301, 400)); return output; } protected Map<String, String> getMethodLinkTypeToTable() { Map<String, String> output = CollectionUtils.createHashMap(); output.put("ENSEMBL_ORTHOLOGUES", "homology"); output.put("ENSEMBL_PARALOGUES", "homology"); output.put("SYNTENY", "synteny_region"); output.put("FAMILY", "family"); return output; } }
/* * Copyright 2014-2015 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.operations.service.akka.actors.core; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import org.kaaproject.kaa.common.TransportType; import org.kaaproject.kaa.common.dto.EndpointProfileDto; import org.kaaproject.kaa.common.dto.NotificationDto; import org.kaaproject.kaa.server.operations.service.akka.actors.core.ChannelMap.ChannelMetaData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class EndpointActorState { private static final Logger LOG = LoggerFactory.getLogger(EndpointActorState.class); /** The map of active communication channels. */ private final ChannelMap channelMap; private final String endpointKey; private final String actorKey; private EndpointProfileDto endpointProfile; private String userId; private boolean userRegistrationRequestSent; private long lastActivityTime; private int processedEventSeqNum = Integer.MIN_VALUE; private Map<String, Integer> subscriptionStates; private boolean ucfHashIntialized; private byte[] ucfHash; public EndpointActorState(String endpointKey, String actorKey) { this.endpointKey = endpointKey; this.actorKey = actorKey; this.channelMap = new ChannelMap(endpointKey, actorKey); this.subscriptionStates = new HashMap<String, Integer>(); } public void addChannel(ChannelMetaData channel) { this.channelMap.addChannel(channel); } public boolean isNoChannels() { return channelMap.isEmpty(); } List<ChannelMetaData> getChannelsByType(TransportType type) { return this.channelMap.getByTransportType(type); } Set<ChannelMetaData> getChannelsByTypes(TransportType... types) { Set<ChannelMetaData> channels = new HashSet<>(); for (TransportType type : types) { channels.addAll(channelMap.getByTransportType(type)); } return channels; } ChannelMetaData getChannelByRequestId(UUID requestId) { return channelMap.getByRequestId(requestId); } ChannelMetaData getChannelById(UUID requestId) { return channelMap.getById(requestId); } public void removeChannel(ChannelMetaData channel) { channelMap.removeChannel(channel); } String getUserId() { return userId; } void setUserId(String userId) { this.userId = userId; } EndpointProfileDto getProfile() { return endpointProfile; } void setProfile(EndpointProfileDto endpointProfile) { this.endpointProfile = endpointProfile; } boolean isProfileSet() { return this.endpointProfile != null; } String getProfileUserId() { if (endpointProfile != null) { return endpointProfile.getEndpointUserId(); } else { return null; } } void setProfileUserId(String userId) { endpointProfile.setEndpointUserId(userId); } boolean isValidForUser() { return endpointProfile != null && endpointProfile.getEndpointUserId() != null && !endpointProfile.getEndpointUserId().isEmpty(); } boolean isValidForEvents() { return endpointProfile != null && endpointProfile.getEndpointUserId() != null && !endpointProfile.getEndpointUserId().isEmpty() && endpointProfile.getEcfVersionStates() != null && !endpointProfile.getEcfVersionStates().isEmpty(); } boolean userIdMismatch() { return userId != null && !userId.equals(getProfileUserId()); } boolean isUserRegistrationPending() { return userRegistrationRequestSent; } void setUserRegistrationPending(boolean userRegistrationRequestSent) { this.userRegistrationRequestSent = userRegistrationRequestSent; } long getLastActivityTime() { return lastActivityTime; } void setLastActivityTime(long time) { this.lastActivityTime = time; } int getEventSeqNumber() { return processedEventSeqNum; } void resetEventSeqNumber() { processedEventSeqNum = Integer.MIN_VALUE; } void setEventSeqNumber(int maxSentEventSeqNum) { processedEventSeqNum = maxSentEventSeqNum; } public void setSubscriptionStates(Map<String, Integer> subscriptionStates) { this.subscriptionStates = new HashMap<String, Integer>(subscriptionStates); } public Map<String, Integer> getSubscriptionStates() { return subscriptionStates; } public boolean isUcfHashRequiresIntialization() { if(!isValidForUser()){ return false; } return !ucfHashIntialized; } public boolean isUserConfigurationUpdatePending() { if(!isValidForUser() || isUcfHashRequiresIntialization()){ return false; }; return !Arrays.equals(ucfHash, endpointProfile.getUserConfigurationHash()); } public void setUcfHash(byte[] ucfHash) { this.ucfHashIntialized = true; this.ucfHash = ucfHash; } public byte[] getUcfHash() { return ucfHash; } public List<NotificationDto> filter(List<NotificationDto> notifications) { List<NotificationDto> list = new ArrayList<NotificationDto>(notifications.size()); for (NotificationDto nf : notifications) { if (subscriptionStates.containsKey(nf.getTopicId())) { list.add(nf); } else { LOG.trace("[{}][{}] Notification {} is no longer valid due to subscription state", endpointKey, actorKey, nf); } } return list; } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.timeseries.date.localdate; import java.io.Serializable; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.Arrays; import java.util.Collection; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.Objects; import org.threeten.bp.LocalDate; import com.opengamma.timeseries.ObjectTimeSeries; import com.opengamma.timeseries.ObjectTimeSeriesOperators; import com.opengamma.timeseries.ObjectTimeSeriesOperators.BinaryOperator; import com.opengamma.timeseries.ObjectTimeSeriesOperators.UnaryOperator; import com.opengamma.timeseries.date.AbstractDateObjectTimeSeries; import com.opengamma.timeseries.date.DateObjectTimeSeries; /** * Standard immutable implementation of {@code LocalDateObjectTimeSeries}. * * @param <V> the value being viewed over time */ public final class ImmutableLocalDateObjectTimeSeries<V> extends AbstractDateObjectTimeSeries<LocalDate, V> implements LocalDateObjectTimeSeries<V>, Serializable { /** Empty instance. */ private static final ImmutableLocalDateObjectTimeSeries<?> EMPTY_SERIES = new ImmutableLocalDateObjectTimeSeries<>(new int[0], new Object[0]); /** Serialization version. */ private static final long serialVersionUID = -43654613865187568L; /** * The times in the series. */ private final int[] _times; /** * The values in the series. */ private final V[] _values; //------------------------------------------------------------------------- /** * Creates an empty builder, used to create time-series. * <p> * The builder has methods to create and modify a time-series. * * @param <V> the value being viewed over time * @return the time-series builder, not null */ public static <V> LocalDateObjectTimeSeriesBuilder<V> builder() { return new ImmutableLocalDateObjectTimeSeriesBuilder<>(); } //------------------------------------------------------------------------- /** * Obtains a time-series from a single date and value. * * @param <V> the value being viewed over time * @return the time-series, not null */ @SuppressWarnings("unchecked") public static <V> ImmutableLocalDateObjectTimeSeries<V> ofEmpty() { return (ImmutableLocalDateObjectTimeSeries<V>) EMPTY_SERIES; } /** * Obtains a time-series from a single date and value. * * @param <V> the value being viewed over time * @param date the singleton date, not null * @param value the singleton value * @return the time-series, not null */ public static <V> ImmutableLocalDateObjectTimeSeries<V> of(final LocalDate date, final V value) { Objects.requireNonNull(date, "date"); final int[] timesArray = new int[] {LocalDateToIntConverter.convertToInt(date)}; @SuppressWarnings("unchecked") final V[] valuesArray = (V[]) new Object[] {value}; return new ImmutableLocalDateObjectTimeSeries<>(timesArray, valuesArray); } /** * Obtains a time-series from matching arrays of dates and values. * * @param <V> the value being viewed over time * @param dates the date array, not null * @param values the value array, not null * @return the time-series, not null */ public static <V> ImmutableLocalDateObjectTimeSeries<V> of(final LocalDate[] dates, final V[] values) { final int[] timesArray = convertToIntArray(dates); final V[] valuesArray = values.clone(); validate(timesArray, valuesArray); return new ImmutableLocalDateObjectTimeSeries<>(timesArray, valuesArray); } /** * Obtains a time-series from matching arrays of dates and values. * * @param <V> the value being viewed over time * @param dates the date array, not null * @param values the value array, not null * @return the time-series, not null */ public static <V> ImmutableLocalDateObjectTimeSeries<V> of(final int[] dates, final V[] values) { validate(dates, values); final int[] timesArray = dates.clone(); final V[] valuesArray = values.clone(); return new ImmutableLocalDateObjectTimeSeries<>(timesArray, valuesArray); } /** * Obtains a time-series from matching arrays of dates and values. * * @param <V> the value being viewed over time * @param dates the date list, not null * @param values the value list, not null * @return the time-series, not null */ public static <V> ImmutableLocalDateObjectTimeSeries<V> of(final Collection<LocalDate> dates, final Collection<V> values) { final int[] timesArray = convertToIntArray(dates); @SuppressWarnings("unchecked") final V[] valuesArray = (V[]) values.toArray(); validate(timesArray, valuesArray); return new ImmutableLocalDateObjectTimeSeries<>(timesArray, valuesArray); } /** * Obtains a time-series from another time-series. * * @param <V> the value being viewed over time * @param timeSeries the time-series, not null * @return the time-series, not null */ @SuppressWarnings("unchecked") public static <V> ImmutableLocalDateObjectTimeSeries<V> of(final DateObjectTimeSeries<?, V> timeSeries) { if (timeSeries instanceof ImmutableLocalDateObjectTimeSeries) { return (ImmutableLocalDateObjectTimeSeries<V>) timeSeries; } final DateObjectTimeSeries<?, V> other = timeSeries; final int[] timesArray = other.timesArrayFast(); final V[] valuesArray = other.valuesArray(); return new ImmutableLocalDateObjectTimeSeries<>(timesArray, valuesArray); } //------------------------------------------------------------------------- /** * Obtains a time-series from another time-series. * * @param <V> the value being viewed over time * @param timeSeries the time-series, not null * @return the time-series, not null */ public static <V> ImmutableLocalDateObjectTimeSeries<V> from(final ObjectTimeSeries<LocalDate, V> timeSeries) { if (timeSeries instanceof DateObjectTimeSeries) { return of((DateObjectTimeSeries<LocalDate, V>) timeSeries); } final int[] timesArray = convertToIntArray(timeSeries.timesArray()); final V[] valuesArray = timeSeries.valuesArray(); return new ImmutableLocalDateObjectTimeSeries<>(timesArray, valuesArray); } //------------------------------------------------------------------------- /** * Validates the data before creation. * * @param <V> the value being viewed over time * @param times the times, not null * @param values the values, not null */ private static <V> void validate(final int[] times, final V[] values) { if (times == null || values == null) { throw new NullPointerException("Array must not be null"); } // check lengths if (times.length != values.length) { throw new IllegalArgumentException("Arrays are of different sizes: " + times.length + ", " + values.length); } // check dates are ordered int maxTime = Integer.MIN_VALUE; for (final int time : times) { if (time < maxTime) { throw new IllegalArgumentException("dates must be ordered"); } maxTime = time; } } /** * Creates an instance. * * @param times the times, not null * @param values the values, not null */ ImmutableLocalDateObjectTimeSeries(final int[] times, final V[] values) { _times = times; _values = values; } //------------------------------------------------------------------------- /** * Converts a collection of dates to an array of int. * * @param times a collection of dates * @return an array of int */ static int[] convertToIntArray(final Collection<LocalDate> times) { final int[] timesArray = new int[times.size()]; int i = 0; for (final LocalDate time : times) { timesArray[i++] = LocalDateToIntConverter.convertToInt(time); } return timesArray; } /** * Converts an array of dates to an array of int. * * @param dates a collection of dates * @return an array of int */ static int[] convertToIntArray(final LocalDate[] dates) { final int[] timesArray = new int[dates.length]; for (int i = 0; i < timesArray.length; i++) { timesArray[i] = LocalDateToIntConverter.convertToInt(dates[i]); } return timesArray; } /** * Creates an immutable entry of date and value. * * @param key the key * @param value the value * @return an entry * @param <V> the type of the data */ static <V> Entry<LocalDate, V> makeMapEntry(final LocalDate key, final V value) { return new SimpleImmutableEntry<>(key, value); } //------------------------------------------------------------------------- @Override protected int convertToInt(final LocalDate date) { return LocalDateToIntConverter.convertToInt(date); } @Override protected LocalDate convertFromInt(final int date) { return LocalDateToIntConverter.convertToLocalDate(date); } @Override protected LocalDate[] createArray(final int size) { return new LocalDate[size]; } //------------------------------------------------------------------------- @Override public int size() { return _times.length; } //------------------------------------------------------------------------- @Override public boolean containsTime(final int date) { final int binarySearch = Arrays.binarySearch(_times, date); return binarySearch >= 0; } @Override public V getValue(final int date) { final int binarySearch = Arrays.binarySearch(_times, date); if (binarySearch >= 0) { return _values[binarySearch]; } return null; } @Override public int getTimeAtIndexFast(final int index) { return _times[index]; } @Override public V getValueAtIndex(final int index) { return _values[index]; } //------------------------------------------------------------------------- @Override public int getEarliestTimeFast() { try { return _times[0]; } catch (final IndexOutOfBoundsException ex) { throw new NoSuchElementException("Series is empty"); } } @Override public V getEarliestValue() { try { return _values[0]; } catch (final IndexOutOfBoundsException ex) { throw new NoSuchElementException("Series is empty"); } } @Override public int getLatestTimeFast() { try { return _times[_times.length - 1]; } catch (final IndexOutOfBoundsException ex) { throw new NoSuchElementException("Series is empty"); } } @Override public V getLatestValue() { try { return _values[_values.length - 1]; } catch (final IndexOutOfBoundsException ex) { throw new NoSuchElementException("Series is empty"); } } //------------------------------------------------------------------------- @Override public int[] timesArrayFast() { return _times.clone(); } @Override public V[] valuesArray() { return _values.clone(); } //------------------------------------------------------------------------- @Override public LocalDateObjectEntryIterator<V> iterator() { return new LocalDateObjectEntryIterator<V>() { private int _index = -1; @Override public boolean hasNext() { return _index + 1 < size(); } @Override public Entry<LocalDate, V> next() { if (!hasNext()) { throw new NoSuchElementException("No more elements in the iteration"); } _index++; final int date = ImmutableLocalDateObjectTimeSeries.this.getTimeAtIndexFast(_index); final V value = ImmutableLocalDateObjectTimeSeries.this.getValueAtIndex(_index); return makeMapEntry(ImmutableLocalDateObjectTimeSeries.this.convertFromInt(date), value); } @Override public int nextTimeFast() { if (!hasNext()) { throw new NoSuchElementException("No more elements in the iteration"); } _index++; return ImmutableLocalDateObjectTimeSeries.this.getTimeAtIndexFast(_index); } @Override public LocalDate nextTime() { return ImmutableLocalDateObjectTimeSeries.this.convertFromInt(nextTimeFast()); } @Override public int currentTimeFast() { if (_index < 0) { throw new IllegalStateException("Iterator has not yet been started"); } return ImmutableLocalDateObjectTimeSeries.this.getTimeAtIndexFast(_index); } @Override public LocalDate currentTime() { return ImmutableLocalDateObjectTimeSeries.this.convertFromInt(currentTimeFast()); } @Override public V currentValue() { if (_index < 0) { throw new IllegalStateException("Iterator has not yet been started"); } return ImmutableLocalDateObjectTimeSeries.this.getValueAtIndex(_index); } @Override public int currentIndex() { return _index; } @Override public void remove() { throw new UnsupportedOperationException("Immutable iterator"); } }; } //------------------------------------------------------------------------- @Override public LocalDateObjectTimeSeries<V> subSeries(final LocalDate startTime, final LocalDate endTime) { return subSeriesFast(convertToInt(startTime), true, convertToInt(endTime), false); } @Override public LocalDateObjectTimeSeries<V> subSeries(final LocalDate startTime, final boolean includeStart, final LocalDate endTime, final boolean includeEnd) { return subSeriesFast(convertToInt(startTime), includeStart, convertToInt(endTime), includeEnd); } @Override public LocalDateObjectTimeSeries<V> subSeriesFast(final int startTime, final int endTime) { return subSeriesFast(startTime, true, endTime, false); } @Override public LocalDateObjectTimeSeries<V> subSeriesFast(final int startTime, final boolean includeStart, final int endTime, final boolean includeEnd) { if (endTime < startTime) { throw new IllegalArgumentException("Invalid subSeries: endTime < startTime"); } // special case when this is empty if (isEmpty()) { return ofEmpty(); } // special case for start equals end if (startTime == endTime) { if (includeStart && includeEnd) { final int pos = Arrays.binarySearch(_times, startTime); if (pos >= 0) { return new ImmutableLocalDateObjectTimeSeries<>(new int[] {startTime}, Arrays.copyOfRange(_values, pos, pos + 1)); } } return ofEmpty(); } // normalize to include start and exclude end int start = startTime; if (!includeStart) { start++; } int end = endTime; if (includeEnd) { if (end != Integer.MAX_VALUE) { end++; } } // calculate int startPos = Arrays.binarySearch(_times, start); startPos = startPos >= 0 ? startPos : -(startPos + 1); int endPos = Arrays.binarySearch(_times, end); endPos = endPos >= 0 ? endPos : -(endPos + 1); if (includeEnd && end == Integer.MAX_VALUE) { endPos = _times.length; } final int[] timesArray = Arrays.copyOfRange(_times, startPos, endPos); final V[] valuesArray = Arrays.copyOfRange(_values, startPos, endPos); return new ImmutableLocalDateObjectTimeSeries<>(timesArray, valuesArray); } //------------------------------------------------------------------------- @Override public LocalDateObjectTimeSeries<V> head(final int numItems) { if (numItems == size()) { return this; } final int[] timesArray = Arrays.copyOfRange(_times, 0, numItems); final V[] valuesArray = Arrays.copyOfRange(_values, 0, numItems); return new ImmutableLocalDateObjectTimeSeries<>(timesArray, valuesArray); } @Override public LocalDateObjectTimeSeries<V> tail(final int numItems) { final int size = size(); if (numItems == size) { return this; } final int[] timesArray = Arrays.copyOfRange(_times, size - numItems, size); final V[] valuesArray = Arrays.copyOfRange(_values, size - numItems, size); return new ImmutableLocalDateObjectTimeSeries<>(timesArray, valuesArray); } @Override @SuppressWarnings("unchecked") public LocalDateObjectTimeSeries<V> lag(final int days) { final int[] times = timesArrayFast(); final V[] values = valuesArray(); if (days == 0) { return new ImmutableLocalDateObjectTimeSeries<>(times, values); } else if (days < 0) { if (-days < times.length) { final int[] resultTimes = new int[times.length + days]; // remember days is -ve System.arraycopy(times, 0, resultTimes, 0, times.length + days); final V[] resultValues = (V[]) new Object[times.length + days]; System.arraycopy(values, -days, resultValues, 0, times.length + days); return new ImmutableLocalDateObjectTimeSeries<>(resultTimes, resultValues); } return ImmutableLocalDateObjectTimeSeries.ofEmpty(); } else { // if (days > 0) { if (days < times.length) { final int[] resultTimes = new int[times.length - days]; // remember days is +ve System.arraycopy(times, days, resultTimes, 0, times.length - days); final V[] resultValues = (V[]) new Object[times.length - days]; System.arraycopy(values, 0, resultValues, 0, times.length - days); return new ImmutableLocalDateObjectTimeSeries<>(resultTimes, resultValues); } return ImmutableLocalDateObjectTimeSeries.ofEmpty(); } } //------------------------------------------------------------------------- @Override public ImmutableLocalDateObjectTimeSeries<V> newInstance(final LocalDate[] dates, final V[] values) { return of(dates, values); } //------------------------------------------------------------------------- @Override public LocalDateObjectTimeSeries<V> operate(final UnaryOperator<V> operator) { final V[] valuesArray = valuesArray(); for (int i = 0; i < valuesArray.length; i++) { valuesArray[i] = operator.operate(valuesArray[i]); } return new ImmutableLocalDateObjectTimeSeries<>(_times, valuesArray); // immutable, so can share times } @Override public LocalDateObjectTimeSeries<V> operate(final V other, final BinaryOperator<V> operator) { final V[] valuesArray = valuesArray(); for (int i = 0; i < valuesArray.length; i++) { valuesArray[i] = operator.operate(valuesArray[i], other); } return new ImmutableLocalDateObjectTimeSeries<>(_times, valuesArray); // immutable, so can share times } @Override @SuppressWarnings("unchecked") public LocalDateObjectTimeSeries<V> operate(final DateObjectTimeSeries<?, V> other, final BinaryOperator<V> operator) { final int[] aTimes = timesArrayFast(); final V[] aValues = valuesArray(); int aCount = 0; final int[] bTimes = other.timesArrayFast(); final V[] bValues = other.valuesArray(); int bCount = 0; final int[] resTimes = new int[aTimes.length + bTimes.length]; final V[] resValues = (V[]) new Object[resTimes.length]; int resCount = 0; while (aCount < aTimes.length && bCount < bTimes.length) { if (aTimes[aCount] == bTimes[bCount]) { resTimes[resCount] = aTimes[aCount]; resValues[resCount] = operator.operate(aValues[aCount], bValues[bCount]); resCount++; aCount++; bCount++; } else if (aTimes[aCount] < bTimes[bCount]) { aCount++; } else { // if (aTimes[aCount] > bTimes[bCount]) { bCount++; } } final int[] trimmedTimes = new int[resCount]; final V[] trimmedValues = (V[]) new Object[resCount]; System.arraycopy(resTimes, 0, trimmedTimes, 0, resCount); System.arraycopy(resValues, 0, trimmedValues, 0, resCount); return new ImmutableLocalDateObjectTimeSeries<>(trimmedTimes, trimmedValues); } @SuppressWarnings("unchecked") @Override public LocalDateObjectTimeSeries<V> unionOperate(final DateObjectTimeSeries<?, V> other, final BinaryOperator<V> operator) { final int[] aTimes = timesArrayFast(); final V[] aValues = valuesArray(); int aCount = 0; final int[] bTimes = other.timesArrayFast(); final V[] bValues = other.valuesArray(); int bCount = 0; final int[] resTimes = new int[aTimes.length + bTimes.length]; final V[] resValues = (V[]) new Object[resTimes.length]; int resCount = 0; while (aCount < aTimes.length || bCount < bTimes.length) { if (aCount >= aTimes.length) { final int bRemaining = bTimes.length - bCount; System.arraycopy(bTimes, bCount, resTimes, resCount, bRemaining); System.arraycopy(bValues, bCount, resValues, resCount, bRemaining); resCount += bRemaining; break; } else if (bCount >= bTimes.length) { final int aRemaining = aTimes.length - aCount; System.arraycopy(aTimes, aCount, resTimes, resCount, aRemaining); System.arraycopy(aValues, aCount, resValues, resCount, aRemaining); resCount += aRemaining; break; } else if (aTimes[aCount] == bTimes[bCount]) { resTimes[resCount] = aTimes[aCount]; resValues[resCount] = operator.operate(aValues[aCount], bValues[bCount]); resCount++; aCount++; bCount++; } else if (aTimes[aCount] < bTimes[bCount]) { resTimes[resCount] = aTimes[aCount]; resValues[resCount] = aValues[aCount]; resCount++; aCount++; } else { // if (aTimes[aCount] > bTimes[bCount]) { resTimes[resCount] = bTimes[bCount]; resValues[resCount] = bValues[bCount]; resCount++; bCount++; } } final int[] trimmedTimes = new int[resCount]; final V[] trimmedValues = (V[]) new Object[resCount]; System.arraycopy(resTimes, 0, trimmedTimes, 0, resCount); System.arraycopy(resValues, 0, trimmedValues, 0, resCount); return new ImmutableLocalDateObjectTimeSeries<>(trimmedTimes, trimmedValues); } //------------------------------------------------------------------------- @Override public LocalDateObjectTimeSeries<V> intersectionFirstValue(final DateObjectTimeSeries<?, V> other) { return operate(other, ObjectTimeSeriesOperators.<V>firstOperator()); } @Override public LocalDateObjectTimeSeries<V> intersectionSecondValue(final DateObjectTimeSeries<?, V> other) { return operate(other, ObjectTimeSeriesOperators.<V>secondOperator()); } @Override public LocalDateObjectTimeSeries<V> noIntersectionOperation(final DateObjectTimeSeries<?, V> other) { return unionOperate(other, ObjectTimeSeriesOperators.<V>noIntersectionOperator()); } //------------------------------------------------------------------------- @Override public LocalDateObjectTimeSeriesBuilder<V> toBuilder() { return ImmutableLocalDateObjectTimeSeries.<V>builder().putAll(this); } //------------------------------------------------------------------------- @Override public boolean equals(final Object obj) { if (obj == this) { return true; } if (obj instanceof ImmutableLocalDateObjectTimeSeries) { final ImmutableLocalDateObjectTimeSeries<?> other = (ImmutableLocalDateObjectTimeSeries<?>) obj; return Arrays.equals(_times, other._times) && Arrays.equals(_values, other._values); } if (obj instanceof DateObjectTimeSeries) { final DateObjectTimeSeries<?, ?> other = (DateObjectTimeSeries<?, ?>) obj; return Arrays.equals(timesArrayFast(), other.timesArrayFast()) && Arrays.equals(valuesArray(), other.valuesArray()); } return false; } @Override public int hashCode() { return Arrays.hashCode(timesArrayFast()) ^ Arrays.hashCode(valuesArray()); } }
package com.beecavegames.persistence; import java.io.IOException; import java.io.Serializable; import java.time.Duration; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import com.beecavegames.concurrency.LabeledCallable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import lombok.Getter; import lombok.Lombok; import lombok.Setter; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import net.rubyeye.xmemcached.CASOperation; import net.rubyeye.xmemcached.GetsResponse; import net.rubyeye.xmemcached.MemcachedClient; import net.rubyeye.xmemcached.exception.MemcachedException; import net.rubyeye.xmemcached.transcoders.Transcoder; import com.beecavegames.persistence.transcoders.NullTranscoder; import com.beecavegames.stats.Counter; import com.beecavegames.stats.StatsTimer; import com.beecavegames.stats.Timer; @Slf4j public abstract class AbstractXMemcacheClient extends AbstractMemcacheClient implements KVStoreClient { private final Counter MC_READ_OPERATION, MC_WRITE_OPERATION, MC_OPERATION_FAILURES, MC_OPERATION_TIMEOUTS; private static final Timer TIMER_GET = Timer.create(PersistenceCounters.Category.Memcache, PersistenceCounters.Memcache.operation_read, "elapsed"), TIMER_GET_BULK = Timer.create(PersistenceCounters.Category.Memcache, PersistenceCounters.Memcache.operation_write, "elapsed"), TIMER_SET = Timer.create(PersistenceCounters.Category.Memcache, PersistenceCounters.Memcache.operation_read, "elapsed"); private static final Transcoder<Object> NULL_TRANSCODER = new NullTranscoder(); @Getter @Setter private boolean touchSupported=true; @Getter @Setter private String namespace=""; @Getter @Setter private Duration asyncMemcacheTimeout = Duration.ofMillis(2500); protected abstract MemcachedClient getClient(); protected AbstractXMemcacheClient(String serverId, StatisticsAgent stats, ExecutorService executor, String poolName) { setStats(stats); this.executor = executor; this.poolName = poolName; MC_READ_OPERATION=Counter.create(PersistenceCounters.Category.Memcache, PersistenceCounters.Memcache.operation_read, serverId, poolName); MC_WRITE_OPERATION=Counter.create(PersistenceCounters.Category.Memcache, PersistenceCounters.Memcache.operation_write, serverId, poolName); MC_OPERATION_FAILURES=Counter.create(PersistenceCounters.Category.Memcache, PersistenceCounters.Memcache.operation_failures, serverId, poolName); MC_OPERATION_TIMEOUTS=Counter.create(PersistenceCounters.Category.Memcache, PersistenceCounters.Memcache.operation_timeouts, serverId, poolName); } public void setStatisticsSampleInterval(int opsBetweenSamples) { sampleRate = opsBetweenSamples; sampleRateFraction = 1.0f/sampleRate; } protected ExecutorService executor; private AtomicInteger ops = new AtomicInteger(); private int sampleRate=10; private float sampleRateFraction=0.1f; protected String poolName; protected void error(MemcachedClient client, String method, String key, Exception to) { log.warn("Error in method={} on key={}", method, key, to); if(this.stats != null) { stats.monitor(MC_OPERATION_FAILURES); } } private void timeout(String method, String key, Exception to) { log.warn("Timeout in method={} on key={}", method, key, to); if (stats != null) { stats.monitor(MC_OPERATION_TIMEOUTS); } } private boolean writeOperation(int keyCount) { if (stats != null) { if (ops.incrementAndGet() > sampleRate) { ops.set(0); stats.monitor(MC_WRITE_OPERATION, keyCount, sampleRateFraction); return true; } } return false; } private boolean readOperation(int keyCount) { if (stats != null) { if (ops.incrementAndGet() > sampleRate) { ops.set(0); stats.monitor(MC_READ_OPERATION, keyCount, sampleRateFraction); return true; } } return false; } @Override public void shutdown() { final MemcachedClient client = getClient(); try { client.shutdown(); } catch (IOException e) { log.warn("Exception in shutdown", e); } } @SneakyThrows @Override public <T extends Serializable> boolean add(String key, int exp, T o, Transcoder<T> tc) { final MemcachedClient client = getClient(); writeOperation(1); return client.add(namespaceKey(key), exp, o, tc); } @Override public boolean add(String key, int exp, Serializable o) { final MemcachedClient client = getClient(); try { writeOperation(1); return client.add(namespaceKey(key), exp, o); } catch (TimeoutException | InterruptedException to) { timeout("add", key, to); return false; } catch (MemcachedException e) { error(client, "add", key, e); return false; } } @Override public <T extends Serializable> boolean set(String key, int exp, T o, Transcoder<T> tc) { final MemcachedClient client = getClient(); try (StatsTimer timer = writeOperation(1) ? getTimer(TIMER_SET) : null) { return client.set(namespaceKey(key), exp, o, tc); } catch (TimeoutException | InterruptedException to) { timeout("set", key, to); return false; } catch (MemcachedException e) { error(client, "set", key, e); return false; } catch (IOException e) { error(client, "set", key, e); Lombok.sneakyThrow(e); return false; } } @Override public <T extends Serializable> boolean replace(String key, int exp, T o) { final MemcachedClient client = getClient(); try (StatsTimer timer = writeOperation(1) ? getTimer(TIMER_SET) : null) { return client.replace(namespaceKey(key), exp, o); } catch (TimeoutException | InterruptedException to) { timeout("set", key, to); return false; } catch (MemcachedException e) { error(client, "set", key, e); return false; } catch (IOException e) { error(client, "set", key, e); Lombok.sneakyThrow(e); return false; } } @Override public <T extends Serializable> boolean replace(String key, int exp, T o, Transcoder<T> tc) { final MemcachedClient client = getClient(); try (StatsTimer timer = writeOperation(1) ? getTimer(TIMER_SET) : null) { return client.replace(namespaceKey(key), exp, o, tc); } catch (TimeoutException | InterruptedException to) { timeout("set", key, to); return false; } catch (MemcachedException e) { error(client, "set", key, e); return false; } catch (IOException e) { error(client, "set", key, e); Lombok.sneakyThrow(e); return false; } } public void setNoReply(String key, int expiration, Serializable value) { asyncSet(key, expiration, value); } public <T extends Serializable> void setNoReply(String key, int expiration, T value, Transcoder<T> transcoder) { asyncSet(key, expiration, value, transcoder); } @Override public boolean set(String key, int exp, Serializable o) { final MemcachedClient client = getClient(); try (StatsTimer timer = writeOperation(1) ? getTimer(TIMER_SET) : null) { return client.set(namespaceKey(key), exp, o); } catch (TimeoutException | InterruptedException to) { timeout("set", key, to); return false; } catch (MemcachedException e) { error(client, "set", key, e); return false; } catch (IOException e) { error(client, "set", key, e); Lombok.sneakyThrow(e); return false; } } @Override public Future<Boolean> asyncSet(final String key, final int exp, final Serializable o) { return executor.submit(new LabeledCallable<Boolean>("mcAsyncSet") { public Boolean call() { final MemcachedClient client = getClient(); try { writeOperation(1); return client.set(namespaceKey(key), exp, o, asyncMemcacheTimeout.toMillis()); } catch (TimeoutException | InterruptedException to) { timeout("asyncSet", key, to); return false; } catch (MemcachedException e) { error(client, "asyncSet", key, e); return false; } } }); } @Override public <T extends Serializable> Future<Boolean> asyncSet(final String key, final int exp, final T o, final Transcoder<T> transcoder) { return executor.submit(new LabeledCallable<Boolean>("mcAsyncSetWithTC") { public Boolean call() { final MemcachedClient client = getClient(); try { writeOperation(1); return client.set(namespaceKey(key), exp, o, transcoder, asyncMemcacheTimeout.toMillis()); } catch (TimeoutException | InterruptedException to) { timeout("asyncSet", key, to); return false; } catch (MemcachedException e) { error(client, "asyncSet", key, e); return false; } } }); } @Override public Future<Serializable> asyncGet(final String key) { return executor.submit(new LabeledCallable<Serializable>("mcAsyncGet") { public Serializable call() { final MemcachedClient client = getClient(); try { readOperation(1); return client.get(namespaceKey(key), asyncMemcacheTimeout.toMillis()); } catch (TimeoutException | InterruptedException to) { timeout("asyncGet", key, to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "asyncGet", key, e); Lombok.sneakyThrow(e); return null; } } }); } @Override public <T extends Serializable> Future<T> asyncGet(final String key, final Transcoder<T> transcoder) { return executor.submit(new LabeledCallable<T>("mcAsyncGet") { public T call() { final MemcachedClient client = getClient(); try { readOperation(1); return client.get(namespaceKey(key), asyncMemcacheTimeout.toMillis(), transcoder); } catch (TimeoutException | InterruptedException to) { timeout("asyncGet", key, to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "asyncGet", key, e); Lombok.sneakyThrow(e); return null; } } }); } @SuppressWarnings("unchecked") @Override public <T extends Serializable> T get(String key) { final MemcachedClient client = getClient(); try (StatsTimer timer = readOperation(1) ? getTimer(TIMER_GET) : null) { return (T) client.get(namespaceKey(key)); } catch (TimeoutException | InterruptedException to) { timeout("get", key, to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "get", key, e); Lombok.sneakyThrow(e); return null; } catch (IOException e) { error(client, "get", key, e); Lombok.sneakyThrow(e); return null; } } @Override public <V extends Serializable> GetsResponse<V> gets(String key) { final MemcachedClient client = getClient(); try { readOperation(1); return client.gets(namespaceKey(key)); } catch (TimeoutException | InterruptedException to) { timeout("gets", key, to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "gets", key, e); Lombok.sneakyThrow(e); return null; } } @Override public boolean delete(String key, long cas) { final MemcachedClient client = getClient(); try { writeOperation(1); return client.delete(namespaceKey(key), cas); } catch (TimeoutException | InterruptedException to) { timeout("gets", key, to); Lombok.sneakyThrow(to); return false; } catch (MemcachedException e) { error(client, "gets", key, e); Lombok.sneakyThrow(e); return false; } } public <T extends Serializable> Future<GetsResponse<T>> asyncGets(final String key) { return executor.submit(new LabeledCallable<GetsResponse<T>>("mcAsyncGets") { public GetsResponse<T> call() { final MemcachedClient client = getClient(); try { readOperation(1); return client.gets(namespaceKey(key), asyncMemcacheTimeout.toMillis()); } catch (TimeoutException | InterruptedException to) { timeout("asyncGets", key, to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "asyncGets", key, e); Lombok.sneakyThrow(e); return null; } } }); } public boolean exists(String key) { final MemcachedClient client = getClient(); try { readOperation(1); return client.get(namespaceKey(key), NULL_TRANSCODER) != null; } catch (TimeoutException | InterruptedException to) { timeout("exists", key, to); return false; } catch (MemcachedException e) { error(client, "add", key, e); return false; } } public <T extends Serializable> Future<Map<String, T>> asyncGetBulk(final Collection<String> keys) { return executor.submit(new LabeledCallable<Map<String,T>>("mcAsyncGetBulk") { @SuppressWarnings("unchecked") public Map<String,T> call() { final MemcachedClient client = getClient(); try { readOperation(keys.size()); return (Map<String, T>) deprefix(client.get(namespaceKeys(keys), asyncMemcacheTimeout.toMillis())); } catch (TimeoutException | InterruptedException to) { timeout("asyncGetBulk", keys.toString(), to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "asyncGetBulk", keys.toString(), e); Lombok.sneakyThrow(e); return null; } } }); } public boolean touch(String key, int exp) { final MemcachedClient client = getClient(); try { if (touchSupported) { writeOperation(1); return client.touch(namespaceKey(key), exp); } else { return false; } } catch (TimeoutException | InterruptedException to) { timeout("touch", key, to); return false; } catch (MemcachedException e) { error(client, "touch", key, e); return false; } } public long incr(String key, long by, long def) { final MemcachedClient client = getClient(); try { writeOperation(1); return client.incr(namespaceKey(key), by, def); } catch (TimeoutException | InterruptedException to) { timeout("incr", key, to); return -1; } catch (MemcachedException e) { error(client, "incr", key, e); return -1; } } public boolean delete(String key) { final MemcachedClient client = getClient(); try { writeOperation(1); return client.delete(namespaceKey(key)); } catch (TimeoutException | InterruptedException to) { timeout("delete", key, to); return false; } catch (MemcachedException e) { error(client, "delete", key, e); return false; } } @Override public long incr(String key, int amount) { final MemcachedClient client = getClient(); try { writeOperation(1); return client.incr(namespaceKey(key), amount); } catch (TimeoutException | InterruptedException to) { timeout("incr", key, to); return -1; } catch (MemcachedException e) { error(client, "incr", key, e); return -1; } } @Override public long decr(String key, int amount) { final MemcachedClient client = getClient(); try { writeOperation(1); return client.decr(namespaceKey(key), amount); } catch(TimeoutException | InterruptedException to) { timeout("decr", key, to); return -1; } catch(MemcachedException e) { error(client, "decr", key, e); return -1; } } @SuppressWarnings("unchecked") @Override public <T extends Serializable> Map<String, T> getBulk(Set<String> keys) { final MemcachedClient client = getClient(); try (StatsTimer timer = readOperation(keys.size()) ? getTimer(TIMER_GET_BULK) : null) { return (Map<String, T>) deprefix(client.get(namespaceKeys(keys))); } catch (TimeoutException | InterruptedException to) { timeout("getBulk", keys.toString(), to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "getBulk", keys.toString(), e); Lombok.sneakyThrow(e); return null; } catch (IOException e) { error(client, "getBulk", keys.toString(), e); Lombok.sneakyThrow(e); return null; } } @SuppressWarnings("unchecked") @Override public <T extends Serializable> Map<String, T> getBulk(Set<String> keys, Transcoder<T> transcoder) { final MemcachedClient client = getClient(); try (StatsTimer timer = readOperation(keys.size()) ? getTimer(TIMER_GET_BULK) : null) { return (Map<String, T>) deprefix(client.get(namespaceKeys(keys), transcoder)); } catch (TimeoutException | InterruptedException to) { timeout("getBulk", keys.toString(), to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "getBulk", keys.toString(), e); Lombok.sneakyThrow(e); return null; } catch (IOException e) { error(client, "getBulk", keys.toString(), e); Lombok.sneakyThrow(e); return null; } } @SuppressWarnings("unchecked") private Map<String, Serializable> deprefix(Map<String,?> map) { if (map == null) { return null; } else if (map.isEmpty()) { return (Map<String, Serializable>) map; } else { Map<String,Serializable> rv = new HashMap<>(map.size()); String namespaceKey = namespace; for (Entry<String, ?> entry: map.entrySet()) { String key=entry.getKey(); if (key.startsWith(namespaceKey)) { rv.put(key.substring(namespaceKey.length()), (Serializable) entry.getValue()); } else { rv.put(key, (Serializable) entry.getValue()); } } return rv; } } @Override public boolean append(String key, Serializable value) { final MemcachedClient client = getClient(); try { writeOperation(1); return client.append(namespaceKey(key), value); } catch (TimeoutException | InterruptedException to) { timeout("append", key, to); return false; } catch (MemcachedException e) { error(client, "append", key, e); return false; } } @Override public Future<Map<String, Serializable>> asyncGetBulk(final Set<String> keys) { return executor.submit(new LabeledCallable<Map<String,Serializable>>("mcAsyncGetBulk") { @Override public Map<String, Serializable> call() { final MemcachedClient client = getClient(); try { readOperation(keys.size()); return client.get(namespaceKeys(keys), asyncMemcacheTimeout.toMillis()); } catch (TimeoutException | InterruptedException to) { timeout("asyncGetBulk", keys.toString(), to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "asyncGetBulk", keys.toString(), e); Lombok.sneakyThrow(e); return null; } } }); } protected String namespaceKey(String key) { return namespace + key; } private Collection<String> namespaceKeys(Collection<String> keyStrs) { final String prefix = namespace; return keyStrs.stream().map(s -> prefix + s).collect(Collectors.toList()); } @Override public <V extends Serializable> V get(String key, Transcoder<V> transcoder) { final MemcachedClient client = getClient(); try (StatsTimer timer = readOperation(1) ? getTimer(TIMER_GET) : null) { return client.get(namespaceKey(key), transcoder); } catch (TimeoutException | InterruptedException to) { timeout("get", key, to); Lombok.sneakyThrow(to); return null; } catch (MemcachedException e) { error(client, "get", key, e); Lombok.sneakyThrow(e); return null; } catch (IOException e) { error(client, "get", key, e); Lombok.sneakyThrow(e); return null; } } private StatsTimer getTimer(Timer timer) { if (stats != null) { return new StatsTimer(stats, timer, false, sampleRate); } return null; } @Override public long incr(String key, int amount, long def) { final MemcachedClient client = getClient(); try { writeOperation(1); return client.incr(namespaceKey(key), amount, def); } catch (TimeoutException | InterruptedException to) { timeout("incr", key, to); return -1; } catch (MemcachedException e) { error(client, "incr", key, e); return -1; } } @Override public void flush() { final MemcachedClient client = getClient(); try { client.flushAll(); } catch (Exception e) { log.warn("Exception in flush", e); } } @Override public <T extends Serializable> boolean cas(String key, int exp, CASOperation<T> operation) throws TimeoutException { key = namespaceKey(key); MemcachedClient client = getClient(); int maxTries = operation.getMaxTries(); while (maxTries-- > 0) { GetsResponse<T> response = null; try { response = client.gets(key); if (response == null) { // This is the add case T newValue = operation.getNewValue(0, null); if (newValue != null) { if (client.add(key, exp, newValue)) { return true; } else { log.info("Retrying"); continue; } } else { return true; } } else { // This is the CAS case return client.cas(key, exp, response, operation); } } catch (CASFailException cfe) { //Operation just wants to fail the whole thing } catch (CASDeleteException cde) { try { return client.delete(key, response.getCas(), 500); } catch (InterruptedException | MemcachedException e) { log.warn("Exception in cas", e); } } catch (InterruptedException | MemcachedException e) { log.warn("Exception in cas", e); } catch (AbortCAS abort) { return true; } } return false; } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.runtime; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.mockito.AdditionalMatchers.find; import static org.mockito.AdditionalMatchers.not; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.contains; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.util.BlazeClock; import com.google.devtools.build.lib.util.io.AnsiTerminalPrinter; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import com.google.devtools.build.lib.view.test.TestStatus.BlazeTestStatus; import com.google.devtools.build.lib.view.test.TestStatus.FailedTestCasesStatus; import com.google.devtools.build.lib.view.test.TestStatus.TestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.InOrder; import org.mockito.Mockito; @RunWith(JUnit4.class) public class TestSummaryTest { private static final String ANY_STRING = ".*?"; private static final String PATH = "package"; private static final String TARGET_NAME = "name"; private ConfiguredTarget stubTarget; private static final ImmutableList<Long> SMALL_TIMING = ImmutableList.of(1L, 2L, 3L, 4L); private static final int CACHED = SMALL_TIMING.size(); private static final int NOT_CACHED = 0; private FileSystem fs; private TestSummary.Builder basicBuilder; @Before public final void createFileSystem() throws Exception { fs = new InMemoryFileSystem(BlazeClock.instance()); stubTarget = stubTarget(); basicBuilder = getTemplateBuilder(); } private TestSummary.Builder getTemplateBuilder() { return TestSummary.newBuilder() .setTarget(stubTarget) .setStatus(BlazeTestStatus.PASSED) .setNumCached(NOT_CACHED) .setActionRan(true) .setRanRemotely(false) .setWasUnreportedWrongSize(false); } private List<Path> getPathList(String... names) { List<Path> list = new ArrayList<>(); for (String name : names) { list.add(fs.getPath(name)); } return list; } @Test public void testShouldProperlyTestLabels() throws Exception { ConfiguredTarget target = target("somepath", "MyTarget"); String expectedString = ANY_STRING + "//somepath:MyTarget" + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summaryStatus = createTestSummary(target, BlazeTestStatus.PASSED, CACHED); TestSummaryPrinter.print(summaryStatus, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testShouldPrintPassedStatus() throws Exception { String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, NOT_CACHED); TestSummaryPrinter.print(summary, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testShouldPrintFailedStatus() throws Exception { String expectedString = ANY_STRING + "ERROR" + ANY_STRING + BlazeTestStatus.FAILED + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.FAILED, NOT_CACHED); TestSummaryPrinter.print(summary, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } private void assertShouldNotPrint(BlazeTestStatus status) throws Exception { AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print( createTestSummary(stubTarget, status, NOT_CACHED), terminalPrinter, true, false); verify(terminalPrinter, never()).print(anyString()); } @Test public void testShouldNotPrintFailedToBuildStatus() throws Exception { assertShouldNotPrint(BlazeTestStatus.FAILED_TO_BUILD); } @Test public void testShouldNotPrintHaltedStatus() throws Exception { assertShouldNotPrint(BlazeTestStatus.BLAZE_HALTED_BEFORE_TESTING); } @Test public void testShouldPrintCachedStatus() throws Exception { String expectedString = ANY_STRING + "\\(cached" + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, CACHED); TestSummaryPrinter.print(summary, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testPartialCachedStatus() throws Exception { String expectedString = ANY_STRING + "\\(3/4 cached" + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, CACHED - 1); TestSummaryPrinter.print(summary, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testIncompleteCached() throws Exception { AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.INCOMPLETE, CACHED - 1); TestSummaryPrinter.print(summary, terminalPrinter, true, false); verify(terminalPrinter).print(not(contains("cached"))); } @Test public void testShouldPrintUncachedStatus() throws Exception { AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, NOT_CACHED); TestSummaryPrinter.print(summary, terminalPrinter, true, false); verify(terminalPrinter).print(not(contains("cached"))); } @Test public void testNoTiming() throws Exception { String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, NOT_CACHED); TestSummaryPrinter.print(summary, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testBuilder() throws Exception { // No need to copy if built twice in a row; no direct setters on the object. TestSummary summary = basicBuilder.build(); TestSummary sameSummary = basicBuilder.build(); assertSame(summary, sameSummary); basicBuilder.addTestTimes(ImmutableList.of(40L)); TestSummary summaryCopy = basicBuilder.build(); assertEquals(summary.getTarget(), summaryCopy.getTarget()); assertEquals(summary.getStatus(), summaryCopy.getStatus()); assertEquals(summary.numCached(), summaryCopy.numCached()); assertNotSame(summary, summaryCopy); assertEquals(0, summary.totalRuns()); assertEquals(1, summaryCopy.totalRuns()); // Check that the builder can add a new warning to the copy, // despite the immutability of the original. basicBuilder.addTestTimes(ImmutableList.of(60L)); TestSummary fiftyCached = basicBuilder.setNumCached(50).build(); assertEquals(summary.getStatus(), fiftyCached.getStatus()); assertEquals(50, fiftyCached.numCached()); assertEquals(2, fiftyCached.totalRuns()); TestSummary sixtyCached = basicBuilder.setNumCached(60).build(); assertEquals(60, sixtyCached.numCached()); assertEquals(50, fiftyCached.numCached()); TestSummary failedCacheTemplate = TestSummary.newBuilderFromExisting(fiftyCached) .setStatus(BlazeTestStatus.FAILED) .build(); assertEquals(50, failedCacheTemplate.numCached()); assertEquals(BlazeTestStatus.FAILED, failedCacheTemplate.getStatus()); } @Test public void testSingleTime() throws Exception { String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED + ANY_STRING + "in 3.4s"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder.addTestTimes(ImmutableList.of(3412L)).build(); TestSummaryPrinter.print(summary, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testNoTime() throws Exception { // The last part matches anything not containing "in". String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED + "(?!in)*"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder.addTestTimes(ImmutableList.of(3412L)).build(); TestSummaryPrinter.print(summary, terminalPrinter, false, false); terminalPrinter.print(find(expectedString)); } @Test public void testMultipleTimes() throws Exception { String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED + ANY_STRING + "\n Stats over 3 runs: max = 3.0s, min = 1.0s, " + "avg = 2.0s, dev = 0.8s"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder .addTestTimes(ImmutableList.of(1000L, 2000L, 3000L)) .build(); TestSummaryPrinter.print(summary, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testCoverageDataReferences() throws Exception { List<Path> paths = getPathList("/cov1.dat", "/cov2.dat", "/cov3.dat", "/cov4.dat"); FileSystemUtils.writeContentAsLatin1(paths.get(1), "something"); FileSystemUtils.writeContentAsLatin1(paths.get(3), ""); FileSystemUtils.writeContentAsLatin1(paths.get(3), "something else"); TestSummary summary = basicBuilder.addCoverageFiles(paths).build(); AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, terminalPrinter, true, false); verify(terminalPrinter).print(find(ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED)); verify(terminalPrinter).print(find(" /cov2.dat")); verify(terminalPrinter).print(find(" /cov4.dat")); } @Test public void testFlakyAttempts() throws Exception { String expectedString = ANY_STRING + "WARNING" + ANY_STRING + BlazeTestStatus.FLAKY + ANY_STRING + ", failed in 2 out of 3"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder .setStatus(BlazeTestStatus.FLAKY) .addPassedLogs(getPathList("/a")) .addFailedLogs(getPathList("/b", "/c")) .build(); TestSummaryPrinter.print(summary, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testNumberOfFailedRuns() throws Exception { String expectedString = ANY_STRING + "ERROR" + ANY_STRING + BlazeTestStatus.FAILED + ANY_STRING + "in 2 out of 3"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder .setStatus(BlazeTestStatus.FAILED) .addPassedLogs(getPathList("/a")) .addFailedLogs(getPathList("/b", "/c")) .build(); TestSummaryPrinter.print(summary, terminalPrinter, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testFileNamesNotShown() throws Exception { List<TestCase> emptyDetails = ImmutableList.of(); TestSummary summary = basicBuilder .setStatus(BlazeTestStatus.FAILED) .addPassedLogs(getPathList("/apple")) .addFailedLogs(getPathList("/pear")) .addCoverageFiles(getPathList("/maracuja")) .addFailedTestCases(emptyDetails, FailedTestCasesStatus.FULL) .build(); // Check that only //package:name is printed. AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, true, true); verify(printer).print(contains("//package:name")); } @Test public void testMessageShownWhenTestCasesMissing() throws Exception { ImmutableList<TestCase> emptyList = ImmutableList.of(); TestSummary summary = createTestSummaryWithDetails( BlazeTestStatus.FAILED, emptyList, FailedTestCasesStatus.NOT_AVAILABLE); AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, true, true); verify(printer).print(contains("//package:name")); verify(printer).print(contains("not available")); } @Test public void testMessageShownForPartialResults() throws Exception { ImmutableList<TestCase> testCases = ImmutableList.of(newDetail("orange", TestCase.Status.FAILED, 1500L)); TestSummary summary = createTestSummaryWithDetails(BlazeTestStatus.FAILED, testCases, FailedTestCasesStatus.PARTIAL); AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, true, true); verify(printer).print(contains("//package:name")); verify(printer).print(find("FAILED.*orange")); verify(printer).print(contains("incomplete")); } private TestCase newDetail(String name, TestCase.Status status, long duration) { return TestCase.newBuilder() .setName(name) .setStatus(status) .setRunDurationMillis(duration).build(); } @Test public void testTestCaseNamesShownWhenNeeded() throws Exception { TestCase detailPassed = newDetail("strawberry", TestCase.Status.PASSED, 1000L); TestCase detailFailed = newDetail("orange", TestCase.Status.FAILED, 1500L); TestSummary summaryPassed = createTestSummaryWithDetails( BlazeTestStatus.PASSED, Arrays.asList(detailPassed)); TestSummary summaryFailed = createTestSummaryWithDetails( BlazeTestStatus.FAILED, Arrays.asList(detailPassed, detailFailed)); assertEquals(BlazeTestStatus.FAILED, summaryFailed.getStatus()); AnsiTerminalPrinter printerPassed = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summaryPassed, printerPassed, true, true); verify(printerPassed).print(contains("//package:name")); AnsiTerminalPrinter printerFailed = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summaryFailed, printerFailed, true, true); verify(printerFailed).print(contains("//package:name")); verify(printerFailed).print(find("FAILED.*orange *\\(1\\.5")); } @Test public void testTestCaseNamesOrdered() throws Exception { TestCase[] details = { newDetail("apple", TestCase.Status.FAILED, 1000L), newDetail("banana", TestCase.Status.FAILED, 1000L), newDetail("cranberry", TestCase.Status.FAILED, 1000L) }; // The exceedingly dumb approach: writing all the permutations down manually // is simply easier than any way of generating them. int[][] permutations = { { 0, 1, 2 }, { 0, 2, 1 }, { 1, 0, 2 }, { 1, 2, 0 }, { 2, 0, 1 }, { 2, 1, 0 } }; for (int[] permutation : permutations) { List<TestCase> permutatedDetails = new ArrayList<>(); for (int element : permutation) { permutatedDetails.add(details[element]); } TestSummary summary = createTestSummaryWithDetails(BlazeTestStatus.FAILED, permutatedDetails); // A mock that checks the ordering of method calls AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, true, true); InOrder order = Mockito.inOrder(printer); order.verify(printer).print(contains("//package:name")); order.verify(printer).print(find("FAILED.*apple")); order.verify(printer).print(find("FAILED.*banana")); order.verify(printer).print(find("FAILED.*cranberry")); } } @Test public void testCachedResultsFirstInSort() throws Exception { TestSummary summaryFailedCached = createTestSummary(BlazeTestStatus.FAILED, CACHED); TestSummary summaryFailedNotCached = createTestSummary(BlazeTestStatus.FAILED, NOT_CACHED); TestSummary summaryPassedCached = createTestSummary(BlazeTestStatus.PASSED, CACHED); TestSummary summaryPassedNotCached = createTestSummary(BlazeTestStatus.PASSED, NOT_CACHED); // This way we can make the test independent from the sort order of FAILEd // and PASSED. assertTrue(summaryFailedCached.compareTo(summaryPassedNotCached) < 0); assertTrue(summaryPassedCached.compareTo(summaryFailedNotCached) < 0); } @Test public void testCollectingFailedDetails() throws Exception { TestCase rootCase = TestCase.newBuilder() .setName("tests") .setRunDurationMillis(5000L) .addChild(newDetail("apple", TestCase.Status.FAILED, 1000L)) .addChild(newDetail("banana", TestCase.Status.PASSED, 1000L)) .addChild(newDetail("cherry", TestCase.Status.ERROR, 1000L)) .build(); TestSummary summary = getTemplateBuilder() .collectFailedTests(rootCase) .setStatus(BlazeTestStatus.FAILED) .build(); AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, true, true); verify(printer).print(contains("//package:name")); verify(printer).print(find("FAILED.*apple")); verify(printer).print(find("ERROR.*cherry")); } private ConfiguredTarget target(String path, String targetName) throws Exception { ConfiguredTarget target = Mockito.mock(ConfiguredTarget.class); when(target.getLabel()).thenReturn(Label.create(path, targetName)); return target; } private ConfiguredTarget stubTarget() throws Exception { return target(PATH, TARGET_NAME); } private TestSummary createTestSummaryWithDetails(BlazeTestStatus status, List<TestCase> details) { TestSummary summary = getTemplateBuilder() .setStatus(status) .addFailedTestCases(details, FailedTestCasesStatus.FULL) .build(); return summary; } private TestSummary createTestSummaryWithDetails( BlazeTestStatus status, List<TestCase> testCaseList, FailedTestCasesStatus detailsStatus) { TestSummary summary = getTemplateBuilder() .setStatus(status) .addFailedTestCases(testCaseList, detailsStatus) .build(); return summary; } private static TestSummary createTestSummary(ConfiguredTarget target, BlazeTestStatus status, int numCached) { ImmutableList<TestCase> emptyList = ImmutableList.of(); TestSummary summary = TestSummary.newBuilder() .setTarget(target) .setStatus(status) .setNumCached(numCached) .setActionRan(true) .setRanRemotely(false) .setWasUnreportedWrongSize(false) .addFailedTestCases(emptyList, FailedTestCasesStatus.FULL) .addTestTimes(SMALL_TIMING) .build(); return summary; } private TestSummary createTestSummary(BlazeTestStatus status, int numCached) { TestSummary summary = getTemplateBuilder() .setStatus(status) .setNumCached(numCached) .addTestTimes(SMALL_TIMING) .build(); return summary; } }
// Copyright (c) 2009 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.jetbrains.v8; import gnu.trove.TIntObjectHashMap; import gnu.trove.TIntObjectProcedure; import gnu.trove.TLongObjectHashMap; import gnu.trove.TObjectProcedure; import org.chromium.sdk.*; import org.chromium.sdk.Breakpoint.Target; import org.chromium.sdk.JavascriptVm.BreakpointCallback; import org.chromium.sdk.JavascriptVm.ExceptionCatchMode; import org.chromium.sdk.JavascriptVm.ListBreakpointsCallback; import org.chromium.sdk.internal.ScriptRegExpBreakpointTarget; import org.jetbrains.v8.BreakpointImpl.FunctionTarget; import org.chromium.sdk.internal.v8native.protocol.input.CommandResponse; import org.chromium.sdk.internal.v8native.protocol.input.CommandResponseBody; import org.chromium.sdk.internal.v8native.protocol.input.FlagsBody; import org.chromium.sdk.internal.v8native.protocol.input.FlagsBody.FlagInfo; import org.chromium.sdk.internal.v8native.protocol.input.data.BreakpointInfo; import org.chromium.sdk.internal.v8native.protocol.output.ClearBreakpointMessage; import org.chromium.sdk.internal.v8native.protocol.output.FlagsMessage; import org.chromium.sdk.internal.v8native.protocol.output.ListBreakpointsMessage; import org.chromium.sdk.util.GenericCallback; import org.chromium.sdk.util.RelaySyncCallback; import org.jetbrains.v8.protocol.Changebreakpoint; import org.jetbrains.v8.protocol.Setbreakpoint; import org.jetbrains.v8.protocol.SetbreakpointResult; import java.util.*; public class BreakpointManager { /** * This map shall contain only breakpoints with valid IDs. * Complex operations must be explicitly synchronized on this instance. */ private final TIntObjectHashMap<BreakpointImpl> idToBreakpoint = new TIntObjectHashMap<BreakpointImpl>(); private final DebugSession debugSession; public BreakpointManager(DebugSession debugSession) { this.debugSession = debugSession; } DebugSession getDebugSession() { return debugSession; } public BreakpointTypeExtension getBreakpointTypeExtension() { return breakpointTypeExtension; } public RelayOk setBreakpoint(Breakpoint.Target target, int line, int column, boolean enabled, String condition, JavascriptVm.BreakpointCallback callback, SyncCallback syncCallback) { return setBreakpoint(target, line, column, enabled, condition, Breakpoint.EMPTY_VALUE, callback, syncCallback); } private static final BreakpointImpl.TargetExtendedVisitor<String> GET_TYPE_VISITOR = new BreakpointImpl.TargetExtendedVisitor<String>() { @Override public String visitFunction(String expression) { return "function"; } @Override public String visitScriptName(String scriptName) { return "script"; } @Override public String visitScriptId(Object scriptId) { return "scriptId"; } @Override public String visitRegExp(String regExp) { return "scriptRegExp"; } @Override public String visitUnknown(Target target) { throw new IllegalArgumentException(); } }; private static final BreakpointImpl.TargetExtendedVisitor<String> GET_TARGET_VISITOR = new BreakpointImpl.TargetExtendedVisitor<String>() { @Override public String visitFunction(String expression) { return expression; } @Override public String visitScriptName(String scriptName) { return scriptName; } @Override public String visitScriptId(Object scriptIdObj) { if (scriptIdObj instanceof Long) { return scriptIdObj.toString(); } throw new IllegalStateException("Script id must be of type Long"); } @Override public String visitRegExp(String regExp) { return regExp; } @Override public String visitUnknown(Target target) { throw new IllegalArgumentException(); } }; RelayOk setBreakpoint(final Breakpoint.Target target, final int line, int column, final boolean enabled, final String condition, int ignoreCount, final JavascriptVm.BreakpointCallback callback, SyncCallback syncCallback) { return debugSession.getCommandProcessor().sendAsync(new Setbreakpoint(target.accept(GET_TYPE_VISITOR), target.accept(GET_TARGET_VISITOR), line).column(column).condition(condition).ignoreCount(ignoreCount).enabled(enabled), new V8CommandCallbackWithResponse<SetbreakpointResult, Void>() { @Override protected Void success(SetbreakpointResult result, CommandResponse.Success response) { BreakpointImpl breakpoint = new BreakpointImpl(result.breakpoint(), target, line, enabled, condition, BreakpointManager.this); idToBreakpoint.put(breakpoint.getId(), breakpoint); if (callback != null) { callback.success(breakpoint); } return null; } @Override public void onError(String message) { if (callback != null) { callback.failure(message); } } }, syncCallback); } public Breakpoint getBreakpoint(int id) { return idToBreakpoint.get(id); } public RelayOk clearBreakpoint(BreakpointImpl breakpointImpl, final BreakpointCallback callback, SyncCallback syncCallback, int originalId) { if (originalId == Breakpoint.INVALID_ID) { return RelaySyncCallback.finish(syncCallback); } idToBreakpoint.remove(originalId); return debugSession.sendMessage( new ClearBreakpointMessage(originalId), new V8CommandCallbackBase() { @Override public void success(CommandResponse.Success successResponse) { if (callback != null) { callback.success(null); } } @Override public void failure(String message) { if (callback != null) { callback.failure(message); } } }, syncCallback); } public RelayOk changeBreakpoint(final BreakpointImpl breakpointImpl, final BreakpointCallback callback, SyncCallback syncCallback) { return debugSession.sendMessage(new Changebreakpoint(breakpointImpl.getId()).enabled(breakpointImpl.isEnabled()).condition(breakpointImpl.getCondition()), new V8CommandCallbackBase() { @Override public void success(CommandResponse.Success successResponse) { if (callback != null) { callback.success(breakpointImpl); } } @Override public void failure(String message) { if (callback != null) { callback.failure(message); } } }, syncCallback); } /** * Reads a list of breakpoints from remote and updates local instances and the map. * @return */ public RelayOk reloadBreakpoints(final ListBreakpointsCallback callback, SyncCallback syncCallback) { V8CommandCallbackBase v8Callback = new V8CommandCallbackBase() { @Override public void failure(String message) { callback.failure(new Exception(message)); } @Override public void success(CommandResponse.Success successResponse) { CommandResponseBody body = successResponse.body(); List<BreakpointInfo> infos = body.asListBreakpointsBody().breakpoints(); Collection<Breakpoint> updatedBreakpoints; try { updatedBreakpoints = syncBreakpoints(infos); } catch (RuntimeException e) { callback.failure(new Exception("Failed to read server response", e)); return; } callback.success(Collections.unmodifiableCollection(updatedBreakpoints)); } }; return debugSession.sendMessage(new ListBreakpointsMessage(), v8Callback, syncCallback); } public RelayOk enableBreakpoints(boolean enabled, GenericCallback<Boolean> callback, SyncCallback syncCallback) { return setRemoteFlag("breakPointsActive", enabled, callback, syncCallback); } public RelayOk setBreakOnException(ExceptionCatchMode catchMode, final GenericCallback<ExceptionCatchMode> callback, SyncCallback syncCallback) { boolean[] flagValues; if (catchMode == null) { flagValues = new boolean[0]; } else { boolean caughtValue; boolean uncaughtValue; switch (catchMode) { case ALL: caughtValue = true; uncaughtValue = true; break; case NONE: caughtValue = false; uncaughtValue = false; break; case UNCAUGHT: caughtValue = false; uncaughtValue = true; break; default: throw new RuntimeException(); } flagValues = new boolean[]{caughtValue, uncaughtValue}; } GenericCallback<List<Boolean>> wrappedCallback; if (callback == null) { wrappedCallback = null; } else { wrappedCallback = new GenericCallback<List<Boolean>>() { @Override public void success(List<Boolean> values) { ExceptionCatchMode newCatchMode; if (values.get(0)) { if (values.get(1)) { newCatchMode = ExceptionCatchMode.ALL; } else { // We cannot fit this combination into ExceptionCatchMode. newCatchMode = null; } } else { if (values.get(1)) { newCatchMode = ExceptionCatchMode.UNCAUGHT; } else { newCatchMode = ExceptionCatchMode.NONE; } } callback.success(newCatchMode); } @Override public void failure(Exception exception) { callback.failure(exception); } }; } return setRemoteFlags(BREAK_ON_EXCEPTION_FLAG_NAMES, flagValues, wrappedCallback, syncCallback); } private static final List<String> BREAK_ON_EXCEPTION_FLAG_NAMES = Arrays.asList("breakOnCaughtException", "breakOnUncaughtException"); private RelayOk setRemoteFlag(String flagName, boolean value, final GenericCallback<Boolean> callback, SyncCallback syncCallback) { GenericCallback<List<Boolean>> wrappedCallback; if (callback == null) { wrappedCallback = null; } else { wrappedCallback = new GenericCallback<List<Boolean>>() { @Override public void success(List<Boolean> value) { callback.success(value.get(0)); } @Override public void failure(Exception exception) { callback.failure(exception); } }; } return setRemoteFlags(Collections.singletonList(flagName), new boolean[]{value}, wrappedCallback, syncCallback); } private RelayOk setRemoteFlags(final List<String> flagNames, boolean[] values, final GenericCallback<List<Boolean>> callback, SyncCallback syncCallback) { Map<String, Boolean> flagMap = new HashMap<String, Boolean>(values.length); for (int i = 0; i < flagNames.size(); i++) { flagMap.put(flagNames.get(i), values[i]); } V8CommandCallback v8Callback; if (callback == null) { v8Callback = null; } else { v8Callback = new V8CommandCallbackBase() { @Override public void success(CommandResponse.Success successResponse) { List<FlagInfo> flagList = successResponse.body().asFlagsBody().flags(); List<Boolean> result = new ArrayList<Boolean>(flagNames.size()); for (String name : flagNames) { FlagsBody.FlagInfo flag; findCorrectFlag: { for (FlagsBody.FlagInfo f : flagList) { if (name.equals(f.name())) { flag = f; break findCorrectFlag; } } throw new RuntimeException("Failed to find the correct flag in response"); } result.add(flag.value()); } callback.success(result); } @Override public void failure(String message) { callback.failure(new Exception(message)); } }; } return debugSession.sendMessage(new FlagsMessage(flagMap), v8Callback, syncCallback); } private Collection<Breakpoint> syncBreakpoints(List<BreakpointInfo> infoList) { synchronized (idToBreakpoint) { final ArrayList<Breakpoint> result = new ArrayList<Breakpoint>(); final TLongObjectHashMap<BreakpointImpl> actualBreakpoints = new TLongObjectHashMap<BreakpointImpl>(); // Wrap all loaded BreakpointInfo as BreakpointImpl, possibly reusing old instances. // Also check that all breakpoint id's in loaded list are unique. for (BreakpointInfo info : infoList) { if (info.type() == BreakpointInfo.Type.FUNCTION) { // We don't support function type breakpoints and ignore them. continue; } BreakpointImpl breakpoint = idToBreakpoint.get(info.number()); if (breakpoint == null) { breakpoint = new BreakpointImpl(info, this); } else { breakpoint.updateFromRemote(info); } Object conflict = actualBreakpoints.put(info.number(), breakpoint); if (conflict != null) { throw new RuntimeException("Duplicated breakpoint number " + info.number()); } result.add(breakpoint); } // Remove all obsolete breakpoints from the map idToBreakpoint.retainEntries(new TIntObjectProcedure<BreakpointImpl>() { @Override public boolean execute(int id, BreakpointImpl b) { return actualBreakpoints.containsKey(id); } }); // Add breakpoints that are not in the main map yet actualBreakpoints.forEachValue(new TObjectProcedure<BreakpointImpl>() { @Override public boolean execute(BreakpointImpl breakpoint) { if (!idToBreakpoint.containsKey(breakpoint.getId())) { idToBreakpoint.put(breakpoint.getId(), breakpoint); result.add(breakpoint); } return true; } }); return result; } } private final BreakpointTypeExtension breakpointTypeExtension = new BreakpointTypeExtension() { @Override public FunctionSupport getFunctionSupport() { return functionSupport; } private final FunctionSupport functionSupport = new FunctionSupport() { @Override public Target createTarget(String expression) { return new FunctionTarget(expression); } }; @Override public ScriptRegExpSupport getScriptRegExpSupport() { if (!V8VersionFeatures.isRegExpBreakpointSupported(debugSession.getVmVersion())) { return null; } return scriptRegExpSupport; } private final ScriptRegExpSupport scriptRegExpSupport = new ScriptRegExpSupport() { @Override public Target createTarget(String regExp) { return new ScriptRegExpBreakpointTarget(regExp); } }; }; }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1beta3/document_processor_service.proto package com.google.cloud.documentai.v1beta3; /** * * * <pre> * Request message for fetch processor types. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.FetchProcessorTypesRequest} */ public final class FetchProcessorTypesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1beta3.FetchProcessorTypesRequest) FetchProcessorTypesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use FetchProcessorTypesRequest.newBuilder() to construct. private FetchProcessorTypesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FetchProcessorTypesRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FetchProcessorTypesRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FetchProcessorTypesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_FetchProcessorTypesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_FetchProcessorTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest.class, com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The project of processor type to list. * Format: projects/{project}/locations/{location} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The project of processor type to list. * Format: projects/{project}/locations/{location} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest)) { return super.equals(obj); } com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest other = (com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for fetch processor types. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.FetchProcessorTypesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1beta3.FetchProcessorTypesRequest) com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_FetchProcessorTypesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_FetchProcessorTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest.class, com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest.Builder.class); } // Construct using com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_FetchProcessorTypesRequest_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest getDefaultInstanceForType() { return com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest build() { com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest buildPartial() { com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest result = new com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest(this); result.parent_ = parent_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest) { return mergeFrom((com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest other) { if (other == com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The project of processor type to list. * Format: projects/{project}/locations/{location} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The project of processor type to list. * Format: projects/{project}/locations/{location} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The project of processor type to list. * Format: projects/{project}/locations/{location} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The project of processor type to list. * Format: projects/{project}/locations/{location} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The project of processor type to list. * Format: projects/{project}/locations/{location} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1beta3.FetchProcessorTypesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta3.FetchProcessorTypesRequest) private static final com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest(); } public static com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FetchProcessorTypesRequest> PARSER = new com.google.protobuf.AbstractParser<FetchProcessorTypesRequest>() { @java.lang.Override public FetchProcessorTypesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FetchProcessorTypesRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<FetchProcessorTypesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FetchProcessorTypesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1beta3.FetchProcessorTypesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: org/waveprotocol/box/search/search.proto package org.waveprotocol.box.search; public final class SearchProto { private SearchProto() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public static final class SearchRequest extends com.google.protobuf.GeneratedMessage { // Use SearchRequest.newBuilder() to construct. private SearchRequest() { initFields(); } private SearchRequest(boolean noInit) {} private static final SearchRequest defaultInstance; public static SearchRequest getDefaultInstance() { return defaultInstance; } public SearchRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.waveprotocol.box.search.SearchProto.internal_static_search_SearchRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.waveprotocol.box.search.SearchProto.internal_static_search_SearchRequest_fieldAccessorTable; } // required string query = 1; public static final int QUERY_FIELD_NUMBER = 1; private boolean hasQuery; private java.lang.String query_ = ""; public boolean hasQuery() { return hasQuery; } public java.lang.String getQuery() { return query_; } // required int32 index = 2; public static final int INDEX_FIELD_NUMBER = 2; private boolean hasIndex; private int index_ = 0; public boolean hasIndex() { return hasIndex; } public int getIndex() { return index_; } // required int32 numResults = 3; public static final int NUMRESULTS_FIELD_NUMBER = 3; private boolean hasNumResults; private int numResults_ = 0; public boolean hasNumResults() { return hasNumResults; } public int getNumResults() { return numResults_; } private void initFields() { } public final boolean isInitialized() { if (!hasQuery) return false; if (!hasIndex) return false; if (!hasNumResults) return false; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (hasQuery()) { output.writeString(1, getQuery()); } if (hasIndex()) { output.writeInt32(2, getIndex()); } if (hasNumResults()) { output.writeInt32(3, getNumResults()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasQuery()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(1, getQuery()); } if (hasIndex()) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, getIndex()); } if (hasNumResults()) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, getNumResults()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.waveprotocol.box.search.SearchProto.SearchRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { private org.waveprotocol.box.search.SearchProto.SearchRequest result; // Construct using org.waveprotocol.box.search.SearchProto.SearchRequest.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); builder.result = new org.waveprotocol.box.search.SearchProto.SearchRequest(); return builder; } protected org.waveprotocol.box.search.SearchProto.SearchRequest internalGetResult() { return result; } public Builder clear() { if (result == null) { throw new IllegalStateException( "Cannot call clear() after build()."); } result = new org.waveprotocol.box.search.SearchProto.SearchRequest(); return this; } public Builder clone() { return create().mergeFrom(result); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.waveprotocol.box.search.SearchProto.SearchRequest.getDescriptor(); } public org.waveprotocol.box.search.SearchProto.SearchRequest getDefaultInstanceForType() { return org.waveprotocol.box.search.SearchProto.SearchRequest.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } public org.waveprotocol.box.search.SearchProto.SearchRequest build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } private org.waveprotocol.box.search.SearchProto.SearchRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return buildPartial(); } public org.waveprotocol.box.search.SearchProto.SearchRequest buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } org.waveprotocol.box.search.SearchProto.SearchRequest returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.waveprotocol.box.search.SearchProto.SearchRequest) { return mergeFrom((org.waveprotocol.box.search.SearchProto.SearchRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.waveprotocol.box.search.SearchProto.SearchRequest other) { if (other == org.waveprotocol.box.search.SearchProto.SearchRequest.getDefaultInstance()) return this; if (other.hasQuery()) { setQuery(other.getQuery()); } if (other.hasIndex()) { setIndex(other.getIndex()); } if (other.hasNumResults()) { setNumResults(other.getNumResults()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); return this; } break; } case 10: { setQuery(input.readString()); break; } case 16: { setIndex(input.readInt32()); break; } case 24: { setNumResults(input.readInt32()); break; } } } } // required string query = 1; public boolean hasQuery() { return result.hasQuery(); } public java.lang.String getQuery() { return result.getQuery(); } public Builder setQuery(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasQuery = true; result.query_ = value; return this; } public Builder clearQuery() { result.hasQuery = false; result.query_ = getDefaultInstance().getQuery(); return this; } // required int32 index = 2; public boolean hasIndex() { return result.hasIndex(); } public int getIndex() { return result.getIndex(); } public Builder setIndex(int value) { result.hasIndex = true; result.index_ = value; return this; } public Builder clearIndex() { result.hasIndex = false; result.index_ = 0; return this; } // required int32 numResults = 3; public boolean hasNumResults() { return result.hasNumResults(); } public int getNumResults() { return result.getNumResults(); } public Builder setNumResults(int value) { result.hasNumResults = true; result.numResults_ = value; return this; } public Builder clearNumResults() { result.hasNumResults = false; result.numResults_ = 0; return this; } // @@protoc_insertion_point(builder_scope:search.SearchRequest) } static { defaultInstance = new SearchRequest(true); org.waveprotocol.box.search.SearchProto.internalForceInit(); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:search.SearchRequest) } public static final class SearchResponse extends com.google.protobuf.GeneratedMessage { // Use SearchResponse.newBuilder() to construct. private SearchResponse() { initFields(); } private SearchResponse(boolean noInit) {} private static final SearchResponse defaultInstance; public static SearchResponse getDefaultInstance() { return defaultInstance; } public SearchResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.waveprotocol.box.search.SearchProto.internal_static_search_SearchResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.waveprotocol.box.search.SearchProto.internal_static_search_SearchResponse_fieldAccessorTable; } public static final class Digest extends com.google.protobuf.GeneratedMessage { // Use Digest.newBuilder() to construct. private Digest() { initFields(); } private Digest(boolean noInit) {} private static final Digest defaultInstance; public static Digest getDefaultInstance() { return defaultInstance; } public Digest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.waveprotocol.box.search.SearchProto.internal_static_search_SearchResponse_Digest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.waveprotocol.box.search.SearchProto.internal_static_search_SearchResponse_Digest_fieldAccessorTable; } // required string title = 1; public static final int TITLE_FIELD_NUMBER = 1; private boolean hasTitle; private java.lang.String title_ = ""; public boolean hasTitle() { return hasTitle; } public java.lang.String getTitle() { return title_; } // required string snippet = 2; public static final int SNIPPET_FIELD_NUMBER = 2; private boolean hasSnippet; private java.lang.String snippet_ = ""; public boolean hasSnippet() { return hasSnippet; } public java.lang.String getSnippet() { return snippet_; } // required string waveId = 3; public static final int WAVEID_FIELD_NUMBER = 3; private boolean hasWaveId; private java.lang.String waveId_ = ""; public boolean hasWaveId() { return hasWaveId; } public java.lang.String getWaveId() { return waveId_; } // required int64 lastModified = 4; public static final int LASTMODIFIED_FIELD_NUMBER = 4; private boolean hasLastModified; private long lastModified_ = 0L; public boolean hasLastModified() { return hasLastModified; } public long getLastModified() { return lastModified_; } // required int32 unreadCount = 5; public static final int UNREADCOUNT_FIELD_NUMBER = 5; private boolean hasUnreadCount; private int unreadCount_ = 0; public boolean hasUnreadCount() { return hasUnreadCount; } public int getUnreadCount() { return unreadCount_; } // required int32 blipCount = 6; public static final int BLIPCOUNT_FIELD_NUMBER = 6; private boolean hasBlipCount; private int blipCount_ = 0; public boolean hasBlipCount() { return hasBlipCount; } public int getBlipCount() { return blipCount_; } // repeated string participants = 7; public static final int PARTICIPANTS_FIELD_NUMBER = 7; private java.util.List<java.lang.String> participants_ = java.util.Collections.emptyList(); public java.util.List<java.lang.String> getParticipantsList() { return participants_; } public int getParticipantsCount() { return participants_.size(); } public java.lang.String getParticipants(int index) { return participants_.get(index); } // required string author = 8; public static final int AUTHOR_FIELD_NUMBER = 8; private boolean hasAuthor; private java.lang.String author_ = ""; public boolean hasAuthor() { return hasAuthor; } public java.lang.String getAuthor() { return author_; } private void initFields() { } public final boolean isInitialized() { if (!hasTitle) return false; if (!hasSnippet) return false; if (!hasWaveId) return false; if (!hasLastModified) return false; if (!hasUnreadCount) return false; if (!hasBlipCount) return false; if (!hasAuthor) return false; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (hasTitle()) { output.writeString(1, getTitle()); } if (hasSnippet()) { output.writeString(2, getSnippet()); } if (hasWaveId()) { output.writeString(3, getWaveId()); } if (hasLastModified()) { output.writeInt64(4, getLastModified()); } if (hasUnreadCount()) { output.writeInt32(5, getUnreadCount()); } if (hasBlipCount()) { output.writeInt32(6, getBlipCount()); } for (java.lang.String element : getParticipantsList()) { output.writeString(7, element); } if (hasAuthor()) { output.writeString(8, getAuthor()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasTitle()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(1, getTitle()); } if (hasSnippet()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(2, getSnippet()); } if (hasWaveId()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(3, getWaveId()); } if (hasLastModified()) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(4, getLastModified()); } if (hasUnreadCount()) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(5, getUnreadCount()); } if (hasBlipCount()) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(6, getBlipCount()); } { int dataSize = 0; for (java.lang.String element : getParticipantsList()) { dataSize += com.google.protobuf.CodedOutputStream .computeStringSizeNoTag(element); } size += dataSize; size += 1 * getParticipantsList().size(); } if (hasAuthor()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(8, getAuthor()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse.Digest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.waveprotocol.box.search.SearchProto.SearchResponse.Digest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { private org.waveprotocol.box.search.SearchProto.SearchResponse.Digest result; // Construct using org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); builder.result = new org.waveprotocol.box.search.SearchProto.SearchResponse.Digest(); return builder; } protected org.waveprotocol.box.search.SearchProto.SearchResponse.Digest internalGetResult() { return result; } public Builder clear() { if (result == null) { throw new IllegalStateException( "Cannot call clear() after build()."); } result = new org.waveprotocol.box.search.SearchProto.SearchResponse.Digest(); return this; } public Builder clone() { return create().mergeFrom(result); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.getDescriptor(); } public org.waveprotocol.box.search.SearchProto.SearchResponse.Digest getDefaultInstanceForType() { return org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } public org.waveprotocol.box.search.SearchProto.SearchResponse.Digest build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } private org.waveprotocol.box.search.SearchProto.SearchResponse.Digest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return buildPartial(); } public org.waveprotocol.box.search.SearchProto.SearchResponse.Digest buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } if (result.participants_ != java.util.Collections.EMPTY_LIST) { result.participants_ = java.util.Collections.unmodifiableList(result.participants_); } org.waveprotocol.box.search.SearchProto.SearchResponse.Digest returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.waveprotocol.box.search.SearchProto.SearchResponse.Digest) { return mergeFrom((org.waveprotocol.box.search.SearchProto.SearchResponse.Digest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.waveprotocol.box.search.SearchProto.SearchResponse.Digest other) { if (other == org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.getDefaultInstance()) return this; if (other.hasTitle()) { setTitle(other.getTitle()); } if (other.hasSnippet()) { setSnippet(other.getSnippet()); } if (other.hasWaveId()) { setWaveId(other.getWaveId()); } if (other.hasLastModified()) { setLastModified(other.getLastModified()); } if (other.hasUnreadCount()) { setUnreadCount(other.getUnreadCount()); } if (other.hasBlipCount()) { setBlipCount(other.getBlipCount()); } if (!other.participants_.isEmpty()) { if (result.participants_.isEmpty()) { result.participants_ = new java.util.ArrayList<java.lang.String>(); } result.participants_.addAll(other.participants_); } if (other.hasAuthor()) { setAuthor(other.getAuthor()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); return this; } break; } case 10: { setTitle(input.readString()); break; } case 18: { setSnippet(input.readString()); break; } case 26: { setWaveId(input.readString()); break; } case 32: { setLastModified(input.readInt64()); break; } case 40: { setUnreadCount(input.readInt32()); break; } case 48: { setBlipCount(input.readInt32()); break; } case 58: { addParticipants(input.readString()); break; } case 66: { setAuthor(input.readString()); break; } } } } // required string title = 1; public boolean hasTitle() { return result.hasTitle(); } public java.lang.String getTitle() { return result.getTitle(); } public Builder setTitle(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasTitle = true; result.title_ = value; return this; } public Builder clearTitle() { result.hasTitle = false; result.title_ = getDefaultInstance().getTitle(); return this; } // required string snippet = 2; public boolean hasSnippet() { return result.hasSnippet(); } public java.lang.String getSnippet() { return result.getSnippet(); } public Builder setSnippet(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasSnippet = true; result.snippet_ = value; return this; } public Builder clearSnippet() { result.hasSnippet = false; result.snippet_ = getDefaultInstance().getSnippet(); return this; } // required string waveId = 3; public boolean hasWaveId() { return result.hasWaveId(); } public java.lang.String getWaveId() { return result.getWaveId(); } public Builder setWaveId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasWaveId = true; result.waveId_ = value; return this; } public Builder clearWaveId() { result.hasWaveId = false; result.waveId_ = getDefaultInstance().getWaveId(); return this; } // required int64 lastModified = 4; public boolean hasLastModified() { return result.hasLastModified(); } public long getLastModified() { return result.getLastModified(); } public Builder setLastModified(long value) { result.hasLastModified = true; result.lastModified_ = value; return this; } public Builder clearLastModified() { result.hasLastModified = false; result.lastModified_ = 0L; return this; } // required int32 unreadCount = 5; public boolean hasUnreadCount() { return result.hasUnreadCount(); } public int getUnreadCount() { return result.getUnreadCount(); } public Builder setUnreadCount(int value) { result.hasUnreadCount = true; result.unreadCount_ = value; return this; } public Builder clearUnreadCount() { result.hasUnreadCount = false; result.unreadCount_ = 0; return this; } // required int32 blipCount = 6; public boolean hasBlipCount() { return result.hasBlipCount(); } public int getBlipCount() { return result.getBlipCount(); } public Builder setBlipCount(int value) { result.hasBlipCount = true; result.blipCount_ = value; return this; } public Builder clearBlipCount() { result.hasBlipCount = false; result.blipCount_ = 0; return this; } // repeated string participants = 7; public java.util.List<java.lang.String> getParticipantsList() { return java.util.Collections.unmodifiableList(result.participants_); } public int getParticipantsCount() { return result.getParticipantsCount(); } public java.lang.String getParticipants(int index) { return result.getParticipants(index); } public Builder setParticipants(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.participants_.set(index, value); return this; } public Builder addParticipants(java.lang.String value) { if (value == null) { throw new NullPointerException(); } if (result.participants_.isEmpty()) { result.participants_ = new java.util.ArrayList<java.lang.String>(); } result.participants_.add(value); return this; } public Builder addAllParticipants( java.lang.Iterable<? extends java.lang.String> values) { if (result.participants_.isEmpty()) { result.participants_ = new java.util.ArrayList<java.lang.String>(); } super.addAll(values, result.participants_); return this; } public Builder clearParticipants() { result.participants_ = java.util.Collections.emptyList(); return this; } // required string author = 8; public boolean hasAuthor() { return result.hasAuthor(); } public java.lang.String getAuthor() { return result.getAuthor(); } public Builder setAuthor(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasAuthor = true; result.author_ = value; return this; } public Builder clearAuthor() { result.hasAuthor = false; result.author_ = getDefaultInstance().getAuthor(); return this; } // @@protoc_insertion_point(builder_scope:search.SearchResponse.Digest) } static { defaultInstance = new Digest(true); org.waveprotocol.box.search.SearchProto.internalForceInit(); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:search.SearchResponse.Digest) } // required string query = 1; public static final int QUERY_FIELD_NUMBER = 1; private boolean hasQuery; private java.lang.String query_ = ""; public boolean hasQuery() { return hasQuery; } public java.lang.String getQuery() { return query_; } // required int32 totalResults = 2; public static final int TOTALRESULTS_FIELD_NUMBER = 2; private boolean hasTotalResults; private int totalResults_ = 0; public boolean hasTotalResults() { return hasTotalResults; } public int getTotalResults() { return totalResults_; } // repeated .search.SearchResponse.Digest digests = 3; public static final int DIGESTS_FIELD_NUMBER = 3; private java.util.List<org.waveprotocol.box.search.SearchProto.SearchResponse.Digest> digests_ = java.util.Collections.emptyList(); public java.util.List<org.waveprotocol.box.search.SearchProto.SearchResponse.Digest> getDigestsList() { return digests_; } public int getDigestsCount() { return digests_.size(); } public org.waveprotocol.box.search.SearchProto.SearchResponse.Digest getDigests(int index) { return digests_.get(index); } private void initFields() { } public final boolean isInitialized() { if (!hasQuery) return false; if (!hasTotalResults) return false; for (org.waveprotocol.box.search.SearchProto.SearchResponse.Digest element : getDigestsList()) { if (!element.isInitialized()) return false; } return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (hasQuery()) { output.writeString(1, getQuery()); } if (hasTotalResults()) { output.writeInt32(2, getTotalResults()); } for (org.waveprotocol.box.search.SearchProto.SearchResponse.Digest element : getDigestsList()) { output.writeMessage(3, element); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasQuery()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(1, getQuery()); } if (hasTotalResults()) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, getTotalResults()); } for (org.waveprotocol.box.search.SearchProto.SearchResponse.Digest element : getDigestsList()) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, element); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.waveprotocol.box.search.SearchProto.SearchResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.waveprotocol.box.search.SearchProto.SearchResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { private org.waveprotocol.box.search.SearchProto.SearchResponse result; // Construct using org.waveprotocol.box.search.SearchProto.SearchResponse.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); builder.result = new org.waveprotocol.box.search.SearchProto.SearchResponse(); return builder; } protected org.waveprotocol.box.search.SearchProto.SearchResponse internalGetResult() { return result; } public Builder clear() { if (result == null) { throw new IllegalStateException( "Cannot call clear() after build()."); } result = new org.waveprotocol.box.search.SearchProto.SearchResponse(); return this; } public Builder clone() { return create().mergeFrom(result); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.waveprotocol.box.search.SearchProto.SearchResponse.getDescriptor(); } public org.waveprotocol.box.search.SearchProto.SearchResponse getDefaultInstanceForType() { return org.waveprotocol.box.search.SearchProto.SearchResponse.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } public org.waveprotocol.box.search.SearchProto.SearchResponse build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } private org.waveprotocol.box.search.SearchProto.SearchResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return buildPartial(); } public org.waveprotocol.box.search.SearchProto.SearchResponse buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } if (result.digests_ != java.util.Collections.EMPTY_LIST) { result.digests_ = java.util.Collections.unmodifiableList(result.digests_); } org.waveprotocol.box.search.SearchProto.SearchResponse returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.waveprotocol.box.search.SearchProto.SearchResponse) { return mergeFrom((org.waveprotocol.box.search.SearchProto.SearchResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.waveprotocol.box.search.SearchProto.SearchResponse other) { if (other == org.waveprotocol.box.search.SearchProto.SearchResponse.getDefaultInstance()) return this; if (other.hasQuery()) { setQuery(other.getQuery()); } if (other.hasTotalResults()) { setTotalResults(other.getTotalResults()); } if (!other.digests_.isEmpty()) { if (result.digests_.isEmpty()) { result.digests_ = new java.util.ArrayList<org.waveprotocol.box.search.SearchProto.SearchResponse.Digest>(); } result.digests_.addAll(other.digests_); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); return this; } break; } case 10: { setQuery(input.readString()); break; } case 16: { setTotalResults(input.readInt32()); break; } case 26: { org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.Builder subBuilder = org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addDigests(subBuilder.buildPartial()); break; } } } } // required string query = 1; public boolean hasQuery() { return result.hasQuery(); } public java.lang.String getQuery() { return result.getQuery(); } public Builder setQuery(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasQuery = true; result.query_ = value; return this; } public Builder clearQuery() { result.hasQuery = false; result.query_ = getDefaultInstance().getQuery(); return this; } // required int32 totalResults = 2; public boolean hasTotalResults() { return result.hasTotalResults(); } public int getTotalResults() { return result.getTotalResults(); } public Builder setTotalResults(int value) { result.hasTotalResults = true; result.totalResults_ = value; return this; } public Builder clearTotalResults() { result.hasTotalResults = false; result.totalResults_ = 0; return this; } // repeated .search.SearchResponse.Digest digests = 3; public java.util.List<org.waveprotocol.box.search.SearchProto.SearchResponse.Digest> getDigestsList() { return java.util.Collections.unmodifiableList(result.digests_); } public int getDigestsCount() { return result.getDigestsCount(); } public org.waveprotocol.box.search.SearchProto.SearchResponse.Digest getDigests(int index) { return result.getDigests(index); } public Builder setDigests(int index, org.waveprotocol.box.search.SearchProto.SearchResponse.Digest value) { if (value == null) { throw new NullPointerException(); } result.digests_.set(index, value); return this; } public Builder setDigests(int index, org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.Builder builderForValue) { result.digests_.set(index, builderForValue.build()); return this; } public Builder addDigests(org.waveprotocol.box.search.SearchProto.SearchResponse.Digest value) { if (value == null) { throw new NullPointerException(); } if (result.digests_.isEmpty()) { result.digests_ = new java.util.ArrayList<org.waveprotocol.box.search.SearchProto.SearchResponse.Digest>(); } result.digests_.add(value); return this; } public Builder addDigests(org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.Builder builderForValue) { if (result.digests_.isEmpty()) { result.digests_ = new java.util.ArrayList<org.waveprotocol.box.search.SearchProto.SearchResponse.Digest>(); } result.digests_.add(builderForValue.build()); return this; } public Builder addAllDigests( java.lang.Iterable<? extends org.waveprotocol.box.search.SearchProto.SearchResponse.Digest> values) { if (result.digests_.isEmpty()) { result.digests_ = new java.util.ArrayList<org.waveprotocol.box.search.SearchProto.SearchResponse.Digest>(); } super.addAll(values, result.digests_); return this; } public Builder clearDigests() { result.digests_ = java.util.Collections.emptyList(); return this; } // @@protoc_insertion_point(builder_scope:search.SearchResponse) } static { defaultInstance = new SearchResponse(true); org.waveprotocol.box.search.SearchProto.internalForceInit(); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:search.SearchResponse) } private static com.google.protobuf.Descriptors.Descriptor internal_static_search_SearchRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_search_SearchRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_search_SearchResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_search_SearchResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_search_SearchResponse_Digest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_search_SearchResponse_Digest_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n(org/waveprotocol/box/search/search.pro" + "to\022\006search\"A\n\rSearchRequest\022\r\n\005query\030\001 \002" + "(\t\022\r\n\005index\030\002 \002(\005\022\022\n\nnumResults\030\003 \002(\005\"\204\002" + "\n\016SearchResponse\022\r\n\005query\030\001 \002(\t\022\024\n\014total" + "Results\030\002 \002(\005\022.\n\007digests\030\003 \003(\0132\035.search." + "SearchResponse.Digest\032\234\001\n\006Digest\022\r\n\005titl" + "e\030\001 \002(\t\022\017\n\007snippet\030\002 \002(\t\022\016\n\006waveId\030\003 \002(\t" + "\022\024\n\014lastModified\030\004 \002(\003\022\023\n\013unreadCount\030\005 " + "\002(\005\022\021\n\tblipCount\030\006 \002(\005\022\024\n\014participants\030\007" + " \003(\t\022\016\n\006author\030\010 \002(\tB*\n\033org.waveprotocol", ".box.searchB\013SearchProto" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_search_SearchRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_search_SearchRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_search_SearchRequest_descriptor, new java.lang.String[] { "Query", "Index", "NumResults", }, org.waveprotocol.box.search.SearchProto.SearchRequest.class, org.waveprotocol.box.search.SearchProto.SearchRequest.Builder.class); internal_static_search_SearchResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_search_SearchResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_search_SearchResponse_descriptor, new java.lang.String[] { "Query", "TotalResults", "Digests", }, org.waveprotocol.box.search.SearchProto.SearchResponse.class, org.waveprotocol.box.search.SearchProto.SearchResponse.Builder.class); internal_static_search_SearchResponse_Digest_descriptor = internal_static_search_SearchResponse_descriptor.getNestedTypes().get(0); internal_static_search_SearchResponse_Digest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_search_SearchResponse_Digest_descriptor, new java.lang.String[] { "Title", "Snippet", "WaveId", "LastModified", "UnreadCount", "BlipCount", "Participants", "Author", }, org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.class, org.waveprotocol.box.search.SearchProto.SearchResponse.Digest.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } public static void internalForceInit() {} // @@protoc_insertion_point(outer_class_scope) }
/** * Copyright (c) 2013-2019 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.test.basic; import java.awt.image.Raster; import java.awt.image.WritableRaster; import java.io.IOException; import org.apache.commons.math.util.MathUtils; import org.geotools.coverage.CoverageFactoryFinder; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.grid.GridCoverageFactory; import org.geotools.geometry.jts.ReferencedEnvelope; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy; import org.locationtech.geowave.core.geotime.ingest.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.store.query.IndexOnlySpatialQuery; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.cli.remote.options.DataStorePluginOptions; import org.locationtech.geowave.test.GeoWaveITRunner; import org.locationtech.geowave.test.TestUtils; import org.locationtech.geowave.test.annotation.GeoWaveTestStore; import org.locationtech.geowave.test.annotation.GeoWaveTestStore.GeoWaveStoreType; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.coverage.grid.GridCoverage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @RunWith(GeoWaveITRunner.class) public class GeoWaveBasicCustomCRSRasterIT extends AbstractGeoWaveIT { private static final double DOUBLE_TOLERANCE = 1E-10d; @GeoWaveTestStore( value = { GeoWaveStoreType.ACCUMULO, GeoWaveStoreType.BIGTABLE, GeoWaveStoreType.CASSANDRA, GeoWaveStoreType.DYNAMODB, GeoWaveStoreType.HBASE, GeoWaveStoreType.KUDU, GeoWaveStoreType.REDIS, GeoWaveStoreType.ROCKSDB}) protected DataStorePluginOptions dataStoreOptions; private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveBasicCustomCRSRasterIT.class); private static final double DELTA = MathUtils.EPSILON; private static long startMillis; @Override protected DataStorePluginOptions getDataStorePluginOptions() { return dataStoreOptions; } @BeforeClass public static void startTimer() { startMillis = System.currentTimeMillis(); LOGGER.warn("-----------------------------------------"); LOGGER.warn("* *"); LOGGER.warn("* RUNNING GeoWaveBasicCustomCRSRasterIT *"); LOGGER.warn("* *"); LOGGER.warn("-----------------------------------------"); } @AfterClass public static void reportTest() { LOGGER.warn("-----------------------------------------"); LOGGER.warn("* *"); LOGGER.warn("* FINISHED GeoWaveBasicCustomCRSRasterIT *"); LOGGER.warn( "* " + ((System.currentTimeMillis() - startMillis) / 1000) + "s elapsed. *"); LOGGER.warn("* *"); LOGGER.warn("-----------------------------------------"); } @Test public void testNoDataMergeStrategy() throws IOException { final String coverageName = "testNoDataMergeStrategy"; final int maxCellSize = TestUtils.getTestEnvironment(dataStoreOptions.getType()).getMaxCellSize(); final int tileSize; if (maxCellSize <= (64 * 1024)) { tileSize = 24; } else { tileSize = 64; // 256 fails on bigtable exceeding maximum size // 128 fails on DynamoDB exceeding maximum size // 64 fails on kudu exceeding maximum size } final double westLon = 0; final double eastLon = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL / 8; final double southLat = 0; final double northLat = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL / 8; ingestAndQueryNoDataMergeStrategy(coverageName, tileSize, westLon, eastLon, southLat, northLat); TestUtils.deleteAll(dataStoreOptions); } @Test public void testMultipleMergeStrategies() throws IOException { final String noDataCoverageName = "testMultipleMergeStrategies_NoDataMergeStrategy"; final String summingCoverageName = "testMultipleMergeStrategies_SummingMergeStrategy"; final String sumAndAveragingCoverageName = "testMultipleMergeStrategies_SumAndAveragingMergeStrategy"; final int maxCellSize = TestUtils.getTestEnvironment(dataStoreOptions.getType()).getMaxCellSize(); final int summingNumBands = 8; final int summingNumRasters = 4; final int sumAndAveragingNumBands = 12; final int sumAndAveragingNumRasters = 15; final int noDataTileSize; final int summingTileSize; if (maxCellSize <= (64 * 1024)) { noDataTileSize = 24; summingTileSize = 24; } else { noDataTileSize = 64; summingTileSize = 32; } final int sumAndAveragingTileSize = 8; final double minX = 0; final double maxX = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL / 2048; final double minY = 0; final double maxY = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL / 2048; ingestGeneralPurpose( summingCoverageName, summingTileSize, minX, maxX, minY, maxY, summingNumBands, summingNumRasters, new GeoWaveBasicRasterIT.SummingMergeStrategy()); ingestGeneralPurpose( sumAndAveragingCoverageName, sumAndAveragingTileSize, minX, maxX, minY, maxY, sumAndAveragingNumBands, sumAndAveragingNumRasters, new GeoWaveBasicRasterIT.SumAndAveragingMergeStrategy()); ingestNoDataMergeStrategy(noDataCoverageName, noDataTileSize, minX, maxX, minY, maxY); queryGeneralPurpose( summingCoverageName, summingTileSize, minX, maxX, minY, maxY, summingNumBands, summingNumRasters, new GeoWaveBasicRasterIT.SummingExpectedValue()); queryNoDataMergeStrategy(noDataCoverageName, noDataTileSize); queryGeneralPurpose( sumAndAveragingCoverageName, sumAndAveragingTileSize, minX, maxX, minY, maxY, sumAndAveragingNumBands, sumAndAveragingNumRasters, new GeoWaveBasicRasterIT.SumAndAveragingExpectedValue()); TestUtils.deleteAll(dataStoreOptions); } private void ingestAndQueryNoDataMergeStrategy( final String coverageName, final int tileSize, final double minX, final double maxX, final double minY, final double maxY) throws IOException { ingestNoDataMergeStrategy(coverageName, tileSize, minX, maxX, minY, maxY); queryNoDataMergeStrategy(coverageName, tileSize); } private void queryNoDataMergeStrategy(final String coverageName, final int tileSize) throws IOException { final DataStore dataStore = dataStoreOptions.createDataStore(); try (CloseableIterator<?> it = dataStore.query(QueryBuilder.newBuilder().addTypeName(coverageName).build())) { // the expected outcome is: // band 1,2,3,4,5,6 has every value set correctly, band 0 has every // even row set correctly and every odd row should be NaN, and band // 7 has the upper quadrant as NaN and the rest set final GridCoverage coverage = (GridCoverage) it.next(); final Raster raster = coverage.getRenderedImage().getData(); Assert.assertEquals(tileSize, raster.getWidth(), DELTA); Assert.assertEquals(tileSize, raster.getHeight(), DELTA); for (int x = 0; x < tileSize; x++) { for (int y = 0; y < tileSize; y++) { for (int b = 1; b < 7; b++) { Assert.assertEquals( "x=" + x + ",y=" + y + ",b=" + b, TestUtils.getTileValue(x, y, b, tileSize), raster.getSampleDouble(x, y, b), DELTA); } if ((y % 2) == 0) { Assert.assertEquals( "x=" + x + ",y=" + y + ",b=0", TestUtils.getTileValue(x, y, 0, tileSize), raster.getSampleDouble(x, y, 0), DELTA); } else { Assert.assertEquals( "x=" + x + ",y=" + y + ",b=0", Double.NaN, raster.getSampleDouble(x, y, 0), DELTA); } if ((x > ((tileSize * 3) / 4)) && (y > ((tileSize * 3) / 4))) { Assert.assertEquals( "x=" + x + ",y=" + y + ",b=7", Double.NaN, raster.getSampleDouble(x, y, 7), DELTA); } else { Assert.assertEquals( "x=" + x + ",y=" + y + ",b=7", TestUtils.getTileValue(x, y, 7, tileSize), raster.getSampleDouble(x, y, 7), DELTA); } } } // there should be exactly one Assert.assertFalse(it.hasNext()); } } private void ingestNoDataMergeStrategy( final String coverageName, final int tileSize, final double minX, final double maxX, final double minY, final double maxY) throws IOException { final int numBands = 8; final DataStore dataStore = dataStoreOptions.createDataStore(); final RasterDataAdapter adapter = RasterUtils.createDataAdapterTypeDouble( coverageName, numBands, tileSize, new NoDataMergeStrategy()); final WritableRaster raster1 = RasterUtils.createRasterTypeDouble(numBands, tileSize); final WritableRaster raster2 = RasterUtils.createRasterTypeDouble(numBands, tileSize); TestUtils.fillTestRasters(raster1, raster2, tileSize); dataStore.addType(adapter, TestUtils.createWebMercatorSpatialIndex()); try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { writer.write(createCoverageTypeDouble(coverageName, minX, maxX, minY, maxY, raster1)); writer.write(createCoverageTypeDouble(coverageName, minX, maxX, minY, maxY, raster2)); } } private static GridCoverage2D createCoverageTypeDouble( final String coverageName, final double minX, final double maxX, final double minY, final double maxY, final WritableRaster raster) { final GridCoverageFactory gcf = CoverageFactoryFinder.getGridCoverageFactory(null); final org.opengis.geometry.Envelope mapExtent = new ReferencedEnvelope(minX, maxX, minY, maxY, TestUtils.CUSTOM_CRS); return gcf.create(coverageName, raster, mapExtent); } private void ingestGeneralPurpose( final String coverageName, final int tileSize, final double westLon, final double eastLon, final double southLat, final double northLat, final int numBands, final int numRasters, final RasterTileMergeStrategy<?> mergeStrategy) throws IOException { // just ingest a number of rasters final DataStore dataStore = dataStoreOptions.createDataStore(); final RasterDataAdapter basicAdapter = RasterUtils.createDataAdapterTypeDouble( coverageName, numBands, tileSize, new NoDataMergeStrategy()); final RasterDataAdapter mergeStrategyOverriddenAdapter = new RasterDataAdapter(basicAdapter, coverageName, mergeStrategy); basicAdapter.getMetadata().put("test-key", "test-value"); dataStore.addType(mergeStrategyOverriddenAdapter, TestUtils.createWebMercatorSpatialIndex()); try (Writer writer = dataStore.createWriter(mergeStrategyOverriddenAdapter.getTypeName())) { for (int r = 0; r < numRasters; r++) { final WritableRaster raster = RasterUtils.createRasterTypeDouble(numBands, tileSize); for (int x = 0; x < tileSize; x++) { for (int y = 0; y < tileSize; y++) { for (int b = 0; b < numBands; b++) { raster.setSample(x, y, b, TestUtils.getTileValue(x, y, b, r, tileSize)); } } } writer.write( createCoverageTypeDouble(coverageName, westLon, eastLon, southLat, northLat, raster)); } } } private void queryGeneralPurpose( final String coverageName, final int tileSize, final double westLon, final double eastLon, final double southLat, final double northLat, final int numBands, final int numRasters, final GeoWaveBasicRasterIT.ExpectedValue expectedValue) throws IOException { final DataStore dataStore = dataStoreOptions.createDataStore(); try (CloseableIterator<?> it = dataStore.query( QueryBuilder.newBuilder().addTypeName(coverageName).constraints( new IndexOnlySpatialQuery( new GeometryFactory().toGeometry( new Envelope(westLon, eastLon, southLat, northLat)), TestUtils.CUSTOM_CRSCODE)).build())) { // the expected outcome is: // band 1,2,3,4,5,6 has every value set correctly, band 0 has every // even row set correctly and every odd row should be NaN, and band // 7 has the upper quadrant as NaN and the rest set final GridCoverage coverage = (GridCoverage) it.next(); final Raster raster = coverage.getRenderedImage().getData(); Assert.assertEquals(tileSize, raster.getWidth()); Assert.assertEquals(tileSize, raster.getHeight()); for (int x = 0; x < tileSize; x++) { for (int y = 0; y < tileSize; y++) { for (int b = 0; b < numBands; b++) { Assert.assertEquals( "x=" + x + ",y=" + y + ",b=" + b, expectedValue.getExpectedValue(x, y, b, numRasters, tileSize), raster.getSampleDouble(x, y, b), DOUBLE_TOLERANCE); } } } // there should be exactly one Assert.assertFalse(it.hasNext()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package net.firejack.platform.core.schedule; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.config.ClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig; import com.sun.jersey.core.util.Base64; import com.sun.jersey.spi.inject.SingletonTypeInjectableProvider; import net.firejack.platform.api.OPFEngine; import net.firejack.platform.api.authority.domain.AuthenticationToken; import net.firejack.platform.core.domain.AbstractDTO; import net.firejack.platform.core.exception.BusinessFunctionException; import net.firejack.platform.core.model.registry.HTTPMethod; import net.firejack.platform.core.model.registry.domain.ActionModel; import net.firejack.platform.core.model.registry.schedule.ScheduleHistoryModel; import net.firejack.platform.core.model.registry.schedule.ScheduleModel; import net.firejack.platform.core.model.user.BaseUserModel; import net.firejack.platform.core.response.ServiceResponse; import net.firejack.platform.core.store.registry.IActionStore; import net.firejack.platform.core.store.registry.IScheduleHistoryStore; import net.firejack.platform.core.store.user.IBaseUserStore; import net.firejack.platform.core.utils.InstallUtils; import net.firejack.platform.core.utils.OpenFlame; import net.firejack.platform.core.utils.OpenFlameSpringContext; import net.firejack.platform.core.utils.StringUtils; import net.firejack.platform.provider.XMLProvider; import net.firejack.platform.web.mina.bean.Status; import net.firejack.platform.web.mina.bean.StatusType; import net.firejack.platform.web.security.model.OpenFlameSecurityConstants; import net.firejack.platform.web.security.x509.KeyUtils; import org.apache.log4j.Logger; import org.codehaus.jackson.jaxrs.JacksonJsonProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import javax.ws.rs.core.Context; import javax.ws.rs.core.Cookie; import javax.ws.rs.core.MediaType; import java.io.File; import java.net.InetAddress; import java.security.KeyPair; import java.security.cert.X509Certificate; import java.util.*; @Component("scheduleJobManager") @Scope(BeanDefinition.SCOPE_SINGLETON) public class ScheduleJobManager { protected static final int TIMEOUT = 60000; protected static final int COUNT_OF_EMPTY_REQUESTS = 3600; // it is about one hour private static final Logger logger = Logger.getLogger(ScheduleJobManager.class); private Map<Long, ScheduleJobStatus> schedulerJobStatusMap = new HashMap<Long, ScheduleJobStatus>(); @Autowired private IActionStore actionStore; @Autowired private IScheduleHistoryStore scheduleHistoryStore; @Autowired private IBaseUserStore<BaseUserModel> baseUserStore; public void executeJob(ScheduleModel scheduleModel, Long userOriginalCallerId) { ActionModel actionModel = scheduleModel.getAction(); ScheduleJobStatus scheduleJobStatus = schedulerJobStatusMap.get(scheduleModel.getId()); if (scheduleJobStatus == null) { scheduleJobStatus = new ScheduleJobStatus(scheduleModel, 0, "Starting to execute the '" + scheduleModel.getName() + "' schedule job with id: " + scheduleModel.getId()); schedulerJobStatusMap.put(scheduleModel.getId(), scheduleJobStatus); logger.info("[" + scheduleJobStatus.getRequestUID() + "]: Scheduled Job is running for action: '" + actionModel.getLookup() + "'"); actionModel = actionStore.findById(actionModel.getId()); scheduleModel.setAction(actionModel); try { AuthenticationToken authenticationToken = getAuthenticationToken(); scheduleJobStatus.setToken(authenticationToken.getToken()); if (userOriginalCallerId == null) { userOriginalCallerId = authenticationToken.getUser().getId(); } BaseUserModel userModel = baseUserStore.findById(userOriginalCallerId); startScheduleHistory(scheduleJobStatus, userModel); ServiceResponse response = doRequest(actionModel, scheduleJobStatus); if (response.isSuccess()) { if (response.getItem() instanceof Status) { Status status = (Status) response.getItem(); logger.info("[" + scheduleJobStatus.getRequestUID() + "]: Progress response: '" + status.getTitle() + "' with percents: " + status.getPercent() + "%"); startProgressStatusTask(scheduleJobStatus); } else { logger.info("[" + scheduleJobStatus.getRequestUID() + "]: Immediate response: " + response.getMessage()); finishScheduleHistory(scheduleJobStatus, response); schedulerJobStatusMap.remove(scheduleModel.getId()); } } else { logger.warn("[" + scheduleJobStatus.getRequestUID() + "]: Response: " + response.getMessage()); finishScheduleHistory(scheduleJobStatus, response); schedulerJobStatusMap.remove(scheduleModel.getId()); } } catch (Exception e) { logger.error("[" + scheduleJobStatus.getRequestUID() + "]: Exception: " + e.getMessage()); finishScheduleHistory(scheduleJobStatus, new ServiceResponse(e.getMessage(), false)); schedulerJobStatusMap.remove(scheduleModel.getId()); } } else { logger.warn("Scheduled Job: '" + scheduleModel.getName() + "' with id: " + scheduleModel.getId() + " is working now. Completed only " + scheduleJobStatus.getPercents() + "%"); } } public ScheduleJobStatus getJobStatus(Long scheduleId) { return schedulerJobStatusMap.get(scheduleId); } private ServiceResponse doRequest(ActionModel action, ScheduleJobStatus scheduleJobStatus) { if (HTTPMethod.GET.equals(action.getMethod())) { String url = "http://" + action.getServerName() + ":" + action.getPort() + action.getParentPath() + action.getUrlPath(); ClientConfig config = new DefaultClientConfig(); config.getClasses().add(JacksonJsonProvider.class); config.getClasses().add(XMLProvider.class); config.getSingletons().add(new SingletonTypeInjectableProvider<Context, Class[]>(Class[].class, getBeans()) {}); WebResource webResource = Client.create(config).resource(url); webResource.setProperty(ClientConfig.PROPERTY_CONNECT_TIMEOUT, TIMEOUT); webResource.setProperty(ClientConfig.PROPERTY_READ_TIMEOUT, TIMEOUT); WebResource.Builder builder = webResource.accept(MediaType.APPLICATION_XML_TYPE).type(MediaType.APPLICATION_XML_TYPE); Cookie cookie = new Cookie(OpenFlameSecurityConstants.AUTHENTICATION_TOKEN_ATTRIBUTE, scheduleJobStatus.getToken()); builder.cookie(cookie); builder.header("Page-UID", scheduleJobStatus.getPageUID()); return builder.get(ServiceResponse.class); } else { throw new BusinessFunctionException("This action: '" + action.getLookup() + "' doesn't supported."); } } private AuthenticationToken getAuthenticationToken() { File keyStore = InstallUtils.getKeyStore(); if (keyStore.exists()) { try { String hostName = InetAddress.getLocalHost().getHostName(); KeyPair keyPair = KeyUtils.load(keyStore); if (keyPair == null) { throw new IllegalStateException("Key not found"); } X509Certificate certificate = KeyUtils.generateCertificate("", 1, keyPair); String cert = new String(Base64.encode(certificate.getEncoded())); ServiceResponse<AuthenticationToken> response = OPFEngine.AuthorityService.processSTSCertSignIn(OpenFlame.PACKAGE, hostName, cert); if (response.isSuccess()) { AuthenticationToken authenticationToken = response.getItem(); if (authenticationToken.getToken() != null) { return authenticationToken; } else { throw new BusinessFunctionException("Could not get authentication token."); } } else { throw new BusinessFunctionException("Could not get authentication token."); } } catch (Exception e) { logger.error(e); throw new BusinessFunctionException(e.getMessage(), e); } } else { throw new BusinessFunctionException("Could not find key store file."); } } private Class[] getBeans() { Map<String, AbstractDTO> map = OpenFlameSpringContext.getContext().getBeansOfType(AbstractDTO.class); Class[] classes = new Class[map.size()]; int i = 0; for (AbstractDTO dto : map.values()) { classes[i++] = dto.getClass(); } return classes; } private void startProgressStatusTask(final ScheduleJobStatus scheduleJobStatus) { final Timer timer = new Timer(); TimerTask task = new TimerTask() { public void run() { boolean needToContinue = doProgressStatusRequest(scheduleJobStatus); if (!needToContinue) { timer.cancel(); } } }; timer.schedule(task, 0, 1000); } private boolean doProgressStatusRequest(ScheduleJobStatus scheduleJobStatus) { boolean needToContinue = false; try { ClientConfig config = new DefaultClientConfig(); config.getClasses().add(JacksonJsonProvider.class); config.getClasses().add(XMLProvider.class); config.getSingletons().add(new SingletonTypeInjectableProvider<Context, Class[]>(Class[].class, getBeans()) {}); ActionModel action = scheduleJobStatus.getScheduleModel().getAction(); String url = "http://" + action.getServerName() + ":" + action.getPort() + action.getParentPath() + "/rest/progress/status"; WebResource webResource = Client.create(config).resource(url); webResource.setProperty(ClientConfig.PROPERTY_CONNECT_TIMEOUT, TIMEOUT); webResource.setProperty(ClientConfig.PROPERTY_READ_TIMEOUT, TIMEOUT); WebResource.Builder builder = webResource.accept(MediaType.APPLICATION_XML_TYPE).type(MediaType.APPLICATION_XML_TYPE); Cookie cookie = new Cookie(OpenFlameSecurityConstants.AUTHENTICATION_TOKEN_ATTRIBUTE, scheduleJobStatus.getToken()); builder.cookie(cookie); builder.header("Page-UID", scheduleJobStatus.getPageUID()); ServiceResponse response = builder.get(ServiceResponse.class); if (response.isSuccess()) { List responseData = response.getData(); if (responseData == null || responseData.isEmpty()) { logger.info("[" + scheduleJobStatus.getRequestUID() + "]: Progress response: 'Progress status list is empty' current percents: " + scheduleJobStatus.getPercents() + "%"); scheduleJobStatus.incrementCountOfEmptyRequests(); if (scheduleJobStatus.getCountOfEmptyRequests() < COUNT_OF_EMPTY_REQUESTS) { needToContinue = true; } else { String message = "'" + scheduleJobStatus.getScheduleModel().getName() + "' has not been responding for long time: " + scheduleJobStatus.getDurationTimeOfEmptyRequests(); logger.warn("[" + scheduleJobStatus.getRequestUID() + "]: Timeout response: " + message); finishScheduleHistory(scheduleJobStatus, new ServiceResponse(message, false)); schedulerJobStatusMap.remove(scheduleJobStatus.getScheduleModel().getId()); } } else { for (Object data : responseData) { AbstractDTO item = ((ServiceResponse) data).getItem(); if (item instanceof Status) { Status status = (Status) item; if (StatusType.ERROR.equals(status.getType())) { logger.warn("[" + scheduleJobStatus.getRequestUID() + "]: Error response: " + status.getTitle()); finishScheduleHistory(scheduleJobStatus, new ServiceResponse(status.getTitle(), false)); schedulerJobStatusMap.remove(scheduleJobStatus.getScheduleModel().getId()); break; } else { logger.info("[" + scheduleJobStatus.getRequestUID() + "]: Progress response: '" + status.getTitle() + "' with percents: " + status.getPercent() + "%"); scheduleJobStatus.setPercents(status.getPercent()); scheduleJobStatus.setMessage(status.getTitle()); if (status.getPercent() < 100) { needToContinue = true; scheduleJobStatus.resetCountOfEmptyRequests(); } else { needToContinue = false; finishScheduleHistory(scheduleJobStatus, response); schedulerJobStatusMap.remove(scheduleJobStatus.getScheduleModel().getId()); break; } } } else { needToContinue = false; logger.info("[" + scheduleJobStatus.getRequestUID() + "]: Completed response: " + response.getMessage()); finishScheduleHistory(scheduleJobStatus, response); schedulerJobStatusMap.remove(scheduleJobStatus.getScheduleModel().getId()); break; } } } } else { logger.warn("[" + scheduleJobStatus.getRequestUID() + "]: Failure response: " + response.getMessage()); finishScheduleHistory(scheduleJobStatus, response); schedulerJobStatusMap.remove(scheduleJobStatus.getScheduleModel().getId()); } } catch (Exception e) { logger.error("[" + scheduleJobStatus.getRequestUID() + "]: Exception: " + e.getMessage()); finishScheduleHistory(scheduleJobStatus, new ServiceResponse(e.getMessage(), false)); schedulerJobStatusMap.remove(scheduleJobStatus.getScheduleModel().getId()); } return needToContinue; } private void startScheduleHistory(ScheduleJobStatus scheduleJobStatus, BaseUserModel userModel) { ScheduleHistoryModel scheduleHistoryModel = new ScheduleHistoryModel(); scheduleHistoryModel.setStartTime(new Date()); scheduleHistoryModel.setSchedule(scheduleJobStatus.getScheduleModel()); scheduleHistoryModel.setUser(userModel); scheduleHistoryStore.saveScheduleHistory(scheduleHistoryModel); scheduleJobStatus.setScheduleHistoryModel(scheduleHistoryModel); } private void finishScheduleHistory(ScheduleJobStatus scheduleJobStatus, ServiceResponse response) { ScheduleHistoryModel scheduleHistoryModel = scheduleJobStatus.getScheduleHistoryModel(); scheduleHistoryModel.setEndTime(new Date()); scheduleHistoryModel.setSuccess(response.isSuccess()); String responseMessage = StringUtils.defaultIfEmpty(response.getMessage(), "No message"); String message = StringUtils.cutting(responseMessage, 1000); scheduleHistoryModel.setMessage(message); scheduleHistoryStore.saveScheduleHistory(scheduleHistoryModel); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.migrationhubrefactorspaces.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/migration-hub-refactor-spaces-2021-10-26/ListApplications" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListApplicationsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ID of the environment. * </p> */ private String environmentIdentifier; /** * <p> * The maximum number of results to return with a single call. To retrieve the remaining results, make another call * with the returned <code>nextToken</code> value. * </p> */ private Integer maxResults; /** * <p> * The token for the next page of results. * </p> */ private String nextToken; /** * <p> * The ID of the environment. * </p> * * @param environmentIdentifier * The ID of the environment. */ public void setEnvironmentIdentifier(String environmentIdentifier) { this.environmentIdentifier = environmentIdentifier; } /** * <p> * The ID of the environment. * </p> * * @return The ID of the environment. */ public String getEnvironmentIdentifier() { return this.environmentIdentifier; } /** * <p> * The ID of the environment. * </p> * * @param environmentIdentifier * The ID of the environment. * @return Returns a reference to this object so that method calls can be chained together. */ public ListApplicationsRequest withEnvironmentIdentifier(String environmentIdentifier) { setEnvironmentIdentifier(environmentIdentifier); return this; } /** * <p> * The maximum number of results to return with a single call. To retrieve the remaining results, make another call * with the returned <code>nextToken</code> value. * </p> * * @param maxResults * The maximum number of results to return with a single call. To retrieve the remaining results, make * another call with the returned <code>nextToken</code> value. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of results to return with a single call. To retrieve the remaining results, make another call * with the returned <code>nextToken</code> value. * </p> * * @return The maximum number of results to return with a single call. To retrieve the remaining results, make * another call with the returned <code>nextToken</code> value. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of results to return with a single call. To retrieve the remaining results, make another call * with the returned <code>nextToken</code> value. * </p> * * @param maxResults * The maximum number of results to return with a single call. To retrieve the remaining results, make * another call with the returned <code>nextToken</code> value. * @return Returns a reference to this object so that method calls can be chained together. */ public ListApplicationsRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * The token for the next page of results. * </p> * * @param nextToken * The token for the next page of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token for the next page of results. * </p> * * @return The token for the next page of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token for the next page of results. * </p> * * @param nextToken * The token for the next page of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListApplicationsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEnvironmentIdentifier() != null) sb.append("EnvironmentIdentifier: ").append(getEnvironmentIdentifier()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListApplicationsRequest == false) return false; ListApplicationsRequest other = (ListApplicationsRequest) obj; if (other.getEnvironmentIdentifier() == null ^ this.getEnvironmentIdentifier() == null) return false; if (other.getEnvironmentIdentifier() != null && other.getEnvironmentIdentifier().equals(this.getEnvironmentIdentifier()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEnvironmentIdentifier() == null) ? 0 : getEnvironmentIdentifier().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListApplicationsRequest clone() { return (ListApplicationsRequest) super.clone(); } }
package org.sirix.xquery.node; import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; import org.brackit.xquery.atomic.Atomic; import org.brackit.xquery.atomic.QNm; import org.brackit.xquery.atomic.Una; import org.brackit.xquery.node.parser.NavigationalSubtreeParser; import org.brackit.xquery.node.parser.SubtreeHandler; import org.brackit.xquery.node.parser.SubtreeParser; import org.brackit.xquery.xdm.DocumentException; import org.brackit.xquery.xdm.Kind; import org.brackit.xquery.xdm.Scope; import org.brackit.xquery.xdm.Stream; import org.brackit.xquery.xdm.node.AbstractTemporalNode; import org.brackit.xquery.xdm.node.Node; import org.brackit.xquery.xdm.node.TemporalNode; import org.brackit.xquery.xdm.type.NodeType; import org.sirix.api.Axis; import org.sirix.api.NodeReadOnlyTrx; import org.sirix.api.xml.XmlNodeReadOnlyTrx; import org.sirix.api.xml.XmlNodeTrx; import org.sirix.api.xml.XmlResourceManager; import org.sirix.axis.*; import org.sirix.axis.temporal.*; import org.sirix.exception.SirixException; import org.sirix.node.SirixDeweyID; import org.sirix.service.InsertPosition; import org.sirix.settings.Fixed; import org.sirix.utils.LogWrapper; import org.sirix.xquery.StructuredDBItem; import org.sirix.xquery.stream.node.SirixNodeStream; import org.sirix.xquery.stream.node.TemporalSirixNodeStream; import org.slf4j.LoggerFactory; import org.checkerframework.checker.nullness.qual.Nullable; import java.util.Collections; import java.util.Objects; /** * A node which is used to provide all XDM functionality as well as temporal functions. * * @author Johannes Lichtenberger * */ public final class XmlDBNode extends AbstractTemporalNode<XmlDBNode> implements StructuredDBItem<XmlNodeReadOnlyTrx> { /** {@link LogWrapper} reference. */ private static final LogWrapper LOGWRAPPER = new LogWrapper(LoggerFactory.getLogger(XmlDBNode.class)); /** Sirix {@link XmlNodeReadOnlyTrx}. */ private final XmlNodeReadOnlyTrx rtx; /** Sirix node key. */ private final long nodeKey; /** Kind of node. */ private final org.sirix.node.NodeKind kind; /** Collection this node is part of. */ private final XmlDBCollection collection; /** Determines if write-transaction is present. */ private final boolean isWtx; /** {@link Scope} of node. */ private SirixScope scope; /** Optional dewey ID. */ private final SirixDeweyID deweyID; /** * Constructor. * * @param rtx {@link XmlNodeReadOnlyTrx} for providing reading access to the underlying node * @param collection {@link XmlDBCollection} reference */ public XmlDBNode(final XmlNodeReadOnlyTrx rtx, final XmlDBCollection collection) { this.collection = Preconditions.checkNotNull(collection); this.rtx = Preconditions.checkNotNull(rtx); isWtx = this.rtx instanceof XmlNodeTrx; nodeKey = this.rtx.getNodeKey(); kind = this.rtx.getKind(); deweyID = this.rtx.getNode().getDeweyID(); } /** * Create a new {@link NodeReadOnlyTrx} and move to node key. */ private void moveRtx() { rtx.moveTo(nodeKey); } /** * Get underlying node. * * @return underlying node */ private org.sirix.node.interfaces.immutable.ImmutableNode getImmutableNode() { moveRtx(); return rtx.getNode(); } @Override public boolean isSelfOf(final Node<?> other) { moveRtx(); if (other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); return node.getImmutableNode().getNodeKey() == this.getImmutableNode().getNodeKey(); } return false; } @Override public boolean isParentOf(final Node<?> other) { moveRtx(); if (other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); return node.getImmutableNode().getParentKey() == rtx.getNodeKey(); } return false; } @Override public boolean isChildOf(final Node<?> other) { moveRtx(); if (other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); if (kind != org.sirix.node.NodeKind.ATTRIBUTE && kind != org.sirix.node.NodeKind.NAMESPACE) { return node.getImmutableNode().getNodeKey() == rtx.getParentKey(); } } return false; } @Override public boolean isDescendantOf(final Node<?> other) { moveRtx(); boolean retVal = false; if (other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); moveRtx(); if (kind != org.sirix.node.NodeKind.ATTRIBUTE && kind != org.sirix.node.NodeKind.NAMESPACE) { if (deweyID != null) { return deweyID.isDescendantOf(node.deweyID); } else { for (final Axis axis = new AncestorAxis(rtx); axis.hasNext();) { axis.next(); if (node.getImmutableNode().getNodeKey() == rtx.getNodeKey()) { retVal = true; } } } } } return retVal; } /** * Get the transaction. * * @return transaction handle */ @Override public XmlNodeReadOnlyTrx getTrx() { moveRtx(); return rtx; } @Override public boolean isDescendantOrSelfOf(final Node<?> other) { moveRtx(); boolean retVal = false; if (other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); if (isSelfOf(other)) { retVal = true; } else { retVal = isDescendantOf(other); } } return retVal; } @Override public boolean isAncestorOf(final Node<?> other) { moveRtx(); if (other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); if (deweyID != null) { return deweyID.isAncestorOf(node.deweyID); } else { return other.isDescendantOf(this); } } return false; } @Override public boolean isAncestorOrSelfOf(final Node<?> other) { moveRtx(); boolean retVal = false; if (other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); if (deweyID != null) { retVal = deweyID.isAncestorOf(node.deweyID); } else { if (isSelfOf(other)) { retVal = true; } else { retVal = other.isDescendantOf(this); } } } return retVal; } @Override public boolean isSiblingOf(final Node<?> other) { moveRtx(); boolean retVal = false; if (other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); try { if (deweyID != null) { return deweyID.isSiblingOf(node.deweyID); } //noinspection ConstantConditions if (node.getKind() != Kind.NAMESPACE && node.getKind() != Kind.ATTRIBUTE && node.getParent().getImmutableNode().getNodeKey() == ((XmlDBNode) other.getParent()).getImmutableNode() .getNodeKey()) { retVal = true; } } catch (final DocumentException e) { LOGWRAPPER.error(e.getMessage(), e); } } return retVal; } @Override public boolean isPrecedingSiblingOf(final Node<?> other) { if (other instanceof XmlDBNode node) { moveRtx(); if (kind != org.sirix.node.NodeKind.ATTRIBUTE && kind != org.sirix.node.NodeKind.NAMESPACE) { if (deweyID != null) { return deweyID.isPrecedingSiblingOf(node.deweyID); } else { while (rtx.hasRightSibling()) { rtx.moveToRightSibling(); if (rtx.getNodeKey() == node.getNodeKey()) { return true; } } } } } return false; } @Override public boolean isFollowingSiblingOf(final Node<?> other) { if (other instanceof XmlDBNode node) { moveRtx(); if (kind != org.sirix.node.NodeKind.ATTRIBUTE && kind != org.sirix.node.NodeKind.NAMESPACE) { if (deweyID != null) { return deweyID.isFollowingSiblingOf(node.deweyID); } else { while (rtx.hasLeftSibling()) { rtx.moveToLeftSibling(); if (rtx.getNodeKey() == node.getNodeKey()) { return true; } } } } } return false; } @Override public boolean isPrecedingOf(final Node<?> other) { if (other instanceof XmlDBNode node) { moveRtx(); if (kind != org.sirix.node.NodeKind.ATTRIBUTE && kind != org.sirix.node.NodeKind.NAMESPACE) { if (deweyID != null) { return deweyID.isPrecedingOf(node.deweyID); } else { for (final Axis axis = new FollowingAxis(rtx); axis.hasNext();) { axis.next(); if (rtx.getNodeKey() == node.getNodeKey()) { return true; } } } } } return false; } @Override public boolean isFollowingOf(final Node<?> other) { if (other instanceof XmlDBNode node) { moveRtx(); if (kind != org.sirix.node.NodeKind.ATTRIBUTE && kind != org.sirix.node.NodeKind.NAMESPACE) { if (deweyID != null) { return deweyID.isFollowingOf(node.deweyID); } else { for (final Axis axis = new PrecedingAxis(rtx); axis.hasNext();) { axis.next(); if (rtx.getNodeKey() == node.getNodeKey()) { return true; } } } } } return false; } @Override public boolean isAttributeOf(final Node<?> other) { moveRtx(); boolean retVal = false; if (other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); try { //noinspection ConstantConditions if (getParent().getImmutableNode().getNodeKey() == node.getImmutableNode().getNodeKey()) { retVal = true; } } catch (final DocumentException e) { LOGWRAPPER.error(e.getMessage(), e); } } return retVal; } @Override public boolean isDocumentOf(final Node<?> other) { moveRtx(); boolean retVal = false; if (getKind() == Kind.DOCUMENT && other instanceof XmlDBNode node) { assert node.getNodeClassID() == this.getNodeClassID(); final NodeReadOnlyTrx rtx = node.getTrx(); if (rtx.getRevisionNumber() == this.rtx.getRevisionNumber() && rtx.getResourceManager().getResourceConfig().getID() == this.rtx.getResourceManager() .getResourceConfig() .getID()) { retVal = true; } } return retVal; } @Override public boolean isDocumentRoot() { moveRtx(); return rtx.getParentKey() == Fixed.NULL_NODE_KEY.getStandardProperty(); } @Override public boolean isRoot() { moveRtx(); return rtx.getParentKey() == Fixed.DOCUMENT_NODE_KEY.getStandardProperty(); } @Override public int getNodeClassID() { return 1732483; } @Override public XmlDBCollection getCollection() { return collection; } @Override public Scope getScope() { if (scope == null && kind == org.sirix.node.NodeKind.ELEMENT) { scope = new SirixScope(this); } return scope; } @Override public Kind getKind() { moveRtx(); // $CASES-OMITTED$ return switch (rtx.getKind()) { case XML_DOCUMENT -> Kind.DOCUMENT; case ELEMENT -> Kind.ELEMENT; case TEXT -> Kind.TEXT; case COMMENT -> Kind.COMMENT; case PROCESSING_INSTRUCTION -> Kind.PROCESSING_INSTRUCTION; case NAMESPACE -> Kind.NAMESPACE; case ATTRIBUTE -> Kind.ATTRIBUTE; default -> throw new IllegalStateException("Kind not known!"); }; } @Override public QNm getName() { moveRtx(); return rtx.getName(); } @Override public void setName(final QNm name) throws DocumentException { if (isWtx) { moveRtx(); final XmlNodeTrx wtx = (XmlNodeTrx) rtx; if (wtx.isNameNode()) { try { wtx.setName(name); } catch (final SirixException e) { throw new DocumentException(e); } } else { throw new DocumentException("Node has no name!"); } } else { final XmlNodeTrx wtx = getWtx(); try { wtx.setName(name); } catch (final SirixException e) { wtx.rollback(); wtx.close(); throw new DocumentException(e); } } } @Override public Atomic getValue() { moveRtx(); // $CASES-OMITTED$ final String value = switch (kind) { case XML_DOCUMENT, ELEMENT -> expandString(); case ATTRIBUTE, COMMENT, PROCESSING_INSTRUCTION -> emptyIfNull(rtx.getValue()); case TEXT -> rtx.getValue(); default -> ""; }; return new Una(value); } /** * Treat a node value of null as an empty string. * * @param s the node value * @return a zero-length string if s is null, otherwise s */ private static String emptyIfNull(final String s) { return (s == null ? "" : s); } /** * Filter text nodes. * * @return concatenated String of text node values */ private String expandString() { final StringBuilder buffer = new StringBuilder(); final Axis axis = new DescendantAxis(rtx); while (axis.hasNext()) { axis.next(); if (rtx.isText()) { buffer.append(rtx.getValue()); } } return buffer.toString(); } @Override public void setValue(final Atomic value) throws DocumentException { moveRtx(); if (!rtx.isValueNode()) { throw new DocumentException("Node has no value!"); } if (isWtx) { final XmlNodeTrx wtx = (XmlNodeTrx) rtx; try { wtx.setValue(value.stringValue()); } catch (final SirixException e) { throw new DocumentException(e); } } else { final XmlNodeTrx wtx = getWtx(); try { wtx.setValue(value.stringValue()); } catch (final SirixException e) { wtx.rollback(); wtx.close(); throw new DocumentException(e); } } } @Override public XmlDBNode getParent() { moveRtx(); return rtx.hasParent() ? new XmlDBNode(rtx.moveToParent().trx(), collection) : null; } @Override public XmlDBNode getFirstChild() { moveRtx(); return rtx.hasFirstChild() ? new XmlDBNode(rtx.moveToFirstChild().trx(), collection) : null; } @Override public XmlDBNode getLastChild() { moveRtx(); return rtx.hasLastChild() ? new XmlDBNode(rtx.moveToLastChild().trx(), collection) : null; } @Override public Stream<XmlDBNode> getChildren() { moveRtx(); return new SirixNodeStream(new ChildAxis(rtx), collection); } // Returns all nodes in the subtree _including_ the subtree root. @Override public Stream<XmlDBNode> getSubtree() { moveRtx(); return new SirixNodeStream(new NonStructuralWrapperAxis(new DescendantAxis(rtx, IncludeSelf.YES)), collection); } @Override public boolean hasChildren() { moveRtx(); return rtx.getChildCount() > 0; } @Override public XmlDBNode getNextSibling() { moveRtx(); return rtx.hasRightSibling() ? new XmlDBNode(rtx.moveToRightSibling().trx(), collection) : null; } @Override public XmlDBNode getPreviousSibling() { moveRtx(); return rtx.hasLeftSibling() ? new XmlDBNode(rtx.moveToLeftSibling().trx(), collection) : null; } @Override public XmlDBNode append(final Kind kind, final QNm name, final Atomic value) { if (isWtx) { moveRtx(); final XmlNodeTrx wtx = (XmlNodeTrx) rtx; try { return append(wtx, kind, name, value); } catch (final SirixException e) { wtx.close(); throw new DocumentException(e); } } else { try (final XmlNodeTrx wtx = getWtx()) { return append(wtx, kind, name, value); } catch (final SirixException e) { throw new DocumentException(e); } } } private XmlDBNode append(final XmlNodeTrx wtx, final Kind kind, final QNm name, final Atomic value) { if (wtx.hasFirstChild()) { wtx.moveToLastChild(); insertNodeAsRightSibling(wtx, kind, name, value); } else { insertNodeAsFirstChild(wtx, kind, name, value); } return new XmlDBNode(wtx, collection); } private void insertNodeAsRightSibling(XmlNodeTrx wtx, Kind kind, QNm name, Atomic value) { switch (kind) { case DOCUMENT: break; case ELEMENT: wtx.insertElementAsRightSibling(name); break; case ATTRIBUTE: wtx.insertAttribute(name, value.asStr().stringValue()); break; case NAMESPACE: wtx.insertNamespace(name); break; case TEXT: wtx.insertTextAsRightSibling(value.asStr().stringValue()); break; case COMMENT: wtx.insertCommentAsRightSibling(value.asStr().stringValue()); break; case PROCESSING_INSTRUCTION: wtx.insertPIAsRightSibling(value.asStr().stringValue(), name.getLocalName()); break; default: throw new AssertionError(); // May not happen. } } private void insertNodeAsFirstChild(XmlNodeTrx wtx, Kind kind, QNm name, Atomic value) { switch (kind) { case DOCUMENT: break; case ELEMENT: wtx.insertElementAsFirstChild(name); break; case ATTRIBUTE: wtx.insertAttribute(name, value.asStr().stringValue()); break; case NAMESPACE: wtx.insertNamespace(name); break; case TEXT: wtx.insertTextAsFirstChild(value.asStr().stringValue()); break; case COMMENT: wtx.insertCommentAsFirstChild(value.asStr().stringValue()); break; case PROCESSING_INSTRUCTION: wtx.insertPIAsFirstChild(value.asStr().stringValue(), name.getLocalName()); break; default: throw new AssertionError(); // May not happen. } } @Override public XmlDBNode append(final Node<?> child) { if (isWtx) { moveRtx(); try { return append((XmlNodeTrx) rtx, child); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return append(wtx, child); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode append(final XmlNodeTrx wtx, final Node<?> child) { try { if (!(child.getKind() == Kind.ELEMENT)) return append(wtx, child.getKind(), child.getName(), child.getValue()); final SubtreeBuilder builder; if (wtx.hasFirstChild()) { wtx.moveToLastChild(); builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_RIGHT_SIBLING, Collections.emptyList()); } else { builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_FIRST_CHILD, Collections.emptyList()); } child.parse(builder); wtx.moveTo(builder.getStartNodeKey()); } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(wtx, collection); } @Override public XmlDBNode append(final SubtreeParser parser) { if (isWtx) { try { moveRtx(); return append(rtx, parser); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return append(wtx, parser); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode append(final XmlNodeReadOnlyTrx rtx, final SubtreeParser parser) { try { if (rtx.hasFirstChild()) { rtx.moveToLastChild(); } parser.parse( new SubtreeBuilder(collection, (XmlNodeTrx) rtx, InsertPosition.AS_RIGHT_SIBLING, Collections.emptyList())); moveRtx(); rtx.moveToFirstChild(); } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(rtx, collection); } @Override public XmlDBNode prepend(final Kind kind, final QNm name, final Atomic value) { if (isWtx) { try { moveRtx(); return prepend((XmlNodeTrx) rtx, kind, name, value); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return prepend((XmlNodeTrx) rtx, kind, name, value); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode prepend(final XmlNodeTrx wtx, final Kind kind, final QNm name, final Atomic value) { try { insertNodeAsFirstChild(wtx, kind, name, value); } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(wtx, collection); } @Override public XmlDBNode prepend(final Node<?> child) { if (isWtx) { try { moveRtx(); return prepend((XmlNodeTrx) rtx, child); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return prepend(wtx, child); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode prepend(final XmlNodeTrx wtx, final Node<?> child) { try { if (!(child.getKind() == Kind.ELEMENT)) return prepend(wtx, child.getKind(), child.getName(), child.getValue()); SubtreeBuilder builder; if (wtx.hasFirstChild()) { wtx.moveToFirstChild(); builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_LEFT_SIBLING, Collections.emptyList()); } else { builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_FIRST_CHILD, Collections.emptyList()); } child.parse(builder); wtx.moveTo(builder.getStartNodeKey()); } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(wtx, collection); } @Override public XmlDBNode prepend(final SubtreeParser parser) { if (isWtx) { try { moveRtx(); return prepend((XmlNodeTrx) rtx, parser); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return prepend(wtx, parser); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode prepend(final XmlNodeTrx wtx, final SubtreeParser parser) { try { parser.parse(new SubtreeBuilder(collection, wtx, InsertPosition.AS_FIRST_CHILD, Collections.emptyList())); moveRtx(); wtx.moveToFirstChild(); } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(wtx, collection); } @Override public XmlDBNode insertBefore(final Kind kind, final QNm name, final Atomic value) { if (isWtx) { try { moveRtx(); return insertBefore((XmlNodeTrx) rtx, kind, name, value); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return insertBefore(wtx, kind, name, value); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode insertBefore(final XmlNodeTrx wtx, final Kind kind, final QNm name, final Atomic value) { try { switch (kind) { case DOCUMENT: break; case ELEMENT: wtx.insertElementAsLeftSibling(name); break; case ATTRIBUTE: wtx.insertAttribute(name, value.asStr().stringValue()); break; case NAMESPACE: wtx.insertNamespace(name); break; case TEXT: wtx.insertTextAsLeftSibling(value.asStr().stringValue()); break; case COMMENT: wtx.insertCommentAsLeftSibling(value.asStr().stringValue()); break; case PROCESSING_INSTRUCTION: wtx.insertPIAsLeftSibling(value.asStr().stringValue(), name.getLocalName()); break; default: throw new AssertionError(); // Must not happen. } } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(wtx, collection); } @Override public XmlDBNode insertBefore(final Node<?> node) { if (isWtx) { try { moveRtx(); return insertBefore((XmlNodeTrx) rtx, node); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return insertBefore(wtx, node); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode insertBefore(final XmlNodeTrx wtx, final Node<?> node) { try { if (!(node.getKind() == Kind.ELEMENT)) return insertBefore(wtx, node.getKind(), node.getName(), node.getValue()); final SubtreeBuilder builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_LEFT_SIBLING, Collections.emptyList()); node.parse(builder); wtx.moveTo(builder.getStartNodeKey()); } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(wtx, collection); } @Override public XmlDBNode insertBefore(final SubtreeParser parser) { if (isWtx) { try { moveRtx(); return insertBefore((XmlNodeTrx) rtx, parser); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return insertBefore(wtx, parser); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode insertBefore(final XmlNodeTrx wtx, final SubtreeParser parser) { try { final SubtreeBuilder builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_LEFT_SIBLING, Collections.emptyList()); parser.parse(builder); return new XmlDBNode(wtx.moveTo(builder.getStartNodeKey()).trx(), collection); } catch (final SirixException e) { throw new DocumentException(e); } } @Override public XmlDBNode insertAfter(final Kind kind, final QNm name, final Atomic value) { if (isWtx) { try { moveRtx(); return insertAfter((XmlNodeTrx) rtx, kind, name, value); } catch (final SirixException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw new DocumentException(e); } } else { final XmlNodeTrx wtx = getWtx(); try { return insertAfter(wtx, kind, name, value); } catch (final SirixException e) { wtx.rollback(); wtx.close(); throw new DocumentException(e); } } } private XmlDBNode insertAfter(final XmlNodeTrx wtx, final Kind kind, final QNm name, final Atomic value) throws SirixException { insertNodeAsRightSibling(wtx, kind, name, value); return new XmlDBNode(wtx, collection); } @Override public XmlDBNode insertAfter(final Node<?> node) { if (isWtx) { try { moveRtx(); return insertAfter((XmlNodeTrx) rtx, node); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return insertAfter(wtx, node); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode insertAfter(final XmlNodeTrx wtx, final Node<?> node) { try { if (!(node.getKind() == Kind.ELEMENT)) return insertAfter(wtx, node.getKind(), node.getName(), node.getValue()); final SubtreeBuilder builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_RIGHT_SIBLING, Collections.emptyList()); node.parse(builder); wtx.moveTo(builder.getStartNodeKey()); } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(wtx, collection); } @Override public XmlDBNode insertAfter(final SubtreeParser parser) { if (isWtx) { try { moveRtx(); return insertAfter((XmlNodeTrx) rtx, parser); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { return insertAfter(wtx, parser); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode insertAfter(final XmlNodeTrx wtx, final SubtreeParser parser) { try { final SubtreeBuilder builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_RIGHT_SIBLING, Collections.emptyList()); parser.parse(builder); return new XmlDBNode(wtx.moveTo(builder.getStartNodeKey()).trx(), collection); } catch (final SirixException e) { throw new DocumentException(e); } } @Override public XmlDBNode setAttribute(final Node<?> attribute) { if (isWtx) { try { moveRtx(); return setAttribute((XmlNodeTrx) rtx, attribute); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { wtx.moveTo(nodeKey); return setAttribute(wtx, attribute); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode setAttribute(final XmlNodeTrx wtx, final Node<?> attribute) { if (wtx.isElement()) { final String value = attribute.getValue().asStr().stringValue(); final QNm name = attribute.getName(); try { wtx.insertAttribute(name, value); } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(rtx, collection); } throw new DocumentException("No element node selected!"); } @Override public XmlDBNode setAttribute(final QNm name, final Atomic value) { if (isWtx) { try { moveRtx(); return setAttribute((XmlNodeTrx) rtx, name, value); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { wtx.moveTo(nodeKey); return setAttribute(wtx, name, value); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode setAttribute(final XmlNodeTrx wtx, final QNm name, final Atomic value) { if (wtx.isElement()) { try { wtx.insertAttribute(name, value.asStr().stringValue()); } catch (final SirixException e) { throw new DocumentException(e); } return new XmlDBNode(rtx, collection); } throw new DocumentException("No element node selected!"); } @Override public boolean deleteAttribute(final QNm name) { if (isWtx) { try { moveRtx(); return deleteAttribute((XmlNodeTrx) rtx, name); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { wtx.moveTo(nodeKey); return deleteAttribute(wtx, name); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private static boolean deleteAttribute(final XmlNodeTrx wtx, final QNm name) { if (wtx.isElement()) { if (wtx.moveToAttributeByName(name).hasMoved()) { try { wtx.remove(); return true; } catch (final SirixException e) { throw new DocumentException(e.getCause()); } } throw new DocumentException("No attribute with name " + name + " exists!"); } throw new DocumentException("No element node selected!"); } @Override public Stream<XmlDBNode> getAttributes() throws DocumentException { moveRtx(); return new SirixNodeStream(new AttributeAxis(rtx), collection); } @Override public XmlDBNode getAttribute(final QNm name) { moveRtx(); if (rtx.isElement() && rtx.moveToAttributeByName(name).hasMoved()) { return new XmlDBNode(rtx, collection); } throw new DocumentException("No element selected!"); } @Override public XmlDBNode replaceWith(final Node<?> node) { if (isWtx) { try { moveRtx(); return replaceWith((XmlNodeTrx) rtx, node); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { wtx.moveTo(nodeKey); return replaceWith(wtx, node); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode replaceWith(final XmlNodeTrx wtx, final Node<?> node) { if (node instanceof XmlDBNode other) { try { final XmlNodeReadOnlyTrx rtx = other.getTrx(); rtx.moveTo(other.getNodeKey()); wtx.replaceNode(rtx); } catch (final SirixException e) { throw new DocumentException(e.getCause()); } return new XmlDBNode(wtx, collection); } else { final SubtreeBuilder builder = createBuilder(wtx); node.parse(builder); try { return replace(builder.getStartNodeKey(), wtx); } catch (final SirixException e) { throw new DocumentException(e.getCause()); } } } @Override public XmlDBNode replaceWith(final SubtreeParser parser) { if (isWtx) { try { moveRtx(); return replaceWith((XmlNodeTrx) rtx, parser); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { wtx.moveTo(nodeKey); return replaceWith(wtx, parser); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode replaceWith(final XmlNodeTrx wtx, final SubtreeParser parser) { final SubtreeBuilder builder = createBuilder(wtx); parser.parse(builder); try { return replace(builder.getStartNodeKey(), wtx); } catch (final SirixException e) { throw new DocumentException(e); } } @Override public XmlDBNode replaceWith(final Kind kind, final @Nullable QNm name, final @Nullable Atomic value) { if (isWtx) { try { moveRtx(); return replaceWith((XmlNodeTrx) rtx, kind, name, value); } catch (final DocumentException e) { ((XmlNodeTrx) rtx).rollback(); rtx.close(); throw e; } } else { final XmlNodeTrx wtx = getWtx(); try { wtx.moveTo(nodeKey); return replaceWith(wtx, kind, name, value); } catch (final DocumentException e) { wtx.rollback(); wtx.close(); throw e; } } } private XmlDBNode replaceWith(final XmlNodeTrx wtx, final Kind kind, final QNm name, final Atomic value) { if (wtx.hasLeftSibling()) { wtx.moveToLeftSibling(); } else { wtx.moveToParent(); } try { final XmlDBNode node = insertAfter(wtx, kind, name, value); return replace(node.getNodeKey(), wtx); } catch (final SirixException e) { throw new DocumentException(e); } } private XmlDBNode replace(final long nodeKey, final XmlNodeTrx wtx) throws SirixException { // Move to original node. wtx.moveTo(nodeKey).trx().moveToRightSibling(); // Remove original node. wtx.remove(); // Move to subtree root of new subtree. wtx.moveTo(nodeKey); return new XmlDBNode(rtx, collection); } private SubtreeBuilder createBuilder(final XmlNodeTrx wtx) { SubtreeBuilder builder; try { if (wtx.hasLeftSibling()) { wtx.moveToLeftSibling(); builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_RIGHT_SIBLING, Collections.emptyList()); } else { wtx.moveToParent(); builder = new SubtreeBuilder(collection, wtx, InsertPosition.AS_FIRST_CHILD, Collections.emptyList()); } } catch (final SirixException e) { throw new DocumentException(e); } return builder; } /** * Get the node key. * * @return node key */ public long getNodeKey() { moveRtx(); return nodeKey; } @Override public boolean hasAttributes() { moveRtx(); return rtx.getAttributeCount() > 0; } /** * Get the sibling position. * * @return sibling position */ public int getSiblingPosition() { moveRtx(); int index = 0; while (rtx.hasLeftSibling()) { rtx.moveToLeftSibling(); index++; } return index; } @Override public void delete() { if (isWtx) { moveRtx(); final XmlNodeTrx wtx = (XmlNodeTrx) rtx; try { wtx.remove(); } catch (final SirixException e) { wtx.rollback(); wtx.close(); throw new DocumentException(e); } } else { final XmlNodeTrx wtx = getWtx(); try { wtx.remove(); } catch (final SirixException e) { wtx.rollback(); wtx.close(); throw new DocumentException(e); } } } private XmlNodeTrx getWtx() { final XmlResourceManager resource = rtx.getResourceManager(); final XmlNodeTrx wtx; if (resource.hasRunningNodeWriteTrx() && resource.getNodeTrx().isPresent()) { wtx = resource.getNodeTrx().get(); } else { wtx = resource.beginNodeTrx(); if (rtx.getRevisionNumber() < resource.getMostRecentRevisionNumber()) wtx.revertTo(rtx.getRevisionNumber()); } wtx.moveTo(nodeKey); return wtx; } @Override public void parse(final SubtreeHandler handler) { moveRtx(); final SubtreeParser parser = new NavigationalSubtreeParser(this); parser.parse(handler); } @Override protected int cmpInternal(final AbstractTemporalNode<XmlDBNode> otherNode) { moveRtx(); // Are they the same node? if (this == otherNode) { return 0; } // Compare collection IDs. final int firstCollectionID = collection.getID(); final int secondCollectionID = ((XmlDBCollection) otherNode.getCollection()).getID(); if (firstCollectionID != secondCollectionID) { return firstCollectionID < secondCollectionID ? -1 : 1; } // Compare document IDs. final long firstDocumentID = getTrx().getResourceManager().getResourceConfig().getID(); final long secondDocumentID = ((XmlDBNode) otherNode).getTrx().getResourceManager().getResourceConfig().getID(); if (firstDocumentID != secondDocumentID) { return firstDocumentID < secondDocumentID ? -1 : 1; } // Temporal extension. final Integer revision = rtx.getRevisionNumber(); final int otherRevision = ((XmlDBNode) otherNode).rtx.getRevisionNumber(); if (revision != otherRevision) { return revision.compareTo(otherRevision); } // Then compare node keys. if (nodeKey == ((XmlDBNode) otherNode).nodeKey) { return 0; } // If dewey-IDs are present it's simply the comparison of dewey-IDs. if (deweyID != null && ((XmlDBNode) otherNode).deweyID != null) { return deweyID.compareTo(((XmlDBNode) otherNode).deweyID); } try { final XmlDBNode firstParent = this.getParent(); if (firstParent == null) { // First node is the root. return -1; } final XmlDBNode secondParent = (XmlDBNode) otherNode.getParent(); if (secondParent == null) { // Second node is the root. return +1; } // Do they have the same parent (common case)? if (firstParent.getNodeKey() == secondParent.getNodeKey()) { final int cat1 = nodeCategories(this.getKind()); final int cat2 = nodeCategories(otherNode.getKind()); if (cat1 == cat2) { final XmlDBNode other = (XmlDBNode) otherNode; if (cat1 == 1) { rtx.moveToParent(); for (int i = 0, nspCount = rtx.getNamespaceCount(); i < nspCount; i++) { rtx.moveToNamespace(i); if (rtx.getNodeKey() == other.nodeKey) { return +1; } if (rtx.getNodeKey() == this.nodeKey) { return -1; } rtx.moveToParent(); } } if (cat1 == 2) { rtx.moveToParent(); for (int i = 0, attCount = rtx.getAttributeCount(); i < attCount; i++) { rtx.moveToAttribute(i); if (rtx.getNodeKey() == other.nodeKey) { return +1; } if (rtx.getNodeKey() == this.nodeKey) { return -1; } rtx.moveToParent(); } } return this.getSiblingPosition() - ((XmlDBNode) otherNode).getSiblingPosition(); } else { return cat1 - cat2; } } // Find the depths of both nodes in the tree. int depth1 = 0; int depth2 = 0; XmlDBNode p1 = this; XmlDBNode p2 = (XmlDBNode) otherNode; while (p1 != null) { depth1++; p1 = p1.getParent(); } while (p2 != null) { depth2++; p2 = p2.getParent(); } // Move up one branch of the tree so we have two nodes on the same level. p1 = this; while (depth1 > depth2) { p1 = p1.getParent(); assert p1 != null; if (p1.getNodeKey() == ((XmlDBNode) otherNode).getNodeKey()) { return +1; } depth1--; } p2 = ((XmlDBNode) otherNode); while (depth2 > depth1) { p2 = p2.getParent(); assert p2 != null; if (p2.getNodeKey() == this.getNodeKey()) { return -1; } depth2--; } // Now move up both branches in sync until we find a common parent. while (true) { final XmlDBNode par1 = p1.getParent(); final XmlDBNode par2 = p2.getParent(); if (par1 == null || par2 == null) { throw new NullPointerException("Node order comparison - internal error"); } if (par1.getNodeKey() == par2.getNodeKey()) { if (p1.getKind() == Kind.ATTRIBUTE && p2.getKind() != Kind.ATTRIBUTE) { return -1; // attributes first } if (p1.getKind() != Kind.ATTRIBUTE && p2.getKind() == Kind.ATTRIBUTE) { return +1; // attributes first } return p1.getSiblingPosition() - p2.getSiblingPosition(); } p1 = par1; p2 = par2; } } catch (final DocumentException e) { LOGWRAPPER.error(e.getMessage(), e); } return 0; } /** * Determine node category. * * @param kind node kind * @return category number */ private static int nodeCategories(final Kind kind) { return switch (kind) { case DOCUMENT -> 0; case COMMENT, PROCESSING_INSTRUCTION, TEXT, ELEMENT -> 3; case ATTRIBUTE -> 2; case NAMESPACE -> 1; }; } @Override public int hashCode() { moveRtx(); return Objects.hash(rtx.getNodeKey(), rtx.getValue(), rtx.getName()); } @Override public String toString() { moveRtx(); return MoreObjects.toStringHelper(this).add("rtx", rtx).toString(); } @Override public Stream<? extends Node<?>> performStep(final org.brackit.xquery.xdm.Axis axis, final NodeType test) { return null; } @Override public XmlDBNode getNext() { moveRtx(); final AbstractTemporalAxis<XmlNodeReadOnlyTrx, XmlNodeTrx> axis = new NextAxis<>(rtx.getResourceManager(), rtx); return moveTemporalAxis(axis); } private XmlDBNode moveTemporalAxis(final AbstractTemporalAxis<XmlNodeReadOnlyTrx, XmlNodeTrx> axis) { if (axis.hasNext()) { final var rtx = axis.next(); return new XmlDBNode(rtx, collection); } return null; } @Override public XmlDBNode getPrevious() { moveRtx(); final AbstractTemporalAxis<XmlNodeReadOnlyTrx, XmlNodeTrx> axis = new PreviousAxis<>(rtx.getResourceManager(), rtx); return moveTemporalAxis(axis); } @Override public XmlDBNode getFirst() { moveRtx(); final AbstractTemporalAxis<XmlNodeReadOnlyTrx, XmlNodeTrx> axis = new FirstAxis<>(rtx.getResourceManager(), rtx); return moveTemporalAxis(axis); } @Override public XmlDBNode getLast() { moveRtx(); final AbstractTemporalAxis<XmlNodeReadOnlyTrx, XmlNodeTrx> axis = new LastAxis<>(rtx.getResourceManager(), rtx); return moveTemporalAxis(axis); } @Override public Stream<AbstractTemporalNode<XmlDBNode>> getEarlier(final boolean includeSelf) { moveRtx(); final IncludeSelf include = includeSelf ? IncludeSelf.YES : IncludeSelf.NO; return new TemporalSirixNodeStream(new PastAxis<>(rtx.getResourceManager(), rtx, include), collection); } @Override public Stream<AbstractTemporalNode<XmlDBNode>> getFuture(final boolean includeSelf) { moveRtx(); final IncludeSelf include = includeSelf ? IncludeSelf.YES : IncludeSelf.NO; return new TemporalSirixNodeStream(new FutureAxis<>(rtx.getResourceManager(), rtx, include), collection); } @Override public Stream<AbstractTemporalNode<XmlDBNode>> getAllTime() { moveRtx(); return new TemporalSirixNodeStream(new AllTimeAxis<>(rtx.getResourceManager(), rtx), collection); } @Override public boolean isNextOf(final TemporalNode<?> other) { moveRtx(); if (this == other) return false; if (!(other instanceof XmlDBNode otherNode)) return false; return otherNode.getTrx().getRevisionNumber() - 1 == this.getTrx().getRevisionNumber(); } @Override public boolean isPreviousOf(final TemporalNode<?> other) { moveRtx(); if (this == other) return false; if (!(other instanceof XmlDBNode otherNode)) return false; return otherNode.getTrx().getRevisionNumber() + 1 == this.getTrx().getRevisionNumber(); } @Override public boolean isFutureOf(final TemporalNode<?> other) { moveRtx(); if (this == other) return false; if (!(other instanceof XmlDBNode otherNode)) return false; return otherNode.getTrx().getRevisionNumber() > this.getTrx().getRevisionNumber(); } @Override public boolean isFutureOrSelfOf(final TemporalNode<?> other) { moveRtx(); if (this == other) return true; if (!(other instanceof XmlDBNode otherNode)) return false; return otherNode.getTrx().getRevisionNumber() - 1 >= this.getTrx().getRevisionNumber(); } @Override public boolean isEarlierOf(final TemporalNode<?> other) { moveRtx(); if (this == other) return false; if (!(other instanceof XmlDBNode otherNode)) return false; return otherNode.getTrx().getRevisionNumber() < this.getTrx().getRevisionNumber(); } @Override public boolean isEarlierOrSelfOf(final TemporalNode<?> other) { moveRtx(); if (this == other) return true; if (!(other instanceof XmlDBNode otherNode)) return false; return otherNode.getTrx().getRevisionNumber() <= this.getTrx().getRevisionNumber(); } @Override public boolean isLastOf(final TemporalNode<?> other) { moveRtx(); if (!(other instanceof XmlDBNode otherNode)) return false; final NodeReadOnlyTrx otherTrx = otherNode.getTrx(); return otherTrx.getResourceManager().getMostRecentRevisionNumber() == otherTrx.getRevisionNumber(); } @Override public boolean isFirstOf(final TemporalNode<?> other) { moveRtx(); if (!(other instanceof XmlDBNode otherNode)) return false; final NodeReadOnlyTrx otherTrx = otherNode.getTrx(); // Revision 0 is just the bootstrap revision and not accessed over here. return otherTrx.getRevisionNumber() == 1; } /** * Get the path class record (PCR). * * @return the path class record * @throws SirixException if Sirix fails to get the path class record */ public long getPCR() { return rtx.getPathNodeKey(); } /** * Get the DeweyID associated with this node (if any). * * @return an optional DeweyID (might be absent, depending on the {@link BasicXmlDBStore} * configuration) */ public SirixDeweyID getDeweyID() { return rtx.getDeweyID(); } }
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeStateListener; import java.util.BitSet; import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicReferenceArray; /** * Create hollow integer set for sparse non-negative {@literal &} unique integer values referenced by fieldPath in a type based on a predicate. */ public class HollowSparseIntegerSet implements HollowTypeStateListener { private final HollowReadStateEngine readStateEngine; private final String type; private final FieldPath fieldPath; private final IndexPredicate predicate; protected volatile SparseBitSet sparseBitSetVolatile; private Set<Integer> valuesToSet; private Set<Integer> valuesToClear; private int maxValueToSet; public interface IndexPredicate { boolean shouldIndex(int ordinal); } private final static IndexPredicate DEFAULT_PREDICATE = new IndexPredicate() { @Override public boolean shouldIndex(int ordinal) { return true; } }; /** * Create a index for integer values pointed by the given field path. * * @param readStateEngine the read state * @param type the type name * @param fieldPath the field path */ public HollowSparseIntegerSet(HollowReadStateEngine readStateEngine, String type, String fieldPath) { this(readStateEngine, type, fieldPath, DEFAULT_PREDICATE); } /** * Create a index for integer values based on the given IndexPredicate. * * @param readStateEngine HollowReadStateEngine to read data set. * @param type type which contains the path to integer values for indexing. * @param fieldPath path to the integer values * @param predicate implementation of IndexPredicate, indicating if the record passes the condition for indexing. */ public HollowSparseIntegerSet(HollowReadStateEngine readStateEngine, String type, String fieldPath, IndexPredicate predicate) { // check arguments if (readStateEngine == null) throw new IllegalArgumentException("Read state engine cannot be null"); if (type == null) throw new IllegalArgumentException("type cannot be null"); if (fieldPath == null || fieldPath.isEmpty()) throw new IllegalArgumentException("fieldPath cannot be null or empty"); this.readStateEngine = readStateEngine; this.type = type; this.fieldPath = new FieldPath(readStateEngine, type, fieldPath); this.predicate = predicate; this.valuesToSet = new HashSet<>(); this.valuesToClear = new HashSet<>(); build(); } protected void build() { // initialize an instance of SparseBitSet initSet(Integer.MAX_VALUE); // iterate through all populated ordinals for the type to set the values based on predicate BitSet typeBitSet = readStateEngine.getTypeState(type).getPopulatedOrdinals(); int ordinal = typeBitSet.nextSetBit(0); while (ordinal != -1) { set(ordinal); ordinal = typeBitSet.nextSetBit(ordinal + 1); } // run compaction compact(); } protected void initSet(int maxValue) { sparseBitSetVolatile = new SparseBitSet(maxValue); } protected void set(int ordinal) { if (predicate.shouldIndex(ordinal)) { Object[] values = fieldPath.findValues(ordinal); if (values != null && values.length > 0) { SparseBitSet bitSet = sparseBitSetVolatile; for (Object value : values) { bitSet.set((int) value); } } } } protected void compact() { SparseBitSet current = sparseBitSetVolatile; SparseBitSet compactedSet = SparseBitSet.compact(current); sparseBitSetVolatile = compactedSet; } /** * Check if the given value is contained in the set (or if the given value satisfies the predicate condition.) * * @param i the integer value * @return {@code true} if the value is present */ public boolean get(int i) { SparseBitSet current; boolean result; do { current = sparseBitSetVolatile; result = current.get(i); } while (current != sparseBitSetVolatile); return result; } /** * Estimate the total number of bits used to represent the integer set. * * @return Calculates the total number of bits used by longs in underlying data structure. */ public long size() { SparseBitSet current; long size; do { current = sparseBitSetVolatile; size = current.estimateBitsUsed(); } while (current != sparseBitSetVolatile); return size; } /** * @return the total number of integers added to the set. */ public int cardinality() { SparseBitSet current; int cardinality; do { current = sparseBitSetVolatile; cardinality = current.cardinality(); } while (current != sparseBitSetVolatile); return cardinality; } /** * Use this method to keep the index updated with delta changes on the read state engine. * Remember to call detachFromDeltaUpdates to stop the delta changes. * NOTE: Each delta updates creates a new prefix index and swaps the new with current. */ public void listenForDeltaUpdates() { readStateEngine.getTypeState(type).addListener(this); } /** * Stop delta updates for this index. */ public void detachFromDeltaUpdates() { readStateEngine.getTypeState(type).removeListener(this); } @Override public void beginUpdate() { valuesToSet.clear(); valuesToClear.clear(); maxValueToSet = -1; } @Override public void addedOrdinal(int ordinal) { if (predicate.shouldIndex(ordinal)) { Object[] values = fieldPath.findValues(ordinal); for (Object value : values) { valuesToSet.add((int) value); if (maxValueToSet < (int) value) maxValueToSet = (int) value; } } } @Override public void removedOrdinal(int ordinal) { Object[] values = fieldPath.findValues(ordinal); for (Object value : values) valuesToClear.add((int) value); } @Override public void endUpdate() { boolean didSomeWork = false; SparseBitSet updated = sparseBitSetVolatile; // first check if the max value among the new values to be added is more than the max value of the existing sparse bit set. if (valuesToSet.size() > 0 && maxValueToSet > updated.findMaxValue()) { updated = SparseBitSet.resize(updated, maxValueToSet); didSomeWork = true; } // when applying delta, check for duplicates, increment counts if duplicate values are found else set them for (int value : valuesToSet) { updated.set(value); } // first clear all the values that are meant to be cleared for (int value : valuesToClear) { updated.clear(value); } if (didSomeWork) { sparseBitSetVolatile = updated; } } /** * This implementation is motivated from several ideas to get a compact sparse set. * When using a a bucket of BitSet, problems * - smaller sizes of BitSet are not useful, since null references are themselves 64/32 bit references. * - larger sizes of BitSet for truly sparse integers, has overhead of too many zeroes in one BitSet. * <p> * The idea is to only store longs in bb that have non-zero values where bucket sizes are longs. Bucket size of 64 longs are convenient when using mod operations. * <p> * Each bit in long value in indices array, indicates if a long value is initialized. 64 bits would point to 64 long values ( 1 bucket ). * Each bucket could contain 1-64 longs, we only hold non-zero long values in bucket. */ static class SparseBitSet { // shift used to determine which bucket private static final int BUCKET_SHIFT = 12; // shift used to determine which Long value to use in bucket. private static final int LONG_SHIFT = 6; private final int maxValue; private final AtomicReferenceArray<Bucket> buckets; private static class Bucket { private long idx; private long[] longs; private Bucket(long idx, long[] longs) { this.idx = idx; this.longs = longs; } } SparseBitSet(int maxValue) { int totalBuckets = maxValue >>> BUCKET_SHIFT; this.maxValue = maxValue; this.buckets = new AtomicReferenceArray<>(totalBuckets + 1); } private SparseBitSet(int maxValue, AtomicReferenceArray<Bucket> buckets) { this.maxValue = maxValue; this.buckets = buckets; } private static int getIndex(int i) { // logical right shift return i >>> BUCKET_SHIFT; } /** * This method returns the number of Longs initialized from LSB to the given bitInIndex. * For example longAtIndex (64 bits) = 00...1001 and bitInIndex (3rd bit is set) 000...100 * then this method will return 1 since only one bit is set in longAtIndex to the right of bitInIndex * * @param longAtIndex * @param bitInIndex * @return */ private static int getOffset(long longAtIndex, long bitInIndex) { // set all bits to one before the bit that is set in bitInIndex // example : 000...0100 will become 000...011 long setAllOnesBeforeBitInIndex = bitInIndex - 1; long offset = (longAtIndex & setAllOnesBeforeBitInIndex); return Long.bitCount(offset); } boolean get(int i) { if (i > maxValue || i < 0) return false; int index = getIndex(i); Bucket currentBucket = buckets.get(index); if (currentBucket == null) return false; long currentLongAtIndex = currentBucket.idx; long[] longs = currentBucket.longs; // find which bit in index will point to the long in bb long whichLong = i >>> LONG_SHIFT; long bitInIndex = 1L << whichLong;// whichLong % 64 long isLongInitialized = (currentLongAtIndex & bitInIndex); if (isLongInitialized == 0) return false; int offset = getOffset(currentLongAtIndex, bitInIndex); long value = longs[offset]; long whichBitInLong = 1L << i; return (value & whichBitInLong) != 0; } // thread-safe void set(int i) { if (i > maxValue) throw new IllegalArgumentException("Max value initialized is " + maxValue + " given value is " + i); if (i < 0) throw new IllegalArgumentException("Cannot index negative numbers"); // find which bucket int index = getIndex(i); // find which bit in index will point to the long in bb long whichLong = i >>> LONG_SHIFT; long bitInIndex = 1L << whichLong;// whichLong % 64 long whichBitInLong = 1L << i;// i % 64 while (true) { long longAtIndex = 0; long[] longs = null; Bucket currentBucket = buckets.get(index); if (currentBucket != null) { longAtIndex = currentBucket.idx; longs = currentBucket.longs.clone(); } boolean isLongInitialized = (longAtIndex & bitInIndex) != 0; if (isLongInitialized) { // if a long value is set, the find the correct offset to determine which long in longs to use. int offset = getOffset(longAtIndex, bitInIndex); longs[offset] |= whichBitInLong;// or preserves previous set operations in this long. } else if (longAtIndex == 0) { // first set that bit in idx for that bucket, and assign a new long[] longAtIndex = bitInIndex; longs = new long[]{whichBitInLong}; } else { // update long value at index longAtIndex |= bitInIndex; // find offset int offset = getOffset(longAtIndex, bitInIndex); int oldLongsLen = longs.length; long[] newLongs = new long[oldLongsLen + 1]; // if offset is 2 means 3 longs are needed starting from 0 // if current longs length is 2 (0,1) then append third long at end // if current longs length is greater than offset, then insert long 0 -> (offset - 1), new long, offset to (length -1) if (offset >= oldLongsLen) { // append new long at end int it; for (it = 0; it < oldLongsLen; it++) newLongs[it] = longs[it]; newLongs[it] = whichBitInLong; } else { // insert new long in between int it; for (it = 0; it < offset; it++) newLongs[it] = longs[it]; newLongs[offset] = whichBitInLong; for (it = offset; it < oldLongsLen; it++) newLongs[it + 1] = longs[it]; } longs = newLongs; } Bucket newBucket = new Bucket(longAtIndex, longs); if (buckets.compareAndSet(index, currentBucket, newBucket)) break; } } // thread-safe void clear(int i) { if (i > maxValue || i < 0) return; int index = getIndex(i); while (true) { Bucket currentBucket = buckets.get(index); if (currentBucket == null) return; long longAtIndex = currentBucket.idx; long[] longs = currentBucket.longs.clone(); // find which bit in index will point to the long in bb long whichLong = i >>> LONG_SHIFT; long bitInIndex = 1L << whichLong;// whichLong % 64 long whichBitInLong = 1L << i;// i % 64 long isLongInitialized = (longAtIndex & bitInIndex); if (isLongInitialized == 0) return; int offset = getOffset(longAtIndex, bitInIndex); long value = longs[offset]; // unset whichBitInIndex in value // to clear 3rd bit (00100 whichBitInLong) in 00101(value), & with 11011 to get 00001 long updatedValue = value & ~whichBitInLong; boolean isBucketEmpty = false; if (updatedValue != 0) { longs[offset] = updatedValue; } else { // if updatedValue is 0, then update the bucket removing that long int oldLongsLen = longs.length; // if only one long was initialized in the bucket, then make the reference null, indexAtLong 0 if (oldLongsLen == 1) { longs = null; longAtIndex = 0; isBucketEmpty = true; } else { // copy everything over, except the long at the given offset, long[] newLongs = new long[oldLongsLen - 1]; int it; for (it = 0; it < offset; it++) newLongs[it] = longs[it]; it++; while (it < oldLongsLen) { newLongs[it - 1] = longs[it]; it++; } longs = newLongs; longAtIndex &= ~bitInIndex; } } Bucket updatedBucket = null; if (!isBucketEmpty) updatedBucket = new Bucket(longAtIndex, longs); if (buckets.compareAndSet(index, currentBucket, updatedBucket)) break; } } int findMaxValue() { // find the last index that is initialized int index = buckets.length() - 1; while (index >= 0) { if (buckets.get(index) != null) break; index--; } // if no buckets are initialized, then return -1 ( meaning set is empty) if (index < 0) return -1; // find the highest bit in indexAtLong to see which is last long init in bucket int highestBitSetInIndexAtLong = 63 - Long.numberOfLeadingZeros(Long.highestOneBit(buckets.get(index).idx)); long[] longs = buckets.get(index).longs; long value = longs[longs.length - 1]; long highestBitSetInLong = 63 - Long.numberOfLeadingZeros(Long.highestOneBit(value)); return (int) ((index << BUCKET_SHIFT) + (highestBitSetInIndexAtLong << 6) + highestBitSetInLong); } int cardinality() { int cardinality = 0; int index = 0; while (index < buckets.length()) { if (buckets.get(index) != null) { long[] longs = buckets.get(index).longs; for (long value : longs) cardinality += Long.bitCount(value); } index++; } return cardinality; } long estimateBitsUsed() { long longsUsed = 0; long idxCounts = 0; int index = 0; while (index < buckets.length()) { if (buckets.get(index) != null) { idxCounts++; longsUsed += buckets.get(index).longs.length; } index++; } // total bits used long bitsUsedByArrayPointers = buckets.length() * 64; long bitsUsedByIdx = idxCounts * 64; long bitsUsedByLongs = longsUsed * 64; return bitsUsedByArrayPointers + bitsUsedByIdx + bitsUsedByLongs; } /** * * Use this method to compact an existing SparseBitSet. Note any attempts to add a new value greater than the max value will result in exception. * * @param sparseBitSet * @return new SparseBitSet that is compact, does not hold null references beyond the max int value added in the given input. */ static SparseBitSet compact(SparseBitSet sparseBitSet) { int maxValueAdded = sparseBitSet.findMaxValue(); // if the given set is empty then compact the sparseBitSet to have only 1 bucket i.e. 64 longs if (maxValueAdded < 0) { maxValueAdded = (1 << BUCKET_SHIFT) - 1; } int indexForMaxValueAdded = getIndex(maxValueAdded); int newLength = indexForMaxValueAdded + 1; return copyWithNewLength(sparseBitSet, newLength, newLength, maxValueAdded); } static SparseBitSet resize(SparseBitSet sparseBitSet, int newMaxValue) { if (sparseBitSet.findMaxValue() < newMaxValue) { int indexForNewMaxValue = getIndex(newMaxValue); int newLength = indexForNewMaxValue + 1; return copyWithNewLength(sparseBitSet, newLength, sparseBitSet.buckets.length(), newMaxValue); } return sparseBitSet; } private static SparseBitSet copyWithNewLength(SparseBitSet sparseBitSet, int newLength, int lengthToClone, int newMaxValue) { AtomicReferenceArray<Bucket> compactBuckets = new AtomicReferenceArray<Bucket>(newLength); for (int i = 0; i < lengthToClone; i++) { if (sparseBitSet.buckets.get(i) != null) compactBuckets.set(i, sparseBitSet.buckets.get(i)); } return new SparseBitSet(newMaxValue, compactBuckets); } } }
/* * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ */ package java.util.concurrent; import java.util.*; /** * Provides default implementations of {@link ExecutorService} * execution methods. This class implements the {@code submit}, * {@code invokeAny} and {@code invokeAll} methods using a * {@link RunnableFuture} returned by {@code newTaskFor}, which defaults * to the {@link FutureTask} class provided in this package. For example, * the implementation of {@code submit(Runnable)} creates an * associated {@code RunnableFuture} that is executed and * returned. Subclasses may override the {@code newTaskFor} methods * to return {@code RunnableFuture} implementations other than * {@code FutureTask}. * * <p><b>Extension example</b>. Here is a sketch of a class * that customizes {@link ThreadPoolExecutor} to use * a {@code CustomTask} class instead of the default {@code FutureTask}: * <pre> {@code * public class CustomThreadPoolExecutor extends ThreadPoolExecutor { * * static class CustomTask<V> implements RunnableFuture<V> {...} * * protected <V> RunnableFuture<V> newTaskFor(Callable<V> c) { * return new CustomTask<V>(c); * } * protected <V> RunnableFuture<V> newTaskFor(Runnable r, V v) { * return new CustomTask<V>(r, v); * } * // ... add constructors, etc. * }}</pre> * * @since 1.5 * @author Doug Lea */ public abstract class AbstractExecutorService implements ExecutorService { /** * Returns a {@code RunnableFuture} for the given runnable and default * value. * * @param runnable the runnable task being wrapped * @param value the default value for the returned future * @return a {@code RunnableFuture} which, when run, will run the * underlying runnable and which, as a {@code Future}, will yield * the given value as its result and provide for cancellation of * the underlying task * @since 1.6 */ protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T value) { return new FutureTask<T>(runnable, value); } /** * Returns a {@code RunnableFuture} for the given callable task. * * @param callable the callable task being wrapped * @return a {@code RunnableFuture} which, when run, will call the * underlying callable and which, as a {@code Future}, will yield * the callable's result as its result and provide for * cancellation of the underlying task * @since 1.6 */ protected <T> RunnableFuture<T> newTaskFor(Callable<T> callable) { return new FutureTask<T>(callable); } /** * @throws RejectedExecutionException {@inheritDoc} * @throws NullPointerException {@inheritDoc} */ public Future<?> submit(Runnable task) { if (task == null) throw new NullPointerException(); RunnableFuture<Void> ftask = newTaskFor(task, null); execute(ftask); return ftask; } /** * @throws RejectedExecutionException {@inheritDoc} * @throws NullPointerException {@inheritDoc} */ public <T> Future<T> submit(Runnable task, T result) { if (task == null) throw new NullPointerException(); RunnableFuture<T> ftask = newTaskFor(task, result); execute(ftask); return ftask; } /** * @throws RejectedExecutionException {@inheritDoc} * @throws NullPointerException {@inheritDoc} */ public <T> Future<T> submit(Callable<T> task) { if (task == null) throw new NullPointerException(); RunnableFuture<T> ftask = newTaskFor(task); execute(ftask); return ftask; } /** * the main mechanics of invokeAny. */ private <T> T doInvokeAny(Collection<? extends Callable<T>> tasks, boolean timed, long nanos) throws InterruptedException, ExecutionException, TimeoutException { if (tasks == null) throw new NullPointerException(); int ntasks = tasks.size(); if (ntasks == 0) throw new IllegalArgumentException(); ArrayList<Future<T>> futures = new ArrayList<Future<T>>(ntasks); ExecutorCompletionService<T> ecs = new ExecutorCompletionService<T>(this); // For efficiency, especially in executors with limited // parallelism, check to see if previously submitted tasks are // done before submitting more of them. This interleaving // plus the exception mechanics account for messiness of main // loop. try { // Record exceptions so that if we fail to obtain any // result, we can throw the last exception we got. ExecutionException ee = null; final long deadline = timed ? System.nanoTime() + nanos : 0L; Iterator<? extends Callable<T>> it = tasks.iterator(); // Start one task for sure; the rest incrementally futures.add(ecs.submit(it.next())); --ntasks; int active = 1; for (;;) { Future<T> f = ecs.poll(); if (f == null) { if (ntasks > 0) { --ntasks; futures.add(ecs.submit(it.next())); ++active; } else if (active == 0) break; else if (timed) { f = ecs.poll(nanos, TimeUnit.NANOSECONDS); if (f == null) throw new TimeoutException(); nanos = deadline - System.nanoTime(); } else f = ecs.take(); } if (f != null) { --active; try { return f.get(); } catch (ExecutionException eex) { ee = eex; } catch (RuntimeException rex) { ee = new ExecutionException(rex); } } } if (ee == null) ee = new ExecutionException(); throw ee; } finally { for (int i = 0, size = futures.size(); i < size; i++) futures.get(i).cancel(true); } } public <T> T invokeAny(Collection<? extends Callable<T>> tasks) throws InterruptedException, ExecutionException { try { return doInvokeAny(tasks, false, 0); } catch (TimeoutException cannotHappen) { assert false; return null; } } public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return doInvokeAny(tasks, true, unit.toNanos(timeout)); } public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) throws InterruptedException { if (tasks == null) throw new NullPointerException(); ArrayList<Future<T>> futures = new ArrayList<Future<T>>(tasks.size()); boolean done = false; try { for (Callable<T> t : tasks) { RunnableFuture<T> f = newTaskFor(t); futures.add(f); execute(f); } for (int i = 0, size = futures.size(); i < size; i++) { Future<T> f = futures.get(i); if (!f.isDone()) { try { f.get(); } catch (CancellationException ignore) { } catch (ExecutionException ignore) { } } } done = true; return futures; } finally { if (!done) for (int i = 0, size = futures.size(); i < size; i++) futures.get(i).cancel(true); } } public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException { if (tasks == null) throw new NullPointerException(); long nanos = unit.toNanos(timeout); ArrayList<Future<T>> futures = new ArrayList<Future<T>>(tasks.size()); boolean done = false; try { for (Callable<T> t : tasks) futures.add(newTaskFor(t)); final long deadline = System.nanoTime() + nanos; final int size = futures.size(); // Interleave time checks and calls to execute in case // executor doesn't have any/much parallelism. for (int i = 0; i < size; i++) { execute((Runnable)futures.get(i)); nanos = deadline - System.nanoTime(); if (nanos <= 0L) return futures; } for (int i = 0; i < size; i++) { Future<T> f = futures.get(i); if (!f.isDone()) { if (nanos <= 0L) return futures; try { f.get(nanos, TimeUnit.NANOSECONDS); } catch (CancellationException ignore) { } catch (ExecutionException ignore) { } catch (TimeoutException toe) { return futures; } nanos = deadline - System.nanoTime(); } } done = true; return futures; } finally { if (!done) for (int i = 0, size = futures.size(); i < size; i++) futures.get(i).cancel(true); } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.github.tasks; import com.intellij.credentialStore.CredentialAttributes; import com.intellij.credentialStore.CredentialAttributesKt; import com.intellij.icons.AllIcons; import com.intellij.openapi.progress.EmptyProgressIndicator; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.NlsSafe; import com.intellij.openapi.util.text.StringUtil; import com.intellij.tasks.*; import com.intellij.tasks.impl.BaseRepository; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.xmlb.annotations.Tag; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.github.api.GithubApiRequestExecutor; import org.jetbrains.plugins.github.api.GithubApiRequests; import org.jetbrains.plugins.github.api.GithubServerPath; import org.jetbrains.plugins.github.api.data.GithubIssue; import org.jetbrains.plugins.github.api.data.GithubIssueBase; import org.jetbrains.plugins.github.api.data.GithubIssueCommentWithHtml; import org.jetbrains.plugins.github.api.data.GithubIssueState; import org.jetbrains.plugins.github.api.util.GithubApiPagesLoader; import org.jetbrains.plugins.github.exceptions.GithubAuthenticationException; import org.jetbrains.plugins.github.exceptions.GithubJsonException; import org.jetbrains.plugins.github.exceptions.GithubRateLimitExceededException; import org.jetbrains.plugins.github.exceptions.GithubStatusCodeException; import org.jetbrains.plugins.github.issue.GithubIssuesLoadingHelper; import javax.swing.*; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; @Tag("GitHub") final class GithubRepository extends BaseRepository { private Pattern myPattern = Pattern.compile("($^)"); @NotNull private String myRepoAuthor = ""; @NotNull private String myRepoName = ""; @NotNull private String myUser = ""; private boolean myAssignedIssuesOnly = false; @SuppressWarnings({"UnusedDeclaration"}) GithubRepository() { } GithubRepository(GithubRepository other) { super(other); setRepoName(other.myRepoName); setRepoAuthor(other.myRepoAuthor); setAssignedIssuesOnly(other.myAssignedIssuesOnly); } GithubRepository(GithubRepositoryType type) { super(type); setUrl("https://" + GithubServerPath.DEFAULT_HOST); } @NotNull @Override public CancellableConnection createCancellableConnection() { return new CancellableConnection() { private final GithubApiRequestExecutor myExecutor = getExecutor(); private final ProgressIndicator myIndicator = new EmptyProgressIndicator(); @Override protected void doTest() throws Exception { try { myExecutor.execute(myIndicator, GithubApiRequests.Repos.get(getServer(), getRepoAuthor(), getRepoName())); } catch (ProcessCanceledException ignore) { } } @Override public void cancel() { myIndicator.cancel(); } }; } @Override public boolean isConfigured() { return super.isConfigured() && !StringUtil.isEmptyOrSpaces(getRepoAuthor()) && !StringUtil.isEmptyOrSpaces(getRepoName()) && !StringUtil.isEmptyOrSpaces(getPassword()); } @Override public String getPresentableName() { final String name = super.getPresentableName(); return name + (!StringUtil.isEmpty(getRepoAuthor()) ? "/" + getRepoAuthor() : "") + (!StringUtil.isEmpty(getRepoName()) ? "/" + getRepoName() : ""); } @Override public Task[] getIssues(@Nullable String query, int offset, int limit, boolean withClosed) throws Exception { try { return getIssues(query, offset + limit, withClosed); } catch (GithubRateLimitExceededException e) { return Task.EMPTY_ARRAY; } catch (GithubAuthenticationException | GithubStatusCodeException e) { throw new Exception(e.getMessage(), e); // Wrap to show error message } catch (GithubJsonException e) { throw new Exception("Bad response format", e); } } @Override public Task[] getIssues(@Nullable String query, int offset, int limit, boolean withClosed, @NotNull ProgressIndicator cancelled) throws Exception { return getIssues(query, offset, limit, withClosed); } private Task @NotNull [] getIssues(@Nullable String query, int max, boolean withClosed) throws Exception { GithubApiRequestExecutor executor = getExecutor(); ProgressIndicator indicator = getProgressIndicator(); GithubServerPath server = getServer(); String assigned = null; if (myAssignedIssuesOnly) { if (StringUtil.isEmptyOrSpaces(myUser)) { myUser = executor.execute(indicator, GithubApiRequests.CurrentUser.get(server)).getLogin(); } assigned = myUser; } List<? extends GithubIssueBase> issues; if (StringUtil.isEmptyOrSpaces(query)) { // search queries have way smaller request number limit issues = GithubIssuesLoadingHelper.load(executor, indicator, server, getRepoAuthor(), getRepoName(), withClosed, max, assigned); } else { issues = GithubIssuesLoadingHelper.search(executor, indicator, server, getRepoAuthor(), getRepoName(), withClosed, assigned, query); } List<Task> tasks = new ArrayList<>(); for (GithubIssueBase issue : issues) { List<GithubIssueCommentWithHtml> comments = GithubApiPagesLoader .loadAll(executor, indicator, GithubApiRequests.Repos.Issues.Comments.pages(issue.getCommentsUrl())); tasks.add(createTask(issue, comments)); } return tasks.toArray(Task.EMPTY_ARRAY); } @NotNull private Task createTask(@NotNull GithubIssueBase issue, @NotNull List<GithubIssueCommentWithHtml> comments) { return new Task() { @NotNull private final String myRepoName = getRepoName(); private final Comment @NotNull [] myComments = ContainerUtil.map2Array(comments, Comment.class, comment -> new GithubComment(comment.getCreatedAt(), comment.getUser().getLogin(), comment.getBodyHtml(), comment.getUser().getAvatarUrl(), comment.getUser().getHtmlUrl())); @Override public boolean isIssue() { return true; } @Override public String getIssueUrl() { return issue.getHtmlUrl(); } @Override public @NlsSafe @NotNull String getId() { return myRepoName + "-" + issue.getNumber(); } @NotNull @Override public String getSummary() { return issue.getTitle(); } @Override public String getDescription() { return issue.getBody(); } @Override public Comment @NotNull [] getComments() { return myComments; } @NotNull @Override public Icon getIcon() { return AllIcons.Vcs.Vendors.Github; } @NotNull @Override public TaskType getType() { return TaskType.BUG; } @Override public Date getUpdated() { return issue.getUpdatedAt(); } @Override public Date getCreated() { return issue.getCreatedAt(); } @Override public boolean isClosed() { return issue.getState() == GithubIssueState.closed; } @Override public TaskRepository getRepository() { return GithubRepository.this; } @Override public String getPresentableName() { return getId() + ": " + getSummary(); } }; } @Override @Nullable public String extractId(@NotNull String taskName) { Matcher matcher = myPattern.matcher(taskName); return matcher.find() ? matcher.group(1) : null; } @Nullable @Override public Task findTask(@NotNull String id) throws Exception { final int index = id.lastIndexOf("-"); if (index < 0) { return null; } final String numericId = id.substring(index + 1); GithubApiRequestExecutor executor = getExecutor(); ProgressIndicator indicator = getProgressIndicator(); GithubIssue issue = executor.execute(indicator, GithubApiRequests.Repos.Issues.get(getServer(), getRepoAuthor(), getRepoName(), numericId)); if (issue == null) return null; List<GithubIssueCommentWithHtml> comments = GithubApiPagesLoader .loadAll(executor, indicator, GithubApiRequests.Repos.Issues.Comments.pages(issue.getCommentsUrl())); return createTask(issue, comments); } @Override public void setTaskState(@NotNull Task task, @NotNull TaskState state) throws Exception { boolean isOpen; switch (state) { case OPEN: isOpen = true; break; case RESOLVED: isOpen = false; break; default: throw new IllegalStateException("Unknown state: " + state); } GithubApiRequestExecutor executor = getExecutor(); GithubServerPath server = getServer(); String repoAuthor = getRepoAuthor(); String repoName = getRepoName(); ProgressIndicator indicator = getProgressIndicator(); executor.execute(indicator, GithubApiRequests.Repos.Issues.updateState(server, repoAuthor, repoName, task.getNumber(), isOpen)); } @NotNull @Override public BaseRepository clone() { return new GithubRepository(this); } public @NlsSafe @NotNull String getRepoName() { return myRepoName; } public void setRepoName(@NotNull String repoName) { myRepoName = repoName; myPattern = Pattern.compile("(" + StringUtil.escapeToRegexp(repoName) + "\\-\\d+)"); } public @NlsSafe @NotNull String getRepoAuthor() { return myRepoAuthor; } public void setRepoAuthor(@NotNull String repoAuthor) { myRepoAuthor = repoAuthor; } public @NlsSafe @NotNull String getUser() { return myUser; } public void setUser(@NotNull String user) { myUser = user; } /** * Stores access token */ @Override public void setPassword(String password) { super.setPassword(password); setUser(""); } public boolean isAssignedIssuesOnly() { return myAssignedIssuesOnly; } public void setAssignedIssuesOnly(boolean value) { myAssignedIssuesOnly = value; } @Override @NotNull protected CredentialAttributes getAttributes() { String serviceName = CredentialAttributesKt.generateServiceName("Tasks", getRepositoryType().getName() + " " + getPresentableName()); return new CredentialAttributes(serviceName, "GitHub OAuth token"); } @NotNull private GithubApiRequestExecutor getExecutor() { return GithubApiRequestExecutor.Factory.getInstance().create(getPassword(), myUseProxy); } @NotNull private static ProgressIndicator getProgressIndicator() { ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator(); if (indicator == null) indicator = new EmptyProgressIndicator(); return indicator; } @NotNull private GithubServerPath getServer() { return GithubServerPath.from(getUrl()); } @Override public boolean equals(Object o) { if (!super.equals(o)) return false; if (!(o instanceof GithubRepository)) return false; GithubRepository that = (GithubRepository)o; if (!Objects.equals(getRepoAuthor(), that.getRepoAuthor())) return false; if (!Objects.equals(getRepoName(), that.getRepoName())) return false; if (!Comparing.equal(isAssignedIssuesOnly(), that.isAssignedIssuesOnly())) return false; return true; } @Override public int hashCode() { return StringUtil.stringHashCode(getRepoName()) + 31 * StringUtil.stringHashCode(getRepoAuthor()); } @Override protected int getFeatures() { return super.getFeatures() | STATE_UPDATING; } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.environment.webserver; import static com.google.common.net.HttpHeaders.CONTENT_TYPE; import static com.google.common.net.MediaType.JSON_UTF_8; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.stream.Collectors.mapping; import static java.util.stream.Collectors.toList; import static org.openqa.selenium.remote.http.HttpMethod.GET; import static org.openqa.selenium.remote.http.HttpMethod.POST; import static org.openqa.selenium.build.InProject.locate; import com.google.common.collect.ImmutableMap; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpServer; import org.openqa.selenium.json.Json; import org.openqa.selenium.net.PortProber; import org.openqa.selenium.remote.http.HttpClient; import org.openqa.selenium.remote.http.HttpMethod; import org.openqa.selenium.remote.http.HttpRequest; import org.openqa.selenium.remote.http.HttpResponse; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UncheckedIOException; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Paths; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Predicate; import java.util.stream.Collectors; public class JreAppServer implements AppServer { private final HttpServer server; private final Map<Predicate<HttpRequest>, BiConsumer<HttpRequest, HttpResponse>> mappings = new LinkedHashMap<>(); // Insert order matters. public JreAppServer() { try { int port = PortProber.findFreePort(); server = HttpServer.create(new InetSocketAddress(port), 0); server.setExecutor(null); server.createContext( "/", httpExchange -> { HttpRequest req = new SunHttpRequest(httpExchange); HttpResponse resp = new SunHttpResponse(httpExchange); List<Predicate<HttpRequest>> reversedKeys = new ArrayList<>(mappings.keySet()); Collections.reverse(reversedKeys); reversedKeys.stream() .filter(pred -> pred.test(req)) .findFirst() .map(mappings::get) .orElseGet(() -> (in, out) -> { out.setStatus(404); out.setContent("".getBytes(UTF_8)); }) .accept(req, resp); }); emulateJettyAppServer(); } catch (IOException e) { throw new UncheckedIOException(e); } } protected JreAppServer emulateJettyAppServer() { String common = locate("common/src/web").toAbsolutePath().toString(); // Listed first, so considered last addHandler( GET, "/", new StaticContent( path -> Paths.get(common + path))); addHandler(GET, "/encoding", new EncodingHandler()); addHandler(GET, "/page", new PageHandler()); addHandler(GET, "/redirect", new RedirectHandler(whereIs("/"))); addHandler(GET, "/sleep", new SleepingHandler()); addHandler(POST, "/upload", new UploadHandler()); return this; } public JreAppServer addHandler( HttpMethod method, String url, BiConsumer<HttpRequest, HttpResponse> handler) { mappings.put(req -> req.getMethod().equals(method) && req.getUri().startsWith(url), handler); return this; } @Override public void start() { server.start(); PortProber.waitForPortUp(server.getAddress().getPort(), 5, SECONDS); } @Override public void stop() { server.stop(0); } @Override public String whereIs(String relativeUrl) { return createUrl("http", getHostName(), relativeUrl); } @Override public String whereElseIs(String relativeUrl) { return createUrl("http", getAlternateHostName(), relativeUrl); } @Override public String whereIsSecure(String relativeUrl) { return createUrl("https", getHostName(), relativeUrl); } @Override public String whereIsWithCredentials(String relativeUrl, String user, String password) { return String.format ("http://%s:%s@%s:%d/%s", user, password, getHostName(), server.getAddress().getPort(), relativeUrl); } private String createUrl(String protocol, String hostName, String relativeUrl) { if (!relativeUrl.startsWith("/")) { relativeUrl = "/" + relativeUrl; } try { return new URL( protocol, hostName, server.getAddress().getPort(), relativeUrl) .toString(); } catch (MalformedURLException e) { throw new UncheckedIOException(e); } } @Override public String create(Page page) { try { byte[] data = new Json() .toJson(ImmutableMap.of("content", page.toString())) .getBytes(UTF_8); HttpClient client = HttpClient.Factory.createDefault().createClient(new URL(whereIs("/"))); HttpRequest request = new HttpRequest(HttpMethod.POST, "/common/createPage"); request.setHeader(CONTENT_TYPE, JSON_UTF_8.toString()); request.setContent(data); HttpResponse response = client.execute(request); return response.getContentString(); } catch (IOException ex) { throw new RuntimeException(ex); } } @Override public String getHostName() { return "localhost"; } @Override public String getAlternateHostName() { throw new UnsupportedOperationException("getAlternateHostName"); } private static class SunHttpRequest extends HttpRequest { private final HttpExchange exchange; public SunHttpRequest(HttpExchange exchange) { super(HttpMethod.valueOf(exchange.getRequestMethod()), exchange.getRequestURI().toString()); this.exchange = exchange; } @Override public HttpMethod getMethod() { return HttpMethod.valueOf(exchange.getRequestMethod()); } @Override public String getUri() { return exchange.getRequestURI().getPath(); } @Override public String getQueryParameter(String name) { String query = exchange.getRequestURI().getQuery(); if (query == null) { return null; } HashMap<String, List<String>> params = Arrays.stream(query.split("&")) .map(q -> { int i = q.indexOf("="); if (i == -1) { return new AbstractMap.SimpleImmutableEntry<>(q, ""); } return new AbstractMap.SimpleImmutableEntry<>(q.substring(0, i), q.substring(i + 1)); }) .collect(Collectors.groupingBy( Map.Entry::getKey, HashMap::new, mapping(Map.Entry::getValue, toList()))); List<String> values = params.get(name); if (values == null || values.isEmpty()) { return null; } return values.get(0); } @Override public Iterable<String> getHeaderNames() { return exchange.getRequestHeaders().keySet(); } @Override public Iterable<String> getHeaders(String name) { return exchange.getRequestHeaders().get(name); } @Override public InputStream consumeContentStream() { return exchange.getRequestBody(); } } private class SunHttpResponse extends HttpResponse { private final HttpExchange exchange; public SunHttpResponse(HttpExchange exchange) { this.exchange = exchange; } @Override public void removeHeader(String name) { exchange.getResponseHeaders().remove(name); } @Override public void addHeader(String name, String value) { exchange.getResponseHeaders().add(name, value); } @Override public void setContent(byte[] data) { try { setHeader("Content-Length", String.valueOf(data.length)); exchange.sendResponseHeaders(getStatus(), data.length); try (OutputStream os = exchange.getResponseBody(); OutputStream out = new BufferedOutputStream(os)) { out.write(data); } } catch (IOException e) { throw new UncheckedIOException(e); } } } public static void main(String[] args) { JreAppServer server = new JreAppServer(); server.start(); System.out.println(server.whereIs("/")); } }
/** * Copyright 2013 SmartBear Software, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.smartbear.swagger4j.impl; import com.smartbear.swagger4j.Api; import com.smartbear.swagger4j.Operation; import com.smartbear.swagger4j.Parameter; import com.smartbear.swagger4j.ResponseMessage; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Default implementation of the Operation interface * * @see Operation */ public class OperationImpl implements Operation { private String nickName; private Method method; private String responseClass; private String summary; private String notes; private final Set<String> produces = new HashSet<String>(); private final Set<String> consumes = new HashSet<String>(); private final List<Parameter> parameterList = new ArrayList<Parameter>(); private final List<ResponseMessage> responseMessages = new ArrayList<ResponseMessage>(); private Api api; OperationImpl(Api api, String nickName, Method method) { this.api = api; this.nickName = nickName; this.method = method; } @Override public Method getMethod() { return method; } @Override public void setMethod(Method method) { assert method != null : "method can not be null"; this.method = method; } @Override public String getNickName() { return nickName; } @Override public void setNickName(String nickName) { assert nickName != null : "nickName can not be null"; this.nickName = nickName; } @Override public String getResponseClass() { return responseClass == null ? "void" : responseClass; } @Override public void setResponseClass(String responseClass) { this.responseClass = responseClass; } @Override public String getSummary() { return summary == null ? "" : summary; } @Override public void setSummary(String summary) { this.summary = summary; } @Override public String getNotes() { return notes; } @Override public void setNotes(String notes) { this.notes = notes; } @Override public Collection<String> getProduces() { if (produces.isEmpty() && getApi() != null && getApi().getApiDeclaration() != null) { return getApi().getApiDeclaration().getProduces(); } return Collections.unmodifiableCollection(produces); } @Override public void removeProduces(String produces) { this.produces.remove(produces); } @Override public void addProduces(String produces) { assert produces != null : "produces can not be null"; this.produces.add(produces); } @Override public Collection<String> getConsumes() { if (consumes.isEmpty() && getApi() != null && getApi().getApiDeclaration() != null) { return getApi().getApiDeclaration().getConsumes(); } return Collections.unmodifiableCollection(consumes); } @Override public void removeConsumes(String consumes) { this.produces.remove(consumes); } @Override public void addConsumes(String consumes) { assert consumes != null : "consumes can not be null"; this.consumes.add(consumes); } @Override public List<Parameter> getParameters() { return Collections.unmodifiableList(parameterList); } @Override public Parameter getParameter(String name) { assert name != null : "parameter name can not be null"; synchronized (parameterList) { for (Parameter parameter : parameterList) { if (parameter.getName().equals(name)) { return parameter; } } return null; } } @Override public void removeParameter(Parameter parameter) { assert parameter != null : "parameter can not be null"; synchronized (parameterList) { parameterList.remove(parameter); } } @Override public Parameter addParameter(String name, Parameter.ParamType type) { assert type != null : "parameter must be created with type"; if (type != Parameter.ParamType.body) { assert name != null : "parameter that is not a body must have a name"; assert getParameter(name) == null : "Parameter already exists with name [" + name + "]"; } synchronized (parameterList) { ParameterImpl parameter = new ParameterImpl(name, type); parameterList.add(parameter); return parameter; } } @Override public List<ResponseMessage> getResponseMessages() { return Collections.unmodifiableList(responseMessages); } @Override public ResponseMessage getResponseMessage(int code) { assert code > 0 : "code can not be 0"; synchronized (responseMessages) { for (ResponseMessage responseMessage : responseMessages) { if (responseMessage.getCode() == code) { return responseMessage; } } return null; } } @Override public void removeResponseMessage(ResponseMessage responseMessage) { assert responseMessage != null; synchronized (responseMessages) { responseMessages.remove(responseMessage); } } @Override public ResponseMessage addResponseMessage(int code, String message) { assert code > 0 : "code must have a value"; assert getResponseMessage(code) == null : "Response for already exists for code [" + code + "]"; synchronized (responseMessages) { ResponseMessage responseMessage = new ResponseMessageImpl(code, message); responseMessages.add(responseMessage); return responseMessage; } } public Api getApi() { return api; } }
/* * Copyright (c) 2006 Pyxis Technologies inc. * * This is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA, * or see the FSF site: http://www.fsf.org. */ package com.greenpepper.interpreter.collection; import com.greenpepper.Example; import com.greenpepper.interpreter.CollectionHeaderForm; import com.greenpepper.interpreter.column.Column; import com.greenpepper.reflect.Fixture; import com.greenpepper.reflect.PlainOldFixture; import com.greenpepper.util.Tables; import junit.framework.TestCase; import java.util.ArrayList; import java.util.List; public class RowFixtureSplitterTest extends TestCase { public void testThatRowsAndFixturesAreMatched() throws Exception { ArrayList<Fixture> adapters = new ArrayList<Fixture>(); adapters.add( new PlainOldFixture(1) ); adapters.add( new PlainOldFixture(3) ); adapters.add( new PlainOldFixture(2) ); Example example = Tables.parse( "[intValue]\n" + "[1]\n" + "[2]\n" + "[3]" ).at( 0, 0 ); RowFixtureSplitter splitter = new RowFixtureSplitter(); Column[] result = getHeaderColumns(example); splitter.split( example.at( 1 ), adapters, result); List<RowFixture> matchees = splitter.getMatch(); assertEquals( 3, matchees.size() ); RowFixture match = matchees.get( 0 ); assertEquals( "1", match.getRow().firstChild().getContent() ); assertEquals( new Integer( 1 ), match.getAdapter().check( "intValue" ).send() ); match = matchees.get( 1 ); assertEquals( "2", match.getRow().firstChild().getContent() ); assertEquals( new Integer( 2 ), match.getAdapter().check( "intValue" ).send() ); } private Column[] getHeaderColumns(Example headersRow) throws Exception { ArrayList<Column> columns = new ArrayList<Column>(); final Example headers = headersRow.firstChild(); for (Example header: headers) { columns.add(CollectionHeaderForm.parse(header.getContent()).selectColumn()); } return columns.toArray(new Column[columns.size()]); } public void testThatMissingRowsAreProcessed() throws Exception { ArrayList<Fixture> adapters = new ArrayList<Fixture>(); adapters.add( new PlainOldFixture(new Integer( 1 )) ); adapters.add( new PlainOldFixture(new Integer( 3 )) ); Example example = Tables.parse( "[intValue]\n" + "[1]\n" + "[2]\n" + "[3]" ).at( 0, 0 ); RowFixtureSplitter splitter = new RowFixtureSplitter(); Column[] result = getHeaderColumns(example); splitter.split( example.at( 1 ), adapters, result ); List<RowFixture> matchees = splitter.getMatch(); assertEquals( 2, matchees.size() ); assertEquals( 1, splitter.getMissing().size() ); assertEquals( "2", splitter.getMissing().get( 0 ).firstChild().getContent() ); } public void testThatSurplusRowsAreProcessed() throws Exception { ArrayList<Fixture> adapters = new ArrayList<Fixture>(); adapters.add( new PlainOldFixture(1) ); adapters.add( new PlainOldFixture(2) ); adapters.add( new PlainOldFixture(3) ); Example example = Tables.parse( "[intValue]\n" + "[1]\n" + "[3]" ).at( 0, 0 ); RowFixtureSplitter splitter = new RowFixtureSplitter(); Column[] result = getHeaderColumns(example); splitter.split( example.at( 1 ), adapters, result ); List<RowFixture> matchees = splitter.getMatch(); assertEquals( 2, matchees.size() ); assertEquals( 0, splitter.getMissing().size() ); assertEquals( 1, splitter.getSurplus().size() ); } public void testThatMultipleOccurencesOfItemInSpecification() throws Exception { ArrayList<Fixture> adapters = new ArrayList<Fixture>(); adapters.add( new PlainOldFixture(new String( "Big Mac" )) ); adapters.add( new PlainOldFixture(new String( "Frites" )) ); adapters.add( new PlainOldFixture(new String( "Coke" )) ); adapters.add( new PlainOldFixture(new String( "Coke" )) ); Example example = Tables.parse( "[toString]\n" + "[Big Mac]\n" + "[Big Mac]\n" + "[Frites]\n" + "[Coke]" ).at( 0, 0 ); RowFixtureSplitter splitter = new RowFixtureSplitter(); Column[] result = getHeaderColumns(example); splitter.split( example.at( 1 ), adapters, result ); assertEquals( 3, splitter.getMatch().size() ); assertEquals( 1, splitter.getMissing().size() ); assertEquals( 1, splitter.getSurplus().size() ); assertEquals( "Big Mac", splitter.getMissing().get( 0 ).firstChild().getContent() ); assertEquals( "Coke", splitter.getSurplus().get( 0 ).check( "toString" ).send() ); } public void testThatMatchesOnMoreThanOneCellSpecification() throws Exception { ArrayList<Fixture> adapters = new ArrayList<Fixture>(); adapters.add( new PlainOldFixture(new EmployeName( "Lapointe", "Christian" )) ); adapters.add( new PlainOldFixture(new EmployeName( "Rochambeau", "Fabrice" )) ); adapters.add( new PlainOldFixture(new EmployeName( "Carrey", "Gilles" )) ); Example example = Tables.parse( "[last][first]\n" + "[Lapointe][Christian]\n" + "[Rochambeau][Fabrice]\n" + "[Carrey][Gilles]" ).at( 0, 0 ); RowFixtureSplitter splitter = new RowFixtureSplitter(); Column[] result = getHeaderColumns(example); splitter.split( example.at( 1 ), adapters, result ); assertEquals( 3, splitter.getMatch().size() ); assertEquals( 0, splitter.getMissing().size() ); assertEquals( 0, splitter.getSurplus().size() ); assertEquals( "Lapointe", splitter.getMatch().get( 0 ).getRow().firstChild().getContent() ); // assertEquals("Lapointe", splitter.getMatch().get(0).getAdapter().getQuery("last").send()); } public void testThatSurplusAndMissingOnThanOneCellSpecification() throws Exception { ArrayList<Fixture> adapters = new ArrayList<Fixture>(); adapters.add( new PlainOldFixture(new EmployeName( "Lapointe", "Jean-Christophe" )) ); adapters.add( new PlainOldFixture(new EmployeName( "Muller", "Fabrice" )) ); adapters.add( new PlainOldFixture(new EmployeName( "Carrey", "Gilles" )) ); adapters.add( new PlainOldFixture(new EmployeName( "Rochambeau", "Benjamin" )) ); Example example = Tables.parse( "[last][first]\n" + "[Lapointe][Christian]\n" + "[Lapointe][Jean-Christophe]\n" + "[Rochambeau][Patrice]\n" + "[Carrey][Gilles]" ).at( 0, 0 ); RowFixtureSplitter splitter = new RowFixtureSplitter(); Column[] result = getHeaderColumns(example); splitter.split( example.at( 1 ), adapters, result ); assertEquals( 2, splitter.getMatch().size() ); assertEquals( 2, splitter.getMissing().size() ); assertEquals( 2, splitter.getSurplus().size() ); assertEquals( "Jean-Christophe", splitter.getMatch().get( 0 ).getRow().at( 0, 1 ).getContent() ); assertEquals( "Christian", splitter.getMissing().get( 0 ).at( 0, 1 ).getContent() ); assertEquals( "Fabrice", splitter.getSurplus().get( 0 ).check( "first" ).send() ); } public void testThatExpectedColumnAreNotConsideredInMatching() throws Exception { ArrayList<Fixture> adapters = new ArrayList<Fixture>(); adapters.add( new PlainOldFixture(new EmployeName( "Lapointe", "Jean-Christophe", true )) ); adapters.add( new PlainOldFixture(new EmployeName( "Carrey", "Gilles", false )) ); Example example = Tables.parse( "[last][first][is developper][is developper?]\n" + "[Lapointe][Jean-Christophe][true][false]\n" + "[Carrey][Gilles][false][true]" ).at( 0, 0 ); RowFixtureSplitter splitter = new RowFixtureSplitter(); Column[] result = getHeaderColumns(example); splitter.split( example.at( 1 ), adapters, result ); assertEquals( 2, splitter.getMatch().size() ); assertEquals( 0, splitter.getMissing().size() ); assertEquals( 0, splitter.getSurplus().size() ); assertEquals( "Jean-Christophe", splitter.getMatch().get( 0 ).getRow().at( 0, 1 ).getContent() ); } public static class EmployeName { public String last; public String first; public boolean isDevelopper; public EmployeName( String last, String first ) { this(last, first, false); } public EmployeName( String last, String first, boolean isDevelopper ) { this.isDevelopper = isDevelopper; this.last = last; this.first = first; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.internal; import org.apache.lucene.search.Explanation; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.highlight.HighlightField; import org.elasticsearch.search.internal.InternalSearchHits.StreamContext.ShardTargetType; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.lucene.Lucene.readExplanation; import static org.elasticsearch.common.lucene.Lucene.writeExplanation; import static org.elasticsearch.search.highlight.HighlightField.readHighlightField; import static org.elasticsearch.search.internal.InternalSearchHitField.readSearchHitField; /** * */ public class InternalSearchHit implements SearchHit { private static final Object[] EMPTY_SORT_VALUES = new Object[0]; private transient int docId; private float score = Float.NEGATIVE_INFINITY; private Text id; private Text type; private InternalNestedIdentity nestedIdentity; private long version = -1; private BytesReference source; private Map<String, SearchHitField> fields = emptyMap(); private Map<String, HighlightField> highlightFields = null; private Object[] sortValues = EMPTY_SORT_VALUES; private String[] matchedQueries = Strings.EMPTY_ARRAY; private Explanation explanation; @Nullable private SearchShardTarget shard; private Map<String, Object> sourceAsMap; private byte[] sourceAsBytes; private Map<String, InternalSearchHits> innerHits; private InternalSearchHit() { } public InternalSearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) { this.docId = docId; this.id = new Text(id); this.type = type; this.fields = fields; } public InternalSearchHit(int nestedTopDocId, String id, Text type, InternalNestedIdentity nestedIdentity, Map<String, SearchHitField> fields) { this.docId = nestedTopDocId; this.id = new Text(id); this.type = type; this.nestedIdentity = nestedIdentity; this.fields = fields; } public int docId() { return this.docId; } public void shardTarget(SearchShardTarget shardTarget) { this.shard = shardTarget; if (innerHits != null) { for (InternalSearchHits searchHits : innerHits.values()) { searchHits.shardTarget(shardTarget); } } } public void score(float score) { this.score = score; } @Override public float score() { return this.score; } @Override public float getScore() { return score(); } public void version(long version) { this.version = version; } @Override public long version() { return this.version; } @Override public long getVersion() { return this.version; } @Override public String index() { return shard.index(); } @Override public String getIndex() { return index(); } @Override public String id() { return id.string(); } @Override public String getId() { return id(); } @Override public String type() { return type.string(); } @Override public String getType() { return type(); } @Override public NestedIdentity getNestedIdentity() { return nestedIdentity; } /** * Returns bytes reference, also un compress the source if needed. */ @Override public BytesReference sourceRef() { try { this.source = CompressorFactory.uncompressIfNeeded(this.source); return this.source; } catch (IOException e) { throw new ElasticsearchParseException("failed to decompress source", e); } } /** * Sets representation, might be compressed.... */ public InternalSearchHit sourceRef(BytesReference source) { this.source = source; this.sourceAsBytes = null; this.sourceAsMap = null; return this; } @Override public BytesReference getSourceRef() { return sourceRef(); } /** * Internal source representation, might be compressed.... */ public BytesReference internalSourceRef() { return source; } @Override public byte[] source() { if (source == null) { return null; } if (sourceAsBytes != null) { return sourceAsBytes; } this.sourceAsBytes = BytesReference.toBytes(sourceRef()); return this.sourceAsBytes; } @Override public boolean hasSource() { return source == null; } @Override public Map<String, Object> getSource() { return sourceAsMap(); } @Override public String sourceAsString() { if (source == null) { return null; } try { return XContentHelper.convertToJson(sourceRef(), false); } catch (IOException e) { throw new ElasticsearchParseException("failed to convert source to a json string"); } } @Override public String getSourceAsString() { return sourceAsString(); } @SuppressWarnings({"unchecked"}) @Override public Map<String, Object> sourceAsMap() throws ElasticsearchParseException { if (source == null) { return null; } if (sourceAsMap != null) { return sourceAsMap; } sourceAsMap = SourceLookup.sourceAsMap(source); return sourceAsMap; } @Override public Iterator<SearchHitField> iterator() { return fields.values().iterator(); } @Override public SearchHitField field(String fieldName) { return fields().get(fieldName); } @Override public Map<String, SearchHitField> fields() { return fields == null ? emptyMap() : fields; } // returns the fields without handling null cases public Map<String, SearchHitField> fieldsOrNull() { return fields; } @Override public Map<String, SearchHitField> getFields() { return fields(); } public void fields(Map<String, SearchHitField> fields) { this.fields = fields; } public Map<String, HighlightField> internalHighlightFields() { return highlightFields; } @Override public Map<String, HighlightField> highlightFields() { return highlightFields == null ? emptyMap() : highlightFields; } @Override public Map<String, HighlightField> getHighlightFields() { return highlightFields(); } public void highlightFields(Map<String, HighlightField> highlightFields) { this.highlightFields = highlightFields; } public void sortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) { this.sortValues = Arrays.copyOf(sortValues, sortValues.length); for (int i = 0; i < sortValues.length; ++i) { if (this.sortValues[i] instanceof BytesRef) { this.sortValues[i] = sortValueFormats[i].format((BytesRef) sortValues[i]); } } } @Override public Object[] sortValues() { return sortValues; } @Override public Object[] getSortValues() { return sortValues(); } @Override public Explanation explanation() { return explanation; } @Override public Explanation getExplanation() { return explanation(); } public void explanation(Explanation explanation) { this.explanation = explanation; } @Override public SearchShardTarget shard() { return shard; } @Override public SearchShardTarget getShard() { return shard(); } public void shard(SearchShardTarget target) { this.shard = target; } public void matchedQueries(String[] matchedQueries) { this.matchedQueries = matchedQueries; } @Override public String[] matchedQueries() { return this.matchedQueries; } @Override public String[] getMatchedQueries() { return this.matchedQueries; } @Override @SuppressWarnings("unchecked") public Map<String, SearchHits> getInnerHits() { return (Map) innerHits; } public void setInnerHits(Map<String, InternalSearchHits> innerHits) { this.innerHits = innerHits; } public static class Fields { static final String _INDEX = "_index"; static final String _TYPE = "_type"; static final String _ID = "_id"; static final String _VERSION = "_version"; static final String _SCORE = "_score"; static final String FIELDS = "fields"; static final String HIGHLIGHT = "highlight"; static final String SORT = "sort"; static final String MATCHED_QUERIES = "matched_queries"; static final String _EXPLANATION = "_explanation"; static final String VALUE = "value"; static final String DESCRIPTION = "description"; static final String DETAILS = "details"; static final String INNER_HITS = "inner_hits"; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { List<SearchHitField> metaFields = new ArrayList<>(); List<SearchHitField> otherFields = new ArrayList<>(); if (fields != null && !fields.isEmpty()) { for (SearchHitField field : fields.values()) { if (field.values().isEmpty()) { continue; } if (field.isMetadataField()) { metaFields.add(field); } else { otherFields.add(field); } } } builder.startObject(); // For inner_hit hits shard is null and that is ok, because the parent search hit has all this information. // Even if this was included in the inner_hit hits this would be the same, so better leave it out. if (explanation() != null && shard != null) { builder.field("_shard", shard.shardId()); builder.field("_node", shard.nodeIdText()); } if (nestedIdentity != null) { nestedIdentity.toXContent(builder, params); } else { if (shard != null) { builder.field(Fields._INDEX, shard.indexText()); } builder.field(Fields._TYPE, type); builder.field(Fields._ID, id); } if (version != -1) { builder.field(Fields._VERSION, version); } if (Float.isNaN(score)) { builder.nullField(Fields._SCORE); } else { builder.field(Fields._SCORE, score); } for (SearchHitField field : metaFields) { builder.field(field.name(), (Object) field.value()); } if (source != null) { XContentHelper.writeRawField("_source", source, builder, params); } if (!otherFields.isEmpty()) { builder.startObject(Fields.FIELDS); for (SearchHitField field : otherFields) { builder.startArray(field.name()); for (Object value : field.getValues()) { builder.value(value); } builder.endArray(); } builder.endObject(); } if (highlightFields != null && !highlightFields.isEmpty()) { builder.startObject(Fields.HIGHLIGHT); for (HighlightField field : highlightFields.values()) { builder.field(field.name()); if (field.fragments() == null) { builder.nullValue(); } else { builder.startArray(); for (Text fragment : field.fragments()) { builder.value(fragment); } builder.endArray(); } } builder.endObject(); } if (sortValues != null && sortValues.length > 0) { builder.startArray(Fields.SORT); for (Object sortValue : sortValues) { builder.value(sortValue); } builder.endArray(); } if (matchedQueries.length > 0) { builder.startArray(Fields.MATCHED_QUERIES); for (String matchedFilter : matchedQueries) { builder.value(matchedFilter); } builder.endArray(); } if (explanation() != null) { builder.field(Fields._EXPLANATION); buildExplanation(builder, explanation()); } if (innerHits != null) { builder.startObject(Fields.INNER_HITS); for (Map.Entry<String, InternalSearchHits> entry : innerHits.entrySet()) { builder.startObject(entry.getKey()); entry.getValue().toXContent(builder, params); builder.endObject(); } builder.endObject(); } builder.endObject(); return builder; } private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { builder.startObject(); builder.field(Fields.VALUE, explanation.getValue()); builder.field(Fields.DESCRIPTION, explanation.getDescription()); Explanation[] innerExps = explanation.getDetails(); if (innerExps != null) { builder.startArray(Fields.DETAILS); for (Explanation exp : innerExps) { buildExplanation(builder, exp); } builder.endArray(); } builder.endObject(); } public static InternalSearchHit readSearchHit(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { InternalSearchHit hit = new InternalSearchHit(); hit.readFrom(in, context); return hit; } @Override public void readFrom(StreamInput in) throws IOException { readFrom(in, InternalSearchHits.streamContext().streamShardTarget(ShardTargetType.STREAM)); } public void readFrom(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { score = in.readFloat(); id = in.readText(); type = in.readText(); nestedIdentity = in.readOptionalStreamable(InternalNestedIdentity::new); version = in.readLong(); source = in.readBytesReference(); if (source.length() == 0) { source = null; } if (in.readBoolean()) { explanation = readExplanation(in); } int size = in.readVInt(); if (size == 0) { fields = emptyMap(); } else if (size == 1) { SearchHitField hitField = readSearchHitField(in); fields = singletonMap(hitField.name(), hitField); } else { Map<String, SearchHitField> fields = new HashMap<>(); for (int i = 0; i < size; i++) { SearchHitField hitField = readSearchHitField(in); fields.put(hitField.name(), hitField); } this.fields = unmodifiableMap(fields); } size = in.readVInt(); if (size == 0) { highlightFields = emptyMap(); } else if (size == 1) { HighlightField field = readHighlightField(in); highlightFields = singletonMap(field.name(), field); } else { Map<String, HighlightField> highlightFields = new HashMap<>(); for (int i = 0; i < size; i++) { HighlightField field = readHighlightField(in); highlightFields.put(field.name(), field); } this.highlightFields = unmodifiableMap(highlightFields); } size = in.readVInt(); if (size > 0) { sortValues = new Object[size]; for (int i = 0; i < sortValues.length; i++) { byte type = in.readByte(); if (type == 0) { sortValues[i] = null; } else if (type == 1) { sortValues[i] = in.readString(); } else if (type == 2) { sortValues[i] = in.readInt(); } else if (type == 3) { sortValues[i] = in.readLong(); } else if (type == 4) { sortValues[i] = in.readFloat(); } else if (type == 5) { sortValues[i] = in.readDouble(); } else if (type == 6) { sortValues[i] = in.readByte(); } else if (type == 7) { sortValues[i] = in.readShort(); } else if (type == 8) { sortValues[i] = in.readBoolean(); } else { throw new IOException("Can't match type [" + type + "]"); } } } size = in.readVInt(); if (size > 0) { matchedQueries = new String[size]; for (int i = 0; i < size; i++) { matchedQueries[i] = in.readString(); } } if (context.streamShardTarget() == ShardTargetType.STREAM) { if (in.readBoolean()) { shard = new SearchShardTarget(in); } } else if (context.streamShardTarget() == ShardTargetType.LOOKUP) { int lookupId = in.readVInt(); if (lookupId > 0) { shard = context.handleShardLookup().get(lookupId); } } size = in.readVInt(); if (size > 0) { innerHits = new HashMap<>(size); for (int i = 0; i < size; i++) { String key = in.readString(); ShardTargetType shardTarget = context.streamShardTarget(); InternalSearchHits value = InternalSearchHits.readSearchHits(in, context.streamShardTarget(ShardTargetType.NO_STREAM)); context.streamShardTarget(shardTarget); innerHits.put(key, value); } } } @Override public void writeTo(StreamOutput out) throws IOException { writeTo(out, InternalSearchHits.streamContext().streamShardTarget(ShardTargetType.STREAM)); } public void writeTo(StreamOutput out, InternalSearchHits.StreamContext context) throws IOException { out.writeFloat(score); out.writeText(id); out.writeText(type); out.writeOptionalStreamable(nestedIdentity); out.writeLong(version); out.writeBytesReference(source); if (explanation == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeExplanation(out, explanation); } if (fields == null) { out.writeVInt(0); } else { out.writeVInt(fields.size()); for (SearchHitField hitField : fields().values()) { hitField.writeTo(out); } } if (highlightFields == null) { out.writeVInt(0); } else { out.writeVInt(highlightFields.size()); for (HighlightField highlightField : highlightFields.values()) { highlightField.writeTo(out); } } if (sortValues.length == 0) { out.writeVInt(0); } else { out.writeVInt(sortValues.length); for (Object sortValue : sortValues) { if (sortValue == null) { out.writeByte((byte) 0); } else { Class type = sortValue.getClass(); if (type == String.class) { out.writeByte((byte) 1); out.writeString((String) sortValue); } else if (type == Integer.class) { out.writeByte((byte) 2); out.writeInt((Integer) sortValue); } else if (type == Long.class) { out.writeByte((byte) 3); out.writeLong((Long) sortValue); } else if (type == Float.class) { out.writeByte((byte) 4); out.writeFloat((Float) sortValue); } else if (type == Double.class) { out.writeByte((byte) 5); out.writeDouble((Double) sortValue); } else if (type == Byte.class) { out.writeByte((byte) 6); out.writeByte((Byte) sortValue); } else if (type == Short.class) { out.writeByte((byte) 7); out.writeShort((Short) sortValue); } else if (type == Boolean.class) { out.writeByte((byte) 8); out.writeBoolean((Boolean) sortValue); } else { throw new IOException("Can't handle sort field value of type [" + type + "]"); } } } } if (matchedQueries.length == 0) { out.writeVInt(0); } else { out.writeVInt(matchedQueries.length); for (String matchedFilter : matchedQueries) { out.writeString(matchedFilter); } } if (context.streamShardTarget() == ShardTargetType.STREAM) { if (shard == null) { out.writeBoolean(false); } else { out.writeBoolean(true); shard.writeTo(out); } } else if (context.streamShardTarget() == ShardTargetType.LOOKUP) { if (shard == null) { out.writeVInt(0); } else { out.writeVInt(context.shardHandleLookup().get(shard)); } } if (innerHits == null) { out.writeVInt(0); } else { out.writeVInt(innerHits.size()); for (Map.Entry<String, InternalSearchHits> entry : innerHits.entrySet()) { out.writeString(entry.getKey()); ShardTargetType shardTarget = context.streamShardTarget(); entry.getValue().writeTo(out, context.streamShardTarget(ShardTargetType.NO_STREAM)); context.streamShardTarget(shardTarget); } } } public static final class InternalNestedIdentity implements NestedIdentity, Streamable, ToXContent { private Text field; private int offset; private InternalNestedIdentity child; public InternalNestedIdentity(String field, int offset, InternalNestedIdentity child) { this.field = new Text(field); this.offset = offset; this.child = child; } InternalNestedIdentity() { } @Override public Text getField() { return field; } @Override public int getOffset() { return offset; } @Override public NestedIdentity getChild() { return child; } @Override public void readFrom(StreamInput in) throws IOException { field = in.readOptionalText(); offset = in.readInt(); child = in.readOptionalStreamable(InternalNestedIdentity::new); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalText(field); out.writeInt(offset); out.writeOptionalStreamable(child); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields._NESTED); if (field != null) { builder.field(Fields._NESTED_FIELD, field); } if (offset != -1) { builder.field(Fields._NESTED_OFFSET, offset); } if (child != null) { builder = child.toXContent(builder, params); } builder.endObject(); return builder; } public static class Fields { static final String _NESTED = "_nested"; static final String _NESTED_FIELD = "field"; static final String _NESTED_OFFSET = "offset"; } } }
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.android_webview.test; import static org.chromium.android_webview.test.OnlyRunIn.ProcessMode.SINGLE_PROCESS; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import androidx.test.filters.MediumTest; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.chromium.android_webview.common.AwSwitches; import org.chromium.android_webview.common.variations.VariationsServiceMetricsHelper; import org.chromium.android_webview.common.variations.VariationsUtils; import org.chromium.android_webview.test.services.MockVariationsSeedServer; import org.chromium.android_webview.test.util.VariationsTestUtils; import org.chromium.android_webview.variations.VariationsSeedLoader; import org.chromium.base.ContextUtils; import org.chromium.base.metrics.RecordHistogram; import org.chromium.base.test.util.CallbackHelper; import org.chromium.base.test.util.CommandLineFlags; import java.io.File; import java.io.IOException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * Test VariationsSeedLoader. */ @RunWith(AwJUnit4ClassRunner.class) @OnlyRunIn(SINGLE_PROCESS) public class VariationsSeedLoaderTest { private static final long CURRENT_TIME_MILLIS = 1234567890; private static final long EXPIRED_TIMESTAMP = 0; private static final long TIMEOUT_MILLIS = 10000; // Needed for tests that test histograms, which rely on native code. @Rule public AwActivityTestRule mActivityTestRule = new AwActivityTestRule(); /** * Helper class to interact with {@link TestLoader}. This can be used to retrieve whether * TestLoader requested a seed. */ public static class TestLoaderResult extends CallbackHelper { private volatile boolean mBackgroundWorkFinished; private volatile boolean mForegroundWorkFinished; private volatile boolean mSeedRequested; public boolean wasSeedRequested() { assert getCallCount() > 0; return mSeedRequested; } public void markSeedRequested() { mSeedRequested = true; } public void onBackgroundWorkFinished() { mBackgroundWorkFinished = true; maybeNotifyCalled(); } public void onForegroundWorkFinished() { mForegroundWorkFinished = true; maybeNotifyCalled(); } private void maybeNotifyCalled() { if (mBackgroundWorkFinished && mForegroundWorkFinished) { notifyCalled(); } } } /** * A {@link VariationsSeedLoader} which is suitable for integration tests. This overrides the * default timeout to be suitable for integration tests, allowing the test to call * startVariationsInit() immediately before finishVariationsInit(). This also overrides the * service Intent to match the test environment. */ public static class TestLoader extends VariationsSeedLoader { private TestLoaderResult mResult; public TestLoader(TestLoaderResult result) { mResult = result; } // Bind to the MockVariationsSeedServer built in to the instrumentation test app, rather // than the real server in the WebView provider. @Override protected Intent getServerIntent() { return new Intent(ContextUtils.getApplicationContext(), MockVariationsSeedServer.class); } @Override protected boolean requestSeedFromService(long oldSeedDate) { boolean result = super.requestSeedFromService(oldSeedDate); mResult.markSeedRequested(); return result; } @Override protected void onBackgroundWorkFinished() { mResult.onBackgroundWorkFinished(); } @Override protected long getSeedLoadTimeoutMillis() { return TIMEOUT_MILLIS; } @Override protected long getCurrentTimeMillis() { return CURRENT_TIME_MILLIS; } } private Handler mMainHandler; // Create a TestLoader, run it on the UI thread, and block until it's finished. The return value // indicates whether the loader decided to request a new seed. private boolean runTestLoaderBlocking() throws TimeoutException { final TestLoaderResult result = new TestLoaderResult(); Runnable run = () -> { TestLoader loader = new TestLoader(result); loader.startVariationsInit(); loader.finishVariationsInit(); result.onForegroundWorkFinished(); }; CallbackHelper onRequestReceived = MockVariationsSeedServer.getRequestHelper(); int requestsReceived = onRequestReceived.getCallCount(); Assert.assertTrue("Failed to post seed loader Runnable", mMainHandler.post(run)); result.waitForCallback("Timed out waiting for loader to finish.", 0); if (result.wasSeedRequested()) { onRequestReceived.waitForCallback("Seed requested, but timed out waiting for request" + " to arrive in MockVariationsSeedServer", requestsReceived); return true; } return false; } @Before public void setUp() throws IOException { mMainHandler = new Handler(Looper.getMainLooper()); VariationsTestUtils.deleteSeeds(); } @After public void tearDown() throws IOException { VariationsTestUtils.deleteSeeds(); } private void assertSingleRecordInHistogram(String histogramName, int expectedValue) { Assert.assertEquals(1, RecordHistogram.getHistogramTotalCountForTesting(histogramName)); Assert.assertEquals( 1, RecordHistogram.getHistogramValueCountForTesting(histogramName, expectedValue)); // Check that the value didn't get recorded in the highest bucket. If expectedValue and // expectedValue*2 are in the same bucket, we probably messed up the bucket configuration. Assert.assertEquals(0, RecordHistogram.getHistogramValueCountForTesting(histogramName, expectedValue * 2)); } // Test the case that: // VariationsUtils.getSeedFile() - doesn't exist // VariationsUtils.getNewSeedFile() - doesn't exist @Test @MediumTest public void testHaveNoSeed() throws Exception { try { boolean seedRequested = runTestLoaderBlocking(); // Since there was no seed, another seed should be requested. Assert.assertTrue("No seed requested", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Test the case that: // VariationsUtils.getSeedFile() - exists, timestamp = now // VariationsUtils.getNewSeedFile() - doesn't exist @Test @MediumTest public void testHaveFreshSeed() throws Exception { try { File oldFile = VariationsUtils.getSeedFile(); Assert.assertTrue("Seed file already exists", oldFile.createNewFile()); VariationsTestUtils.writeMockSeed(oldFile); boolean seedRequested = runTestLoaderBlocking(); // Since there was a fresh seed, we should not request another seed. Assert.assertFalse("New seed was requested when it should not have been", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Test the case that: // VariationsUtils.getSeedFile() - exists, timestamp = epoch // VariationsUtils.getNewSeedFile() - doesn't exist @Test @MediumTest public void testHaveExpiredSeed() throws Exception { try { File oldFile = VariationsUtils.getSeedFile(); Assert.assertTrue("Seed file already exists", oldFile.createNewFile()); VariationsTestUtils.writeMockSeed(oldFile); oldFile.setLastModified(0); boolean seedRequested = runTestLoaderBlocking(); // Since the seed was expired, another seed should be requested. Assert.assertTrue("No seed requested", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Test the case that: // VariationsUtils.getSeedFile() - doesn't exist // VariationsUtils.getNewSeedFile() - exists, timestamp = now @Test @MediumTest public void testHaveFreshNewSeed() throws Exception { try { File oldFile = VariationsUtils.getSeedFile(); File newFile = VariationsUtils.getNewSeedFile(); Assert.assertTrue("New seed file already exists", newFile.createNewFile()); VariationsTestUtils.writeMockSeed(newFile); boolean seedRequested = runTestLoaderBlocking(); // The "new" seed should have been renamed to the "old" seed. Assert.assertTrue("Old seed not found", oldFile.exists()); Assert.assertFalse("New seed still exists", newFile.exists()); // Since the "new" seed was fresh, we should not request another seed. Assert.assertFalse("New seed was requested when it should not have been", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Test the case that: // VariationsUtils.getSeedFile() - doesn't exist // VariationsUtils.getNewSeedFile() - exists, timestamp = epoch @Test @MediumTest public void testHaveExpiredNewSeed() throws Exception { try { File oldFile = VariationsUtils.getSeedFile(); File newFile = VariationsUtils.getNewSeedFile(); Assert.assertTrue("Seed file already exists", newFile.createNewFile()); VariationsTestUtils.writeMockSeed(newFile); newFile.setLastModified(0); boolean seedRequested = runTestLoaderBlocking(); // The "new" seed should have been renamed to the "old" seed. Another empty "new" seed // should have been created as a destination for the request. Assert.assertTrue("Old seed not found", oldFile.exists()); Assert.assertTrue("New seed not found", newFile.exists()); Assert.assertTrue("New seed is not empty", newFile.length() == 0L); // Since the "new" seed was expired, another seed should be requested. Assert.assertTrue("No seed requested", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Test the case that: // VariationsUtils.getSeedFile() - doesn't exist // VariationsUtils.getNewSeedFile() - exists, empty @Test @MediumTest public void testHaveEmptyNewSeed() throws Exception { try { File oldFile = VariationsUtils.getSeedFile(); File newFile = VariationsUtils.getNewSeedFile(); Assert.assertTrue("Seed file should not already exist", newFile.createNewFile()); boolean seedRequested = runTestLoaderBlocking(); // Neither file should have been touched. Assert.assertFalse("Old seed file should not exist", oldFile.exists()); Assert.assertTrue("New seed file not found", newFile.exists()); Assert.assertEquals("New seed file is not empty", 0L, newFile.length()); // Since the "new" seed was empty/invalid, another seed should be requested. Assert.assertTrue("No seed requested", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Test the case that: // VariationsUtils.getSeedFile() - exists, timestamp = epoch // VariationsUtils.getNewSeedFile() - exists, timestamp = epoch + 1 day @Test @MediumTest public void testHaveBothExpiredSeeds() throws Exception { try { File oldFile = VariationsUtils.getSeedFile(); Assert.assertTrue("Old seed file already exists", oldFile.createNewFile()); VariationsTestUtils.writeMockSeed(oldFile); oldFile.setLastModified(0); File newFile = VariationsUtils.getNewSeedFile(); Assert.assertTrue("New seed file already exists", newFile.createNewFile()); VariationsTestUtils.writeMockSeed(newFile); newFile.setLastModified(TimeUnit.DAYS.toMillis(1)); boolean seedRequested = runTestLoaderBlocking(); // The "new" seed should have been renamed to the "old" seed. Another empty "new" seed // should have been created as a destination for the request. Assert.assertTrue("Old seed not found", oldFile.exists()); Assert.assertTrue("New seed not found", newFile.exists()); Assert.assertTrue("New seed is not empty", newFile.length() == 0L); // Since the "new" seed was expired, another seed should be requested. Assert.assertTrue("No seed requested", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Test loading twice. The first load should trigger a request, but the second should not, // because requests should be rate-limited. // VariationsUtils.getSeedFile() - doesn't exist // VariationsUtils.getNewSeedFile() - doesn't exist @Test @MediumTest public void testDoubleLoad() throws Exception { try { boolean seedRequested = runTestLoaderBlocking(); Assert.assertTrue("No seed requested", seedRequested); seedRequested = runTestLoaderBlocking(); Assert.assertFalse("New seed was requested when it should not have been", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Tests that the finch-seed-expiration-age flag works. @Test @MediumTest @CommandLineFlags.Add(AwSwitches.FINCH_SEED_EXPIRATION_AGE + "=0") public void testFinchSeedExpirationAgeFlag() throws Exception { try { // Create a new seed file with a recent timestamp. File oldFile = VariationsUtils.getSeedFile(); VariationsTestUtils.writeMockSeed(oldFile); oldFile.setLastModified(CURRENT_TIME_MILLIS); boolean seedRequested = runTestLoaderBlocking(); Assert.assertTrue("Seed file should be requested", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Tests that the finch-seed-min-update-period flag overrides the seed request throttling. @Test @MediumTest @CommandLineFlags.Add(AwSwitches.FINCH_SEED_MIN_UPDATE_PERIOD + "=0") public void testFinchSeedMinUpdatePeriodFlag() throws Exception { try { // Update the last modified time of the stamp file to simulate having just requested a // new seed from the service. VariationsUtils.getStampFile().createNewFile(); VariationsUtils.updateStampTime(CURRENT_TIME_MILLIS); boolean seedRequested = runTestLoaderBlocking(); Assert.assertTrue("Seed file should be requested", seedRequested); } finally { VariationsTestUtils.deleteSeeds(); } } // Tests that metrics passed from the service get recorded to histograms. @Test @MediumTest public void testRecordMetricsFromService() throws Exception { try { long nineMinutesMs = TimeUnit.MINUTES.toMillis(9); long twoWeeksMs = TimeUnit.DAYS.toMillis(14); long threeWeeksMs = TimeUnit.DAYS.toMillis(21); VariationsServiceMetricsHelper metrics = VariationsServiceMetricsHelper.fromBundle(new Bundle()); metrics.setJobInterval(threeWeeksMs); metrics.setJobQueueTime(twoWeeksMs); MockVariationsSeedServer.setMetricsBundle(metrics.toBundle()); runTestLoaderBlocking(); assertSingleRecordInHistogram(VariationsSeedLoader.DOWNLOAD_JOB_INTERVAL_HISTOGRAM_NAME, (int) TimeUnit.MILLISECONDS.toMinutes(threeWeeksMs)); assertSingleRecordInHistogram( VariationsSeedLoader.DOWNLOAD_JOB_QUEUE_TIME_HISTOGRAM_NAME, (int) TimeUnit.MILLISECONDS.toMinutes(twoWeeksMs)); } finally { MockVariationsSeedServer.setMetricsBundle(null); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.examples.ml; import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks; import org.apache.flink.streaming.api.functions.co.CoMapFunction; import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows; import org.apache.flink.streaming.api.windowing.time.Time; import org.apache.flink.streaming.api.windowing.windows.TimeWindow; import org.apache.flink.util.Collector; /** * Skeleton for incremental machine learning algorithm consisting of a * pre-computed model, which gets updated for the new inputs and new input data * for which the job provides predictions. * * <p>This may serve as a base of a number of algorithms, e.g. updating an * incremental Alternating Least Squares model while also providing the * predictions. * * <p>This example shows how to use: * <ul> * <li>Connected streams * <li>CoFunctions * <li>Tuple data types * </ul> */ public class IncrementalLearningSkeleton { // ************************************************************************* // PROGRAM // ************************************************************************* public static void main(String[] args) throws Exception { // Checking input parameters final ParameterTool params = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Integer> trainingData = env.addSource(new FiniteTrainingDataSource()); DataStream<Integer> newData = env.addSource(new FiniteNewDataSource()); // build new model on every second of new data DataStream<Double[]> model = trainingData .assignTimestampsAndWatermarks(new LinearTimestamp()) .windowAll(TumblingEventTimeWindows.of(Time.milliseconds(5000))) .apply(new PartialModelBuilder()); // use partial model for newData DataStream<Integer> prediction = newData.connect(model).map(new Predictor()); // emit result if (params.has("output")) { prediction.writeAsText(params.get("output")); } else { System.out.println("Printing result to stdout. Use --output to specify output path."); prediction.print(); } // execute program env.execute("Streaming Incremental Learning"); } // ************************************************************************* // USER FUNCTIONS // ************************************************************************* /** * Feeds new data for newData. By default it is implemented as constantly * emitting the Integer 1 in a loop. */ public static class FiniteNewDataSource implements SourceFunction<Integer> { private static final long serialVersionUID = 1L; private int counter; @Override public void run(SourceContext<Integer> ctx) throws Exception { Thread.sleep(15); while (counter < 50) { ctx.collect(getNewData()); } } @Override public void cancel() { // No cleanup needed } private Integer getNewData() throws InterruptedException { Thread.sleep(5); counter++; return 1; } } /** * Feeds new training data for the partial model builder. By default it is * implemented as constantly emitting the Integer 1 in a loop. */ public static class FiniteTrainingDataSource implements SourceFunction<Integer> { private static final long serialVersionUID = 1L; private int counter = 0; @Override public void run(SourceContext<Integer> collector) throws Exception { while (counter < 8200) { collector.collect(getTrainingData()); } } @Override public void cancel() { // No cleanup needed } private Integer getTrainingData() throws InterruptedException { counter++; return 1; } } private static class LinearTimestamp implements AssignerWithPunctuatedWatermarks<Integer> { private static final long serialVersionUID = 1L; private long counter = 0L; @Override public long extractTimestamp(Integer element, long previousElementTimestamp) { return counter += 10L; } @Override public Watermark checkAndGetNextWatermark(Integer lastElement, long extractedTimestamp) { return new Watermark(counter - 1); } } /** * Builds up-to-date partial models on new training data. */ public static class PartialModelBuilder implements AllWindowFunction<Integer, Double[], TimeWindow> { private static final long serialVersionUID = 1L; protected Double[] buildPartialModel(Iterable<Integer> values) { return new Double[]{1.}; } @Override public void apply(TimeWindow window, Iterable<Integer> values, Collector<Double[]> out) throws Exception { out.collect(buildPartialModel(values)); } } /** * Creates newData using the model produced in batch-processing and the * up-to-date partial model. * <p> * By default emits the Integer 0 for every newData and the Integer 1 * for every model update. * </p> */ public static class Predictor implements CoMapFunction<Integer, Double[], Integer> { private static final long serialVersionUID = 1L; Double[] batchModel = null; Double[] partialModel = null; @Override public Integer map1(Integer value) { // Return newData return predict(value); } @Override public Integer map2(Double[] value) { // Update model partialModel = value; batchModel = getBatchModel(); return 1; } // pulls model built with batch-job on the old training data protected Double[] getBatchModel() { return new Double[]{0.}; } // performs newData using the two models protected Integer predict(Integer inTuple) { return 0; } } }
package com.stfl.ui; import com.stfl.Constant; import com.stfl.MainGui; import com.stfl.misc.Config; import com.stfl.misc.UTF8Control; import com.stfl.misc.Util; import com.stfl.network.IServer; import com.stfl.network.NioLocalServer; import com.stfl.network.proxy.IProxy; import com.stfl.network.proxy.ProxyFactory; import com.stfl.ss.CryptFactory; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.Scene; import javafx.scene.control.*; import javafx.scene.image.Image; import javafx.scene.layout.Pane; import javafx.stage.Stage; import java.io.IOException; import java.security.InvalidAlgorithmParameterException; import java.util.ResourceBundle; import java.util.logging.Logger; public class MainLayoutController { @FXML private TextField txtServerIP; @FXML private TextField txtServerPort; @FXML private ComboBox cboCipher; @FXML private TextField txtPassword; @FXML private TextField txtLocalPort; @FXML private ComboBox cboProxyType; @FXML private Button btnStart; @FXML private Button btnStop; @FXML private Button btnLog; @FXML private Button btnClose; private Logger logger = Logger.getLogger(MainLayoutController.class.getName()); private MainGui gui; private IServer server; private Stage logStage; private Config config; @FXML private void initialize() { // set cipher options ObservableList<String> ciphers = FXCollections.observableArrayList(); ciphers.addAll(CryptFactory.getSupportedCiphers()); cboCipher.setItems(ciphers); // set proxy options ObservableList<IProxy.TYPE> proxyTypes = FXCollections.observableArrayList(); proxyTypes.addAll(ProxyFactory.getSupportedProxyTypes()); cboProxyType.setItems(proxyTypes); // prepare configuration config = new Config(); config.loadFromJson(Util.getFileContent(Constant.CONF_FILE)); txtServerIP.setText(config.getRemoteIpAddress()); txtServerPort.setText(String.valueOf(config.getRemotePort())); txtLocalPort.setText(String.valueOf(config.getLocalPort())); txtPassword.setText(config.getPassword()); cboCipher.setValue(config.getMethod()); cboProxyType.setValue(config.getProxyType()); // prepare log window Stage stage = new Stage(); try { FXMLLoader logLayoutLoader = new FXMLLoader(MainGui.class.getResource("/resources/ui/LogLayout.fxml")); logLayoutLoader.setResources(ResourceBundle.getBundle("resources.bundle.ui", Constant.LOCALE, new UTF8Control())); Pane logLayout = logLayoutLoader.load(); Scene logScene = new Scene(logLayout); stage.setTitle("Log"); stage.setScene(logScene); stage.setResizable(false); stage.getIcons().add(new Image(MainGui.class.getResource("/resources/image/icon.png").toString())); LogLayoutController controller = logLayoutLoader.getController(); controller.setStage(stage); logStage = stage; } catch (IOException e) { logger.warning("Unable to load ICON: " + e.toString()); } btnStop.setDisable(true); } @FXML private void handleStart() { boolean isValidated = false; do { if (!txtServerIP.getText().matches("[0-9]{1,4}.[0-9]{1,4}.[0-9]{1,4}.[0-9]{1,4}")) { showAlert(Constant.PROG_NAME, "Invalid IP address", Alert.AlertType.ERROR); break; } String ip = txtServerIP.getText(); if (!txtServerPort.getText().matches("[0-9]+")) { showAlert(Constant.PROG_NAME, "Invalid Port", Alert.AlertType.ERROR); break; } int port = Integer.parseInt(txtServerPort.getText()); String method = (String) cboCipher.getValue(); if (txtPassword.getText().length() == 0) { showAlert(Constant.PROG_NAME, "Please specified password", Alert.AlertType.ERROR); break; } String password = txtPassword.getText(); IProxy.TYPE type = (IProxy.TYPE) cboProxyType.getValue(); if (!txtLocalPort.getText().matches("[0-9]+")) { showAlert(Constant.PROG_NAME, "Invalid Port", Alert.AlertType.ERROR); break; } int localPort = Integer.parseInt(txtLocalPort.getText()); // create config config.setRemoteIpAddress(ip); config.setRemotePort(port); config.setLocalIpAddress("0.0.0.0"); config.setLocalPort(localPort); config.setMethod(method); config.setPassword(password); config.setProxyType(type); Util.saveFile(Constant.CONF_FILE, config.saveToJson()); isValidated = true; } while (false); if (!isValidated) return; // start start try { server = new NioLocalServer(config); Thread t = new Thread(server); t.setDaemon(true); t.start(); String message = String.format("(Connected) Server %s:%d", config.getRemoteIpAddress(), config.getRemotePort()); gui.setTooltip(message); gui.showNotification(message); } catch (IOException | InvalidAlgorithmParameterException e) { logger.warning("Unable to start server: " + e.toString()); } btnStop.setDisable(false); btnStart.setDisable(true); } @FXML private void handleStop() { if (server != null) { server.close(); String message = String.format("(Disconnected) Server %s:%d", config.getRemoteIpAddress(), config.getRemotePort()); gui.showNotification(message); gui.setTooltip("Not Connected"); } btnStop.setDisable(true); btnStart.setDisable(false); } @FXML private void handleLog() { logStage.show(); } @FXML private void handleClose() { gui.hide(); } public void setMainGui(MainGui gui) { this.gui = gui; } public void closeServer() { handleStop(); } private boolean validationInput(String pattern, String text) { return false; } private void showAlert(String title, String message, Alert.AlertType type) { Alert a = new Alert(type); a.setTitle(title); a.setHeaderText(type.name()); a.setResizable(false); a.setContentText(message); a.showAndWait(); } }
package ca.uhn.fhir.util; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.time.DateUtils; import java.text.DecimalFormat; import java.text.NumberFormat; import java.util.Date; import java.util.LinkedList; import java.util.concurrent.TimeUnit; /* * #%L * HAPI FHIR - Core Library * %% * Copyright (C) 2014 - 2022 Smile CDR, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A multipurpose stopwatch which can be used to time tasks and produce * human readable output about task duration, throughput, estimated task completion, * etc. * <p> * <p> * <b>Thread Safety Note: </b> StopWatch is not intended to be thread safe. * </p> * * @since HAPI FHIR 3.3.0 */ public class StopWatch { private static Long ourNowForUnitTest; private long myStarted = now(); private TaskTiming myCurrentTask; private LinkedList<TaskTiming> myTasks; /** * Constructor */ public StopWatch() { super(); } /** * Constructor * * @param theStart The time to record as the start for this timer */ public StopWatch(Date theStart) { myStarted = theStart.getTime(); } /** * Constructor * * @param theStart The time that the stopwatch was started */ public StopWatch(long theStart) { myStarted = theStart; } private void addNewlineIfContentExists(StringBuilder theB) { if (theB.length() > 0) { theB.append("\n"); } } /** * Finish the counter on the current task (which was started by calling * {@link #startTask(String)}. This method has no effect if no task * is currently started so it's ok to call it more than once. */ public void endCurrentTask() { ensureTasksListExists(); if (myCurrentTask != null) { myCurrentTask.setEnd(now()); } myCurrentTask = null; } private void ensureTasksListExists() { if (myTasks == null) { myTasks = new LinkedList<>(); } } /** * Returns a nice human-readable display of the time taken per * operation. Note that this may not actually output the number * of milliseconds if the time taken per operation was very long (over * 10 seconds) * * @see #formatMillis(long) */ public String formatMillisPerOperation(long theNumOperations) { double millisPerOperation = (((double) getMillis()) / Math.max(1.0, theNumOperations)); return formatMillis(millisPerOperation); } /** * Returns a string providing the durations of all tasks collected by {@link #startTask(String)} */ public String formatTaskDurations() { ensureTasksListExists(); StringBuilder b = new StringBuilder(); if (myTasks.size() > 0) { long delta = myTasks.getFirst().getStart() - myStarted; if (delta > 10) { addNewlineIfContentExists(b); b.append("Before first task"); b.append(": "); b.append(formatMillis(delta)); } } else { b.append("No tasks"); } TaskTiming last = null; for (TaskTiming nextTask : myTasks) { if (last != null) { long delta = nextTask.getStart() - last.getEnd(); if (delta > 10) { addNewlineIfContentExists(b); b.append("Between"); b.append(": "); b.append(formatMillis(delta)); } } addNewlineIfContentExists(b); b.append(nextTask.getTaskName()); b.append(": "); long delta = nextTask.getMillis(); b.append(formatMillis(delta)); last = nextTask; } if (myTasks.size() > 0) { long delta = now() - myTasks.getLast().getEnd(); if (delta > 10) { addNewlineIfContentExists(b); b.append("After last task"); b.append(": "); b.append(formatMillis(delta)); } } return b.toString(); } /** * Determine the current throughput per unit of time (specified in theUnit) * assuming that theNumOperations operations have happened. * <p> * For example, if this stopwatch has 2 seconds elapsed, and this method is * called for theNumOperations=30 and TimeUnit=SECONDS, * this method will return 15 * </p> * * @see #getThroughput(long, TimeUnit) */ public String formatThroughput(long theNumOperations, TimeUnit theUnit) { double throughput = getThroughput(theNumOperations, theUnit); return new DecimalFormat("0.0").format(throughput); } /** * Given an amount of something completed so far, and a total amount, calculates how long it will take for something to complete * * @param theCompleteToDate The amount so far * @param theTotal The total (must be higher than theCompleteToDate * @return A formatted amount of time */ public String getEstimatedTimeRemaining(double theCompleteToDate, double theTotal) { double millis = getMillis(); long millisRemaining = (long) (((theTotal / theCompleteToDate) * millis) - (millis)); return formatMillis(millisRemaining); } public long getMillis(Date theNow) { return theNow.getTime() - myStarted; } public long getMillis() { long now = now(); return now - myStarted; } public long getMillisAndRestart() { long now = now(); long retVal = now - myStarted; myStarted = now; return retVal; } /** * @param theNumOperations Ok for this to be 0, it will be treated as 1 */ public long getMillisPerOperation(long theNumOperations) { return (long) (((double) getMillis()) / Math.max(1.0, theNumOperations)); } public Date getStartedDate() { return new Date(myStarted); } /** * Determine the current throughput per unit of time (specified in theUnit) * assuming that theNumOperations operations have happened. * <p> * For example, if this stopwatch has 2 seconds elapsed, and this method is * called for theNumOperations=30 and TimeUnit=SECONDS, * this method will return 15 * </p> * * @see #formatThroughput(long, TimeUnit) */ public double getThroughput(long theNumOperations, TimeUnit theUnit) { if (theNumOperations <= 0) { return 0.0f; } long millisElapsed = Math.max(1, getMillis()); long periodMillis = theUnit.toMillis(1); double denominator = ((double) millisElapsed) / ((double) periodMillis); double throughput = (double) theNumOperations / denominator; if (throughput > theNumOperations) { throughput = theNumOperations; } return throughput; } public void restart() { myStarted = now(); } /** * Starts a counter for a sub-task * <p> * <b>Thread Safety Note: </b> This method is not threadsafe! Do not use subtasks in a * multithreaded environment. * </p> * * @param theTaskName Note that if theTaskName is blank or empty, no task is started */ public void startTask(String theTaskName) { endCurrentTask(); Validate.notBlank(theTaskName, "Task name must not be blank"); myCurrentTask = new TaskTiming() .setTaskName(theTaskName) .setStart(now()); myTasks.add(myCurrentTask); } /** * Formats value in an appropriate format. See {@link #formatMillis(long)}} * for a description of the format * * @see #formatMillis(long) */ @Override public String toString() { return formatMillis(getMillis()); } private static class TaskTiming { private long myStart; private long myEnd; private String myTaskName; public long getEnd() { if (myEnd == 0) { return now(); } return myEnd; } public TaskTiming setEnd(long theEnd) { myEnd = theEnd; return this; } public long getMillis() { return getEnd() - getStart(); } public long getStart() { return myStart; } public TaskTiming setStart(long theStart) { myStart = theStart; return this; } public String getTaskName() { return myTaskName; } public TaskTiming setTaskName(String theTaskName) { myTaskName = theTaskName; return this; } } private static NumberFormat getDayFormat() { return new DecimalFormat("0.0"); } private static NumberFormat getTenDayFormat() { return new DecimalFormat("0"); } private static NumberFormat getSubMillisecondMillisFormat() { return new DecimalFormat("0.000"); } /** * Append a right-aligned and zero-padded numeric value to a `StringBuilder`. */ static void appendRightAlignedNumber(StringBuilder theStringBuilder, String thePrefix, int theNumberOfDigits, long theValueToAppend) { theStringBuilder.append(thePrefix); if (theNumberOfDigits > 1) { int pad = (theNumberOfDigits - 1); for (long xa = theValueToAppend; xa > 9 && pad > 0; xa /= 10) { pad--; } for (int xa = 0; xa < pad; xa++) { theStringBuilder.append('0'); } } theStringBuilder.append(theValueToAppend); } /** * Formats a number of milliseconds for display (e.g. * in a log file), tailoring the output to how big * the value actually is. * <p> * Example outputs: * </p> * <ul> * <li>133ms</li> * <li>00:00:10.223</li> * <li>1.7 days</li> * <li>64 days</li> * </ul> */ public static String formatMillis(long theMillis) { return formatMillis((double) theMillis); } /** * Formats a number of milliseconds for display (e.g. * in a log file), tailoring the output to how big * the value actually is. * <p> * Example outputs: * </p> * <ul> * <li>133ms</li> * <li>00:00:10.223</li> * <li>1.7 days</li> * <li>64 days</li> * </ul> */ public static String formatMillis(double theMillis) { StringBuilder buf = new StringBuilder(20); if (theMillis > 0.0 && theMillis < 1.0) { buf.append(getSubMillisecondMillisFormat().format(theMillis)); buf.append("ms"); } else if (theMillis < (10 * DateUtils.MILLIS_PER_SECOND)) { buf.append((int) theMillis); buf.append("ms"); } else if (theMillis >= DateUtils.MILLIS_PER_DAY) { double days = theMillis / DateUtils.MILLIS_PER_DAY; if (days >= 10) { buf.append(getTenDayFormat().format(days)); buf.append(" days"); } else if (days != 1.0f) { buf.append(getDayFormat().format(days)); buf.append(" days"); } else { buf.append(getDayFormat().format(days)); buf.append(" day"); } } else { long millisAsLong = (long) theMillis; appendRightAlignedNumber(buf, "", 2, ((millisAsLong % DateUtils.MILLIS_PER_DAY) / DateUtils.MILLIS_PER_HOUR)); appendRightAlignedNumber(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_HOUR) / DateUtils.MILLIS_PER_MINUTE)); appendRightAlignedNumber(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_MINUTE) / DateUtils.MILLIS_PER_SECOND)); if (theMillis <= DateUtils.MILLIS_PER_MINUTE) { appendRightAlignedNumber(buf, ".", 3, (millisAsLong % DateUtils.MILLIS_PER_SECOND)); } } return buf.toString(); } private static long now() { if (ourNowForUnitTest != null) { return ourNowForUnitTest; } return System.currentTimeMillis(); } @VisibleForTesting static void setNowForUnitTestForUnitTest(Long theNowForUnitTest) { ourNowForUnitTest = theNowForUnitTest; } }
package com.zfgc.dbobj; import java.util.ArrayList; import java.util.List; public class BrBuddyIgnoreListDbObjExample { /** * This field was generated by MyBatis Generator. This field corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ protected String orderByClause; /** * This field was generated by MyBatis Generator. This field corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ protected boolean distinct; /** * This field was generated by MyBatis Generator. This field corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ protected List<Criteria> oredCriteria; /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public BrBuddyIgnoreListDbObjExample() { oredCriteria = new ArrayList<Criteria>(); } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public String getOrderByClause() { return orderByClause; } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public void setDistinct(boolean distinct) { this.distinct = distinct; } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public boolean isDistinct() { return distinct; } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public List<Criteria> getOredCriteria() { return oredCriteria; } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public void or(Criteria criteria) { oredCriteria.add(criteria); } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } /** * This method was generated by MyBatis Generator. This method corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } /** * This class was generated by MyBatis Generator. This class corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andUserAIdIsNull() { addCriterion("USER_A_ID is null"); return (Criteria) this; } public Criteria andUserAIdIsNotNull() { addCriterion("USER_A_ID is not null"); return (Criteria) this; } public Criteria andUserAIdEqualTo(Integer value) { addCriterion("USER_A_ID =", value, "userAId"); return (Criteria) this; } public Criteria andUserAIdNotEqualTo(Integer value) { addCriterion("USER_A_ID <>", value, "userAId"); return (Criteria) this; } public Criteria andUserAIdGreaterThan(Integer value) { addCriterion("USER_A_ID >", value, "userAId"); return (Criteria) this; } public Criteria andUserAIdGreaterThanOrEqualTo(Integer value) { addCriterion("USER_A_ID >=", value, "userAId"); return (Criteria) this; } public Criteria andUserAIdLessThan(Integer value) { addCriterion("USER_A_ID <", value, "userAId"); return (Criteria) this; } public Criteria andUserAIdLessThanOrEqualTo(Integer value) { addCriterion("USER_A_ID <=", value, "userAId"); return (Criteria) this; } public Criteria andUserAIdIn(List<Integer> values) { addCriterion("USER_A_ID in", values, "userAId"); return (Criteria) this; } public Criteria andUserAIdNotIn(List<Integer> values) { addCriterion("USER_A_ID not in", values, "userAId"); return (Criteria) this; } public Criteria andUserAIdBetween(Integer value1, Integer value2) { addCriterion("USER_A_ID between", value1, value2, "userAId"); return (Criteria) this; } public Criteria andUserAIdNotBetween(Integer value1, Integer value2) { addCriterion("USER_A_ID not between", value1, value2, "userAId"); return (Criteria) this; } public Criteria andUserBIdIsNull() { addCriterion("USER_B_ID is null"); return (Criteria) this; } public Criteria andUserBIdIsNotNull() { addCriterion("USER_B_ID is not null"); return (Criteria) this; } public Criteria andUserBIdEqualTo(Integer value) { addCriterion("USER_B_ID =", value, "userBId"); return (Criteria) this; } public Criteria andUserBIdNotEqualTo(Integer value) { addCriterion("USER_B_ID <>", value, "userBId"); return (Criteria) this; } public Criteria andUserBIdGreaterThan(Integer value) { addCriterion("USER_B_ID >", value, "userBId"); return (Criteria) this; } public Criteria andUserBIdGreaterThanOrEqualTo(Integer value) { addCriterion("USER_B_ID >=", value, "userBId"); return (Criteria) this; } public Criteria andUserBIdLessThan(Integer value) { addCriterion("USER_B_ID <", value, "userBId"); return (Criteria) this; } public Criteria andUserBIdLessThanOrEqualTo(Integer value) { addCriterion("USER_B_ID <=", value, "userBId"); return (Criteria) this; } public Criteria andUserBIdIn(List<Integer> values) { addCriterion("USER_B_ID in", values, "userBId"); return (Criteria) this; } public Criteria andUserBIdNotIn(List<Integer> values) { addCriterion("USER_B_ID not in", values, "userBId"); return (Criteria) this; } public Criteria andUserBIdBetween(Integer value1, Integer value2) { addCriterion("USER_B_ID between", value1, value2, "userBId"); return (Criteria) this; } public Criteria andUserBIdNotBetween(Integer value1, Integer value2) { addCriterion("USER_B_ID not between", value1, value2, "userBId"); return (Criteria) this; } public Criteria andBuddyFlagIsNull() { addCriterion("BUDDY_FLAG is null"); return (Criteria) this; } public Criteria andBuddyFlagIsNotNull() { addCriterion("BUDDY_FLAG is not null"); return (Criteria) this; } public Criteria andBuddyFlagEqualTo(Boolean value) { addCriterion("BUDDY_FLAG =", value, "buddyFlag"); return (Criteria) this; } public Criteria andBuddyFlagNotEqualTo(Boolean value) { addCriterion("BUDDY_FLAG <>", value, "buddyFlag"); return (Criteria) this; } public Criteria andBuddyFlagGreaterThan(Boolean value) { addCriterion("BUDDY_FLAG >", value, "buddyFlag"); return (Criteria) this; } public Criteria andBuddyFlagGreaterThanOrEqualTo(Boolean value) { addCriterion("BUDDY_FLAG >=", value, "buddyFlag"); return (Criteria) this; } public Criteria andBuddyFlagLessThan(Boolean value) { addCriterion("BUDDY_FLAG <", value, "buddyFlag"); return (Criteria) this; } public Criteria andBuddyFlagLessThanOrEqualTo(Boolean value) { addCriterion("BUDDY_FLAG <=", value, "buddyFlag"); return (Criteria) this; } public Criteria andBuddyFlagIn(List<Boolean> values) { addCriterion("BUDDY_FLAG in", values, "buddyFlag"); return (Criteria) this; } public Criteria andBuddyFlagNotIn(List<Boolean> values) { addCriterion("BUDDY_FLAG not in", values, "buddyFlag"); return (Criteria) this; } public Criteria andBuddyFlagBetween(Boolean value1, Boolean value2) { addCriterion("BUDDY_FLAG between", value1, value2, "buddyFlag"); return (Criteria) this; } public Criteria andBuddyFlagNotBetween(Boolean value1, Boolean value2) { addCriterion("BUDDY_FLAG not between", value1, value2, "buddyFlag"); return (Criteria) this; } public Criteria andIgnoreFlagIsNull() { addCriterion("IGNORE_FLAG is null"); return (Criteria) this; } public Criteria andIgnoreFlagIsNotNull() { addCriterion("IGNORE_FLAG is not null"); return (Criteria) this; } public Criteria andIgnoreFlagEqualTo(Boolean value) { addCriterion("IGNORE_FLAG =", value, "ignoreFlag"); return (Criteria) this; } public Criteria andIgnoreFlagNotEqualTo(Boolean value) { addCriterion("IGNORE_FLAG <>", value, "ignoreFlag"); return (Criteria) this; } public Criteria andIgnoreFlagGreaterThan(Boolean value) { addCriterion("IGNORE_FLAG >", value, "ignoreFlag"); return (Criteria) this; } public Criteria andIgnoreFlagGreaterThanOrEqualTo(Boolean value) { addCriterion("IGNORE_FLAG >=", value, "ignoreFlag"); return (Criteria) this; } public Criteria andIgnoreFlagLessThan(Boolean value) { addCriterion("IGNORE_FLAG <", value, "ignoreFlag"); return (Criteria) this; } public Criteria andIgnoreFlagLessThanOrEqualTo(Boolean value) { addCriterion("IGNORE_FLAG <=", value, "ignoreFlag"); return (Criteria) this; } public Criteria andIgnoreFlagIn(List<Boolean> values) { addCriterion("IGNORE_FLAG in", values, "ignoreFlag"); return (Criteria) this; } public Criteria andIgnoreFlagNotIn(List<Boolean> values) { addCriterion("IGNORE_FLAG not in", values, "ignoreFlag"); return (Criteria) this; } public Criteria andIgnoreFlagBetween(Boolean value1, Boolean value2) { addCriterion("IGNORE_FLAG between", value1, value2, "ignoreFlag"); return (Criteria) this; } public Criteria andIgnoreFlagNotBetween(Boolean value1, Boolean value2) { addCriterion("IGNORE_FLAG not between", value1, value2, "ignoreFlag"); return (Criteria) this; } } /** * This class was generated by MyBatis Generator. This class corresponds to the database table BR_BUDDY_IGNORE_LIST * @mbggenerated Sun Jul 16 16:20:51 EDT 2017 */ public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table BR_BUDDY_IGNORE_LIST * * @mbggenerated do_not_delete_during_merge Sun Jul 09 00:52:33 EDT 2017 */ public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.transforms.display; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.fasterxml.jackson.annotation.JsonGetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonValue; import com.google.auto.value.AutoValue; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import java.io.Serializable; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import javax.annotation.Nullable; import org.apache.beam.sdk.transforms.PTransform; import org.joda.time.Duration; import org.joda.time.Instant; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; /** * Static display data associated with a pipeline component. Display data is useful for * pipeline runner UIs and diagnostic dashboards to display details about * {@link PTransform PTransforms} that make up a pipeline. * * <p>Components specify their display data by implementing the {@link HasDisplayData} * interface. */ public class DisplayData implements Serializable { private static final DisplayData EMPTY = new DisplayData(Maps.<Identifier, Item>newHashMap()); private static final DateTimeFormatter TIMESTAMP_FORMATTER = ISODateTimeFormat.dateTime(); private final ImmutableMap<Identifier, Item> entries; private DisplayData(Map<Identifier, Item> entries) { this.entries = ImmutableMap.copyOf(entries); } /** * Default empty {@link DisplayData} instance. */ public static DisplayData none() { return EMPTY; } /** * Collect the {@link DisplayData} from a component. This will traverse all subcomponents * specified via {@link Builder#include} in the given component. Data in this component will be in * a namespace derived from the component. */ public static DisplayData from(HasDisplayData component) { checkNotNull(component, "component argument cannot be null"); InternalBuilder builder = new InternalBuilder(); builder.include(Path.root(), component); return builder.build(); } /** * Infer the {@link Type} for the given object. * * <p>Use this method if the type of metadata is not known at compile time. For example: * * <pre>{@code * {@literal @}Override * public void populateDisplayData(DisplayData.Builder builder) { * Optional<DisplayData.Type> type = DisplayData.inferType(foo); * if (type.isPresent()) { * builder.add(DisplayData.item("foo", type.get(), foo)); * } * } * } * </pre> * * @return The inferred {@link Type}, or null if the type cannot be inferred, */ @Nullable public static Type inferType(@Nullable Object value) { return Type.tryInferFrom(value); } @JsonValue public Collection<Item> items() { return entries.values(); } public Map<Identifier, Item> asMap() { return entries; } @Override public int hashCode() { return entries.hashCode(); } @Override public boolean equals(Object obj) { if (obj instanceof DisplayData) { DisplayData that = (DisplayData) obj; return Objects.equals(this.entries, that.entries); } return false; } @Override public String toString() { StringBuilder builder = new StringBuilder(); boolean isFirstLine = true; for (Item entry : entries.values()) { if (isFirstLine) { isFirstLine = false; } else { builder.append("\n"); } builder.append(entry); } return builder.toString(); } /** * Utility to build up display data from a component and its included * subcomponents. */ public interface Builder { /** * Register display data from the specified subcomponent at the given path. For example, a * {@link PTransform} which delegates to a user-provided function can implement * {@link HasDisplayData} on the function and include it from the {@link PTransform}: * * <pre><code>{@literal @Override} * public void populateDisplayData(DisplayData.Builder builder) { * super.populateDisplayData(builder); * * builder * // To register the class name of the userFn * .add(DisplayData.item("userFn", userFn.getClass())) * // To allow the userFn to register additional display data * .include("userFn", userFn); * } * </code></pre> * * <p>Using {@code include(path, subcomponent)} will associate each of the registered items with * the namespace of the {@code subcomponent} being registered, with the specified path element * relative to the current path. To register display data in the current path and namespace, * such as from a base class implementation, use * {@code subcomponent.populateDisplayData(builder)} instead. * * @see HasDisplayData#populateDisplayData(DisplayData.Builder) */ Builder include(String path, HasDisplayData subComponent); /** * Register display data from the specified component on behalf of the current component. * Display data items will be added with the subcomponent namespace but the current component * path. * * <p>This is useful for components which simply wrap other components and wish to retain the * display data from the wrapped component. Such components should implement * {@code populateDisplayData} as: * * <pre><code>{@literal @Override} * public void populateDisplayData(DisplayData.Builder builder) { * builder.delegate(wrapped); * } * </code></pre> */ Builder delegate(HasDisplayData component); /** * Register the given display item. */ Builder add(ItemSpec<?> item); /** * Register the given display item if the value is not null. */ Builder addIfNotNull(ItemSpec<?> item); /** * Register the given display item if the value is different than the specified default. */ <T> Builder addIfNotDefault(ItemSpec<T> item, @Nullable T defaultValue); } /** * {@link Item Items} are the unit of display data. Each item is identified by a given path, key, * and namespace from the component the display item belongs to. * * <p>{@link Item Items} are registered via {@link DisplayData.Builder#add} * within {@link HasDisplayData#populateDisplayData} implementations. */ @AutoValue public abstract static class Item { /** * The path for the display item within a component hierarchy. */ @Nullable @JsonIgnore public abstract Path getPath(); /** * The namespace for the display item. The namespace defaults to the component which * the display item belongs to. */ @Nullable @JsonGetter("namespace") public abstract Class<?> getNamespace(); /** * The key for the display item. Each display item is created with a key and value * via {@link DisplayData#item}. */ @JsonGetter("key") public abstract String getKey(); /** * Retrieve the {@link DisplayData.Type} of display data. All metadata conforms to a * predefined set of allowed types. */ @JsonGetter("type") public abstract Type getType(); /** * Retrieve the value of the display item. The value is translated from the input to * {@link DisplayData#item} into a format suitable for display. Translation is based on the * item's {@link #getType() type}. */ @JsonGetter("value") public abstract Object getValue(); /** * Return the optional short value for an item, or null if none is provided. * * <p>The short value is an alternative display representation for items having a long display * value. For example, the {@link #getValue() value} for {@link Type#JAVA_CLASS} items contains * the full class name with package, while the short value contains just the class name. * * <p>A {@link #getValue() value} will be provided for each display item, and some types may * also provide a short-value. If a short value is provided, display data consumers may * choose to display it instead of or in addition to the {@link #getValue() value}. */ @JsonGetter("shortValue") @JsonInclude(JsonInclude.Include.NON_NULL) @Nullable public abstract Object getShortValue(); /** * Retrieve the optional label for an item. The label is a human-readable description of what * the metadata represents. UIs may choose to display the label instead of the item key. * * <p>If no label was specified, this will return {@code null}. */ @JsonGetter("label") @JsonInclude(JsonInclude.Include.NON_NULL) @Nullable public abstract String getLabel(); /** * Retrieve the optional link URL for an item. The URL points to an address where the reader * can find additional context for the display data. * * <p>If no URL was specified, this will return {@code null}. */ @JsonGetter("linkUrl") @JsonInclude(JsonInclude.Include.NON_NULL) @Nullable public abstract String getLinkUrl(); private static Item create(ItemSpec<?> spec, Path path) { checkNotNull(spec, "spec cannot be null"); checkNotNull(path, "path cannot be null"); Class<?> ns = checkNotNull(spec.getNamespace(), "namespace must be set"); return new AutoValue_DisplayData_Item(path, ns, spec.getKey(), spec.getType(), spec.getValue(), spec.getShortValue(), spec.getLabel(), spec.getLinkUrl()); } @Override public String toString() { return String.format("%s%s:%s=%s", getPath(), getNamespace().getName(), getKey(), getValue()); } } /** * Specifies an {@link Item} to register as display data. Each item is identified by a given * path, key, and namespace from the component the display item belongs to. * * <p>{@link Item Items} are registered via {@link DisplayData.Builder#add} * within {@link HasDisplayData#populateDisplayData} implementations. */ @AutoValue public abstract static class ItemSpec<T> implements Serializable { /** * The namespace for the display item. If unset, defaults to the component which * the display item is registered to. */ @Nullable public abstract Class<?> getNamespace(); /** * The key for the display item. Each display item is created with a key and value * via {@link DisplayData#item}. */ public abstract String getKey(); /** * The {@link DisplayData.Type} of display data. All display data conforms to a predefined set * of allowed types. */ public abstract Type getType(); /** * The value of the display item. The value is translated from the input to * {@link DisplayData#item} into a format suitable for display. Translation is based on the * item's {@link #getType() type}. */ @Nullable public abstract Object getValue(); /** * The optional short value for an item, or {@code null} if none is provided. * * <p>The short value is an alternative display representation for items having a long display * value. For example, the {@link #getValue() value} for {@link Type#JAVA_CLASS} items contains * the full class name with package, while the short value contains just the class name. * * <p>A {@link #getValue() value} will be provided for each display item, and some types may * also provide a short-value. If a short value is provided, display data consumers may * choose to display it instead of or in addition to the {@link #getValue() value}. */ @Nullable public abstract Object getShortValue(); /** * The optional label for an item. The label is a human-readable description of what * the metadata represents. UIs may choose to display the label instead of the item key. */ @Nullable public abstract String getLabel(); /** * The optional link URL for an item. The URL points to an address where the reader * can find additional context for the display data. */ @Nullable public abstract String getLinkUrl(); private static <T> ItemSpec<T> create(String key, Type type, @Nullable T value) { return ItemSpec.<T>builder() .setKey(key) .setType(type) .setRawValue(value) .build(); } /** * Set the item {@link ItemSpec#getNamespace() namespace} from the given {@link Class}. * * <p>This method does not alter the current instance, but instead returns a new * {@link ItemSpec} with the namespace set. */ public ItemSpec<T> withNamespace(Class<?> namespace) { checkNotNull(namespace, "namespace argument cannot be null"); return toBuilder() .setNamespace(namespace) .build(); } /** * Set the item {@link Item#getLabel() label}. * * <p>Specifying a null value will clear the label if it was previously defined. * * <p>This method does not alter the current instance, but instead returns a new * {@link ItemSpec} with the label set. */ public ItemSpec<T> withLabel(@Nullable String label) { return toBuilder() .setLabel(label) .build(); } /** * Set the item {@link Item#getLinkUrl() link url}. * * <p>Specifying a null value will clear the link url if it was previously defined. * * <p>This method does not alter the current instance, but instead returns a new * {@link ItemSpec} with the link url set. */ public ItemSpec<T> withLinkUrl(@Nullable String url) { return toBuilder() .setLinkUrl(url) .build(); } /** * Creates a similar item to the current instance but with the specified value. * * <p>This should only be used internally. It is useful to compare the value of a * {@link DisplayData.Item} to the value derived from a specified input. */ private ItemSpec<T> withValue(T value) { return toBuilder() .setRawValue(value) .build(); } @Override public String toString() { return String.format("%s:%s=%s", getNamespace(), getKey(), getValue()); } static <T> ItemSpec.Builder<T> builder() { return new AutoValue_DisplayData_ItemSpec.Builder<>(); } abstract ItemSpec.Builder<T> toBuilder(); @AutoValue.Builder abstract static class Builder<T> { public abstract ItemSpec.Builder<T> setKey(String key); public abstract ItemSpec.Builder<T> setNamespace(@Nullable Class<?> namespace); public abstract ItemSpec.Builder<T> setType(Type type); public abstract ItemSpec.Builder<T> setValue(@Nullable Object longValue); public abstract ItemSpec.Builder<T> setShortValue(@Nullable Object shortValue); public abstract ItemSpec.Builder<T> setLabel(@Nullable String label); public abstract ItemSpec.Builder<T> setLinkUrl(@Nullable String url); public abstract ItemSpec<T> build(); abstract Type getType(); ItemSpec.Builder<T> setRawValue(@Nullable T value) { FormattedItemValue formatted = getType().safeFormat(value); return this .setValue(formatted.getLongValue()) .setShortValue(formatted.getShortValue()); } } } /** * Unique identifier for a display data item within a component. * * <p>Identifiers are composed of: * * <ul> * <li>A {@link #getPath() path} based on the component hierarchy</li> * <li>The {@link #getKey() key} it is registered with</li> * <li>A {@link #getNamespace() namespace} generated from the class of the component which * registered the item.</li> * </ul> * * <p>Display data registered with the same key from different components will have different * namespaces and thus will both be represented in the composed {@link DisplayData}. If a * single component registers multiple metadata items with the same key, only the most recent * item will be retained; previous versions are discarded. */ @AutoValue public abstract static class Identifier { public abstract Path getPath(); public abstract Class<?> getNamespace(); public abstract String getKey(); public static Identifier of(Path path, Class<?> namespace, String key) { return new AutoValue_DisplayData_Identifier(path, namespace, key); } @Override public String toString() { return String.format("%s%s:%s", getPath(), getNamespace(), getKey()); } } /** * Structured path of registered display data within a component hierarchy. * * <p>Display data items registered directly by a component will have the {@link Path#root() root} * path. If the component {@link Builder#include includes} a sub-component, its display data will * be registered at the path specified. Each sub-component path is created by appending a child * element to the path of its parent component, forming a hierarchy. */ public static class Path { private final ImmutableList<String> components; private Path(ImmutableList<String> components) { this.components = components; } /** * Path for display data registered by a top-level component. */ public static Path root() { return new Path(ImmutableList.<String>of()); } /** * Construct a path from an absolute component path hierarchy. * * <p>For the root path, use {@link Path#root()}. * * @param firstPath Path of the first sub-component. * @param paths Additional path components. */ public static Path absolute(String firstPath, String... paths) { ImmutableList.Builder<String> builder = ImmutableList.builder(); validatePathElement(firstPath); builder.add(firstPath); for (String path : paths) { validatePathElement(path); builder.add(path); } return new Path(builder.build()); } /** * Hierarchy list of component paths making up the full path, starting with the top-level child * component path. For the {@link #root root} path, returns the empty list. */ public List<String> getComponents() { return components; } /** * Extend the path by appending a sub-component path. The new path element is added to the end * of the path hierarchy. * * <p>Returns a new {@link Path} instance; the originating {@link Path} is not modified. */ public Path extend(String path) { validatePathElement(path); return new Path(ImmutableList.<String>builder() .addAll(components.iterator()) .add(path) .build()); } private static void validatePathElement(String path) { checkNotNull(path); checkArgument(!"".equals(path), "path cannot be empty"); } @Override public String toString() { StringBuilder b = new StringBuilder().append("["); Joiner.on("/").appendTo(b, components); b.append("]"); return b.toString(); } @Override public boolean equals(Object obj) { return obj instanceof Path && Objects.equals(components, ((Path) obj).components); } @Override public int hashCode() { return components.hashCode(); } } /** * Display data type. */ public enum Type { STRING { @Override FormattedItemValue format(Object value) { return new FormattedItemValue(checkType(value, String.class, STRING)); } }, INTEGER { @Override FormattedItemValue format(Object value) { if (value instanceof Integer) { long l = ((Integer) value).longValue(); return format(l); } return new FormattedItemValue(checkType(value, Long.class, INTEGER)); } }, FLOAT { @Override FormattedItemValue format(Object value) { return new FormattedItemValue(checkType(value, Number.class, FLOAT)); } }, BOOLEAN() { @Override FormattedItemValue format(Object value) { return new FormattedItemValue(checkType(value, Boolean.class, BOOLEAN)); } }, TIMESTAMP() { @Override FormattedItemValue format(Object value) { Instant instant = checkType(value, Instant.class, TIMESTAMP); return new FormattedItemValue((TIMESTAMP_FORMATTER.print(instant))); } }, DURATION { @Override FormattedItemValue format(Object value) { Duration duration = checkType(value, Duration.class, DURATION); return new FormattedItemValue(duration.getMillis()); } }, JAVA_CLASS { @Override FormattedItemValue format(Object value) { Class<?> clazz = checkType(value, Class.class, JAVA_CLASS); return new FormattedItemValue(clazz.getName(), clazz.getSimpleName()); } }; private static <T> T checkType(Object value, Class<T> clazz, DisplayData.Type expectedType) { if (!clazz.isAssignableFrom(value.getClass())) { throw new ClassCastException(String.format( "Value is not valid for DisplayData type %s: %s", expectedType, value)); } @SuppressWarnings("unchecked") // type checked above. T typedValue = (T) value; return typedValue; } /** * Format the display data value into a long string representation, and optionally * a shorter representation for display. * * <p>Internal-only. Value objects can be safely cast to the expected Java type. */ abstract FormattedItemValue format(Object value); /** * Safe version of {@link Type#format(Object)}, which checks for null input value and if so * returns a {@link FormattedItemValue} with null value properties. * * @see #format(Object) */ FormattedItemValue safeFormat(@Nullable Object value) { if (value == null) { return FormattedItemValue.NULL_VALUES; } return format(value); } @Nullable private static Type tryInferFrom(@Nullable Object value) { if (value instanceof Integer || value instanceof Long) { return INTEGER; } else if (value instanceof Double || value instanceof Float) { return FLOAT; } else if (value instanceof Boolean) { return BOOLEAN; } else if (value instanceof Instant) { return TIMESTAMP; } else if (value instanceof Duration) { return DURATION; } else if (value instanceof Class<?>) { return JAVA_CLASS; } else if (value instanceof String) { return STRING; } else { return null; } } } static class FormattedItemValue { /** * Default instance which contains null values. */ private static final FormattedItemValue NULL_VALUES = new FormattedItemValue(null); private final Object shortValue; private final Object longValue; private FormattedItemValue(Object longValue) { this(longValue, null); } private FormattedItemValue(Object longValue, Object shortValue) { this.longValue = longValue; this.shortValue = shortValue; } Object getLongValue() { return this.longValue; } Object getShortValue() { return this.shortValue; } } private static class InternalBuilder implements Builder { private final Map<Identifier, Item> entries; private final Set<HasDisplayData> visitedComponents; private final Map<Path, HasDisplayData> visitedPathMap; private Path latestPath; private Class<?> latestNs; private InternalBuilder() { this.entries = Maps.newHashMap(); this.visitedComponents = Sets.newIdentityHashSet(); this.visitedPathMap = Maps.newHashMap(); } @Override public Builder include(String path, HasDisplayData subComponent) { checkNotNull(subComponent, "subComponent argument cannot be null"); checkNotNull(path, "path argument cannot be null"); Path absolutePath = latestPath.extend(path); HasDisplayData existingComponent = visitedPathMap.get(absolutePath); if (existingComponent != null) { throw new IllegalArgumentException(String.format("Specified path '%s' already used for " + "subcomponent %s. Subcomponents must be included using unique paths.", path, existingComponent)); } return include(absolutePath, subComponent); } @Override public Builder delegate(HasDisplayData component) { checkNotNull(component); return include(latestPath, component); } private Builder include(Path path, HasDisplayData subComponent) { if (visitedComponents.contains(subComponent)) { // Component previously registered; ignore in order to break cyclic dependencies return this; } // New component; add it. visitedComponents.add(subComponent); visitedPathMap.put(path, subComponent); Class<?> namespace = subComponent.getClass(); Path prevPath = latestPath; Class<?> prevNs = latestNs; latestPath = path; latestNs = namespace; try { subComponent.populateDisplayData(this); } catch (PopulateDisplayDataException e) { // Don't re-wrap exceptions recursively. throw e; } catch (Throwable e) { String msg = String.format("Error while populating display data for component: %s", namespace.getName()); throw new PopulateDisplayDataException(msg, e); } latestPath = prevPath; latestNs = prevNs; return this; } /** * Marker exception class for exceptions encountered while populating display data. */ private static class PopulateDisplayDataException extends RuntimeException { PopulateDisplayDataException(String message, Throwable cause) { super(message, cause); } } @Override public Builder add(ItemSpec<?> item) { checkNotNull(item, "Input display item cannot be null"); return addItemIf(true, item); } @Override public Builder addIfNotNull(ItemSpec<?> item) { checkNotNull(item, "Input display item cannot be null"); return addItemIf(item.getValue() != null, item); } @Override public <T> Builder addIfNotDefault(ItemSpec<T> item, @Nullable T defaultValue) { checkNotNull(item, "Input display item cannot be null"); ItemSpec<T> defaultItem = item.withValue(defaultValue); return addItemIf(!Objects.equals(item, defaultItem), item); } private Builder addItemIf(boolean condition, ItemSpec<?> spec) { if (!condition) { return this; } checkNotNull(spec, "Input display item cannot be null"); checkNotNull(spec.getValue(), "Input display value cannot be null"); if (spec.getNamespace() == null) { spec = spec.withNamespace(latestNs); } Item item = Item.create(spec, latestPath); Identifier id = Identifier.of(item.getPath(), item.getNamespace(), item.getKey()); checkArgument(!entries.containsKey(id), "Display data key (%s) is not unique within the specified path and namespace: %s%s.", item.getKey(), item.getPath(), item.getNamespace()); entries.put(id, item); return this; } private DisplayData build() { return new DisplayData(this.entries); } } /** * Create a display item for the specified key and string value. */ public static ItemSpec<String> item(String key, @Nullable String value) { return item(key, Type.STRING, value); } /** * Create a display item for the specified key and integer value. */ public static ItemSpec<Integer> item(String key, @Nullable Integer value) { return item(key, Type.INTEGER, value); } /** * Create a display item for the specified key and integer value. */ public static ItemSpec<Long> item(String key, @Nullable Long value) { return item(key, Type.INTEGER, value); } /** * Create a display item for the specified key and floating point value. */ public static ItemSpec<Float> item(String key, @Nullable Float value) { return item(key, Type.FLOAT, value); } /** * Create a display item for the specified key and floating point value. */ public static ItemSpec<Double> item(String key, @Nullable Double value) { return item(key, Type.FLOAT, value); } /** * Create a display item for the specified key and boolean value. */ public static ItemSpec<Boolean> item(String key, @Nullable Boolean value) { return item(key, Type.BOOLEAN, value); } /** * Create a display item for the specified key and timestamp value. */ public static ItemSpec<Instant> item(String key, @Nullable Instant value) { return item(key, Type.TIMESTAMP, value); } /** * Create a display item for the specified key and duration value. */ public static ItemSpec<Duration> item(String key, @Nullable Duration value) { return item(key, Type.DURATION, value); } /** * Create a display item for the specified key and class value. */ public static <T> ItemSpec<Class<T>> item(String key, @Nullable Class<T> value) { return item(key, Type.JAVA_CLASS, value); } /** * Create a display item for the specified key, type, and value. This method should be used * if the type of the input value can only be determined at runtime. Otherwise, * {@link HasDisplayData} implementors should call one of the typed factory methods, such as * {@link #item(String, String)} or {@link #item(String, Integer)}. * * @throws ClassCastException if the value cannot be formatted as the given type. * * @see Type#inferType(Object) */ public static <T> ItemSpec<T> item(String key, Type type, @Nullable T value) { checkNotNull(key, "key argument cannot be null"); checkNotNull(type, "type argument cannot be null"); return ItemSpec.create(key, type, value); } }
/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.event.simulator.core.internal.ds; import org.apache.axis2.AxisFault; import org.apache.axis2.engine.AxisConfiguration; import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.databridge.commons.Attribute; import org.wso2.carbon.databridge.commons.AttributeType; import org.wso2.carbon.databridge.commons.StreamDefinition; import org.wso2.carbon.event.simulator.core.*; import org.wso2.carbon.event.stream.core.EventStreamService; import org.wso2.carbon.event.stream.core.exception.EventStreamConfigurationException; import org.wso2.carbon.ndatasource.common.DataSourceException; import org.wso2.carbon.utils.CarbonUtils; import org.wso2.carbon.ndatasource.core.CarbonDataSource; import javax.activation.DataHandler; import javax.sql.DataSource; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import java.io.*; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; public class CarbonEventSimulator implements EventSimulator { private static final Log log = LogFactory.getLog(CarbonEventSimulator.class); private HashMap<String, EventStreamProducer> eventProducerMap; private HashMap<Integer, HashMap<String, CSVFileInfo>> tenantSpecificCSVFileInfoMap; private HashMap<Integer, HashMap<String, DataSourceTableAndStreamInfo>> tenantSpecificDataSourceInfoMap; public CarbonEventSimulator() { eventProducerMap = new HashMap<String, EventStreamProducer>(); tenantSpecificCSVFileInfoMap = new HashMap<Integer, HashMap<String, CSVFileInfo>>(); tenantSpecificDataSourceInfoMap = new HashMap<Integer, HashMap<String, DataSourceTableAndStreamInfo>>(); } public Collection<StreamDefinition> getAllEventStreamDefinitions() { try { EventStreamService eventStreamService = EventSimulatorValueHolder.getEventStreamService(); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); Collection<StreamDefinition> collection = new ArrayList<StreamDefinition>(); for(StreamDefinition streamDefinition: eventStreamService.getAllStreamDefinitions()) { collection.add(streamDefinition); } return collection; } catch (Exception e) { log.error(e); } return null; } @Override public void sendEvent(Event eventDetail) throws AxisFault { EventStreamService eventstreamservice = EventSimulatorValueHolder.getEventStreamService(); StreamDefinition streamDefinition = eventDetail.getStreamDefinition(); String[] attributeValues = eventDetail.getAttributeValues(); Object[] dataObjects = new Object[attributeValues.length]; List<Attribute> streamAttributeList = getStreamAttributeList(eventDetail.getStreamDefinition()); if (validateAttributeValues(streamAttributeList, attributeValues)) { for (int i = 0; i < dataObjects.length; i++) { Attribute attribute = streamAttributeList.get(i); String attributeType = attribute.getType().toString(); if (attributeType.equals(EventSimulatorConstant.STRING)) { dataObjects[i] = attributeValues[i]; } else if (attributeType.equals(EventSimulatorConstant.INT)) { try { int val = Integer.parseInt(attributeValues[i]); dataObjects[i] = val; } catch (NumberFormatException e) { throw new AxisFault("Incorrect value types for the attribute - " + attribute.getName() + ", expected " + attribute.getType().toString() + " : " + e.getMessage(), e); } } else if (attributeType.equals(EventSimulatorConstant.LONG)) { try { long val = Long.parseLong(attributeValues[i]); dataObjects[i] = val; } catch (NumberFormatException e) { throw new AxisFault("Incorrect value types for the attribute - " + attribute.getName() + ", expected " + attribute.getType().toString() + " : " + e.getMessage(), e); } } else if (attributeType.equals(EventSimulatorConstant.DOUBLE)) { try { double val = Double.parseDouble(attributeValues[i]); dataObjects[i] = val; } catch (NumberFormatException e) { throw new AxisFault("Incorrect value types for the attribute - " + attribute.getName() + ", expected " + attribute.getType().toString() + " : " + e.getMessage(), e); } } else if (attributeType.equals(EventSimulatorConstant.FLOAT)) { try { float val = Float.parseFloat(attributeValues[i]); dataObjects[i] = val; } catch (NumberFormatException e) { throw new AxisFault("Incorrect value types for the attribute - " + attribute.getName() + ", expected " + attribute.getType().toString() + " : " + e.getMessage(), e); } } else if (attributeType.equals(EventSimulatorConstant.BOOLEAN)) { if (!Boolean.parseBoolean(attributeValues[i])) { throw new AxisFault("Incorrect value types for the attribute - " + attribute.getName() + ", expected " + attribute.getType().toString()); } else { boolean val = Boolean.parseBoolean(attributeValues[i]); dataObjects[i] = val; } } } } if (eventProducerMap.get(streamDefinition.getStreamId()) != null) { EventStreamProducer eventProducer = eventProducerMap.get(streamDefinition.getStreamId()); eventProducer.sendData(dataObjects); } else { EventStreamProducer eventStreamProducer = new EventStreamProducer(); eventStreamProducer.setStreamID(streamDefinition.getStreamId()); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); try { eventstreamservice.subscribe(eventStreamProducer); } catch (EventStreamConfigurationException e) { log.error(e); } eventProducerMap.put(streamDefinition.getStreamId(), eventStreamProducer); eventStreamProducer.sendData(dataObjects); } } @Override public List<CSVFileInfo> getAllCSVFileInfo() { int tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); HashMap<String, CSVFileInfo> csvFileInfoMap = tenantSpecificCSVFileInfoMap.get(tenantID); if (csvFileInfoMap != null) { return new ArrayList<CSVFileInfo>(csvFileInfoMap.values()); }else { return null; } } public void addCSVFileInfo(CSVFileInfo csvFileInfo) { int tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (tenantSpecificCSVFileInfoMap.containsKey(tenantID)) { HashMap<String, CSVFileInfo> csvFileInfoMap = tenantSpecificCSVFileInfoMap.get(tenantID); csvFileInfoMap.put(csvFileInfo.getFileName(), csvFileInfo); } else { HashMap<String, CSVFileInfo> csvFileMap = new HashMap<String, CSVFileInfo>(); csvFileMap.put(csvFileInfo.getFileName(), csvFileInfo); tenantSpecificCSVFileInfoMap.put(tenantID, csvFileMap); } } public void addEventMappingConfiguration(String fileName, String streamId, String separateChar) { int tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); HashMap<String, CSVFileInfo> csvFileInfoMap = tenantSpecificCSVFileInfoMap.get(tenantID); CSVFileInfo csvFileInfo = csvFileInfoMap.get(fileName); csvFileInfo.setStreamID(streamId); csvFileInfo.setSeparateCharacter(separateChar); } @Override public void createConfigurationXML(String fileName, String streamId, String separateChar, AxisConfiguration axisConfiguration) { String repo = axisConfiguration.getRepository().getPath(); String path = repo + EventSimulatorConstant.DEPLOY_DIRECTORY_PATH; try { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.newDocument(); Element rootElement = doc.createElement(EventSimulatorConstant.ROOT_ELEMENT_NAME); rootElement.setAttribute("type", "csv"); doc.appendChild(rootElement); Element csvFileName = doc.createElement(EventSimulatorConstant.FILE_ELEMENT); csvFileName.appendChild(doc.createTextNode(fileName)); rootElement.appendChild(csvFileName); Element streamID = doc.createElement(EventSimulatorConstant.STREAM_ID_ELEMENT); streamID.appendChild(doc.createTextNode(streamId)); rootElement.appendChild(streamID); Element separateCharacter = doc.createElement(EventSimulatorConstant.SEPARATE_CHAR_ELEMENT); separateCharacter.appendChild(doc.createTextNode(separateChar)); rootElement.appendChild(separateCharacter); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); DOMSource source = new DOMSource(doc); String absolutePath = path + File.separator + fileName.substring(0, fileName.length() - 4) + EventSimulatorConstant.CONFIGURATION_XML_PREFIX; StreamResult result = new StreamResult(new File(absolutePath)); transformer.transform(source, result); addEventMappingConfiguration(fileName, streamId, separateChar); } catch (ParserConfigurationException e) { log.error(e); } catch (TransformerException e) { log.error(e); } } @Override public void deleteFile(String fileName, AxisConfiguration axisConfiguration) throws AxisFault { int tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); HashMap<String, CSVFileInfo> csvFileInfoMap = tenantSpecificCSVFileInfoMap.get(tenantID); CSVFileInfo csvFileInfo = csvFileInfoMap.get(fileName); String repo = axisConfiguration.getRepository().getPath(); String path = repo + EventSimulatorConstant.DEPLOY_DIRECTORY_PATH; String xmlFileName = csvFileInfo.getFileName().substring(0, csvFileInfo.getFileName().length() - 4) + EventSimulatorConstant.CONFIGURATION_XML_PREFIX; String xmlFilePath = path + File.separator + xmlFileName; File file = new File(csvFileInfo.getFilePath()); File xmlFile = new File(xmlFilePath); if (file.delete()) { csvFileInfoMap.remove(fileName); } else { throw new AxisFault("Failed to delete the file .." + csvFileInfo.getFileName()); } if (xmlFile.exists()) { xmlFile.delete(); } } @Override public void sendEvents(String fileName) throws AxisFault { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); Thread eventCreator = new Thread(new EventCreation(fileName, tenantId)); eventCreator.start(); } @Override public void uploadService(UploadedFileItem[] fileItems, AxisConfiguration axisConfiguration) throws AxisFault { String repo = axisConfiguration.getRepository().getPath(); if (CarbonUtils.isURL(repo)) { throw new AxisFault("URL Repositories are not supported: " + repo); } String csvDirectory = repo + EventSimulatorConstant.DEPLOY_DIRECTORY_PATH; String csvTemp = CarbonUtils.getCarbonHome() + EventSimulatorConstant.TEMP_DIR_PATH; File csvTempDir = new File(csvTemp); if (!csvTempDir.exists() && !csvTempDir.mkdirs()) { throw new AxisFault("Fail to create the directory: " + csvTempDir.getAbsolutePath()); } File csvDir = new File(csvDirectory); if (!csvDir.exists() && !csvDir.mkdirs()) { throw new AxisFault("Fail to create the directory: " + csvDir.getAbsolutePath()); } for (UploadedFileItem uploadedFile : fileItems) { String fileName = uploadedFile.getFileName(); if (fileName == null || fileName.equals("")) { throw new AxisFault("Invalid file name. File name is not available"); } if (uploadedFile.getFileType().equals("csv")) { try { writeResource(uploadedFile.getDataHandler(), csvTemp, fileName, csvDir); } catch (IOException e) { throw new AxisFault("IOError: Writing resource failed.", e); } } else { throw new AxisFault("Invalid file type : " + uploadedFile.getFileType() + " ." + "csv" + " file type is expected"); } } } private void writeResource(DataHandler dataHandler, String destPath, String fileName, File csvDest) throws IOException { File tempDestFile = new File(destPath, fileName); FileOutputStream fos = null; File destFile = new File(csvDest, fileName); try { fos = new FileOutputStream(tempDestFile); /* File stream is copied to a temp directory in order handle hot deployment issue occurred in windows */ dataHandler.writeTo(fos); FileUtils.copyFile(tempDestFile, destFile); } catch (FileNotFoundException e) { log.error("Cannot find the file", e); throw e; } catch (IOException e) { log.error("IO error."); throw e; } finally { close(fos); } boolean isDeleted = tempDestFile.delete(); if (!isDeleted) { log.warn("temp file: " + tempDestFile.getAbsolutePath() + " deletion failed, scheduled deletion on server exit."); tempDestFile.deleteOnExit(); } } public static void close(Closeable c) { if (c == null) { return; } try { c.close(); } catch (IOException e) { log.warn("Can't close file streams.", e); } } private List<Attribute> getStreamAttributeList(StreamDefinition streamDefinition) { List<Attribute> attributeList = new ArrayList<Attribute>(); if (streamDefinition != null) { int i = 0; if (streamDefinition.getMetaData() != null) { for (Attribute attribute : streamDefinition.getMetaData()) { attributeList.add(attribute); i++; } } int j = 0; if (streamDefinition.getCorrelationData() != null) { for (Attribute attribute : streamDefinition.getCorrelationData()) { attributeList.add(attribute); j++; } } int k = 0; if (streamDefinition.getPayloadData() != null) { for (Attribute attribute : streamDefinition.getPayloadData()) { attributeList.add(attribute); k++; } } } return attributeList; } private StreamDefinition getStreamDefinition(String streamId) { EventStreamService eventStreamService = EventSimulatorValueHolder.getEventStreamService(); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); StreamDefinition streamDefinition = null; try { Collection<StreamDefinition> streamDefinitions = eventStreamService.getAllStreamDefinitions(); int index = 0; for (StreamDefinition streamDefinition1 : streamDefinitions) { if (streamDefinition1.getStreamId().equals(streamId)) { streamDefinition = streamDefinition1; break; } index++; } } catch (Exception e) { log.error(e); } return streamDefinition; } private boolean validateAttributeValues(List<Attribute> attributeList, String[] valueArray) throws AxisFault { if (attributeList.size() != valueArray.length) { throw new AxisFault("Failed configuration of event stream in this file or file is corrupted "); } return true; } private class EventCreation implements Runnable { String fileName = null; int tenantId; public EventCreation(String fileName, int tenantId) { this.fileName = fileName; this.tenantId = tenantId; } @Override public void run() { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(this.tenantId, true); HashMap<String, CSVFileInfo> csvFileInfoMap = tenantSpecificCSVFileInfoMap.get(tenantId); CSVFileInfo fileInfo = csvFileInfoMap.get(fileName); String path = fileInfo.getFilePath(); File file = new File(path); FileInputStream fis = null; BufferedInputStream bis = null; DataInputStream dis = null; StreamDefinition streamDefinition = getStreamDefinition(fileInfo.getStreamID()); try { fis = new FileInputStream(file); bis = new BufferedInputStream(fis); dis = new DataInputStream(bis); int rowNumber = 0; while (dis.available() != 0) { String eventValues = dis.readLine(); try { String[] attributeValueList = eventValues.split(fileInfo.getSeparateCharacter()); Event event = new Event(); event.setStreamDefinition(streamDefinition); event.setAttributeValues(attributeValueList); sendEvent(event); } catch (Exception e) { log.error("Error in row " + rowNumber + "-failed to create an event " + e); rowNumber++; continue; } rowNumber++; } } catch (IOException e) { log.error(e); } finally { try { if(fis!=null) { fis.close(); } if (bis != null) { bis.close(); } if (dis != null) { dis.close(); } } catch (IOException ex) { ex.printStackTrace(); } } } finally { PrivilegedCarbonContext.endTenantFlow(); } } } public void addDataSourceTableAndStreamInfo(DataSourceTableAndStreamInfo dataSourceTableAndStreamInfo) { int tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (tenantSpecificDataSourceInfoMap.containsKey(tenantID)) { HashMap<String, DataSourceTableAndStreamInfo> dataSourceTableAndStreamInfoMap = tenantSpecificDataSourceInfoMap.get(tenantID); dataSourceTableAndStreamInfoMap.put(dataSourceTableAndStreamInfo.getConfigurationName(), dataSourceTableAndStreamInfo); } else { HashMap<String, DataSourceTableAndStreamInfo> dataSourceTableAndStreamInfoMap = new HashMap<String, DataSourceTableAndStreamInfo>(); dataSourceTableAndStreamInfoMap.put(dataSourceTableAndStreamInfo.getConfigurationName(), dataSourceTableAndStreamInfo); tenantSpecificDataSourceInfoMap.put(tenantID, dataSourceTableAndStreamInfoMap); } } @Override public List<DataSourceTableAndStreamInfo> getAllDataSourceInfo() { int tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); HashMap<String, DataSourceTableAndStreamInfo> DataSourceTableAndStreamInfoMap = tenantSpecificDataSourceInfoMap.get(tenantID); if (DataSourceTableAndStreamInfoMap != null) { return new ArrayList<DataSourceTableAndStreamInfo>(DataSourceTableAndStreamInfoMap.values()); }else { return null; } } @Override public void createConfigurationXMLForDataSource(String dataSourceConfigAndEventStreamInfo,AxisConfiguration axisConfiguration) throws AxisFault { String repo = axisConfiguration.getRepository().getPath(); String path = repo + EventSimulatorConstant.DATA_SOURCE_DEPLOY_DIRECTORY_PATH; try { JSONObject jsonConvertedInfo = new JSONObject(dataSourceConfigAndEventStreamInfo); DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); String fileName= jsonConvertedInfo.getString(EventSimulatorConstant.CONFIGURATION_NAME); Document doc = docBuilder.newDocument(); Element rootElement = doc.createElement(EventSimulatorConstant.ROOT_ELEMENT_NAME); rootElement.setAttribute("type", "database"); rootElement.setAttribute("name", jsonConvertedInfo.getString(EventSimulatorConstant.CONFIGURATION_NAME)); doc.appendChild(rootElement); Element dataSourceName = doc.createElement(EventSimulatorConstant.DATA_SOURCE_NAME); dataSourceName.appendChild(doc.createTextNode(jsonConvertedInfo.getString(EventSimulatorConstant.DATA_SOURCE_NAME))); rootElement.appendChild(dataSourceName); Element tableName = doc.createElement(EventSimulatorConstant.TABLE_NAME); tableName.appendChild(doc.createTextNode(jsonConvertedInfo.getString(EventSimulatorConstant.TABLE_NAME))); rootElement.appendChild(tableName); Element streamNameID = doc.createElement(EventSimulatorConstant.EVENT_STREAM_ID); streamNameID.appendChild(doc.createTextNode(jsonConvertedInfo.getString(EventSimulatorConstant.EVENT_STREAM_ID))); rootElement.appendChild(streamNameID); Element columnMappings = doc.createElement("columnMappings"); JSONArray databaseColumnAndStreamAttributeInfo1 = jsonConvertedInfo.getJSONArray(EventSimulatorConstant.DATABASE_COLUMNS_AND_STREAM_ATTRIBUTE_INFO); for(int i=0; i< databaseColumnAndStreamAttributeInfo1.length(); i++){ JSONObject temp = databaseColumnAndStreamAttributeInfo1.getJSONObject(i); Element columnMapping = doc.createElement("columnMapping"); columnMapping.setAttribute(EventSimulatorConstant.COLUMN_NAME, temp.getString(EventSimulatorConstant.COLUMN_NAME)); columnMapping.setAttribute(EventSimulatorConstant.STREAM_ATTRIBUTE_NAME, temp.getString(EventSimulatorConstant.STREAM_ATTRIBUTE_NAME)); columnMappings.appendChild(columnMapping); } rootElement.appendChild(columnMappings); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); DOMSource source = new DOMSource(doc); String absolutePath = path + File.separator + fileName + EventSimulatorConstant.DATA_SOURCE_CONFIGURATION_XML_PREFIX; StreamResult result = new StreamResult(new File(absolutePath)); uploadXMLFile(axisConfiguration); transformer.transform(source, result); //addEventMappingDBConfiguration(fileName, streamId, separateChar); } catch (ParserConfigurationException e) { log.error(e); } catch (TransformerException e) { log.error(e); } catch (JSONException e) { log.error(e); e.printStackTrace(); } } //@Override public void uploadXMLFile(AxisConfiguration axisConfiguration) throws AxisFault { String repo = axisConfiguration.getRepository().getPath(); if (CarbonUtils.isURL(repo)) { throw new AxisFault("URL Repositories are not supported: " + repo); } String xmlDirectory = repo + EventSimulatorConstant.DEPLOY_DIRECTORY_PATH; File csvDir = new File(xmlDirectory); if (!csvDir.exists() && !csvDir.mkdirs()) { throw new AxisFault("Fail to create the directory: " + csvDir.getAbsolutePath()); } /* for (UploadedFileItem uploadedFile : fileItems) { String fileName = uploadedFile.getFileName(); if (fileName == null || fileName.equals("")) { throw new AxisFault("Invalid file name. File name is not available"); } if (uploadedFile.getFileType().equals("csv")) { try { writeResource(uploadedFile.getDataHandler(), csvTemp, fileName, csvDir); } catch (IOException e) { throw new AxisFault("IOError: Writing resource failed.", e); } } else { throw new AxisFault("Invalid file type : " + uploadedFile.getFileType() + " ." + "csv" + " file type is expected"); } }*/ } @Override public void deleteDBConfigFile(String fileName, AxisConfiguration axisConfiguration) throws AxisFault { int tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); HashMap<String, DataSourceTableAndStreamInfo> dataSourceTableAndStreamInfoMap = tenantSpecificDataSourceInfoMap.get(tenantID); fileName = fileName.replace(EventSimulatorConstant.DATA_SOURCE_CONFIGURATION_XML_PREFIX,""); DataSourceTableAndStreamInfo dataSourceTableAndStreamInfo = dataSourceTableAndStreamInfoMap.get(fileName); String repo = axisConfiguration.getRepository().getPath(); String path = repo + EventSimulatorConstant.DEPLOY_DIRECTORY_PATH; String xmlFilePath = path + File.separator + dataSourceTableAndStreamInfo.getFileName(); File xmlFile = new File(xmlFilePath); if (xmlFile.exists()) { dataSourceTableAndStreamInfoMap.remove(fileName); xmlFile.delete(); } } @Override public String getEventStreamInfo(String fileName) throws AxisFault { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(tenantId, true); fileName = fileName.replace(EventSimulatorConstant.DATA_SOURCE_CONFIGURATION_XML_PREFIX,""); HashMap<String, DataSourceTableAndStreamInfo> dataSourceInfoMap = tenantSpecificDataSourceInfoMap.get(tenantId); DataSourceTableAndStreamInfo dataSourceTableAndStreamInfo = dataSourceInfoMap.get(fileName); String jsonFormattedAllInfo = "{\""+ EventSimulatorConstant.EVENT_STREAM_ID+"\":\"" + dataSourceTableAndStreamInfo.getEventStreamID() + "\",\""+EventSimulatorConstant.DATA_SOURCE_NAME+"\":\"" + dataSourceTableAndStreamInfo.getDataSourceName() + "\",\""+EventSimulatorConstant.TABLE_NAME+"\":\"" + dataSourceTableAndStreamInfo.getTableName() + "\", \""+EventSimulatorConstant.CONFIGURATION_NAME+"\":\"" + dataSourceTableAndStreamInfo.getConfigurationName() + "\",\""+EventSimulatorConstant.DATABASE_COLUMNS_AND_STREAM_ATTRIBUTE_INFO+"\":["; String jsonAttribute = ""; String[][] columnAndStreamAttributeNames = dataSourceTableAndStreamInfo.getDataSourceColumnsAndTypes(); StreamDefinition streamDefinition = getStreamDefinition(dataSourceTableAndStreamInfo.getEventStreamID()); List<Attribute> metaAttributeList = streamDefinition.getMetaData(); List<Attribute> correlationAttributeList = streamDefinition.getCorrelationData(); List<Attribute> payloadAttributeList = streamDefinition.getPayloadData(); int q=0, r=0; //columnAndStreamAttributeNames[0] includes column names //columnAndStreamAttributeNames[1] includes mapping attribute names for columns for(int i=0; i<columnAndStreamAttributeNames[0].length; i++){ if(i<metaAttributeList.size()){ for(int j=0; j<columnAndStreamAttributeNames[0].length; j++){ if(metaAttributeList.get(i).getName().equalsIgnoreCase(columnAndStreamAttributeNames[1][j])){ jsonAttribute = jsonAttribute + "{\""+EventSimulatorConstant.STREAM_ATTRIBUTE_NAME+"\":\"" + metaAttributeList.get(i).getName() + "\",\""+EventSimulatorConstant.COLUMN_NAME+"\":\"" + columnAndStreamAttributeNames[0][j] + "\",\""+EventSimulatorConstant.COLUMN_TYPE+"\":\"" + metaAttributeList.get(i).getType() + "\"},"; } } q=0; }else if(i>=metaAttributeList.size() && q<correlationAttributeList.size()){ for(int j=0; j<columnAndStreamAttributeNames[0].length; j++){ if(correlationAttributeList.get(q).getName().equalsIgnoreCase(columnAndStreamAttributeNames[1][j])){ jsonAttribute = jsonAttribute + "{\""+EventSimulatorConstant.STREAM_ATTRIBUTE_NAME+"\":\"" + correlationAttributeList.get(q).getName() + "\",\""+EventSimulatorConstant.COLUMN_NAME+"\":\"" + columnAndStreamAttributeNames[0][j] + "\",\""+EventSimulatorConstant.COLUMN_TYPE+"\":\"" + correlationAttributeList.get(q).getType() + "\"},"; } } q++; r=0; }else{ for(int j=0; j<columnAndStreamAttributeNames[0].length; j++){ if(payloadAttributeList.get(r).getName().equalsIgnoreCase(columnAndStreamAttributeNames[1][j])){ jsonAttribute = jsonAttribute + "{\""+EventSimulatorConstant.STREAM_ATTRIBUTE_NAME+"\":\"" + payloadAttributeList.get(r).getName() + "\",\""+EventSimulatorConstant.COLUMN_NAME+"\":\"" + columnAndStreamAttributeNames[0][j] + "\",\""+EventSimulatorConstant.COLUMN_TYPE+"\":\"" + payloadAttributeList.get(r).getType() + "\"},"; } } r++; } } jsonFormattedAllInfo = jsonFormattedAllInfo + jsonAttribute + "]}"; return jsonFormattedAllInfo; } @Override public void sendEventsViaDB(JSONObject allInfo, String getPreparedSelectStatement) throws AxisFault{ int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); Thread eventCreator = new Thread(new EventCreationForDB(tenantId, allInfo, getPreparedSelectStatement)); eventCreator.start(); } private class EventCreationForDB implements Runnable { private ResultSet resultSet = null; private int tenantId; private JSONObject allInfo; private DataSource datasource; private JSONArray columnAndAttributeMapping; public EventCreationForDB(int tenantId, JSONObject allInfo, String getPreparedSelectStatement) throws AxisFault { this.tenantId = tenantId; this.allInfo = allInfo; CarbonDataSource carbonDataSource; String dataSourceName; try { dataSourceName = allInfo.getString(EventSimulatorConstant.DATA_SOURCE_NAME); try { carbonDataSource = EventSimulatorValueHolder.getDataSourceService().getDataSource(dataSourceName); datasource = (DataSource) carbonDataSource.getDSObject(); Connection con; Statement stmt; try { con = datasource.getConnection(); stmt = con.createStatement(); resultSet = stmt.executeQuery(getPreparedSelectStatement); } catch (SQLException e) { log.error(EventSimulatorConstant.CONNECTION_STRING_NOT_FOUND + dataSourceName, e); throw new AxisFault(EventSimulatorConstant.CONNECTION_STRING_NOT_FOUND + dataSourceName, e); } } catch (DataSourceException e) { log.error(EventSimulatorConstant.DATA_SOURCE_NOT_FOUND_FOR_DATA_SOURCE_NAME + dataSourceName, e); throw new AxisFault(EventSimulatorConstant.DATA_SOURCE_NOT_FOUND_FOR_DATA_SOURCE_NAME + dataSourceName, e); } } catch (JSONException e) { log.error(EventSimulatorConstant.JSON_EXCEPTION, e); throw new AxisFault(EventSimulatorConstant.JSON_EXCEPTION, e); } } @Override public void run(){ try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(this.tenantId, true); columnAndAttributeMapping = allInfo.getJSONArray(EventSimulatorConstant.DATABASE_COLUMNS_AND_STREAM_ATTRIBUTE_INFO); StreamDefinition streamDefinition = getStreamDefinition(allInfo.getString(EventSimulatorConstant.EVENT_STREAM_ID)); List<Attribute> metaAttributeList = streamDefinition.getMetaData(); List<Attribute> correlationAttributeList = streamDefinition.getCorrelationData(); List<Attribute> payloadAttributeList = streamDefinition.getPayloadData(); //columnAndStreamAttributeNames[0] includes column names //columnAndStreamAttributeNames[1] includes mapping attribute names for columns while (resultSet.next()) { Event event = new Event(); event.setStreamDefinition(streamDefinition); String[] attributeValues = new String[columnAndAttributeMapping.length()]; int noOfAttributes = 0; int j=0; String columnName = columnAndAttributeMapping.getJSONObject(j).getString(EventSimulatorConstant.COLUMN_NAME); if(metaAttributeList != null){ for (int i = 0; i < metaAttributeList.size(); i++) { if (metaAttributeList.get(i).getType() == AttributeType.INT) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getInt(columnName)); } else if (metaAttributeList.get(i).getType() == AttributeType.LONG) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getLong(columnName)); } else if (metaAttributeList.get(i).getType() == AttributeType.FLOAT) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getFloat(columnName)); } else if (metaAttributeList.get(i).getType() == AttributeType.DOUBLE) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getDouble(columnName)); } else if (metaAttributeList.get(i).getType() == AttributeType.STRING) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getString(columnName)); } else if (metaAttributeList.get(i).getType() == AttributeType.BOOL) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getBoolean(columnName)); } if(j<columnAndAttributeMapping.length()-1){ noOfAttributes++; j++; columnName = columnAndAttributeMapping.getJSONObject(j).getString(EventSimulatorConstant.COLUMN_NAME); } } } if(correlationAttributeList!=null){//noOfAttributes-metaAttributeList.size()<correlationAttributeList.size()){ for (int i = 0; i< correlationAttributeList.size(); i++) { if (correlationAttributeList.get(i).getType() == AttributeType.INT) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getInt(columnName)); } else if (correlationAttributeList.get(i).getType() == AttributeType.LONG) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getLong(columnName)); } else if (correlationAttributeList.get(i).getType() == AttributeType.FLOAT) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getFloat(columnName)); } else if (correlationAttributeList.get(i).getType() == AttributeType.DOUBLE) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getDouble(columnName)); } else if (correlationAttributeList.get(i).getType() == AttributeType.STRING) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getString(columnName)); } else if (correlationAttributeList.get(i).getType() == AttributeType.BOOL) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getBoolean(columnName)); } if(j<columnAndAttributeMapping.length()-1){ noOfAttributes++; j++; columnName = columnAndAttributeMapping.getJSONObject(j).getString(EventSimulatorConstant.COLUMN_NAME); } } } if(payloadAttributeList != null){ //noOfAttributes -metaAttributeList.size()-correlationAttributeList.size()<payloadAttributeList.size()){ for (int i = 0; i< payloadAttributeList.size(); i++) { if (payloadAttributeList.get(i).getType() == AttributeType.INT) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getInt(columnName)); } else if (payloadAttributeList.get(i).getType() == AttributeType.LONG) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getLong(columnName)); } else if (payloadAttributeList.get(i).getType() == AttributeType.FLOAT) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getFloat(columnName)); } else if (payloadAttributeList.get(i).getType() == AttributeType.DOUBLE) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getDouble(columnName)); } else if (payloadAttributeList.get(i).getType() == AttributeType.STRING) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getString(columnName)); } else if (payloadAttributeList.get(i).getType() == AttributeType.BOOL) { attributeValues[noOfAttributes] = String.valueOf(resultSet.getBoolean(columnName)); } if(j<columnAndAttributeMapping.length()-1){ noOfAttributes++; j++; columnName = columnAndAttributeMapping.getJSONObject(j).getString(EventSimulatorConstant.COLUMN_NAME); } } } event.setAttributeValues(attributeValues); sendEvent(event); } } catch (SQLException e) { log.error("database exception occurred: "+ e.getMessage(), e); } catch (JSONException e) { log.error(EventSimulatorConstant.JSON_EXCEPTION, e); //throw new AxisFault(EventSimulatorConstant.JSON_EXCEPTION, e); } catch (AxisFault axisFault) { log.error(axisFault.getMessage(), axisFault); } } } }
package com.planet_ink.coffee_mud.Abilities.Prayers; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.DatabaseEngine; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2010 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class Prayer_Resurrect extends Prayer implements MendingSkill { public String ID() { return "Prayer_Resurrect"; } public String name(){ return "Resurrect";} public int classificationCode(){return Ability.ACODE_PRAYER|Ability.DOMAIN_DEATHLORE;} public int abstractQuality(){ return Ability.QUALITY_INDIFFERENT;} public long flags(){return Ability.FLAG_HOLY;} protected int canTargetCode(){return Ability.CAN_ITEMS;} public boolean supportsMending(Environmental E) { return (E instanceof DeadBody); } public boolean invoke(MOB mob, Vector commands, Environmental givenTarget, boolean auto, int asLevel) { Environmental body=null; body=getTarget(mob,mob.location(),givenTarget,commands,Item.WORNREQ_UNWORNONLY); DatabaseEngine.PlayerData nonPlayerData=null; boolean playerCorpse=false; if((body==null)&&(CMSecurity.isASysOp(mob))) { Vector V=CMLib.database().DBReadData("HEAVEN"); Vector allObjs=new Vector(); Vector allDataPs=new Vector(); if((V!=null)&&(V.size()>0)) for(int v=0;v<V.size();v++) { DatabaseEngine.PlayerData dataP=(DatabaseEngine.PlayerData)V.elementAt(v); String data=dataP.xml; Environmental obj=parseHeavenlyData(data); if(obj!=null) { allDataPs.addElement(dataP); allObjs.addElement(obj); } } if(allObjs.size()==0) return false; String name=CMParms.combine(commands,0); if(name.equalsIgnoreCase("list")) { mob.tell("^x"+CMStrings.padRight("Guardian",15) +CMStrings.padRight("Child name",45) +CMStrings.padRight("Birth date",16)+"^?"); for(int i=0;i<allObjs.size();i++) { body=(Environmental)allObjs.elementAt(i); Ability age=body.fetchEffect("Age"); mob.tell(CMStrings.padRight(((DatabaseEngine.PlayerData)allDataPs.elementAt(i)).who,15) +CMStrings.padRight(body.name(),45) +CMStrings.padRight(((age==null)?"":CMLib.time().date2String(CMath.s_long(age.text()))),16)+"\n\r"+CMStrings.padRight("",15)+body.description()); } return false; } Environmental E=CMLib.english().fetchEnvironmental(allObjs,name,true); if(E==null) E=CMLib.english().fetchEnvironmental(allObjs,name,false); if(E==null) return false; for(int i=0;i<allObjs.size();i++) if(allObjs.elementAt(i)==E) { nonPlayerData=(DatabaseEngine.PlayerData)allDataPs.elementAt(i); body=E; break; } } if(nonPlayerData==null) { if(body==null) return false; if((!(body instanceof DeadBody)) ||(((DeadBody)body).mobName().length()==0)) { mob.tell("You can't resurrect that."); return false; } playerCorpse=((DeadBody)body).playerCorpse(); if(!playerCorpse) { Ability AGE=body.fetchEffect("Age"); if((AGE!=null)&&(CMath.isLong(AGE.text()))&&(CMath.s_long(AGE.text())>Short.MAX_VALUE)) { MOB M=null; for(int i=0;i<mob.location().numInhabitants();i++) { M=mob.location().fetchInhabitant(i); if((M!=null)&&(!M.isMonster())) { Vector V=CMLib.database().DBReadData(M.Name(),"HEAVEN",M.Name()+"/HEAVEN/"+AGE.text()); if((V!=null)&&(V.size()>0)) { nonPlayerData=(DatabaseEngine.PlayerData)V.firstElement(); break; } } } if(nonPlayerData==null) { mob.tell("You can't seem to focus on "+body.Name()+"'s spirit. Perhaps if loved ones were here?"); return false; } } else { mob.tell("You can't resurrect "+((DeadBody)body).charStats().himher()+"."); return false; } } } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; boolean success=proficiencyCheck(mob,0,auto); if(success) { // it worked, so build a copy of this ability, // and add it to the affects list of the // affected MOB. Then tell everyone else // what happened. CMMsg msg=CMClass.getMsg(mob,body,this,verbalCastCode(mob,body,auto),auto?"<T-NAME> is resurrected!":"^S<S-NAME> resurrect(s) <T-NAMESELF>!^?"); if(mob.location().okMessage(mob,msg)) { invoker=mob; mob.location().send(mob,msg); if(playerCorpse) success = CMLib.utensils().resurrect(mob,mob.location(), (DeadBody)body, super.getXPCOSTLevel(mob)); else if((nonPlayerData!=null) && (body != null)) { String data=nonPlayerData.xml; Environmental object=parseHeavenlyData(data); if(object==null) mob.location().show(mob,body,CMMsg.MSG_OK_VISUAL,"<T-NAME> twitch(es) for a moment, but the spirit is too far gone."); else if(object instanceof Item) { body.destroy(); mob.location().showHappens(CMMsg.MSG_OK_VISUAL,object.Name()+" comes back to life!"); mob.location().addItem((Item)object); } else { MOB rejuvedMOB=(MOB)object; rejuvedMOB.recoverCharStats(); rejuvedMOB.recoverMaxState(); body.delEffect(body.fetchEffect("Age")); // so misskids doesn't record it body.destroy(); rejuvedMOB.bringToLife(mob.location(),true); rejuvedMOB.location().show(rejuvedMOB,null,CMMsg.MSG_NOISYMOVEMENT,"<S-NAME> get(s) up!"); } mob.location().recoverRoomStats(); } } } else beneficialWordsFizzle(mob,body,auto?"":"<S-NAME> attempt(s) to resurrect <T-NAMESELF>, but nothing happens."); // return whether it worked return success; } public Environmental parseHeavenlyData(String data) { String classID=null; int ability=0; int x=data.indexOf("/"); if(x>=0) { classID=data.substring(0,x); data=data.substring(x+1); } x=data.indexOf("/"); if(x>=0) { ability=CMath.s_int(data.substring(0,x)); data=data.substring(x+1); } Environmental object=CMClass.getItem(classID); if(object==null) object=CMClass.getMOB(classID); if(object==null) return null; object.setMiscText(data); object.baseEnvStats().setAbility(ability); object.recoverEnvStats(); return object; } }
/* * Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.net; import java.io.IOException; import java.io.FileDescriptor; import sun.misc.SharedSecrets; import sun.misc.JavaIOFileDescriptorAccess; /** * This class defines the plain DatagramSocketImpl that is used on * Windows platforms greater than or equal to Windows Vista. These * platforms have a dual layer TCP/IP stack and can handle both IPv4 * and IPV6 through a single file descriptor. * <p> * Note: Multicasting on a dual layer TCP/IP stack is always done with * TwoStacksPlainDatagramSocketImpl. This is to overcome the lack * of behavior defined for multicasting over a dual layer socket by the RFC. * * @author Chris Hegarty */ class DualStackPlainDatagramSocketImpl extends AbstractPlainDatagramSocketImpl { static JavaIOFileDescriptorAccess fdAccess = SharedSecrets.getJavaIOFileDescriptorAccess(); protected void datagramSocketCreate() throws SocketException { if (fd == null) throw new SocketException("Socket closed"); int newfd = socketCreate(false /* v6Only */); fdAccess.set(fd, newfd); } protected synchronized void bind0(int lport, InetAddress laddr) throws SocketException { int nativefd = checkAndReturnNativeFD(); if (laddr == null) throw new NullPointerException("argument address"); socketBind(nativefd, laddr, lport); if (lport == 0) { localPort = socketLocalPort(nativefd); } else { localPort = lport; } } protected synchronized int peek(InetAddress address) throws IOException { int nativefd = checkAndReturnNativeFD(); if (address == null) throw new NullPointerException("Null address in peek()"); // Use peekData() DatagramPacket peekPacket = new DatagramPacket(new byte[1], 1); int peekPort = peekData(peekPacket); address = peekPacket.getAddress(); return peekPort; } protected synchronized int peekData(DatagramPacket p) throws IOException { int nativefd = checkAndReturnNativeFD(); if (p == null) throw new NullPointerException("packet"); if (p.getData() == null) throw new NullPointerException("packet buffer"); return socketReceiveOrPeekData(nativefd, p, timeout, connected, true /*peek*/); } protected synchronized void receive0(DatagramPacket p) throws IOException { int nativefd = checkAndReturnNativeFD(); if (p == null) throw new NullPointerException("packet"); if (p.getData() == null) throw new NullPointerException("packet buffer"); socketReceiveOrPeekData(nativefd, p, timeout, connected, false /*receive*/); } protected void send(DatagramPacket p) throws IOException { int nativefd = checkAndReturnNativeFD(); if (p == null) throw new NullPointerException("null packet"); if (p.getAddress() == null ||p.getData() ==null) throw new NullPointerException("null address || null buffer"); socketSend(nativefd, p.getData(), p.getOffset(), p.getLength(), p.getAddress(), p.getPort(), connected); } protected void connect0(InetAddress address, int port) throws SocketException { int nativefd = checkAndReturnNativeFD(); if (address == null) throw new NullPointerException("address"); socketConnect(nativefd, address, port); } protected void disconnect0(int family /*unused*/) { if (fd == null || !fd.valid()) return; // disconnect doesn't throw any exceptions socketDisconnect(fdAccess.get(fd)); } protected void datagramSocketClose() { if (fd == null || !fd.valid()) return; // close doesn't throw any exceptions socketClose(fdAccess.get(fd)); fdAccess.set(fd, -1); } protected void socketSetOption(int opt, Object val) throws SocketException { int nativefd = checkAndReturnNativeFD(); int optionValue = 0; switch(opt) { case IP_TOS : case SO_RCVBUF : case SO_SNDBUF : optionValue = ((Integer)val).intValue(); break; case SO_REUSEADDR : case SO_BROADCAST : optionValue = ((Boolean)val).booleanValue() ? 1 : 0; break; default: /* shouldn't get here */ throw new SocketException("Option not supported"); } socketSetIntOption(nativefd, opt, optionValue); } protected Object socketGetOption(int opt) throws SocketException { int nativefd = checkAndReturnNativeFD(); // SO_BINDADDR is not a socket option. if (opt == SO_BINDADDR) { return socketLocalAddress(nativefd); } int value = socketGetIntOption(nativefd, opt); Object returnValue = null; switch (opt) { case SO_REUSEADDR : case SO_BROADCAST : returnValue = (value == 0) ? Boolean.FALSE : Boolean.TRUE; break; case IP_TOS : case SO_RCVBUF : case SO_SNDBUF : returnValue = new Integer(value); break; default: /* shouldn't get here */ throw new SocketException("Option not supported"); } return returnValue; } /* Multicast specific methods. * Multicasting on a dual layer TCP/IP stack is always done with * TwoStacksPlainDatagramSocketImpl. This is to overcome the lack * of behavior defined for multicasting over a dual layer socket by the RFC. */ protected void join(InetAddress inetaddr, NetworkInterface netIf) throws IOException { throw new IOException("Method not implemented!"); } protected void leave(InetAddress inetaddr, NetworkInterface netIf) throws IOException { throw new IOException("Method not implemented!"); } protected void setTimeToLive(int ttl) throws IOException { throw new IOException("Method not implemented!"); } protected int getTimeToLive() throws IOException { throw new IOException("Method not implemented!"); } protected void setTTL(byte ttl) throws IOException { throw new IOException("Method not implemented!"); } protected byte getTTL() throws IOException { throw new IOException("Method not implemented!"); } /* END Multicast specific methods */ private int checkAndReturnNativeFD() throws SocketException { if (fd == null || !fd.valid()) throw new SocketException("Socket closed"); return fdAccess.get(fd); } /* Native methods */ private static native void initIDs(); private static native int socketCreate(boolean v6Only); private static native void socketBind(int fd, InetAddress localAddress, int localport) throws SocketException; private static native void socketConnect(int fd, InetAddress address, int port) throws SocketException; private static native void socketDisconnect(int fd); private static native void socketClose(int fd); private static native int socketLocalPort(int fd) throws SocketException; private static native Object socketLocalAddress(int fd) throws SocketException; private static native int socketReceiveOrPeekData(int fd, DatagramPacket packet, int timeout, boolean connected, boolean peek) throws IOException; private static native void socketSend(int fd, byte[] data, int offset, int length, InetAddress address, int port, boolean connected) throws IOException; private static native void socketSetIntOption(int fd, int cmd, int optionValue) throws SocketException; private static native int socketGetIntOption(int fd, int cmd) throws SocketException; }
/* * Copyright 2009-2010 WSO2, Inc. (http://wso2.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.eclipse.esb.mediators.impl; import java.util.HashMap; import java.util.List; import java.util.Map; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.w3c.dom.Element; import org.wso2.developerstudio.eclipse.esb.NamespacedProperty; import org.wso2.developerstudio.eclipse.esb.RegistryKeyProperty; import org.wso2.developerstudio.eclipse.esb.core.utils.ESBMediaTypeConstants; import org.wso2.developerstudio.eclipse.esb.impl.MediatorImpl; import org.wso2.developerstudio.eclipse.esb.mediators.EnrichMediator; import org.wso2.developerstudio.eclipse.esb.mediators.EnrichSourceInlineType; import org.wso2.developerstudio.eclipse.esb.mediators.EnrichSourceType; import org.wso2.developerstudio.eclipse.esb.mediators.EnrichTargetAction; import org.wso2.developerstudio.eclipse.esb.mediators.EnrichTargetType; import org.wso2.developerstudio.eclipse.esb.mediators.MediatorsPackage; import org.wso2.developerstudio.eclipse.esb.util.EsbUtils; import org.wso2.developerstudio.eclipse.esb.util.ObjectValidator; import org.wso2.developerstudio.eclipse.platform.core.utils.CSProviderConstants; import org.wso2.developerstudio.eclipse.platform.core.utils.DeveloperStudioProviderUtils; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Enrich Mediator</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#isCloneSource <em>Clone Source</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getSourceType <em>Source Type</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getSourceXpath <em>Source Xpath</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getSourceProperty <em>Source Property</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getSourceXML <em>Source XML</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getTargetAction <em>Target Action</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getTargetType <em>Target Type</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getTargetXpath <em>Target Xpath</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getTargetProperty <em>Target Property</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getInlineType <em>Inline Type</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.EnrichMediatorImpl#getInlineRegistryKey <em>Inline Registry Key</em>}</li> * </ul> * </p> * * @generated */ public class EnrichMediatorImpl extends MediatorImpl implements EnrichMediator { /** * The default value of the '{@link #isCloneSource() <em>Clone Source</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isCloneSource() * @generated * @ordered */ protected static final boolean CLONE_SOURCE_EDEFAULT = false; /** * The cached value of the '{@link #isCloneSource() <em>Clone Source</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isCloneSource() * @generated * @ordered */ protected boolean cloneSource = CLONE_SOURCE_EDEFAULT; /** * The default value of the '{@link #getSourceType() <em>Source Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSourceType() * @generated * @ordered */ protected static final EnrichSourceType SOURCE_TYPE_EDEFAULT = EnrichSourceType.CUSTOM; /** * The cached value of the '{@link #getSourceType() <em>Source Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSourceType() * @generated * @ordered */ protected EnrichSourceType sourceType = SOURCE_TYPE_EDEFAULT; /** * The cached value of the '{@link #getSourceXpath() <em>Source Xpath</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSourceXpath() * @generated * @ordered */ protected NamespacedProperty sourceXpath; /** * The default value of the '{@link #getSourceProperty() <em>Source Property</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSourceProperty() * @generated * @ordered */ protected static final String SOURCE_PROPERTY_EDEFAULT = "source_property"; /** * The cached value of the '{@link #getSourceProperty() <em>Source Property</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSourceProperty() * @generated * @ordered */ protected String sourceProperty = SOURCE_PROPERTY_EDEFAULT; /** * The default value of the '{@link #getSourceXML() <em>Source XML</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSourceXML() * @generated * @ordered */ protected static final String SOURCE_XML_EDEFAULT = "<inline/>"; /** * The cached value of the '{@link #getSourceXML() <em>Source XML</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSourceXML() * @generated * @ordered */ protected String sourceXML = SOURCE_XML_EDEFAULT; /** * The default value of the '{@link #getTargetAction() <em>Target Action</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTargetAction() * @generated * @ordered */ protected static final EnrichTargetAction TARGET_ACTION_EDEFAULT = EnrichTargetAction.REPLACE; /** * The cached value of the '{@link #getTargetAction() <em>Target Action</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTargetAction() * @generated * @ordered */ protected EnrichTargetAction targetAction = TARGET_ACTION_EDEFAULT; /** * The default value of the '{@link #getTargetType() <em>Target Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTargetType() * @generated * @ordered */ protected static final EnrichTargetType TARGET_TYPE_EDEFAULT = EnrichTargetType.CUSTOM; /** * The cached value of the '{@link #getTargetType() <em>Target Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTargetType() * @generated * @ordered */ protected EnrichTargetType targetType = TARGET_TYPE_EDEFAULT; /** * The cached value of the '{@link #getTargetXpath() <em>Target Xpath</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTargetXpath() * @generated * @ordered */ protected NamespacedProperty targetXpath; /** * The default value of the '{@link #getTargetProperty() <em>Target Property</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTargetProperty() * @generated * @ordered */ protected static final String TARGET_PROPERTY_EDEFAULT = "target_property"; /** * The cached value of the '{@link #getTargetProperty() <em>Target Property</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTargetProperty() * @generated * @ordered */ protected String targetProperty = TARGET_PROPERTY_EDEFAULT; /** * The default value of the '{@link #getInlineType() <em>Inline Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInlineType() * @generated * @ordered */ protected static final EnrichSourceInlineType INLINE_TYPE_EDEFAULT = EnrichSourceInlineType.CONTENT; /** * The cached value of the '{@link #getInlineType() <em>Inline Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInlineType() * @generated * @ordered */ protected EnrichSourceInlineType inlineType = INLINE_TYPE_EDEFAULT; /** * The cached value of the '{@link #getInlineRegistryKey() <em>Inline Registry Key</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInlineRegistryKey() * @generated * @ordered */ protected RegistryKeyProperty inlineRegistryKey; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> */ protected EnrichMediatorImpl() { super(); // Source xpath. NamespacedProperty sourceXpath = getEsbFactory().createNamespacedProperty(); sourceXpath.setPrettyName("Source XPath"); sourceXpath.setPropertyName("xpath"); sourceXpath.setPropertyValue(DEFAULT_XPATH_PROPERTY_VALUE); setSourceXpath(sourceXpath); // Target xpath. NamespacedProperty targetXpath = getEsbFactory().createNamespacedProperty(); targetXpath.setPrettyName("Target XPath"); targetXpath.setPropertyName("xpath"); targetXpath.setPropertyValue(DEFAULT_XPATH_PROPERTY_VALUE); setTargetXpath(targetXpath); // Inline Registry Key inlineRegistryKey = getEsbFactory().createRegistryKeyProperty(); DeveloperStudioProviderUtils.addFilter( (Map<String, List<String>>) inlineRegistryKey.getFilters(), CSProviderConstants.FILTER_MEDIA_TYPE, ESBMediaTypeConstants.MEDIA_TYPE_SEQUENCE); inlineRegistryKey.setPrettyName("InlineKey"); inlineRegistryKey.setKeyName("key"); inlineRegistryKey.setKeyValue(DEFAULT_SEQUENCE_REFERENCE_REGISTRY_KEY); setInlineRegistryKey(inlineRegistryKey); } /** * {@inheritDoc} */ protected void doLoad(Element self) throws Exception { Element sourceElem = getChildElement(self, "source"); if (null == sourceElem) { throw new Exception("Expected source element."); } // Clone if (sourceElem.hasAttribute("clone")) { if (sourceElem.getAttribute("clone").equals("true")) { setCloneSource(true); } } // Source type EnrichSourceType sourceType = EnrichSourceType.get(sourceElem.getAttribute("type")); if (null != sourceType) { setSourceType(EnrichSourceType.get(sourceElem.getAttribute("type"))); } // Source XPath. if (getSourceType().equals(EnrichSourceType.CUSTOM)) { getSourceXpath().load(sourceElem); } // Source property. if (getSourceType().equals(EnrichSourceType.PROPERTY)) { setSourceProperty(sourceElem.getAttribute("property")); } // Source inline XML & Registry Key. if (getSourceType().equals(EnrichSourceType.INLINE)) { List<Element> children = getChildElements(sourceElem); if (!children.isEmpty()) { setInlineType(EnrichSourceInlineType.CONTENT); // Question: Do we have to consider all the child elements? setSourceXML(EsbUtils.renderElement(children.get(0), true)); } if (sourceElem.hasAttribute("key")) { setInlineType(EnrichSourceInlineType.KEY); String inlineKey = sourceElem.getAttribute("key"); if (null != inlineKey) { getInlineRegistryKey().setKeyValue(inlineKey); } else { getInlineRegistryKey().setKeyValue(""); } } } Element targetElem = getChildElement(self, "target"); if (null == targetElem) { throw new Exception("Target element expected."); } // Target action. EnrichTargetAction targetAction = EnrichTargetAction.get(targetElem.getAttribute("action")); if (null != targetAction) { setTargetAction(targetAction); } // Target type. EnrichTargetType targetType = EnrichTargetType.get(targetElem.getAttribute("type")); if (null != targetType) { setTargetType(targetType); } // Target XPath. if (getTargetType().equals(EnrichTargetType.CUSTOM)) { getTargetXpath().load(targetElem); } // Target property. if (getTargetType().equals(EnrichTargetType.PROPERTY)) { setTargetProperty(targetElem.getAttribute("property")); } super.doLoad(self); } /** * {@inheritDoc} */ protected Element doSave(Element parent) throws Exception { Element self = createChildElement(parent, "enrich"); Element sourceElem = null; Element target = null; switch (getCurrentEsbVersion()) { case ESB301: sourceElem = createChildElement(self, "source"); // Clone source. if (isCloneSource()) { sourceElem.setAttribute("clone", "true"); } else { sourceElem.setAttribute("clone", "false"); } // Source type. sourceElem.setAttribute("type", getSourceType().getLiteral()); // Source XPath. if (getSourceType().equals(EnrichSourceType.CUSTOM)) { getSourceXpath().save(sourceElem); } // Source property. if (getSourceType().equals(EnrichSourceType.PROPERTY)) { sourceElem.setAttribute("property", getSourceProperty()); } // Source inline XML & Registry Key. if (getSourceType().equals(EnrichSourceType.INLINE)) { Element inlineElem = null; try { inlineElem = EsbUtils.parseElement(getSourceXML()); inlineElem = (Element) sourceElem.getOwnerDocument() .importNode(inlineElem, true); sourceElem.appendChild(inlineElem); } catch (Exception ex) { // TODO: Log the error. // This is a workaround. inlineElem = createChildElement(sourceElem, "inline"); setSourceXML(SOURCE_XML_EDEFAULT); } } target = createChildElement(self, "target"); // Target action. target.setAttribute("action", getTargetAction().getLiteral()); // Target type. target.setAttribute("type", getTargetType().getLiteral()); // Target XPath. if (getTargetType().equals(EnrichTargetType.CUSTOM)) { getTargetXpath().save(target); } // Target property. if (getTargetType().equals(EnrichTargetType.PROPERTY)) { target.setAttribute("property", getTargetProperty()); } break; case ESB400: sourceElem = createChildElement(self, "source"); // Clone source. if (isCloneSource()) { sourceElem.setAttribute("clone", "true"); } else { sourceElem.setAttribute("clone", "false"); } // Source type. sourceElem.setAttribute("type", getSourceType().getLiteral()); // Source XPath. if (getSourceType().equals(EnrichSourceType.CUSTOM)) { getSourceXpath().save(sourceElem); } // Source property. if (getSourceType().equals(EnrichSourceType.PROPERTY)) { sourceElem.setAttribute("property", getSourceProperty()); } // Source inline XML & Registry Key. if (getSourceType().equals(EnrichSourceType.INLINE)) { switch (getInlineType()) { case CONTENT: Element inlineElem = null; try { inlineElem = EsbUtils.parseElement(getSourceXML()); inlineElem = (Element) sourceElem.getOwnerDocument() .importNode(inlineElem, true); sourceElem.appendChild(inlineElem); } catch (Exception ex) { // TODO: Log the error. // This is a workaround. inlineElem = createChildElement(sourceElem, "inline"); setSourceXML(SOURCE_XML_EDEFAULT); } break; case KEY: getInlineRegistryKey().save(sourceElem); } } target = createChildElement(self, "target"); // Target action. target.setAttribute("action", getTargetAction().getLiteral()); // Target type. target.setAttribute("type", getTargetType().getLiteral()); // Target XPath. if (getTargetType().equals(EnrichTargetType.CUSTOM)) { getTargetXpath().save(target); } // Target property. if (getTargetType().equals(EnrichTargetType.PROPERTY)) { target.setAttribute("property", getTargetProperty()); } break; } if(description!=null) description.save(self); addComments(self); return self; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return MediatorsPackage.Literals.ENRICH_MEDIATOR; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isCloneSource() { return cloneSource; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCloneSource(boolean newCloneSource) { boolean oldCloneSource = cloneSource; cloneSource = newCloneSource; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__CLONE_SOURCE, oldCloneSource, cloneSource)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EnrichSourceType getSourceType() { return sourceType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSourceType(EnrichSourceType newSourceType) { EnrichSourceType oldSourceType = sourceType; sourceType = newSourceType == null ? SOURCE_TYPE_EDEFAULT : newSourceType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__SOURCE_TYPE, oldSourceType, sourceType)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NamespacedProperty getSourceXpath() { return sourceXpath; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetSourceXpath(NamespacedProperty newSourceXpath, NotificationChain msgs) { NamespacedProperty oldSourceXpath = sourceXpath; sourceXpath = newSourceXpath; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XPATH, oldSourceXpath, newSourceXpath); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSourceXpath(NamespacedProperty newSourceXpath) { if (newSourceXpath != sourceXpath) { NotificationChain msgs = null; if (sourceXpath != null) msgs = ((InternalEObject)sourceXpath).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XPATH, null, msgs); if (newSourceXpath != null) msgs = ((InternalEObject)newSourceXpath).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XPATH, null, msgs); msgs = basicSetSourceXpath(newSourceXpath, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XPATH, newSourceXpath, newSourceXpath)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getSourceProperty() { return sourceProperty; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSourceProperty(String newSourceProperty) { String oldSourceProperty = sourceProperty; sourceProperty = newSourceProperty; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__SOURCE_PROPERTY, oldSourceProperty, sourceProperty)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getSourceXML() { return sourceXML; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSourceXML(String newSourceXML) { String oldSourceXML = sourceXML; sourceXML = newSourceXML; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XML, oldSourceXML, sourceXML)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EnrichTargetAction getTargetAction() { return targetAction; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTargetAction(EnrichTargetAction newTargetAction) { EnrichTargetAction oldTargetAction = targetAction; targetAction = newTargetAction == null ? TARGET_ACTION_EDEFAULT : newTargetAction; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__TARGET_ACTION, oldTargetAction, targetAction)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EnrichTargetType getTargetType() { return targetType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTargetType(EnrichTargetType newTargetType) { EnrichTargetType oldTargetType = targetType; targetType = newTargetType == null ? TARGET_TYPE_EDEFAULT : newTargetType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__TARGET_TYPE, oldTargetType, targetType)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NamespacedProperty getTargetXpath() { return targetXpath; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetTargetXpath(NamespacedProperty newTargetXpath, NotificationChain msgs) { NamespacedProperty oldTargetXpath = targetXpath; targetXpath = newTargetXpath; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__TARGET_XPATH, oldTargetXpath, newTargetXpath); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTargetXpath(NamespacedProperty newTargetXpath) { if (newTargetXpath != targetXpath) { NotificationChain msgs = null; if (targetXpath != null) msgs = ((InternalEObject)targetXpath).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - MediatorsPackage.ENRICH_MEDIATOR__TARGET_XPATH, null, msgs); if (newTargetXpath != null) msgs = ((InternalEObject)newTargetXpath).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - MediatorsPackage.ENRICH_MEDIATOR__TARGET_XPATH, null, msgs); msgs = basicSetTargetXpath(newTargetXpath, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__TARGET_XPATH, newTargetXpath, newTargetXpath)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getTargetProperty() { return targetProperty; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTargetProperty(String newTargetProperty) { String oldTargetProperty = targetProperty; targetProperty = newTargetProperty; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__TARGET_PROPERTY, oldTargetProperty, targetProperty)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EnrichSourceInlineType getInlineType() { return inlineType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setInlineType(EnrichSourceInlineType newInlineType) { EnrichSourceInlineType oldInlineType = inlineType; inlineType = newInlineType == null ? INLINE_TYPE_EDEFAULT : newInlineType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__INLINE_TYPE, oldInlineType, inlineType)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RegistryKeyProperty getInlineRegistryKey() { if (inlineRegistryKey != null && inlineRegistryKey.eIsProxy()) { InternalEObject oldInlineRegistryKey = (InternalEObject)inlineRegistryKey; inlineRegistryKey = (RegistryKeyProperty)eResolveProxy(oldInlineRegistryKey); if (inlineRegistryKey != oldInlineRegistryKey) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, MediatorsPackage.ENRICH_MEDIATOR__INLINE_REGISTRY_KEY, oldInlineRegistryKey, inlineRegistryKey)); } } return inlineRegistryKey; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RegistryKeyProperty basicGetInlineRegistryKey() { return inlineRegistryKey; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setInlineRegistryKey(RegistryKeyProperty newInlineRegistryKey) { RegistryKeyProperty oldInlineRegistryKey = inlineRegistryKey; inlineRegistryKey = newInlineRegistryKey; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.ENRICH_MEDIATOR__INLINE_REGISTRY_KEY, oldInlineRegistryKey, inlineRegistryKey)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XPATH: return basicSetSourceXpath(null, msgs); case MediatorsPackage.ENRICH_MEDIATOR__TARGET_XPATH: return basicSetTargetXpath(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case MediatorsPackage.ENRICH_MEDIATOR__CLONE_SOURCE: return isCloneSource(); case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_TYPE: return getSourceType(); case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XPATH: return getSourceXpath(); case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_PROPERTY: return getSourceProperty(); case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XML: return getSourceXML(); case MediatorsPackage.ENRICH_MEDIATOR__TARGET_ACTION: return getTargetAction(); case MediatorsPackage.ENRICH_MEDIATOR__TARGET_TYPE: return getTargetType(); case MediatorsPackage.ENRICH_MEDIATOR__TARGET_XPATH: return getTargetXpath(); case MediatorsPackage.ENRICH_MEDIATOR__TARGET_PROPERTY: return getTargetProperty(); case MediatorsPackage.ENRICH_MEDIATOR__INLINE_TYPE: return getInlineType(); case MediatorsPackage.ENRICH_MEDIATOR__INLINE_REGISTRY_KEY: if (resolve) return getInlineRegistryKey(); return basicGetInlineRegistryKey(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case MediatorsPackage.ENRICH_MEDIATOR__CLONE_SOURCE: setCloneSource((Boolean)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_TYPE: setSourceType((EnrichSourceType)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XPATH: setSourceXpath((NamespacedProperty)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_PROPERTY: setSourceProperty((String)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XML: setSourceXML((String)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_ACTION: setTargetAction((EnrichTargetAction)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_TYPE: setTargetType((EnrichTargetType)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_XPATH: setTargetXpath((NamespacedProperty)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_PROPERTY: setTargetProperty((String)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__INLINE_TYPE: setInlineType((EnrichSourceInlineType)newValue); return; case MediatorsPackage.ENRICH_MEDIATOR__INLINE_REGISTRY_KEY: setInlineRegistryKey((RegistryKeyProperty)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case MediatorsPackage.ENRICH_MEDIATOR__CLONE_SOURCE: setCloneSource(CLONE_SOURCE_EDEFAULT); return; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_TYPE: setSourceType(SOURCE_TYPE_EDEFAULT); return; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XPATH: setSourceXpath((NamespacedProperty)null); return; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_PROPERTY: setSourceProperty(SOURCE_PROPERTY_EDEFAULT); return; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XML: setSourceXML(SOURCE_XML_EDEFAULT); return; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_ACTION: setTargetAction(TARGET_ACTION_EDEFAULT); return; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_TYPE: setTargetType(TARGET_TYPE_EDEFAULT); return; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_XPATH: setTargetXpath((NamespacedProperty)null); return; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_PROPERTY: setTargetProperty(TARGET_PROPERTY_EDEFAULT); return; case MediatorsPackage.ENRICH_MEDIATOR__INLINE_TYPE: setInlineType(INLINE_TYPE_EDEFAULT); return; case MediatorsPackage.ENRICH_MEDIATOR__INLINE_REGISTRY_KEY: setInlineRegistryKey((RegistryKeyProperty)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case MediatorsPackage.ENRICH_MEDIATOR__CLONE_SOURCE: return cloneSource != CLONE_SOURCE_EDEFAULT; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_TYPE: return sourceType != SOURCE_TYPE_EDEFAULT; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XPATH: return sourceXpath != null; case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_PROPERTY: return SOURCE_PROPERTY_EDEFAULT == null ? sourceProperty != null : !SOURCE_PROPERTY_EDEFAULT.equals(sourceProperty); case MediatorsPackage.ENRICH_MEDIATOR__SOURCE_XML: return SOURCE_XML_EDEFAULT == null ? sourceXML != null : !SOURCE_XML_EDEFAULT.equals(sourceXML); case MediatorsPackage.ENRICH_MEDIATOR__TARGET_ACTION: return targetAction != TARGET_ACTION_EDEFAULT; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_TYPE: return targetType != TARGET_TYPE_EDEFAULT; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_XPATH: return targetXpath != null; case MediatorsPackage.ENRICH_MEDIATOR__TARGET_PROPERTY: return TARGET_PROPERTY_EDEFAULT == null ? targetProperty != null : !TARGET_PROPERTY_EDEFAULT.equals(targetProperty); case MediatorsPackage.ENRICH_MEDIATOR__INLINE_TYPE: return inlineType != INLINE_TYPE_EDEFAULT; case MediatorsPackage.ENRICH_MEDIATOR__INLINE_REGISTRY_KEY: return inlineRegistryKey != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (cloneSource: "); result.append(cloneSource); result.append(", sourceType: "); result.append(sourceType); result.append(", sourceProperty: "); result.append(sourceProperty); result.append(", sourceXML: "); result.append(sourceXML); result.append(", targetAction: "); result.append(targetAction); result.append(", targetType: "); result.append(targetType); result.append(", targetProperty: "); result.append(targetProperty); result.append(", inlineType: "); result.append(inlineType); result.append(')'); return result.toString(); } public Map<String, ObjectValidator> validate() { ObjectValidator objectValidator = new ObjectValidator(); Map<String, String> validateMap = new HashMap<String, String>(); Map<String, ObjectValidator> mediatorValidateMap = new HashMap<String, ObjectValidator>(); switch (getCurrentEsbVersion()) { case ESB301: // Source XPath. if (getSourceType().equals(EnrichSourceType.CUSTOM)) { if(null==getSourceXpath().getPropertyValue() || getSourceXpath().getPropertyValue().trim().isEmpty()){ validateMap.put("Property Expression", "Source Expression is empty"); } } // Source property. if (getSourceType().equals(EnrichSourceType.PROPERTY)) { if(null==getSourceProperty() || getSourceProperty().trim().isEmpty()){ validateMap.put("Property Value", "Source Property Value is empty"); } } // Source inline XML & Registry Key. if (getSourceType().equals(EnrichSourceType.INLINE)) { try { Element inlineElem = EsbUtils.parseElement(getSourceXML()); } catch (Exception ex) { validateMap.put("Source XML", "Property Value is empty"); } } // Target XPath. if (getTargetType().equals(EnrichTargetType.CUSTOM)) { if(null==getTargetXpath().getPropertyValue() || getTargetXpath().getPropertyValue().trim().isEmpty()){ validateMap.put("Property Expression", "Target Expression is empty"); } } // Target property. if (getTargetType().equals(EnrichTargetType.PROPERTY)) { if(null==getTargetProperty() || getTargetProperty().trim().isEmpty()){ validateMap.put("Property Value", "Target Property Value is empty"); } } break; case ESB400: // Source XPath. if (getSourceType().equals(EnrichSourceType.CUSTOM)) { if(null==getSourceXpath().getPropertyValue() || getSourceXpath().getPropertyValue().trim().isEmpty()){ validateMap.put("Property Expression", "Source Expression is empty"); } } // Source property. if (getSourceType().equals(EnrichSourceType.PROPERTY)) { validateMap.put("Property Value", "Property Value is empty"); } // Source inline XML & Registry Key. if (getSourceType().equals(EnrichSourceType.INLINE)) { switch (getInlineType()) { case CONTENT: try { Element inlineElem = EsbUtils.parseElement(getSourceXML()); } catch (Exception ex) { validateMap.put("Source XML", "Property Value is empty"); } break; case KEY: break; } } // Target XPath. if (getTargetType().equals(EnrichTargetType.CUSTOM)) { if(null==getTargetXpath().getPropertyValue() || getTargetXpath().getPropertyValue().trim().isEmpty()){ validateMap.put("Property Expression", "Target Expression is empty"); } } // Target property. if (getTargetType().equals(EnrichTargetType.PROPERTY)) { if(null==getTargetProperty() || getTargetProperty().trim().isEmpty()){ validateMap.put("Property Value", "Target Property Value is empty"); } } break; } objectValidator.setMediatorErrorMap(validateMap); mediatorValidateMap.put("Enrich Mediator", objectValidator); return mediatorValidateMap; } } //EnrichMediatorImpl
/* Copyright 2004, 2005, 2006 Acegi Technology Pty Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.authentication.jaas; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.core.io.Resource; import org.springframework.security.authentication.AuthenticationProvider; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.authentication.jaas.event.JaasAuthenticationFailedEvent; import org.springframework.security.core.AuthenticationException; import org.springframework.security.core.GrantedAuthority; import org.springframework.util.Assert; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import java.io.File; import java.io.IOException; import java.net.URL; import java.security.Security; /** * An {@link AuthenticationProvider} implementation that retrieves user details from a * JAAS login configuration. * * <p> * This <code>AuthenticationProvider</code> is capable of validating * {@link org.springframework.security.authentication.UsernamePasswordAuthenticationToken} * requests contain the correct username and password. * </p> * <p> * This implementation is backed by a <a * href="http://java.sun.com/j2se/1.5.0/docs/guide/security/jaas/JAASRefGuide.html" * >JAAS</a> configuration. The loginConfig property must be set to a given JAAS * configuration file. This setter accepts a Spring * {@link org.springframework.core.io.Resource} instance. It should point to a JAAS * configuration file containing an index matching the * {@link #setLoginContextName(java.lang.String) loginContextName} property. * </p> * <p> * For example: If this JaasAuthenticationProvider were configured in a Spring * WebApplicationContext the xml to set the loginConfiguration could be as follows... * * <pre> * &lt;property name="loginConfig"&gt; * &lt;value&gt;/WEB-INF/login.conf&lt;/value&gt; * &lt;/property&gt; * </pre> * * </p> * <p> * The loginContextName should coincide with a given index in the loginConfig specifed. * The loginConfig file used in the JUnit tests appears as the following... * * <pre> * JAASTest { * org.springframework.security.authentication.jaas.TestLoginModule required; * }; * </pre> * * Using the example login configuration above, the loginContextName property would be set * as <i>JAASTest</i>... * * <pre> * &lt;property name="loginContextName"&gt; &lt;value&gt;JAASTest&lt;/value&gt; &lt;/property&gt; * </pre> * * </p> * <p> * When using JAAS login modules as the authentication source, sometimes the <a href= * "http://java.sun.com/j2se/1.5.0/docs/api/javax/security/auth/login/LoginContext.html" * >LoginContext</a> will require <i>CallbackHandler</i>s. The JaasAuthenticationProvider * uses an internal <a href= * "http://java.sun.com/j2se/1.5.0/docs/api/javax/security/auth/callback/CallbackHandler.html" * >CallbackHandler </a> to wrap the {@link JaasAuthenticationCallbackHandler}s configured * in the ApplicationContext. When the LoginContext calls the internal CallbackHandler, * control is passed to each {@link JaasAuthenticationCallbackHandler} for each Callback * passed. * </p> * <p> * {@link JaasAuthenticationCallbackHandler}s are passed to the JaasAuthenticationProvider * through the * {@link #setCallbackHandlers(org.springframework.security.authentication.jaas.JaasAuthenticationCallbackHandler[]) * callbackHandlers} property. * * <pre> * &lt;property name="callbackHandlers"&gt; * &lt;list&gt; * &lt;bean class="org.springframework.security.authentication.jaas.TestCallbackHandler"/&gt; * &lt;bean class="{@link JaasNameCallbackHandler org.springframework.security.authentication.jaas.JaasNameCallbackHandler}"/&gt; * &lt;bean class="{@link JaasPasswordCallbackHandler org.springframework.security.authentication.jaas.JaasPasswordCallbackHandler}"/&gt; * &lt;/list&gt; * &lt;/property&gt; * </pre> * * </p> * <p> * After calling LoginContext.login(), the JaasAuthenticationProvider will retrieve the * returned Principals from the Subject (LoginContext.getSubject().getPrincipals). Each * returned principal is then passed to the configured {@link AuthorityGranter}s. An * AuthorityGranter is a mapping between a returned Principal, and a role name. If an * AuthorityGranter wishes to grant an Authorization a role, it returns that role name * from it's {@link AuthorityGranter#grant(java.security.Principal)} method. The returned * role will be applied to the Authorization object as a {@link GrantedAuthority}. * </p> * <p> * AuthorityGranters are configured in spring xml as follows... * * <pre> * &lt;property name="authorityGranters"&gt; * &lt;list&gt; * &lt;bean class="org.springframework.security.authentication.jaas.TestAuthorityGranter"/&gt; * &lt;/list&gt; * &lt;/property&gt; * </pre> * * A configuration note: The JaasAuthenticationProvider uses the security properites * &quote;login.config.url.X&quote; to configure jaas. If you would like to customize the * way Jaas gets configured, create a subclass of this and override the * {@link #configureJaas(Resource)} method. * </p> * * @author Ray Krueger * @author Rob Winch */ public class JaasAuthenticationProvider extends AbstractJaasAuthenticationProvider { // ~ Static fields/initializers // ===================================================================================== // exists for passivity protected static final Log log = LogFactory.getLog(JaasAuthenticationProvider.class); // ~ Instance fields // ================================================================================================ private Resource loginConfig; private boolean refreshConfigurationOnStartup = true; // ~ Methods // ======================================================================================================== public void afterPropertiesSet() throws Exception { // the superclass is not called because it does additional checks that are // non-passive Assert.hasLength(getLoginContextName(), "loginContextName must be set on " + getClass()); Assert.notNull(loginConfig, "loginConfig must be set on " + getClass()); configureJaas(loginConfig); Assert.notNull( Configuration.getConfiguration(), "As per http://java.sun.com/j2se/1.5.0/docs/api/javax/security/auth/login/Configuration.html " + "\"If a Configuration object was set via the Configuration.setConfiguration method, then that object is " + "returned. Otherwise, a default Configuration object is returned\". Your JRE returned null to " + "Configuration.getConfiguration()."); } @Override protected LoginContext createLoginContext(CallbackHandler handler) throws LoginException { return new LoginContext(getLoginContextName(), handler); } /** * Hook method for configuring Jaas. * * @param loginConfig URL to Jaas login configuration * * @throws IOException if there is a problem reading the config resource. */ protected void configureJaas(Resource loginConfig) throws IOException { configureJaasUsingLoop(); if (refreshConfigurationOnStartup) { // Overcome issue in SEC-760 Configuration.getConfiguration().refresh(); } } /** * Loops through the login.config.url.1,login.config.url.2 properties looking for the * login configuration. If it is not set, it will be set to the last available * login.config.url.X property. * */ private void configureJaasUsingLoop() throws IOException { String loginConfigUrl = convertLoginConfigToUrl(); boolean alreadySet = false; int n = 1; final String prefix = "login.config.url."; String existing; while ((existing = Security.getProperty(prefix + n)) != null) { alreadySet = existing.equals(loginConfigUrl); if (alreadySet) { break; } n++; } if (!alreadySet) { String key = prefix + n; log.debug("Setting security property [" + key + "] to: " + loginConfigUrl); Security.setProperty(key, loginConfigUrl); } } private String convertLoginConfigToUrl() throws IOException { String loginConfigPath; try { loginConfigPath = loginConfig.getFile().getAbsolutePath() .replace(File.separatorChar, '/'); if (!loginConfigPath.startsWith("/")) { loginConfigPath = "/" + loginConfigPath; } return new URL("file", "", loginConfigPath).toString(); } catch (IOException e) { // SEC-1700: May be inside a jar return loginConfig.getURL().toString(); } } /** * Publishes the {@link JaasAuthenticationFailedEvent}. Can be overridden by * subclasses for different functionality * * @param token The authentication token being processed * @param ase The excetion that caused the authentication failure */ protected void publishFailureEvent(UsernamePasswordAuthenticationToken token, AuthenticationException ase) { // exists for passivity (the superclass does a null check before publishing) getApplicationEventPublisher().publishEvent( new JaasAuthenticationFailedEvent(token, ase)); } public Resource getLoginConfig() { return loginConfig; } /** * Set the JAAS login configuration file. * * @param loginConfig * * @see <a * href="http://java.sun.com/j2se/1.5.0/docs/guide/security/jaas/JAASRefGuide.html">JAAS * Reference</a> */ public void setLoginConfig(Resource loginConfig) { this.loginConfig = loginConfig; } /** * If set, a call to {@code Configuration#refresh()} will be made by * {@code #configureJaas(Resource) } method. Defaults to {@code true}. * * @see <a href="https://jira.springsource.org/browse/SEC-1320">SEC-1320</a> * * @param refresh set to {@code false} to disable reloading of the configuration. May * be useful in some environments. */ public void setRefreshConfigurationOnStartup(boolean refresh) { this.refreshConfigurationOnStartup = refresh; } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.thoughtworks.selenium; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.spy; import org.junit.Test; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import java.net.HttpURLConnection; import java.net.URL; /** * {@link com.thoughtworks.selenium.HttpCommandProcessor} unit test class. */ public class HttpCommandProcessorUnitTest { @Test public void testCanStopTheSeleneseSessionEvenIfThereIsNoCurrentSession() { final HttpCommandProcessor processor; processor = new HttpCommandProcessor("a Server", 1234, "", "a url"); processor.stop(); } @Test public void testCanStopTheSeleneseSessionWhenASessionIsInProgress() { final HttpCommandProcessor processor; processor = new HttpCommandProcessor("a Server", 1234, "", "a url") { @Override public String doCommand(String commandName, String[] args) { assertEquals("testComplete", commandName); assertNull(args); return null; } }; processor.setSessionInProgress("123456789"); processor.stop(); } @Test public void testResourcesClosedWhenIoeOnGetConnection() { IOEThrowingHttpCommandProcessor cmdProc = new IOEThrowingHttpCommandProcessor( "localhost", 4444, "*chrome", "http://www.google.com"); cmdProc.throwIoeOnGetConnection = true; try { cmdProc.getCommandResponseAsString("testCommand"); fail(); } catch (IOException ioe) { cmdProc.verifyClosedResources(false, false, false); } } @Test public void testResourcesClosedWhenIoeOnGetOutputStream() { IOEThrowingHttpCommandProcessor cmdProc = new IOEThrowingHttpCommandProcessor( "localhost", 4444, "*chrome", "http://www.google.com"); cmdProc.throwIoeOnGetOutputStream = true; try { cmdProc.getCommandResponseAsString("testCommand"); fail(); } catch (IOException ioe) { cmdProc.verifyClosedResources(true, false, false); } } @Test public void testResourcesClosedWhenIoeOnGetInputStream() { IOEThrowingHttpCommandProcessor cmdProc = new IOEThrowingHttpCommandProcessor( "localhost", 4444, "*chrome", "http://www.google.com"); cmdProc.throwIoeOnGetInputStream = true; try { cmdProc.getCommandResponseAsString("testCommand"); fail(); } catch (IOException ioe) { cmdProc.verifyClosedResources(true, true, false); } } @Test public void testResourcesClosedWhenNoIoes() { IOEThrowingHttpCommandProcessor cmdProc = new IOEThrowingHttpCommandProcessor( "localhost", 4444, "*chrome", "http://www.google.com"); try { cmdProc.getCommandResponseAsString("testCommand"); cmdProc.verifyClosedResources(true, true, true); } catch (IOException ioe) { fail(); } } /** * Inner class to help mock out the network and pipe connections to verify that they are closed * regardless of where IOExceptions occur. * * @author jbevan@google.com (Jennifer Bevan) */ private class IOEThrowingHttpCommandProcessor extends HttpCommandProcessor { private HttpURLConnection closedConn; private Writer closedWriter; private Reader closedReader; protected String responseString = "normal response"; protected boolean throwIoeOnGetConnection = false; protected boolean throwIoeOnGetInputStream = false; protected boolean throwIoeOnGetOutputStream = false; public IOEThrowingHttpCommandProcessor(String serverHost, int serverPort, String browserStartCommand, String browserURL) { super(serverHost, serverPort, browserStartCommand, browserURL); } @Override protected HttpURLConnection getHttpUrlConnection(URL urlForServlet) throws IOException { if (throwIoeOnGetConnection) { throw new IOException("injected exception"); } return super.getHttpUrlConnection(urlForServlet); } @Override protected Writer getOutputStreamWriter(HttpURLConnection conn) throws IOException { if (throwIoeOnGetOutputStream) { throw new IOException("injected exception"); } return new StringWriter(1024); } @Override protected Reader getInputStreamReader(HttpURLConnection conn) throws IOException { if (throwIoeOnGetInputStream) { throw new IOException("injected exception"); } return new StringReader(responseString); } @Override protected int getResponseCode(HttpURLConnection conn) throws IOException { return HttpURLConnection.HTTP_OK; } @Override protected void closeResources(HttpURLConnection conn, Writer wr, Reader rdr) { closedConn = conn; closedWriter = wr; closedReader = rdr; super.closeResources(conn, wr, rdr); } protected boolean verifyClosedResources(boolean connNotNull, boolean writerNotNull, boolean readerNotNull) { return ((connNotNull && (null != closedConn)) && (writerNotNull && (null != closedWriter)) && (readerNotNull && (null != closedReader))); } } @Test public void testGetBooleanArray() throws Exception { HttpCommandProcessor processor = new HttpCommandProcessor("localhost", 4444, "*chrome", "http://www.openqa.org"); processor = spy(processor); String[] cmdArgs = new String[] {"1", "2"}; String[] cmdResults = new String[] {"true", "false"}; boolean[] boolCmdResults = new boolean[] {true, false}; doReturn(cmdResults).when(processor).getStringArray("command", cmdArgs); boolean[] methodResults = processor.getBooleanArray("command", cmdArgs); assertEquals(boolCmdResults[0], methodResults[0]); assertEquals(boolCmdResults[1], methodResults[1]); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.any23.validator; import org.apache.any23.extractor.html.DomUtils; import org.w3c.dom.Node; import java.io.Serializable; import java.util.List; /** * This class contains the report of a validation performed by * the {@link Validator} class. * * @see Validator * @see ValidationReportBuilder * @author Michele Mostarda (mostarda@fbk.eu) * @author Davide Palmisano (palmisano@fbk.eu) */ // TODO: merge with ErrorReporter public interface ValidationReport extends Serializable { /** * Defines the different issue levels. */ enum IssueLevel { error, warning, info } /** * Returns the list of detected issues. * * @return list of detected issues. */ List<Issue> getIssues(); /** * Returns the list of activated rules. * * @return list of activated rules. */ List<RuleActivation> getRuleActivations(); /** * Returns the list of detected errors. * * @return list of detected errors. */ List<Error> getErrors(); /** * An issue found during the validation process. */ class Issue implements Serializable { private final IssueLevel level; private final String message; private final Node origin; public Issue(IssueLevel level, String message, Node origin) { if(level == null) { throw new NullPointerException("level cannot be null."); } if(message == null) { throw new NullPointerException("message cannot be null."); } if(origin == null) { throw new NullPointerException("origin cannot be null."); } this.level = level; this.message = message; this.origin = origin; } public String getMessage() { return message; } public IssueLevel getLevel() { return level; } public Node getOrigin() { return origin; } @Override public String toString() { return String.format( "Issue %s '%s' %s", level, message, DomUtils.getXPathForNode(origin) ); } } /** * This class describes the activation of a rule. */ class RuleActivation implements Serializable { private final String ruleStr; public RuleActivation(Rule r) { if(r == null) { throw new NullPointerException("rule cannot be null."); } ruleStr = r.getHRName(); } public String getRuleStr() { return ruleStr; } @Override public String toString() { return ruleStr; } } /** * An error occurred while performing the validation process. */ abstract class Error implements Serializable { private final Exception cause; private final String message; public Error(Exception e, String msg) { if(e == null) { throw new NullPointerException("exception cannot be null."); } if(msg == null) { throw new NullPointerException("message cannot be null."); } cause = e; message = msg; } public Exception getCause() { return cause; } public String getMessage() { return message; } @Override public String toString() { return String.format("%s %s %s", this.getClass().getName(), cause, message); } } /** * An error occurred while executing a rule. */ class RuleError extends Error { private final Rule origin; public RuleError(Rule r, Exception e, String msg) { super(e, msg); if(r == null) { throw new NullPointerException("rule cannot be null."); } origin = r; } public Rule getOrigin() { return origin; } @Override public String toString() { return String.format("%s - %s", super.toString(), origin.getHRName()); } } /** * An error occurred while executing a fix. */ class FixError extends Error { private final Fix origin; public FixError(Fix f, Exception e, String msg) { super(e, msg); origin = f; } public Fix getOrigin() { return origin; } @Override public String toString() { return String.format("%s - %s", super.toString(), origin.getHRName()); } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v9.services; import com.google.ads.googleads.v9.resources.HotelGroupView; import com.google.ads.googleads.v9.resources.HotelGroupViewName; import com.google.ads.googleads.v9.services.stub.HotelGroupViewServiceStub; import com.google.ads.googleads.v9.services.stub.HotelGroupViewServiceStubSettings; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.UnaryCallable; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Service to manage Hotel Group Views. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * try (HotelGroupViewServiceClient hotelGroupViewServiceClient = * HotelGroupViewServiceClient.create()) { * HotelGroupViewName resourceName = * HotelGroupViewName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]"); * HotelGroupView response = hotelGroupViewServiceClient.getHotelGroupView(resourceName); * } * }</pre> * * <p>Note: close() needs to be called on the HotelGroupViewServiceClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). * * <p>The surface of this class includes several types of Java methods for each of the API's * methods: * * <ol> * <li> A "flattened" method. With this type of method, the fields of the request type have been * converted into function parameters. It may be the case that not all fields are available as * parameters, and not every API method will have a flattened method entry point. * <li> A "request object" method. This type of method only takes one parameter, a request object, * which must be constructed before the call. Not every API method will have a request object * method. * <li> A "callable" method. This type of method takes no parameters and returns an immutable API * callable object, which can be used to initiate calls to the service. * </ol> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of HotelGroupViewServiceSettings * to create(). For example: * * <p>To customize credentials: * * <pre>{@code * HotelGroupViewServiceSettings hotelGroupViewServiceSettings = * HotelGroupViewServiceSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * HotelGroupViewServiceClient hotelGroupViewServiceClient = * HotelGroupViewServiceClient.create(hotelGroupViewServiceSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * HotelGroupViewServiceSettings hotelGroupViewServiceSettings = * HotelGroupViewServiceSettings.newBuilder().setEndpoint(myEndpoint).build(); * HotelGroupViewServiceClient hotelGroupViewServiceClient = * HotelGroupViewServiceClient.create(hotelGroupViewServiceSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @Generated("by gapic-generator-java") public class HotelGroupViewServiceClient implements BackgroundResource { private final HotelGroupViewServiceSettings settings; private final HotelGroupViewServiceStub stub; /** Constructs an instance of HotelGroupViewServiceClient with default settings. */ public static final HotelGroupViewServiceClient create() throws IOException { return create(HotelGroupViewServiceSettings.newBuilder().build()); } /** * Constructs an instance of HotelGroupViewServiceClient, using the given settings. The channels * are created based on the settings passed in, or defaults for any settings that are not set. */ public static final HotelGroupViewServiceClient create(HotelGroupViewServiceSettings settings) throws IOException { return new HotelGroupViewServiceClient(settings); } /** * Constructs an instance of HotelGroupViewServiceClient, using the given stub for making calls. * This is for advanced usage - prefer using create(HotelGroupViewServiceSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final HotelGroupViewServiceClient create(HotelGroupViewServiceStub stub) { return new HotelGroupViewServiceClient(stub); } /** * Constructs an instance of HotelGroupViewServiceClient, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HotelGroupViewServiceClient(HotelGroupViewServiceSettings settings) throws IOException { this.settings = settings; this.stub = ((HotelGroupViewServiceStubSettings) settings.getStubSettings()).createStub(); } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") protected HotelGroupViewServiceClient(HotelGroupViewServiceStub stub) { this.settings = null; this.stub = stub; } public final HotelGroupViewServiceSettings getSettings() { return settings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public HotelGroupViewServiceStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the requested Hotel Group View in full detail. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * try (HotelGroupViewServiceClient hotelGroupViewServiceClient = * HotelGroupViewServiceClient.create()) { * HotelGroupViewName resourceName = * HotelGroupViewName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]"); * HotelGroupView response = hotelGroupViewServiceClient.getHotelGroupView(resourceName); * } * }</pre> * * @param resourceName Required. Resource name of the Hotel Group View to fetch. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final HotelGroupView getHotelGroupView(HotelGroupViewName resourceName) { GetHotelGroupViewRequest request = GetHotelGroupViewRequest.newBuilder() .setResourceName(resourceName == null ? null : resourceName.toString()) .build(); return getHotelGroupView(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the requested Hotel Group View in full detail. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * try (HotelGroupViewServiceClient hotelGroupViewServiceClient = * HotelGroupViewServiceClient.create()) { * String resourceName = * HotelGroupViewName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]").toString(); * HotelGroupView response = hotelGroupViewServiceClient.getHotelGroupView(resourceName); * } * }</pre> * * @param resourceName Required. Resource name of the Hotel Group View to fetch. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final HotelGroupView getHotelGroupView(String resourceName) { GetHotelGroupViewRequest request = GetHotelGroupViewRequest.newBuilder().setResourceName(resourceName).build(); return getHotelGroupView(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the requested Hotel Group View in full detail. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * try (HotelGroupViewServiceClient hotelGroupViewServiceClient = * HotelGroupViewServiceClient.create()) { * GetHotelGroupViewRequest request = * GetHotelGroupViewRequest.newBuilder() * .setResourceName( * HotelGroupViewName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]") * .toString()) * .build(); * HotelGroupView response = hotelGroupViewServiceClient.getHotelGroupView(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final HotelGroupView getHotelGroupView(GetHotelGroupViewRequest request) { return getHotelGroupViewCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the requested Hotel Group View in full detail. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * try (HotelGroupViewServiceClient hotelGroupViewServiceClient = * HotelGroupViewServiceClient.create()) { * GetHotelGroupViewRequest request = * GetHotelGroupViewRequest.newBuilder() * .setResourceName( * HotelGroupViewName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]") * .toString()) * .build(); * ApiFuture<HotelGroupView> future = * hotelGroupViewServiceClient.getHotelGroupViewCallable().futureCall(request); * // Do something. * HotelGroupView response = future.get(); * } * }</pre> */ public final UnaryCallable<GetHotelGroupViewRequest, HotelGroupView> getHotelGroupViewCallable() { return stub.getHotelGroupViewCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } }
package plugin.google.maps; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentSender.SendIntentException; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.content.res.Configuration; import android.content.res.Resources; import android.graphics.Color; import android.graphics.RectF; import android.location.Location; import android.location.LocationManager; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.provider.Settings; import android.support.annotation.NonNull; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.Toast; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.PendingResult; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.common.api.Status; import com.google.android.gms.location.LocationListener; import com.google.android.gms.location.LocationRequest; import com.google.android.gms.location.LocationServices; import com.google.android.gms.location.LocationSettingsRequest; import com.google.android.gms.location.LocationSettingsResult; import com.google.android.gms.location.LocationSettingsStatusCodes; import com.google.android.gms.maps.MapsInitializer; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaInterface; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaWebView; import org.apache.cordova.LOG; import org.apache.cordova.PluginEntry; import org.apache.cordova.PluginManager; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Set; @SuppressWarnings("deprecation") public class CordovaGoogleMaps extends CordovaPlugin implements ViewTreeObserver.OnScrollChangedListener{ private final String TAG = "GoogleMapsPlugin"; private HashMap<String, Bundle> bufferForLocationDialog = new HashMap<String, Bundle>(); private final int ACTIVITY_LOCATION_DIALOG = 0x7f999900; // Invite the location dialog using Google Play Services private final int ACTIVITY_LOCATION_PAGE = 0x7f999901; // Open the location settings page private Activity activity; public ViewGroup root; public MyPluginLayout mPluginLayout = null; private GoogleApiClient googleApiClient = null; public boolean initialized = false; public PluginManager pluginManager; public static String CURRENT_URL; public static final HashMap<String, String> semaphore = new HashMap<String, String>(); @SuppressLint("NewApi") @Override public void initialize(final CordovaInterface cordova, final CordovaWebView webView) { super.initialize(cordova, webView); if (root != null) { return; } LOG.setLogLevel(LOG.ERROR); activity = cordova.getActivity(); final View view = webView.getView(); view.getViewTreeObserver().addOnScrollChangedListener(CordovaGoogleMaps.this); root = (ViewGroup) view.getParent(); pluginManager = webView.getPluginManager(); cordova.getActivity().runOnUiThread(new Runnable() { @SuppressLint("NewApi") public void run() { CURRENT_URL = webView.getUrl(); // Enable this, webView makes draw cache on the Android action bar issue. //View view = webView.getView(); //if (Build.VERSION.SDK_INT >= 21 || "org.xwalk.core.XWalkView".equals(view.getClass().getName())){ // view.setLayerType(View.LAYER_TYPE_HARDWARE, null); // Log.d("Layout", "--> view =" + view.isHardwareAccelerated()); //always false //} // ------------------------------ // Check of Google Play Services // ------------------------------ int checkGooglePlayServices = GooglePlayServicesUtil.isGooglePlayServicesAvailable(activity); Log.d(TAG, "----> checkGooglePlayServices = " + (ConnectionResult.SUCCESS == checkGooglePlayServices)); if (checkGooglePlayServices != ConnectionResult.SUCCESS) { // google play services is missing!!!! /* * Returns status code indicating whether there was an error. Can be one * of following in ConnectionResult: SUCCESS, SERVICE_MISSING, * SERVICE_VERSION_UPDATE_REQUIRED, SERVICE_DISABLED, SERVICE_INVALID. */ Log.e(TAG, "---Google Play Services is not available: " + GooglePlayServicesUtil.getErrorString(checkGooglePlayServices)); boolean isNeedToUpdate = false; String errorMsg = "Google Maps Android API v2 is not available for some reason on this device. Do you install the latest Google Play Services from Google Play Store?"; switch (checkGooglePlayServices) { case ConnectionResult.DEVELOPER_ERROR: errorMsg = "The application is misconfigured. This error is not recoverable and will be treated as fatal. The developer should look at the logs after this to determine more actionable information."; break; case ConnectionResult.INTERNAL_ERROR: errorMsg = "An internal error of Google Play Services occurred. Please retry, and it should resolve the problem."; break; case ConnectionResult.INVALID_ACCOUNT: errorMsg = "You attempted to connect to the service with an invalid account name specified."; break; case ConnectionResult.LICENSE_CHECK_FAILED: errorMsg = "The application is not licensed to the user. This error is not recoverable and will be treated as fatal."; break; case ConnectionResult.NETWORK_ERROR: errorMsg = "A network error occurred. Please retry, and it should resolve the problem."; break; case ConnectionResult.SERVICE_DISABLED: errorMsg = "The installed version of Google Play services has been disabled on this device. Please turn on Google Play Services."; break; case ConnectionResult.SERVICE_INVALID: errorMsg = "The version of the Google Play services installed on this device is not authentic. Please update the Google Play Services from Google Play Store."; isNeedToUpdate = true; break; case ConnectionResult.SERVICE_MISSING: errorMsg = "Google Play services is missing on this device. Please install the Google Play Services."; isNeedToUpdate = true; break; case ConnectionResult.SERVICE_VERSION_UPDATE_REQUIRED: errorMsg = "The installed version of Google Play services is out of date. Please update the Google Play Services from Google Play Store."; isNeedToUpdate = true; break; case ConnectionResult.SIGN_IN_REQUIRED: errorMsg = "You attempted to connect to the service but you are not signed in. Please check the Google Play Services configuration"; break; default: isNeedToUpdate = true; break; } final boolean finalIsNeedToUpdate = isNeedToUpdate; AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(activity); alertDialogBuilder .setMessage(errorMsg) .setCancelable(false) .setPositiveButton("Close", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog,int id) { dialog.dismiss(); if (finalIsNeedToUpdate) { try { activity.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("market://details?id=com.google.android.gms"))); } catch (android.content.ActivityNotFoundException anfe) { activity.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("http://play.google.com/store/apps/details?id=appPackageName"))); } } } }); AlertDialog alertDialog = alertDialogBuilder.create(); // show it alertDialog.show(); Log.e(TAG, "Google Play Services is not available."); return; } webView.getView().setBackgroundColor(Color.TRANSPARENT); webView.getView().setOverScrollMode(View.OVER_SCROLL_NEVER); mPluginLayout = new MyPluginLayout(webView, activity); mPluginLayout.isSuspended = true; // Check the API key ApplicationInfo appliInfo = null; try { appliInfo = activity.getPackageManager().getApplicationInfo(activity.getPackageName(), PackageManager.GET_META_DATA); } catch (NameNotFoundException e) {} String API_KEY = appliInfo.metaData.getString("com.google.android.maps.v2.API_KEY"); if ("API_KEY_FOR_ANDROID".equals(API_KEY)) { AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(activity); alertDialogBuilder .setMessage("Please replace 'API_KEY_FOR_ANDROID' in the platforms/android/AndroidManifest.xml with your API Key!") .setCancelable(false) .setPositiveButton("Close", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog,int id) { dialog.dismiss(); } }); AlertDialog alertDialog = alertDialogBuilder.create(); // show it alertDialog.show(); } CURRENT_URL = webView.getUrl(); //------------------------------ // Initialize Google Maps SDK //------------------------------ if (!initialized) { try { MapsInitializer.initialize(cordova.getActivity()); initialized = true; } catch (Exception e) { e.printStackTrace(); } } } }); } @Override public boolean onOverrideUrlLoading(String url) { mPluginLayout.isSuspended = true; /* this.activity.runOnUiThread(new Runnable() { @Override public void run() { webView.loadUrl("javascript:if(window.cordova){cordova.fireDocumentEvent('plugin_url_changed', {});}"); } }); */ CURRENT_URL = url; return false; } @Override public void onScrollChanged() { if (mPluginLayout == null) { return; } View view = webView.getView(); int scrollX = view.getScrollX(); int scrollY = view.getScrollY(); mPluginLayout.scrollTo(scrollX, scrollY); } @Override public boolean execute(final String action, final JSONArray args, final CallbackContext callbackContext) throws JSONException { cordova.getThreadPool().submit(new Runnable() { @Override public void run() { try { if (action.equals("putHtmlElements")) { CordovaGoogleMaps.this.putHtmlElements(args, callbackContext); } else if ("clearHtmlElements".equals(action)) { CordovaGoogleMaps.this.clearHtmlElements(args, callbackContext); } else if ("pause".equals(action)) { CordovaGoogleMaps.this.pause(args, callbackContext); } else if ("resume".equals(action)) { CordovaGoogleMaps.this.resume(args, callbackContext); } else if ("getMyLocation".equals(action)) { CordovaGoogleMaps.this.getMyLocation(args, callbackContext); } else if ("getMap".equals(action)) { CordovaGoogleMaps.this.getMap(args, callbackContext); } else if ("removeMap".equals(action)) { CordovaGoogleMaps.this.removeMap(args, callbackContext); } else if ("backHistory".equals(action)) { CordovaGoogleMaps.this.backHistory(args, callbackContext); } else if ("resumeResizeTimer".equals(action)) { CordovaGoogleMaps.this.resumeResizeTimer(args, callbackContext); } else if ("pauseResizeTimer".equals(action)) { CordovaGoogleMaps.this.pauseResizeTimer(args, callbackContext); } else if ("updateMapPositionOnly".equals(action)) { CordovaGoogleMaps.this.updateMapPositionOnly(args, callbackContext); } } catch (JSONException e) { e.printStackTrace(); } } }); return true; } public void resumeResizeTimer(final JSONArray args, final CallbackContext callbackContext) throws JSONException { if (mPluginLayout.isWaiting) { mPluginLayout.pauseResize = false; synchronized (mPluginLayout.timerLock) { mPluginLayout.timerLock.notify(); } } callbackContext.success(); } public void updateMapPositionOnly(final JSONArray args, final CallbackContext callbackContext) throws JSONException { final JSONObject elements = args.getJSONObject(0); Bundle elementsBundle = PluginUtil.Json2Bundle(elements); float zoomScale = Resources.getSystem().getDisplayMetrics().density; Iterator<String> domIDs = elementsBundle.keySet().iterator(); String domId; Bundle domInfo, size; while (domIDs.hasNext()) { domId = domIDs.next(); domInfo = elementsBundle.getBundle(domId); size = domInfo.getBundle("size"); RectF rectF = new RectF(); rectF.left = (float)(Double.parseDouble(size.get("left") + "") * zoomScale); rectF.top = (float)(Double.parseDouble(size.get("top") + "") * zoomScale); rectF.right = rectF.left + (float)(Double.parseDouble(size.get("width") + "") * zoomScale); rectF.bottom = rectF.top + (float)(Double.parseDouble(size.get("height") + "") * zoomScale); mPluginLayout.HTMLNodeRectFs.put(domId, rectF); domInfo.remove("size"); mPluginLayout.HTMLNodes.put(domId, domInfo); } mPluginLayout.pauseResize = true; if (mPluginLayout.isWaiting) { mPluginLayout.pauseResize = false; synchronized (mPluginLayout.timerLock) { mPluginLayout.timerLock.notify(); } } callbackContext.success(); } public void pauseResizeTimer(final JSONArray args, final CallbackContext callbackContext) throws JSONException { mPluginLayout.pauseResize = true; callbackContext.success(); } public void backHistory(final JSONArray args, final CallbackContext callbackContext) throws JSONException { cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { if (!webView.backHistory()) { // If no more history back, exit the app cordova.getActivity().finish(); } } }); } public void onRequestPermissionResult(int requestCode, String[] permissions, int[] grantResults) throws JSONException { synchronized (CordovaGoogleMaps.semaphore) { semaphore.notify(); } } public void pause(final JSONArray args, final CallbackContext callbackContext) throws JSONException { if (mPluginLayout == null) { callbackContext.success(); return; } mPluginLayout.isSuspended = true; callbackContext.success(); } public void resume(final JSONArray args, final CallbackContext callbackContext) throws JSONException { if (mPluginLayout == null) { callbackContext.success(); return; } if (mPluginLayout.isSuspended) { mPluginLayout.isSuspended = false; synchronized (mPluginLayout.timerLock) { mPluginLayout.timerLock.notify(); } } callbackContext.success(); } public void clearHtmlElements(final JSONArray args, final CallbackContext callbackContext) throws JSONException { if (mPluginLayout == null) { callbackContext.success(); return; } mPluginLayout.clearHtmlElements(); callbackContext.success(); } public void putHtmlElements(final JSONArray args, final CallbackContext callbackContext) throws JSONException { final JSONObject elements = args.getJSONObject(0); if (mPluginLayout == null) { callbackContext.success(); return; } if (!mPluginLayout.stopFlag || mPluginLayout.needUpdatePosition) { mPluginLayout.putHTMLElements(elements); } callbackContext.success(); } @SuppressWarnings("unused") public void getMyLocation(final JSONArray args, final CallbackContext callbackContext) throws JSONException { // enableHighAccuracy = true -> PRIORITY_HIGH_ACCURACY // enableHighAccuracy = false -> PRIORITY_BALANCED_POWER_ACCURACY JSONObject params = args.getJSONObject(0); boolean isHighLocal = false; if (params.has("enableHighAccuracy")) { isHighLocal = params.getBoolean("enableHighAccuracy"); } final boolean isHigh = isHighLocal; // Request geolocation permission. boolean locationPermission = cordova.hasPermission("android.permission.ACCESS_COARSE_LOCATION"); if (!locationPermission) { //_saveArgs = args; //_saveCallbackContext = callbackContext; synchronized (semaphore) { cordova.requestPermissions(this, callbackContext.hashCode(), new String[]{"android.permission.ACCESS_FINE_LOCATION", "android.permission.ACCESS_COARSE_LOCATION"}); try { semaphore.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } locationPermission = cordova.hasPermission("android.permission.ACCESS_COARSE_LOCATION"); if (!locationPermission) { callbackContext.error("Geolocation permission request was denied."); return; } } if (googleApiClient == null) { googleApiClient = new GoogleApiClient.Builder(activity) .addApi(LocationServices.API) .addConnectionCallbacks(new com.google.android.gms.common.api.GoogleApiClient.ConnectionCallbacks() { @Override public void onConnected(Bundle connectionHint) { Log.e(TAG, "===> onConnected"); CordovaGoogleMaps.this.sendNoResult(callbackContext); _checkLocationSettings(isHigh, callbackContext); } @Override public void onConnectionSuspended(int cause) { Log.e(TAG, "===> onConnectionSuspended"); } }) .addOnConnectionFailedListener(new com.google.android.gms.common.api.GoogleApiClient.OnConnectionFailedListener() { @Override public void onConnectionFailed(@NonNull ConnectionResult result) { Log.e(TAG, "===> onConnectionFailed"); PluginResult tmpResult = new PluginResult(PluginResult.Status.ERROR, result.toString()); tmpResult.setKeepCallback(false); callbackContext.sendPluginResult(tmpResult); googleApiClient.disconnect(); } }) .build(); googleApiClient.connect(); } else if (googleApiClient.isConnected()) { _checkLocationSettings(isHigh, callbackContext); } } private void _checkLocationSettings(final boolean enableHighAccuracy, final CallbackContext callbackContext) { LocationSettingsRequest.Builder builder = new LocationSettingsRequest.Builder().setAlwaysShow(true); LocationRequest locationRequest; locationRequest = LocationRequest.create() .setPriority(LocationRequest.PRIORITY_BALANCED_POWER_ACCURACY); builder.addLocationRequest(locationRequest); if (enableHighAccuracy) { locationRequest = LocationRequest.create() .setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY); builder.addLocationRequest(locationRequest); } PendingResult<LocationSettingsResult> locationSettingsResult = LocationServices.SettingsApi.checkLocationSettings(googleApiClient, builder.build()); locationSettingsResult.setResultCallback(new ResultCallback<LocationSettingsResult>() { @Override public void onResult(@NonNull LocationSettingsResult result) { final Status status = result.getStatus(); switch (status.getStatusCode()) { case LocationSettingsStatusCodes.SUCCESS: _requestLocationUpdate(false, enableHighAccuracy, callbackContext); break; case LocationSettingsStatusCodes.RESOLUTION_REQUIRED: // Location settings are not satisfied. But could be fixed by showing the user // a dialog. try { //Keep the callback id Bundle bundle = new Bundle(); bundle.putInt("type", ACTIVITY_LOCATION_DIALOG); bundle.putString("callbackId", callbackContext.getCallbackId()); bundle.putBoolean("enableHighAccuracy", enableHighAccuracy); int hashCode = bundle.hashCode(); bufferForLocationDialog.put("bundle_" + hashCode, bundle); CordovaGoogleMaps.this.sendNoResult(callbackContext); // Show the dialog by calling startResolutionForResult(), // and check the result in onActivityResult(). cordova.setActivityResultCallback(CordovaGoogleMaps.this); status.startResolutionForResult(cordova.getActivity(), hashCode); } catch (SendIntentException e) { // Show the dialog that is original version of this plugin. _showLocationSettingsPage(enableHighAccuracy, callbackContext); } break; case LocationSettingsStatusCodes.SETTINGS_CHANGE_UNAVAILABLE: // Location settings are not satisfied. However, we have no way to fix the // settings so we won't show the dialog. JSONObject jsResult = new JSONObject(); try { jsResult.put("status", false); jsResult.put("error_code", "service_not_available"); jsResult.put("error_message", "This app has been rejected to use Location Services."); } catch (JSONException e) { e.printStackTrace(); } callbackContext.error(jsResult); break; } } }); } private void _showLocationSettingsPage(final boolean enableHighAccuracy, final CallbackContext callbackContext) { //Ask the user to turn on the location services. AlertDialog.Builder builder = new AlertDialog.Builder(this.activity); builder.setTitle("Improve location accuracy"); builder.setMessage("To enhance your Maps experience:\n\n" + " - Enable Google apps location access\n\n" + " - Turn on GPS and mobile network location"); builder.setPositiveButton("Settings", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //Keep the callback id Bundle bundle = new Bundle(); bundle.putInt("type", ACTIVITY_LOCATION_PAGE); bundle.putString("callbackId", callbackContext.getCallbackId()); bundle.putBoolean("enableHighAccuracy", enableHighAccuracy); int hashCode = bundle.hashCode(); bufferForLocationDialog.put("bundle_" + hashCode, bundle); CordovaGoogleMaps.this.sendNoResult(callbackContext); //Launch settings, allowing user to make a change cordova.setActivityResultCallback(CordovaGoogleMaps.this); Intent intent = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS); activity.startActivityForResult(intent, hashCode); } }); builder.setNegativeButton("Skip", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //No location service, no Activity dialog.dismiss(); JSONObject result = new JSONObject(); try { result.put("status", false); result.put("error_code", "service_denied"); result.put("error_message", "This app has been rejected to use Location Services."); } catch (JSONException e) { e.printStackTrace(); } callbackContext.error(result); } }); builder.create().show(); } @SuppressWarnings("MissingPermission") private void _requestLocationUpdate(final boolean isRetry, final boolean enableHighAccuracy, final CallbackContext callbackContext) { int priority = LocationRequest.PRIORITY_BALANCED_POWER_ACCURACY; if (enableHighAccuracy) { priority = LocationRequest.PRIORITY_HIGH_ACCURACY; } LocationRequest locationRequest= LocationRequest.create() .setExpirationTime(5000) .setNumUpdates(2) .setSmallestDisplacement(0) .setPriority(priority) .setInterval(5000); final PendingResult<Status> result = LocationServices.FusedLocationApi.requestLocationUpdates( googleApiClient, locationRequest, new LocationListener() { @Override public void onLocationChanged(Location location) { /* if (callbackContext.isFinished()) { return; } */ JSONObject result; try { result = PluginUtil.location2Json(location); result.put("status", true); callbackContext.success(result); } catch (JSONException e) { e.printStackTrace(); } googleApiClient.disconnect(); } }); result.setResultCallback(new ResultCallback<Status>() { public void onResult(Status status) { if (!status.isSuccess()) { String errorMsg = status.getStatusMessage(); PluginResult result = new PluginResult(PluginResult.Status.ERROR, errorMsg); callbackContext.sendPluginResult(result); } else { // no update location Location location = LocationServices.FusedLocationApi.getLastLocation(googleApiClient); if (location != null) { try { JSONObject result = PluginUtil.location2Json(location); result.put("status", true); callbackContext.success(result); } catch (JSONException e) { e.printStackTrace(); } } else { if (!isRetry) { Toast.makeText(activity, "Waiting for location...", Toast.LENGTH_SHORT).show(); CordovaGoogleMaps.this.sendNoResult(callbackContext); // Retry Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { _requestLocationUpdate(true, enableHighAccuracy, callbackContext); } }, 3000); } else { // Send back the error result JSONObject result = new JSONObject(); try { result.put("status", false); result.put("error_code", "cannot_detect"); result.put("error_message", "Can not detect your location. Try again."); } catch (JSONException e) { e.printStackTrace(); } callbackContext.error(result); } } } } }); } @Override public void onReset() { super.onReset(); if (mPluginLayout == null || mPluginLayout.pluginMaps == null) { return; } cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { CURRENT_URL = webView.getUrl(); mPluginLayout.setBackgroundColor(Color.WHITE); Set<String> mapIds = mPluginLayout.pluginMaps.keySet(); PluginMap pluginMap; // prevent the ConcurrentModificationException error. String[] mapIdArray= mapIds.toArray(new String[mapIds.size()]); for (String mapId : mapIdArray) { if (mPluginLayout.pluginMaps.containsKey(mapId)) { pluginMap = mPluginLayout.removePluginMap(mapId); pluginMap.remove(null, null); pluginMap.onDestroy(); mPluginLayout.HTMLNodes.remove(mapId); } } mPluginLayout.HTMLNodes.clear(); mPluginLayout.pluginMaps.clear(); System.gc(); Runtime.getRuntime().gc(); } }); } public void removeMap(final JSONArray args, final CallbackContext callbackContext) throws JSONException { String mapId = args.getString(0); if (mPluginLayout.pluginMaps.containsKey(mapId)) { PluginMap pluginMap = mPluginLayout.removePluginMap(mapId); if (pluginMap != null) { pluginMap.remove(null, null); pluginMap.onDestroy(); pluginMap.objects.clear(); pluginMap.objects.destroy(); mPluginLayout.HTMLNodes.remove(mapId); pluginMap = null; } try { Field pluginMapField = pluginManager.getClass().getDeclaredField("pluginMap"); pluginMapField.setAccessible(true); LinkedHashMap<String, CordovaPlugin> pluginMapInstance = (LinkedHashMap<String, CordovaPlugin>) pluginMapField.get(pluginManager); pluginMapInstance.remove(mapId); Field entryMapField = pluginManager.getClass().getDeclaredField("entryMap"); entryMapField.setAccessible(true); LinkedHashMap<String, PluginEntry> entryMapInstance = (LinkedHashMap<String, PluginEntry>) entryMapField.get(pluginManager); entryMapInstance.remove(mapId); } catch (Exception e) { e.printStackTrace(); } } System.gc(); Runtime.getRuntime().gc(); callbackContext.success(); } @TargetApi(Build.VERSION_CODES.HONEYCOMB) public void getMap(final JSONArray args, final CallbackContext callbackContext) throws JSONException { //------------------------------------------ // Create an instance of PluginMap class. //------------------------------------------ String mapId = args.getString(0); PluginMap pluginMap = new PluginMap(); pluginMap.privateInitialize(mapId, cordova, webView, null); pluginMap.initialize(cordova, webView); pluginMap.mapCtrl = CordovaGoogleMaps.this; pluginMap.self = pluginMap; ((MyPlugin)pluginMap).CURRENT_PAGE_URL = CURRENT_URL; PluginEntry pluginEntry = new PluginEntry(mapId, pluginMap); pluginManager.addService(pluginEntry); if (mPluginLayout.isSuspended) { mPluginLayout.isSuspended = false; synchronized (mPluginLayout.timerLock) { mPluginLayout.timerLock.notify(); } } pluginMap.getMap(args, callbackContext); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (!bufferForLocationDialog.containsKey("bundle_" + requestCode)) { Log.e(TAG, "no key"); return; } Bundle query = bufferForLocationDialog.get("bundle_" + requestCode); Log.d(TAG, "====> onActivityResult (" + resultCode + ")"); switch (query.getInt("type")) { case ACTIVITY_LOCATION_DIALOG: // User was asked to enable the location setting. switch (resultCode) { case Activity.RESULT_OK: // All required changes were successfully made _inviteLocationUpdateAfterActivityResult(query); break; case Activity.RESULT_CANCELED: // The user was asked to change settings, but chose not to _userRefusedToUseLocationAfterActivityResult(query); break; default: break; } break; case ACTIVITY_LOCATION_PAGE: _onActivityResultLocationPage(query); break; } } private void _onActivityResultLocationPage(Bundle bundle) { String callbackId = bundle.getString("callbackId"); CallbackContext callbackContext = new CallbackContext(callbackId, this.webView); LocationManager locationManager = (LocationManager) this.activity.getSystemService(Context.LOCATION_SERVICE); List<String> providers = locationManager.getAllProviders(); int availableProviders = 0; if (mPluginLayout != null && mPluginLayout.isDebug) { Log.d(TAG, "---debug at getMyLocation(available providers)--"); } Iterator<String> iterator = providers.iterator(); String provider; boolean isAvailable; while(iterator.hasNext()) { provider = iterator.next(); isAvailable = locationManager.isProviderEnabled(provider); if (isAvailable) { availableProviders++; } if (mPluginLayout != null && mPluginLayout.isDebug) { Log.d(TAG, " " + provider + " = " + (isAvailable ? "" : "not ") + "available"); } } if (availableProviders == 0) { JSONObject result = new JSONObject(); try { result.put("status", false); result.put("error_code", "not_available"); result.put("error_message", "Since this device does not have any location provider, this app can not detect your location."); } catch (JSONException e) { e.printStackTrace(); } callbackContext.error(result); return; } _inviteLocationUpdateAfterActivityResult(bundle); } private void _inviteLocationUpdateAfterActivityResult(Bundle bundle) { boolean enableHighAccuracy = bundle.getBoolean("enableHighAccuracy"); String callbackId = bundle.getString("callbackId"); CallbackContext callbackContext = new CallbackContext(callbackId, this.webView); this._requestLocationUpdate(false, enableHighAccuracy, callbackContext); } private void _userRefusedToUseLocationAfterActivityResult(Bundle bundle) { String callbackId = bundle.getString("callbackId"); CallbackContext callbackContext = new CallbackContext(callbackId, this.webView); JSONObject result = new JSONObject(); try { result.put("status", false); result.put("error_code", "service_denied"); result.put("error_message", "This app has been rejected to use Location Services."); } catch (JSONException e) { e.printStackTrace(); } callbackContext.error(result); } @Override public void onPause(boolean multitasking) { super.onPause(multitasking); cordova.getThreadPool().submit(new Runnable() { @Override public void run() { Set<String> mapIds = mPluginLayout.pluginMaps.keySet(); PluginMap pluginMap; // prevent the ConcurrentModificationException error. String[] mapIdArray= mapIds.toArray(new String[mapIds.size()]); for (String mapId : mapIdArray) { if (mPluginLayout.pluginMaps.containsKey(mapId)) { pluginMap = mPluginLayout.pluginMaps.get(mapId); pluginMap.mapView.onPause(); } } } }); } @Override public void onResume(boolean multitasking) { super.onResume(multitasking); if (mPluginLayout != null) { mPluginLayout.isSuspended = false; if (mPluginLayout.pluginMaps.size() > 0) { this.activity.runOnUiThread(new Runnable() { @Override public void run() { CURRENT_URL = webView.getUrl(); webView.loadUrl("javascript:if(window.cordova){cordova.fireDocumentEvent('plugin_touch', {});}"); } }); } } cordova.getThreadPool().submit(new Runnable() { @Override public void run() { Set<String> mapIds = mPluginLayout.pluginMaps.keySet(); PluginMap pluginMap; // prevent the ConcurrentModificationException error. String[] mapIdArray= mapIds.toArray(new String[mapIds.size()]); for (String mapId : mapIdArray) { if (mPluginLayout.pluginMaps.containsKey(mapId)) { pluginMap = mPluginLayout.pluginMaps.get(mapId); pluginMap.mapView.onResume(); } } } }); } @Override public void onDestroy() { super.onDestroy(); cordova.getThreadPool().submit(new Runnable() { @Override public void run() { Set<String> mapIds = mPluginLayout.pluginMaps.keySet(); PluginMap pluginMap; // prevent the ConcurrentModificationException error. String[] mapIdArray= mapIds.toArray(new String[mapIds.size()]); for (String mapId : mapIdArray) { if (mPluginLayout.pluginMaps.containsKey(mapId)) { pluginMap = mPluginLayout.pluginMaps.get(mapId); pluginMap.mapView.onDestroy(); } } } }); } protected void sendNoResult(CallbackContext callbackContext) { PluginResult pluginResult = new PluginResult(PluginResult.Status.NO_RESULT); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); } /** * Called by the system when the device configuration changes while your activity is running. * * @param newConfig The new device configuration */ public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { PluginMap pluginMap; Collection<PluginEntry> collection = pluginManager.getPluginEntries(); for (PluginEntry entry: collection) { if ("plugin.google.maps.PluginMap".equals(entry.pluginClass) && entry.plugin != null) { pluginMap = (PluginMap)entry.plugin; if (pluginMap.map != null) { // Trigger the CAMERA_MOVE_END mandatory pluginMap.onCameraIdle(); } } } } }, 500); /* // Checks the orientation of the screen if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) { Toast.makeText(activity, "landscape", Toast.LENGTH_SHORT).show(); } else if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT){ Toast.makeText(activity, "portrait", Toast.LENGTH_SHORT).show(); } */ } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import hudson.XmlFile; import hudson.BulkChange; import hudson.Util; import static hudson.Util.singleQuote; import hudson.scm.CVSSCM; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.Ancestor; import org.kohsuke.stapler.HttpResponse; import org.springframework.util.StringUtils; import org.jvnet.tiger_types.Types; import org.apache.commons.io.IOUtils; import javax.servlet.http.HttpServletRequest; import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; import javax.servlet.ServletException; import javax.servlet.RequestDispatcher; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.Locale; import java.util.Arrays; import java.util.Collections; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import java.util.logging.Logger; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Type; import java.lang.reflect.Field; import java.lang.reflect.ParameterizedType; import java.beans.Introspector; /** * Metadata about a configurable instance. * * <p> * {@link Descriptor} is an object that has metadata about a {@link Describable} * object, and also serves as a factory (in a way this relationship is similar * to {@link Object}/{@link Class} relationship. * * A {@link Descriptor}/{@link Describable} * combination is used throughout in Hudson to implement a * configuration/extensibility mechanism. * * <p> * For example, Take the CVS support as an example, which is implemented * in {@link CVSSCM} class. Whenever a job is configured with CVS, a new * {@link CVSSCM} instance is created with the per-job configuration * information. This instance gets serialized to XML, and this instance * will be called to perform CVS operations for that job. This is the job * of {@link Describable} &mdash; each instance represents a specific * configuration of the CVS support (branch, CVSROOT, etc.) * * <p> * For Hudson to create such configured {@link CVSSCM} instance, Hudson * needs another object that captures the metadata of {@link CVSSCM}, * and that is what a {@link Descriptor} is for. {@link CVSSCM} class * has a singleton descriptor, and this descriptor helps render * the configuration form, remember system-wide configuration (such as * where <tt>cvs.exe</tt> is), and works as a factory. * * <p> * {@link Descriptor} also usually have its associated views. * * * <h2>Persistence</h2> * <p> * {@link Descriptor} can persist data just by storing them in fields. * However, it is the responsibility of the derived type to properly * invoke {@link #save()} and {@link #load()}. * * <h2>Reflection Enhancement</h2> * {@link Descriptor} defines addition to the standard Java reflection * and provides reflective information about its corresponding {@link Describable}. * These are primarily used by tag libraries to * keep the Jelly scripts concise. * * @author Kohsuke Kawaguchi * @see Describable */ public abstract class Descriptor<T extends Describable<T>> implements Saveable { /** * Up to Hudson 1.61 this was used as the primary persistence mechanism. * Going forward Hudson simply persists all the non-transient fields * of {@link Descriptor}, just like others, so this is pointless. * * @deprecated */ @Deprecated private transient Map<String,Object> properties; /** * The class being described by this descriptor. */ public transient final Class<? extends T> clazz; private transient final Map<String,Method> checkMethods = new ConcurrentHashMap<String,Method>(); /** * Lazily computed list of properties on {@link #clazz}. */ private transient volatile Map<String, PropertyType> propertyTypes; /** * Represents a readable property on {@link Describable}. */ public static final class PropertyType { public final Class clazz; public final Type type; private volatile Class itemType; PropertyType(Class clazz, Type type) { this.clazz = clazz; this.type = type; } PropertyType(Field f) { this(f.getType(),f.getGenericType()); } PropertyType(Method getter) { this(getter.getReturnType(),getter.getGenericReturnType()); } public Enum[] getEnumConstants() { return (Enum[])clazz.getEnumConstants(); } /** * If the property is a collection/array type, what is an item type? */ public Class getItemType() { if(itemType==null) itemType = computeItemType(); return itemType; } private Class computeItemType() { if(clazz.isArray()) { return clazz.getComponentType(); } if(Collection.class.isAssignableFrom(clazz)) { Type col = Types.getBaseClass(type, Collection.class); if (col instanceof ParameterizedType) return Types.erasure(Types.getTypeArgument(col,0)); else return Object.class; } return null; } /** * Returns {@link Descriptor} whose 'clazz' is the same as {@link #getItemType() the item type}. */ public Descriptor getItemTypeDescriptor() { Class itemType = getItemType(); for( Descriptor d : Hudson.getInstance().getExtensionList(Descriptor.class) ) if(d.clazz==itemType) return d; return null; } } protected Descriptor(Class<? extends T> clazz) { this.clazz = clazz; // doing this turns out to be very error prone, // as field initializers in derived types will override values. // load(); } /** * Infers the type of the corresponding {@link Describable} from the outer class. * This version works when you follow the common convention, where a descriptor * is written as the static nested class of the describable class. * * @since 1.278 */ protected Descriptor() { this.clazz = (Class<T>)getClass().getEnclosingClass(); if(clazz==null) throw new AssertionError(getClass()+" doesn't have an outer class. Use the constructor that takes the Class object explicitly."); // detect an type error Type bt = Types.getBaseClass(getClass(), Descriptor.class); if (bt instanceof ParameterizedType) { ParameterizedType pt = (ParameterizedType) bt; // this 't' is the closest approximation of T of Descriptor<T>. Class t = Types.erasure(pt.getActualTypeArguments()[0]); if(!t.isAssignableFrom(clazz)) throw new AssertionError("Outer class "+clazz+" of "+getClass()+" is not assignable to "+t+". Perhaps wrong outer class?"); } // detect a type error. this Descriptor is supposed to be returned from getDescriptor(), so make sure its type match up. // this prevents a bug like http://www.nabble.com/Creating-a-new-parameter-Type-%3A-Masked-Parameter-td24786554.html try { Method getd = clazz.getMethod("getDescriptor"); if(!getd.getReturnType().isAssignableFrom(getClass())) { throw new AssertionError(getClass()+" must be assignable to "+getd.getReturnType()); } } catch (NoSuchMethodException e) { throw new AssertionError(getClass()+" is missing getDescriptor method."); } } /** * Human readable name of this kind of configurable object. */ public abstract String getDisplayName(); /** * If the field "xyz" of a {@link Describable} has the corresponding "doCheckXyz" method, * return the form-field validation string. Otherwise null. * <p> * This method is used to hook up the form validation method to */ public String getCheckUrl(String fieldName) { String capitalizedFieldName = StringUtils.capitalize(fieldName); Method method = checkMethods.get(fieldName); if(method==null) { method = NONE; String methodName = "doCheck"+ capitalizedFieldName; for( Method m : getClass().getMethods() ) { if(m.getName().equals(methodName)) { method = m; break; } } checkMethods.put(fieldName,method); } if(method==NONE) return null; StaplerRequest req = Stapler.getCurrentRequest(); Ancestor a = req.findAncestor(DescriptorByNameOwner.class); // a is always non-null because we already have Hudson as the sentinel return singleQuote(a.getUrl()+"/descriptorByName/"+clazz.getName()+"/check"+capitalizedFieldName+"?value=")+"+toValue(this)"; } /** * Obtains the property type of the given field of {@link #clazz} */ public PropertyType getPropertyType(String field) { if(propertyTypes ==null) { Map<String, PropertyType> r = new HashMap<String, PropertyType>(); for (Field f : clazz.getFields()) r.put(f.getName(),new PropertyType(f)); for (Method m : clazz.getMethods()) if(m.getName().startsWith("get")) r.put(Introspector.decapitalize(m.getName().substring(3)),new PropertyType(m)); propertyTypes = r; } return propertyTypes.get(field); } /** * Gets the class name nicely escaped to be usable as a key in the structured form submission. */ public final String getJsonSafeClassName() { return clazz.getName().replace('.','-'); } /** * @deprecated * Implement {@link #newInstance(StaplerRequest, JSONObject)} method instead. * Deprecated as of 1.145. */ public T newInstance(StaplerRequest req) throws FormException { throw new UnsupportedOperationException(getClass()+" should implement newInstance(StaplerRequest,JSONObject)"); } /** * Creates a configured instance from the submitted form. * * <p> * Hudson only invokes this method when the user wants an instance of <tt>T</tt>. * So there's no need to check that in the implementation. * * <p> * Starting 1.206, the default implementation of this method does the following: * <pre> * req.bindJSON(clazz,formData); * </pre> * <p> * ... which performs the databinding on the constructor of {@link #clazz}. * * @param req * Always non-null. This object includes represents the entire submisison. * @param formData * The JSON object that captures the configuration data for this {@link Descriptor}. * See http://hudson.gotdns.com/wiki/display/HUDSON/Structured+Form+Submission * * @throws FormException * Signals a problem in the submitted form. * @since 1.145 */ public T newInstance(StaplerRequest req, JSONObject formData) throws FormException { try { Method m = getClass().getMethod("newInstance", StaplerRequest.class); if(!Modifier.isAbstract(m.getDeclaringClass().getModifiers())) { // this class overrides newInstance(StaplerRequest). // maintain the backward compatible behavior return newInstance(req); } else { // new behavior as of 1.206 return req.bindJSON(clazz,formData); } } catch (NoSuchMethodException e) { throw new AssertionError(e); // impossible } } /** * Returns the resource path to the help screen HTML, if any. * * <p> * Starting 1.282, this method uses "convention over configuration" &mdash; you should * just put the "help.html" (and its localized versions, if any) in the same directory * you put your Jelly view files, and this method will automatically does the right thing. * * <p> * This value is relative to the context root of Hudson, so normally * the values are something like <tt>"/plugin/emma/help.html"</tt> to * refer to static resource files in a plugin, or <tt>"/publisher/EmmaPublisher/abc"</tt> * to refer to Jelly script <tt>abc.jelly</tt> or a method <tt>EmmaPublisher.doAbc()</tt>. * * @return * null to indicate that there's no help. */ public String getHelpFile() { return getHelpFile(null); } /** * Returns the path to the help screen HTML for the given field. * * <p> * The help files are assumed to be at "help/FIELDNAME.html" with possible * locale variations. */ public String getHelpFile(final String fieldName) { for(Class c=clazz; c!=null; c=c.getSuperclass()) { String page = "/descriptor/" + clazz.getName() + "/help"; String suffix; if(fieldName==null) { suffix=""; } else { page += '/'+fieldName; suffix='-'+fieldName; } try { if(Stapler.getCurrentRequest().getView(c,"help"+suffix)!=null) return page; } catch (IOException e) { throw new Error(e); } InputStream in = getHelpStream(c,suffix); IOUtils.closeQuietly(in); if(in!=null) return page; } return null; } /** * Checks if the given object is created from this {@link Descriptor}. */ public final boolean isInstance( T instance ) { return clazz.isInstance(instance); } /** * @deprecated * As of 1.64. Use {@link #configure(StaplerRequest)}. */ @Deprecated public boolean configure( HttpServletRequest req ) throws FormException { return true; } /** * @deprecated * As of 1.239, use {@link #configure(StaplerRequest, JSONObject)}. */ public boolean configure( StaplerRequest req ) throws FormException { // compatibility return configure( (HttpServletRequest) req ); } /** * Invoked when the global configuration page is submitted. * * Can be overriden to store descriptor-specific information. * * @param json * The JSON object that captures the configuration data for this {@link Descriptor}. * See http://hudson.gotdns.com/wiki/display/HUDSON/Structured+Form+Submission * @return false * to keep the client in the same config page. */ public boolean configure( StaplerRequest req, JSONObject json ) throws FormException { // compatibility return configure(req); } public String getConfigPage() { return getViewPage(clazz, "config.jelly"); } public String getGlobalConfigPage() { return getViewPage(clazz, "global.jelly"); } protected final String getViewPage(Class<?> clazz, String pageName) { while(clazz!=Object.class) { String name = clazz.getName().replace('.', '/').replace('$', '/') + "/" + pageName; if(clazz.getClassLoader().getResource(name)!=null) return '/'+name; clazz = clazz.getSuperclass(); } // We didn't find the configuration page. // Either this is non-fatal, in which case it doesn't matter what string we return so long as // it doesn't exist. // Or this error is fatal, in which case we want the developer to see what page he's missing. // so we put the page name. return pageName; } /** * Saves the configuration info to the disk. */ public synchronized void save() { if(BulkChange.contains(this)) return; try { getConfigFile().write(this); } catch (IOException e) { LOGGER.log(Level.WARNING, "Failed to save "+getConfigFile(),e); } } /** * Loads the data from the disk into this object. * * <p> * The constructor of the derived class must call this method. * (If we do that in the base class, the derived class won't * get a chance to set default values.) */ public synchronized void load() { XmlFile file = getConfigFile(); if(!file.exists()) return; try { Object o = file.unmarshal(this); if(o instanceof Map) { // legacy format @SuppressWarnings("unchecked") Map<String,Object> _o = (Map) o; convert(_o); save(); // convert to the new format } } catch (IOException e) { LOGGER.log(Level.WARNING, "Failed to load "+file, e); } } /** * {@link Descriptor}s that has existed &lt;= 1.61 needs to * be able to read in the old configuration in a property bag * and reflect that into the new layout. */ protected void convert(Map<String, Object> oldPropertyBag) { } private XmlFile getConfigFile() { return new XmlFile(new File(Hudson.getInstance().getRootDir(),clazz.getName()+".xml")); } /** * Serves <tt>help.html</tt> from the resource of {@link #clazz}. */ public void doHelp(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { String path = req.getRestOfPath(); if(path.contains("..")) throw new ServletException("Illegal path: "+path); path = path.replace('/','-'); for (Class c=clazz; c!=null; c=c.getSuperclass()) { RequestDispatcher rd = Stapler.getCurrentRequest().getView(c, "help"+path); if(rd!=null) {// Jelly-generated help page rd.forward(req,rsp); return; } InputStream in = getHelpStream(c,path); if(in!=null) { // TODO: generalize macro expansion and perhaps even support JEXL rsp.setContentType("text/html;charset=UTF-8"); String literal = IOUtils.toString(in,"UTF-8"); rsp.getWriter().println(Util.replaceMacro(literal, Collections.singletonMap("rootURL",req.getContextPath()))); in.close(); return; } } rsp.sendError(SC_NOT_FOUND); } private InputStream getHelpStream(Class c, String suffix) { Locale locale = Stapler.getCurrentRequest().getLocale(); String base = c.getName().replace('.', '/') + "/help"+suffix; ClassLoader cl = c.getClassLoader(); if(cl==null) return null; InputStream in; in = cl.getResourceAsStream(base + '_' + locale.getLanguage() + '_' + locale.getCountry() + '_' + locale.getVariant() + ".html"); if(in!=null) return in; in = cl.getResourceAsStream(base + '_' + locale.getLanguage() + '_' + locale.getCountry() + ".html"); if(in!=null) return in; in = cl.getResourceAsStream(base + '_' + locale.getLanguage() + ".html"); if(in!=null) return in; // default return cl.getResourceAsStream(base+".html"); } // // static methods // // to work around warning when creating a generic array type public static <T> T[] toArray( T... values ) { return values; } public static <T> List<T> toList( T... values ) { return new ArrayList<T>(Arrays.asList(values)); } public static <T extends Describable<T>> Map<Descriptor<T>,T> toMap(Iterable<T> describables) { Map<Descriptor<T>,T> m = new LinkedHashMap<Descriptor<T>,T>(); for (T d : describables) { m.put(d.getDescriptor(),d); } return m; } /** * Used to build {@link Describable} instance list from &lt;f:hetero-list> tag. * * @param req * Request that represents the form submission. * @param formData * Structured form data that represents the contains data for the list of describables. * @param key * The JSON property name for 'formData' that represents the data for the list of describables. * @param descriptors * List of descriptors to create instances from. * @return * Can be empty but never null. */ public static <T extends Describable<T>> List<T> newInstancesFromHeteroList(StaplerRequest req, JSONObject formData, String key, Collection<? extends Descriptor<T>> descriptors) throws FormException { List<T> items = new ArrayList<T>(); if(!formData.has(key)) return items; JSONArray a = JSONArray.fromObject(formData.get(key)); for (Object o : a) { JSONObject jo = (JSONObject)o; String kind = jo.getString("kind"); items.add(find(descriptors,kind).newInstance(req,jo)); } return items; } /** * Finds a descriptor from a collection by its class name. */ public static <T extends Descriptor> T find(Collection<? extends T> list, String className) { for (T d : list) { if(d.getClass().getName().equals(className)) return d; } return null; } public static Descriptor find(String className) { return find(Hudson.getInstance().getExtensionList(Descriptor.class),className); } public static final class FormException extends Exception implements HttpResponse { private final String formField; public FormException(String message, String formField) { super(message); this.formField = formField; } public FormException(String message, Throwable cause, String formField) { super(message, cause); this.formField = formField; } public FormException(Throwable cause, String formField) { super(cause); this.formField = formField; } /** * Which form field contained an error? */ public String getFormField() { return formField; } public void generateResponse(StaplerRequest req, StaplerResponse rsp, Object node) throws IOException, ServletException { // for now, we can't really use the field name that caused the problem. new Failure(getMessage()).generateResponse(req,rsp,node); } } private static final Logger LOGGER = Logger.getLogger(Descriptor.class.getName()); /** * Used in {@link #checkMethods} to indicate that there's no check method. */ private static final Method NONE; static { try { NONE = Object.class.getMethod("toString"); } catch (NoSuchMethodException e) { throw new AssertionError(); } } }
/******************************************************************************* * Copyright 2014 United States Government as represented by the * Administrator of the National Aeronautics and Space Administration. * All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package gov.nasa.arc.spife.ui.timeline; import java.util.Iterator; import org.eclipse.gef.EditPart; import org.eclipse.gef.Request; import org.eclipse.gef.commands.Command; import org.eclipse.gef.commands.CompoundCommand; import org.eclipse.gef.commands.UnexecutableCommand; import org.eclipse.gef.requests.ChangeBoundsRequest; import org.eclipse.gef.tools.DragEditPartsTracker; import org.eclipse.swt.SWT; import org.eclipse.swt.events.KeyEvent; public class TimelineViewerEditPartsTracker extends DragEditPartsTracker implements TimelineConstants { private static final int CHANGE_VALUE_KEY_MODIFIER = SWT.ALT; private static final int ADD_VALUE_KEY_MODIFIER = SWT.ALT | SWT.SHIFT; private boolean changeValueActive = false; private boolean addValueActive = false; public TimelineViewerEditPartsTracker(EditPart sourceEditPart) { super(sourceEditPart); } @Override protected boolean handleKeyDown(KeyEvent e) { int key = e.keyCode; if (testModifierFlags(key, ADD_VALUE_KEY_MODIFIER)) { setAddValueActive(true); setChangeValueActive(false); handleDragInProgress(); return true; } else if (testModifierFlags(key, CHANGE_VALUE_KEY_MODIFIER)) { setChangeValueActive(true); setAddValueActive(false); handleDragInProgress(); return true; } // else... return super.handleKeyDown(e); } @Override protected boolean handleKeyUp(KeyEvent e) { int key = e.keyCode; if (testModifierFlags(key, ADD_VALUE_KEY_MODIFIER)) { setAddValueActive(false); setChangeValueActive(false); handleDragInProgress(); return true; } else if (testModifierFlags(key, CHANGE_VALUE_KEY_MODIFIER)) { setChangeValueActive(false); setAddValueActive(false); handleDragInProgress(); return true; } // else... return super.handleKeyUp(e); } @Override protected void performDirectEdit() { if (getSourceEditPart().getViewer() != null) { super.performDirectEdit(); } } @Override protected boolean isCloneActive() { return false; } @Override protected void setCloneActive(boolean cloneActive) { // if (this.cloneActive == cloneActive) // return; // eraseSourceFeedback(); // eraseTargetFeedback(); // this.cloneActive = cloneActive; } @Override protected void setState(int state) { super.setState(state); if (isInState(STATE_ACCESSIBLE_DRAG | STATE_DRAG_IN_PROGRESS | STATE_ACCESSIBLE_DRAG_IN_PROGRESS)) { Input input = getCurrentInput(); if (input.isAltKeyDown() && input.isShiftKeyDown()) { setAddValueActive(true); handleDragInProgress(); } else if (input.isAltKeyDown()) { setChangeValueActive(true); handleDragInProgress(); } } } public boolean isAddValueActive() { return addValueActive; } public void setAddValueActive(boolean addValueActive) { this.addValueActive = addValueActive; } public boolean isChangeValueActive() { return changeValueActive; } public void setChangeValueActive(boolean changeValueActive) { this.changeValueActive = changeValueActive; } @Override protected Request createTargetRequest() { if (isChangeValueActive()) return new ChangeBoundsRequest(REQ_CHANGE_VALUE_VIA_DROP); else if (isAddValueActive()) return new ChangeBoundsRequest(REQ_CHANGE_VALUE_VIA_DROP); // else if (isCloneActive()) // return new ChangeBoundsRequest(REQ_CLONE); else return new ChangeBoundsRequest(REQ_MOVE); } @Override protected Command getCommand() { CompoundCommand command = new CompoundCommand(); command.setDebugLabel("Drag Object Tracker");//$NON-NLS-1$ Iterator iter = getOperationSet().iterator(); Request request = getTargetRequest(); if (isCloneActive()) request.setType(REQ_CLONE); else if (isAddValueActive()) request.setType(REQ_ADD_VALUE_VIA_DROP); else if (isChangeValueActive()) request.setType(REQ_CHANGE_VALUE_VIA_DROP); else if (isMove()) request.setType(REQ_MOVE); else request.setType(REQ_ORPHAN); if (!isChangeValueActive() && !isAddValueActive()) { while (iter.hasNext()) { EditPart editPart = (EditPart) iter.next(); command.add(editPart.getCommand(request)); } } if (!isMove() || isChangeValueActive() || isAddValueActive()) { if (!isChangeValueActive() && !isAddValueActive()) request.setType(REQ_ADD); if (getTargetEditPart() == null) command.add(UnexecutableCommand.INSTANCE); else command.add(getTargetEditPart().getCommand(getTargetRequest())); } return command.unwrap(); } @Override protected String getCommandName() { if (isAddValueActive()) return REQ_ADD_VALUE_VIA_DROP; else if (isChangeValueActive()) { return REQ_CHANGE_VALUE_VIA_DROP; // } else if (isCloneActive()) { // return REQ_CLONE; } else if (isMove()) { return REQ_MOVE; } else { return REQ_ADD; } } private boolean testModifierFlags(int key, int modifierFlags) { return (modifierFlags & key) == modifierFlags; } }
package io.ray.test; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.ray.runtime.metric.Count; import io.ray.runtime.metric.Gauge; import io.ray.runtime.metric.Histogram; import io.ray.runtime.metric.MetricConfig; import io.ray.runtime.metric.Metrics; import io.ray.runtime.metric.Sum; import io.ray.runtime.metric.TagKey; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.Test; @Test(groups = {"cluster"}) public class MetricTest extends BaseTest { boolean doubleEqual(double value, double other) { return value <= other + 1e-5 && value >= other - 1e-5; } private MetricConfig initRayMetrics( long timeIntervalMs, int threadPoolSize, long shutdownWaitTimeMs) { MetricConfig config = MetricConfig.builder() .timeIntervalMs(timeIntervalMs) .threadPoolSize(threadPoolSize) .shutdownWaitTimeMs(shutdownWaitTimeMs) .create(); Metrics.init(config); return config; } private Gauge registerGauge() { return Metrics.gauge() .name("metric_gauge") .description("gauge") .unit("") .tags(ImmutableMap.of("tag1", "value1")) .register(); } private Count registerCount() { return Metrics.count() .name("metric_count") .description("counter") .unit("1pc") .tags(ImmutableMap.of("tag1", "value1", "count_tag", "default")) .register(); } private Sum registerSum() { return Metrics.sum() .name("metric_sum") .description("sum") .unit("1pc") .tags(ImmutableMap.of("tag1", "value1", "sum_tag", "default")) .register(); } private Histogram registerHistogram() { return Metrics.histogram() .name("metric_histogram") .description("histogram") .unit("1pc") .boundaries(ImmutableList.of(10.0, 15.0, 20.0)) .tags(ImmutableMap.of("tag1", "value1", "histogram_tag", "default")) .register(); } @AfterMethod public void maybeShutdownMetrics() { Metrics.shutdown(); } public void testAddGauge() { Map<TagKey, String> tags = new HashMap<>(); tags.put(new TagKey("tag1"), "value1"); Gauge gauge = new Gauge("metric1", "", "", tags); gauge.update(2); gauge.record(); Assert.assertTrue(doubleEqual(gauge.getValue(), 2.0)); gauge.unregister(); } public void testAddGaugeWithTagMap() { Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); Gauge gauge = new Gauge("metric1", "", tags); gauge.update(2); gauge.record(); Assert.assertTrue(doubleEqual(gauge.getValue(), 2.0)); gauge.unregister(); } public void testAddCount() { Map<TagKey, String> tags = new HashMap<>(); tags.put(new TagKey("tag1"), "value1"); tags.put(new TagKey("count_tag"), "default"); Count count = new Count("metric_count", "counter", "1pc", tags); count.inc(10.0); count.inc(20.0); count.record(); Assert.assertTrue(doubleEqual(count.getCount(), 30.0)); } public void testAddSum() { Map<TagKey, String> tags = new HashMap<>(); tags.put(new TagKey("tag1"), "value1"); tags.put(new TagKey("sum_tag"), "default"); Sum sum = new Sum("metric_sum", "sum", "sum", tags); sum.update(10.0); sum.update(20.0); sum.record(); Assert.assertTrue(doubleEqual(sum.getSum(), 30.0)); } public void testAddHistogram() { Map<TagKey, String> tags = new HashMap<>(); tags.put(new TagKey("tag1"), "value1"); tags.put(new TagKey("histogram_tag"), "default"); List<Double> boundaries = new ArrayList<>(); boundaries.add(10.0); boundaries.add(15.0); boundaries.add(12.0); Histogram histogram = new Histogram("metric_histogram", "histogram", "1pc", boundaries, tags); for (int i = 1; i <= 200; ++i) { histogram.update(i * 1.0d); } Assert.assertTrue(doubleEqual(200.0d, histogram.getValue())); List<Double> window = histogram.getHistogramWindow(); for (int i = 0; i < Histogram.HISTOGRAM_WINDOW_SIZE; ++i) { Assert.assertTrue(doubleEqual(i + 101.0d, window.get(i))); } histogram.record(); Assert.assertTrue(doubleEqual(200.0d, histogram.getValue())); Assert.assertEquals(window.size(), 0); } public void testRegisterGauge() throws InterruptedException { Gauge gauge = registerGauge(); gauge.update(2.0); Assert.assertTrue(doubleEqual(gauge.getValue(), 2.0)); gauge.update(5.0); Assert.assertTrue(doubleEqual(gauge.getValue(), 5.0)); } public void testRegisterCount() throws InterruptedException { Count count = registerCount(); count.inc(10.0); count.inc(20.0); Assert.assertTrue(doubleEqual(count.getCount(), 30.0)); count.inc(1.0); count.inc(2.0); Assert.assertTrue(doubleEqual(count.getCount(), 33.0)); } public void testRegisterSum() throws InterruptedException { Sum sum = registerSum(); sum.update(10.0); sum.update(20.0); Assert.assertTrue(doubleEqual(sum.getSum(), 30.0)); sum.update(1.0); sum.update(2.0); Assert.assertTrue(doubleEqual(sum.getSum(), 33.0)); } public void testRegisterHistogram() throws InterruptedException { Histogram histogram = registerHistogram(); for (int i = 1; i <= 200; ++i) { histogram.update(i * 1.0d); } Assert.assertTrue(doubleEqual(histogram.getValue(), 200.0d)); List<Double> window = histogram.getHistogramWindow(); for (int i = 0; i < Histogram.HISTOGRAM_WINDOW_SIZE; ++i) { Assert.assertTrue(doubleEqual(i + 101.0d, window.get(i))); } Assert.assertTrue(doubleEqual(histogram.getValue(), 200.0d)); } public void testRegisterGaugeWithConfig() throws InterruptedException { initRayMetrics(2000L, 1, 1000L); Gauge gauge = registerGauge(); gauge.update(2.0); Assert.assertTrue(doubleEqual(gauge.getValue(), 2.0)); gauge.update(5.0); Assert.assertTrue(doubleEqual(gauge.getValue(), 5.0)); } public void testRegisterCountWithConfig() throws InterruptedException { initRayMetrics(2000L, 1, 1000L); Count count = registerCount(); count.inc(10.0); count.inc(20.0); Assert.assertTrue(doubleEqual(count.getCount(), 30.0)); count.inc(1.0); count.inc(2.0); Assert.assertTrue(doubleEqual(count.getCount(), 33.0)); } public void testRegisterSumWithConfig() throws InterruptedException { initRayMetrics(2000L, 1, 1000L); Sum sum = registerSum(); sum.update(10.0); sum.update(20.0); Assert.assertTrue(doubleEqual(sum.getSum(), 30.0)); sum.update(1.0); sum.update(2.0); Assert.assertTrue(doubleEqual(sum.getSum(), 33.0)); } public void testRegisterHistogramWithConfig() throws InterruptedException { initRayMetrics(2000L, 1, 1000L); Histogram histogram = registerHistogram(); for (int i = 1; i <= 200; ++i) { histogram.update(i * 1.0d); } Assert.assertTrue(doubleEqual(histogram.getValue(), 200.0d)); List<Double> window = histogram.getHistogramWindow(); for (int i = 0; i < Histogram.HISTOGRAM_WINDOW_SIZE; ++i) { Assert.assertTrue(doubleEqual(i + 101.0d, window.get(i))); } Assert.assertTrue(doubleEqual(histogram.getValue(), 200.0d)); } }
/* * Copyright 2012 Google Inc. * Copyright 2012 Matt Corallo. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.litecoin.core; import com.google.litecoin.params.MainNetParams; import com.google.litecoin.params.UnitTestParams; import com.google.litecoin.script.Script; import com.google.litecoin.store.FullPrunedBlockStore; import com.google.litecoin.store.MemoryFullPrunedBlockStore; import com.google.litecoin.utils.BlockFileLoader; import com.google.litecoin.utils.BriefLogFormatter; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.lang.ref.WeakReference; import java.util.Arrays; import static org.junit.Assert.*; /** * We don't do any wallet tests here, we leave that to {@link ChainSplitTest} */ public class FullPrunedBlockChainTest { private static final Logger log = LoggerFactory.getLogger(FullPrunedBlockChainTest.class); private NetworkParameters params; private FullPrunedBlockChain chain; private FullPrunedBlockStore store; @Before public void setUp() throws Exception { BriefLogFormatter.init(); params = new UnitTestParams() { @Override public int getInterval() { return 10000; } }; } @Test public void testGeneratedChain() throws Exception { // Tests various test cases from FullBlockTestGenerator FullBlockTestGenerator generator = new FullBlockTestGenerator(params); RuleList blockList = generator.getBlocksToTest(false, false, null); store = new MemoryFullPrunedBlockStore(params, blockList.maximumReorgBlockCount); chain = new FullPrunedBlockChain(params, store); for (Rule rule : blockList.list) { if (!(rule instanceof BlockAndValidity)) continue; BlockAndValidity block = (BlockAndValidity) rule; log.info("Testing rule " + block.ruleName + " with block hash " + block.block.getHash()); boolean threw = false; try { if (chain.add(block.block) != block.connects) { log.error("Block didn't match connects flag on block " + block.ruleName); fail(); } } catch (VerificationException e) { threw = true; if (!block.throwsException) { log.error("Block didn't match throws flag on block " + block.ruleName); throw e; } if (block.connects) { log.error("Block didn't match connects flag on block " + block.ruleName); fail(); } } if (!threw && block.throwsException) { log.error("Block didn't match throws flag on block " + block.ruleName); fail(); } if (!chain.getChainHead().getHeader().getHash().equals(block.hashChainTipAfterBlock)) { log.error("New block head didn't match the correct value after block " + block.ruleName); fail(); } if (chain.getChainHead().getHeight() != block.heightAfterBlock) { log.error("New block head didn't match the correct height after block " + block.ruleName); fail(); } } } @Test public void skipScripts() throws Exception { store = new MemoryFullPrunedBlockStore(params, 10); chain = new FullPrunedBlockChain(params, store); // Check that we aren't accidentally leaving any references // to the full StoredUndoableBlock's lying around (ie memory leaks) ECKey outKey = new ECKey(); // Build some blocks on genesis block to create a spendable output Block rollingBlock = params.getGenesisBlock().createNextBlockWithCoinbase(outKey.getPubKey()); chain.add(rollingBlock); TransactionOutput spendableOutput = rollingBlock.getTransactions().get(0).getOutput(0); for (int i = 1; i < params.getSpendableCoinbaseDepth(); i++) { rollingBlock = rollingBlock.createNextBlockWithCoinbase(outKey.getPubKey()); chain.add(rollingBlock); } rollingBlock = rollingBlock.createNextBlock(null); Transaction t = new Transaction(params); t.addOutput(new TransactionOutput(params, t, Utils.toNanoCoins(50, 0), new byte[] {})); TransactionInput input = t.addInput(spendableOutput); // Invalid script. input.setScriptBytes(new byte[]{}); rollingBlock.addTransaction(t); rollingBlock.solve(); chain.setRunScripts(false); try { chain.add(rollingBlock); } catch (VerificationException e) { fail(); } } @Test public void testFinalizedBlocks() throws Exception { final int UNDOABLE_BLOCKS_STORED = 10; store = new MemoryFullPrunedBlockStore(params, UNDOABLE_BLOCKS_STORED); chain = new FullPrunedBlockChain(params, store); // Check that we aren't accidentally leaving any references // to the full StoredUndoableBlock's lying around (ie memory leaks) ECKey outKey = new ECKey(); // Build some blocks on genesis block to create a spendable output Block rollingBlock = params.getGenesisBlock().createNextBlockWithCoinbase(outKey.getPubKey()); chain.add(rollingBlock); TransactionOutPoint spendableOutput = new TransactionOutPoint(params, 0, rollingBlock.getTransactions().get(0).getHash()); byte[] spendableOutputScriptPubKey = rollingBlock.getTransactions().get(0).getOutputs().get(0).getScriptBytes(); for (int i = 1; i < params.getSpendableCoinbaseDepth(); i++) { rollingBlock = rollingBlock.createNextBlockWithCoinbase(outKey.getPubKey()); chain.add(rollingBlock); } WeakReference<StoredTransactionOutput> out = new WeakReference<StoredTransactionOutput> (store.getTransactionOutput(spendableOutput.getHash(), spendableOutput.getIndex())); rollingBlock = rollingBlock.createNextBlock(null); Transaction t = new Transaction(params); // Entirely invalid scriptPubKey t.addOutput(new TransactionOutput(params, t, Utils.toNanoCoins(50, 0), new byte[] {})); t.addSignedInput(spendableOutput, new Script(spendableOutputScriptPubKey), outKey); rollingBlock.addTransaction(t); rollingBlock.solve(); chain.add(rollingBlock); WeakReference<StoredUndoableBlock> undoBlock = new WeakReference<StoredUndoableBlock>(store.getUndoBlock(rollingBlock.getHash())); StoredUndoableBlock storedUndoableBlock = undoBlock.get(); assertNotNull(storedUndoableBlock); assertNull(storedUndoableBlock.getTransactions()); WeakReference<TransactionOutputChanges> changes = new WeakReference<TransactionOutputChanges>(storedUndoableBlock.getTxOutChanges()); assertNotNull(changes.get()); storedUndoableBlock = null; // Blank the reference so it can be GCd. // Create a chain longer than UNDOABLE_BLOCKS_STORED for (int i = 0; i < UNDOABLE_BLOCKS_STORED; i++) { rollingBlock = rollingBlock.createNextBlock(null); chain.add(rollingBlock); } // Try to get the garbage collector to run System.gc(); assertNull(undoBlock.get()); assertNull(changes.get()); assertNull(out.get()); } @Test public void testFirst100KBlocks() throws Exception { NetworkParameters params = MainNetParams.get(); File blockFile = new File(getClass().getResource("first-100k-blocks.dat").getFile()); BlockFileLoader loader = new BlockFileLoader(params, Arrays.asList(blockFile)); store = new MemoryFullPrunedBlockStore(params, 10); chain = new FullPrunedBlockChain(params, store); for (Block block : loader) chain.add(block); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.work; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import com.codahale.metrics.Counter; import org.apache.drill.common.SelfCleaningRunnable; import org.apache.drill.common.concurrent.ExtendedLatch; import org.apache.drill.exec.coord.ClusterCoordinator; import org.apache.drill.exec.metrics.DrillMetrics; import org.apache.drill.exec.proto.BitControl.FragmentStatus; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; import org.apache.drill.exec.proto.ExecProtos.FragmentHandle; import org.apache.drill.exec.proto.GeneralRPCProtos.Ack; import org.apache.drill.exec.proto.UserBitShared.QueryId; import org.apache.drill.exec.proto.helper.QueryIdHelper; import org.apache.drill.exec.rpc.DrillRpcFuture; import org.apache.drill.exec.rpc.RpcException; import org.apache.drill.exec.rpc.control.Controller; import org.apache.drill.exec.rpc.control.WorkEventBus; import org.apache.drill.exec.rpc.data.DataConnectionCreator; import org.apache.drill.exec.server.BootStrapContext; import org.apache.drill.exec.server.DrillbitContext; import org.apache.drill.exec.store.sys.PersistentStoreProvider; import org.apache.drill.exec.work.batch.ControlMessageHandler; import org.apache.drill.exec.work.foreman.Foreman; import org.apache.drill.exec.work.foreman.QueryManager; import org.apache.drill.exec.work.fragment.FragmentExecutor; import org.apache.drill.exec.work.fragment.FragmentManager; import org.apache.drill.exec.work.user.UserWorker; import com.codahale.metrics.Gauge; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Manages the running fragments in a Drillbit. Periodically requests run-time stats updates from fragments * running elsewhere. */ public class WorkManager implements AutoCloseable { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WorkManager.class); /* * We use a {@see java.util.concurrent.ConcurrentHashMap} because it promises never to throw a * {@see java.util.ConcurrentModificationException}; we need that because the statusThread may * iterate over the map while other threads add FragmentExecutors via the {@see #WorkerBee}. */ private final Map<FragmentHandle, FragmentExecutor> runningFragments = new ConcurrentHashMap<>(); private final ConcurrentMap<QueryId, Foreman> queries = Maps.newConcurrentMap(); private final BootStrapContext bContext; private DrillbitContext dContext; private final ControlMessageHandler controlMessageWorker; private final UserWorker userWorker; private final WorkerBee bee; private final WorkEventBus workBus; private final Executor executor; private final StatusThread statusThread; /** * How often the StatusThread collects statistics about running fragments. */ private final static int STATUS_PERIOD_SECONDS = 5; public WorkManager(final BootStrapContext context) { this.bContext = context; bee = new WorkerBee(); // TODO should this just be an interface? workBus = new WorkEventBus(); // TODO should this just be an interface? executor = context.getExecutor(); // TODO references to this escape here (via WorkerBee) before construction is done controlMessageWorker = new ControlMessageHandler(bee); // TODO getFragmentRunner(), getForemanForQueryId() userWorker = new UserWorker(bee); // TODO should just be an interface? addNewForeman(), getForemanForQueryId() statusThread = new StatusThread(); } public void start( final DrillbitEndpoint endpoint, final Controller controller, final DataConnectionCreator data, final ClusterCoordinator coord, final PersistentStoreProvider provider, final PersistentStoreProvider profilesProvider) { dContext = new DrillbitContext(endpoint, bContext, coord, controller, data, workBus, provider, profilesProvider); statusThread.start(); DrillMetrics.register("drill.fragments.running", new Gauge<Integer>() { @Override public Integer getValue() { return runningFragments.size(); } }); } public Executor getExecutor() { return executor; } public WorkEventBus getWorkBus() { return workBus; } public ControlMessageHandler getControlMessageHandler() { return controlMessageWorker; } public UserWorker getUserWorker() { return userWorker; } public WorkerBee getBee() { return bee; } @Override public void close() throws Exception { statusThread.interrupt(); final long numRunningFragments = runningFragments.size(); if (numRunningFragments != 0) { logger.warn("Closing WorkManager but there are {} running fragments.", numRunningFragments); if (logger.isDebugEnabled()) { for (final FragmentHandle handle : runningFragments.keySet()) { logger.debug("Fragment still running: {} status: {}", QueryIdHelper.getQueryIdentifier(handle), runningFragments.get(handle).getStatus()); } } } getContext().close(); } public DrillbitContext getContext() { return dContext; } private ExtendedLatch exitLatch = null; // used to wait to exit when things are still running /** * Waits until it is safe to exit. Blocks until all currently running fragments have completed. * * <p>This is intended to be used by {@link org.apache.drill.exec.server.Drillbit#close()}.</p> */ public void waitToExit() { synchronized(this) { if (queries.isEmpty() && runningFragments.isEmpty()) { return; } exitLatch = new ExtendedLatch(); } // Wait for at most 5 seconds or until the latch is released. exitLatch.awaitUninterruptibly(5000); } /** * If it is safe to exit, and the exitLatch is in use, signals it so that waitToExit() will * unblock. */ private void indicateIfSafeToExit() { synchronized(this) { if (exitLatch != null) { if (queries.isEmpty() && runningFragments.isEmpty()) { exitLatch.countDown(); } } } } /** * Narrowed interface to WorkManager that is made available to tasks it is managing. */ public class WorkerBee { public void addNewForeman(final Foreman foreman) { queries.put(foreman.getQueryId(), foreman); // We're relying on the Foreman to clean itself up with retireForeman(). executor.execute(foreman); } /** * Add a self contained runnable work to executor service. * @param runnable */ public void addNewWork(final Runnable runnable) { executor.execute(runnable); } /** * Remove the given Foreman from the running query list. * * <p>The running query list is a bit of a misnomer, because it doesn't * necessarily mean that {@link org.apache.drill.exec.work.foreman.Foreman#run()} * is executing. That only lasts for the duration of query setup, after which * the Foreman instance survives as a state machine that reacts to events * from the local root fragment as well as RPC responses from remote Drillbits.</p> * * @param foreman the Foreman to retire */ public void retireForeman(final Foreman foreman) { Preconditions.checkNotNull(foreman); final QueryId queryId = foreman.getQueryId(); final boolean wasRemoved = queries.remove(queryId, foreman); if (!wasRemoved) { logger.warn("Couldn't find retiring Foreman for query " + queryId); // throw new IllegalStateException("Couldn't find retiring Foreman for query " + queryId); } indicateIfSafeToExit(); } public Foreman getForemanForQueryId(final QueryId queryId) { return queries.get(queryId); } public DrillbitContext getContext() { return dContext; } /** * Currently used to start a root fragment that is not blocked on data, and leaf fragments. * @param fragmentExecutor the executor to run */ public void addFragmentRunner(final FragmentExecutor fragmentExecutor) { final FragmentHandle fragmentHandle = fragmentExecutor.getContext().getHandle(); runningFragments.put(fragmentHandle, fragmentExecutor); executor.execute(new SelfCleaningRunnable(fragmentExecutor) { @Override protected void cleanup() { runningFragments.remove(fragmentHandle); indicateIfSafeToExit(); } }); } /** * Currently used to start a root fragment that is blocked on data, and intermediate fragments. This method is * called, when the first batch arrives. * * @param fragmentManager the manager for the fragment */ public void startFragmentPendingRemote(final FragmentManager fragmentManager) { final FragmentHandle fragmentHandle = fragmentManager.getHandle(); final FragmentExecutor fragmentExecutor = fragmentManager.getRunnable(); if (fragmentExecutor == null) { // the fragment was most likely cancelled return; } runningFragments.put(fragmentHandle, fragmentExecutor); executor.execute(new SelfCleaningRunnable(fragmentExecutor) { @Override protected void cleanup() { runningFragments.remove(fragmentHandle); workBus.removeFragmentManager(fragmentHandle); indicateIfSafeToExit(); } }); } public FragmentExecutor getFragmentRunner(final FragmentHandle handle) { return runningFragments.get(handle); } } /** * Periodically gather current statistics. {@link QueryManager} uses a FragmentStatusListener to * maintain changes to state, and should be current. However, we want to collect current statistics * about RUNNING queries, such as current memory consumption, number of rows processed, and so on. * The FragmentStatusListener only tracks changes to state, so the statistics kept there will be * stale; this thread probes for current values. */ private class StatusThread extends Thread { public StatusThread() { // assume this thread is created by a non-daemon thread setName("WorkManager.StatusThread"); } @Override public void run() { while(true) { final Controller controller = dContext.getController(); final List<DrillRpcFuture<Ack>> futures = Lists.newArrayList(); for(final FragmentExecutor fragmentExecutor : runningFragments.values()) { final FragmentStatus status = fragmentExecutor.getStatus(); if (status == null) { continue; } final DrillbitEndpoint ep = fragmentExecutor.getContext().getForemanEndpoint(); futures.add(controller.getTunnel(ep).sendFragmentStatus(status)); } for(final DrillRpcFuture<Ack> future : futures) { try { future.checkedGet(); } catch(final RpcException ex) { logger.info("Failure while sending intermediate fragment status to Foreman", ex); } } try { Thread.sleep(STATUS_PERIOD_SECONDS * 1000); } catch(final InterruptedException e) { // Preserve evidence that the interruption occurred so that code higher up on the call stack can learn of the // interruption and respond to it if it wants to. Thread.currentThread().interrupt(); // exit status thread on interrupt. break; } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * Created on Feb 15, 2006 * * TODO To change the template for this generated file go to Window - Preferences - Java - Code * Style - Code Templates */ package org.apache.geode.internal.cache; import static org.junit.Assert.*; import java.io.File; import java.util.Collections; import java.util.HashSet; import java.util.Set; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.StatisticsFactory; import org.apache.geode.test.dunit.ThreadUtils; import org.apache.geode.test.junit.categories.IntegrationTest; /** * Testing methods for SimpleDiskRegion.java api's * * @since GemFire 5.1 */ @Category(IntegrationTest.class) public class SimpleDiskRegionJUnitTest extends DiskRegionTestingBase { private Set keyIds = Collections.synchronizedSet(new HashSet()); private DiskRegionProperties diskProps = new DiskRegionProperties(); @Override protected final void postSetUp() throws Exception { diskProps.setDiskDirs(dirs); } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.basicClose()' */ @Test public void testBasicClose() { { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } region.close(); closeDiskStores(); checkIfContainsFileWithExt("lk"); } { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncOverFlowOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowOnly due to " + e); } region.close(); closeDiskStores(); checkIfContainsFileWithExt("lk"); } { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } region.close(); closeDiskStores(); checkIfContainsFileWithExt("lk"); } // Asif: Recreate the region so that it will be destroyed try { region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } } void checkIfContainsFileWithSubstring(String substr) { for (int i = 0; i < dirs.length; i++) { File[] files = dirs[i].listFiles(); for (int j = 0; j < files.length; j++) { if (files[j].getAbsolutePath().contains(substr)) { fail("file \"" + files[j].getAbsolutePath() + "\" still exists"); } } } } void expectContainsFileWithSubstring(String substr) { for (int i = 0; i < dirs.length; i++) { File[] files = dirs[i].listFiles(); for (int j = 0; j < files.length; j++) { if (files[j].getAbsolutePath().contains(substr)) { return; // found one } } } fail("did not find a file with the substring " + substr); } void checkIfContainsFileWithExt(String fileExtension) { for (int i = 0; i < dirs.length; i++) { File[] files = dirs[i].listFiles(); for (int j = 0; j < files.length; j++) { if (files[j].getAbsolutePath().endsWith(fileExtension)) { fail("file \"" + files[j].getAbsolutePath() + "\" still exists"); } } } } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.basicDestroy()' */ @Test public void testBasicDestroy() { { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } region.destroyRegion(); closeDiskStores(); checkIfContainsFileWithExt("lk"); // note that this only passes because the test never forced us to create the following files checkIfContainsFileWithExt("crf"); checkIfContainsFileWithExt("drf"); checkIfContainsFileWithSubstring("OVERFLOW"); } { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncOverFlowOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowOnly due to " + e); } region.destroyRegion(); closeDiskStores(); checkIfContainsFileWithExt("lk"); // note that this only passes because the test never forced us to create the following files checkIfContainsFileWithExt("crf"); checkIfContainsFileWithExt("drf"); checkIfContainsFileWithSubstring("OVERFLOW"); } { deleteFiles(); try { region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); } catch (Exception e) { logWriter.error("Exception occurred", e); fail(" Exception in createOverflowandPersist due to " + e); } region.destroyRegion(); closeDiskStores(); checkIfContainsFileWithExt("lk"); // note that this only passes because the test never forced us to create the following files checkIfContainsFileWithExt("crf"); checkIfContainsFileWithExt("drf"); checkIfContainsFileWithSubstring("OVERFLOW"); } } // /* // * Test method for // * 'org.apache.geode.internal.cache.SimpleDiskRegion.basicInitializeOwner()' // */ // @Test // public void testBasicInitializeOwner() // { // deleteFiles(); // region = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache, diskProps); // DiskRegion dr = ((LocalRegion)region).getDiskRegion(); // put100Int(); // assertIndexDetailsEquals(new Integer(1), region.get(new Integer(1))); // Oplog oplog = dr.getChild(); // int id = oplog.getOplogId(); // StatisticsFactory factory = dr.getOwner().getCache().getDistributedSystem(); // Oplog newOplog = new Oplog(id + 1, dr.getDiskStore(), new DirectoryHolder(factory, // dirs[0], 1000000, 0)); // dr.setChild(newOplog); // region.clear(); // newOplog = dr.getChild(); // assertIndexDetailsEquals(null, region.get(new Integer(1))); // try { // dr.addToOplogSet(id, new File(oplog.getOplogFile() // .getPath()), dr.getNextDir()); // } // catch (Exception e) { // logWriter // .error( // "Exception in synching data present in the buffers of RandomAccessFile of Oplog, to the disk", // e); // fail("Test failed because synching of data present in buffer of RandomAccesFile "); // } // oplog.close(); // dr.setIsRecovering(true); // dr.basicInitializeOwner(); // assertIndexDetailsEquals(new Integer(1), region.get(new Integer(1))); // closeDown(); // } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.getChild()' */ @Test public void testGetChild() { deleteFiles(); region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); DiskRegion dr = ((LocalRegion) region).getDiskRegion(); Oplog oplog = dr.testHook_getChild(); long id = oplog.getOplogId(); StatisticsFactory factory = region.getCache().getDistributedSystem(); Oplog newOplog = new Oplog(id, dr.getOplogSet(), new DirectoryHolder(factory, dirs[0], 1000000, 0)); dr.getDiskStore().persistentOplogs.setChild(newOplog); assertEquals(newOplog, dr.testHook_getChild()); dr.setChild(oplog); assertEquals(oplog, dr.testHook_getChild()); newOplog.close(); newOplog = null; closeDown(); } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.getNextDir()' */ @Test public void testGetNextDir() { deleteFiles(); File file1 = new File("SimpleDiskRegionJUnitTestDir1"); file1.mkdir(); file1.deleteOnExit(); File file2 = new File("SimpleDiskRegionJUnitTestDir2"); file2.mkdir(); file2.deleteOnExit(); File file3 = new File("SimpleDiskRegionJUnitTestDir3"); file3.mkdir(); file3.deleteOnExit(); File file4 = new File("SimpleDiskRegionJUnitTestDir4"); file4.mkdir(); file4.deleteOnExit(); File[] oldDirs = new File[4]; oldDirs = dirs; dirs[0] = file1; dirs[1] = file2; dirs[2] = file3; dirs[3] = file4; closeDiskStores(); deleteFiles(); DiskRegionProperties diskProps = new DiskRegionProperties(); diskProps.setDiskDirs(dirs); region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); DiskRegion dr = ((LocalRegion) region).getDiskRegion(); assertEquals(file2, dr.getNextDir().getDir()); assertEquals(file3, dr.getNextDir().getDir()); assertEquals(file4, dr.getNextDir().getDir()); assertEquals(file1, dr.getNextDir().getDir()); closeDown(); deleteFiles(); dirs = oldDirs; } /* * Test method for 'org.apache.geode.internal.cache.SimpleDiskRegion.newDiskId()' */ @Test public void testNewDiskId() { deleteFiles(); region = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, diskProps); TestNewDiskId newDiskId = new TestNewDiskId(); Thread thread1 = new Thread(newDiskId); Thread thread2 = new Thread(newDiskId); Thread thread3 = new Thread(newDiskId); Thread thread4 = new Thread(newDiskId); Thread thread5 = new Thread(newDiskId); thread1.setDaemon(true); thread2.setDaemon(true); thread3.setDaemon(true); thread4.setDaemon(true); thread5.setDaemon(true); thread1.start(); thread2.start(); thread3.start(); thread4.start(); thread5.start(); ThreadUtils.join(thread1, 30 * 1000); ThreadUtils.join(thread2, 30 * 1000); ThreadUtils.join(thread3, 30 * 1000); ThreadUtils.join(thread4, 30 * 1000); ThreadUtils.join(thread5, 30 * 1000); if (keyIds.size() != 50000) { fail("Size not equal to 5000 as expected but is " + keyIds.size()); } closeDown(); } class TestNewDiskId implements Runnable { public void run() { long keyId = 0; for (int i = 0; i < 10000; i++) { keyId = ((LocalRegion) region).getDiskRegion().newOplogEntryId(); keyIds.add(new Long(keyId)); } } } }
package com.planet_ink.coffee_mud.Abilities.Prayers; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2004-2016 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Prayer_Doomspout extends Prayer implements DiseaseAffect { @Override public String ID() { return "Prayer_Doomspout"; } private final static String localizedName = CMLib.lang().L("Doomspout"); @Override public String name() { return localizedName; } @Override public int classificationCode(){return Ability.ACODE_PRAYER|Ability.DOMAIN_EVANGELISM;} @Override public int abstractQuality(){ return Ability.QUALITY_MALICIOUS;} @Override public long flags(){return Ability.FLAG_UNHOLY;} private final static String localizedStaticDisplay = CMLib.lang().L("(Doomspout)"); @Override public String displayText() { return localizedStaticDisplay; } @Override protected int canAffectCode(){return Ability.CAN_MOBS;} @Override protected int canTargetCode(){return Ability.CAN_MOBS;} @Override public int difficultyLevel(){return 7;} int plagueDown=4; String godName="The Demon"; protected boolean ispoke=false; @Override public int spreadBitmap(){return DiseaseAffect.SPREAD_PROXIMITY;} @Override public String getHealthConditionDesc() { return "Profound mental compulsion disorder."; } @Override public boolean tick(Tickable ticking, int tickID) { if(!(affected instanceof MOB)) return super.tick(ticking,tickID); if(!super.tick(ticking,tickID)) return false; if((--plagueDown)<=0) { final MOB mob=(MOB)affected; plagueDown=4; if(invoker==null) invoker=mob; if(mob.location()==null) return false; ispoke=false; switch(CMLib.dice().roll(1,12,0)) { case 1: CMLib.commands().postSay(mob,null,L("Repent, or @x1 will consume your soul!",godName),false,false); break; case 2: CMLib.commands().postSay(mob,null,L("We are all damned! Hope is forgotten!"),false,false); break; case 3: CMLib.commands().postSay(mob,null,L("@x1 has damned us all!",godName),false,false); break; case 4: CMLib.commands().postSay(mob,null,L("Death is the only way out for us now!"),false,false); break; case 5: CMLib.commands().postSay(mob,null,L("The finger of @x1 will destroy all!",godName),false,false); break; case 6: CMLib.commands().postSay(mob,null,L("The waters will dry! The air will turn cold! Our bodies will fail! We are Lost!"),false,false); break; case 7: CMLib.commands().postSay(mob,null,L("Nothing can save you! Throw yourself on the mercy of @x1!",godName),false,false); break; case 8: CMLib.commands().postSay(mob,null,L("@x1 will show us no mercy!",godName),false,false); break; case 9: CMLib.commands().postSay(mob,null,L("@x1 has spoken! We will all be destroyed!",godName),false,false); break; case 10: case 11: case 12: CMLib.commands().postSay(mob,null,L("Our doom is upon us! The end is near!"),false,false); break; } if((CMLib.flags().canSpeak(mob))&&(ispoke)) { final MOB target=mob.location().fetchRandomInhabitant(); if((target!=null) &&(CMLib.flags().canBeHeardSpeakingBy(mob,target)) &&(target!=invoker) &&(target!=mob) &&(target.fetchEffect(ID())==null)) if(CMLib.dice().rollPercentage()>target.charStats().getSave(CharStats.STAT_SAVE_DISEASE)) { final Room room=target.location(); final CMMsg msg=CMClass.getMsg(invoker,target,this,CMMsg.MSG_CAST_VERBAL_SPELL,L("<S-NAME> look(s) seriously ill!")); if((room!=null)&&(room.okMessage(mob, msg))) { room.send(mob,msg); maliciousAffect(invoker,target,0,0,-1); } } else spreadImmunity(target); } } return true; } @Override public void affectCharStats(MOB affected, CharStats affectableStats) { super.affectCharStats(affected,affectableStats); if(affected==null) return; if(affectableStats.getStat(CharStats.STAT_INTELLIGENCE)>3) affectableStats.setStat(CharStats.STAT_INTELLIGENCE,3); } @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { super.executeMsg(myHost,msg); if((affected!=null) &&(msg.source()==affected) &&(msg.sourceMinor()==CMMsg.TYP_SPEAK)) ispoke=true; } @Override public void unInvoke() { // undo the affects of this spell if(!(affected instanceof MOB)) return; final MOB mob=(MOB)affected; super.unInvoke(); if(canBeUninvoked()) if((mob.location()!=null)&&(!mob.amDead())) { spreadImmunity(mob); mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("<S-YOUPOSS> doomspout disease clear up.")); } } @Override public int castingQuality(MOB mob, Physical target) { if(mob!=null) { if(mob.isInCombat()) return Ability.QUALITY_INDIFFERENT; } return super.castingQuality(mob,target); } @Override public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel) { final MOB target=getTarget(mob,commands,givenTarget); if(target==null) return false; if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto)|CMMsg.MASK_MALICIOUS,auto?"":L("^S<S-NAME> inflict(s) an unholy disease upon <T-NAMESELF>.^?")); final CMMsg msg2=CMClass.getMsg(mob,target,this,CMMsg.MSK_CAST_MALICIOUS_VERBAL|CMMsg.TYP_DISEASE|(auto?CMMsg.MASK_ALWAYS:0),null); final CMMsg msg3=CMClass.getMsg(mob,target,this,CMMsg.MSK_CAST_MALICIOUS_VERBAL|CMMsg.TYP_MIND|(auto?CMMsg.MASK_ALWAYS:0),null); if((mob.location().okMessage(mob,msg)) &&(mob.location().okMessage(mob,msg2)) &&(mob.location().okMessage(mob,msg3))) { mob.location().send(mob,msg); mob.location().send(mob,msg2); mob.location().send(mob,msg3); if((msg.value()<=0)&&(msg2.value()<=0)&&(msg3.value()<=0)) { invoker=mob; if(mob.getWorshipCharID().length()>0) godName=mob.getWorshipCharID(); maliciousAffect(mob,target,asLevel,0,-1); mob.location().show(target,null,CMMsg.MSG_OK_VISUAL,L("<S-NAME> look(s) seriously ill!")); } else spreadImmunity(target); } } else return maliciousFizzle(mob,target,L("<S-NAME> attempt(s) to inflict a disease upon <T-NAMESELF>, but flub(s) it.")); // return whether it worked return success; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3.statements; import org.apache.cassandra.cql3.*; import org.apache.cassandra.exceptions.InvalidRequestException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; public abstract class SingleColumnRestriction implements Restriction { public boolean isMultiColumn() { return false; } public static class EQ extends SingleColumnRestriction implements Restriction.EQ { protected final Term value; private final boolean onToken; public EQ(Term value, boolean onToken) { this.value = value; this.onToken = onToken; } public List<ByteBuffer> values(QueryOptions options) throws InvalidRequestException { return Collections.singletonList(value.bindAndGet(options)); } public boolean isSlice() { return false; } public boolean isEQ() { return true; } public boolean isIN() { return false; } public boolean isContains() { return false; } public boolean isOnToken() { return onToken; } public boolean canEvaluateWithSlices() { return true; } @Override public String toString() { return String.format("EQ(%s)%s", value, onToken ? "*" : ""); } } public static class InWithValues extends SingleColumnRestriction implements Restriction.IN { protected final List<? extends Term> values; public InWithValues(List<? extends Term> values) { this.values = values; } public List<ByteBuffer> values(QueryOptions options) throws InvalidRequestException { List<ByteBuffer> buffers = new ArrayList<>(values.size()); for (Term value : values) buffers.add(value.bindAndGet(options)); return buffers; } public boolean canHaveOnlyOneValue() { return values.size() == 1; } public boolean isSlice() { return false; } public boolean isEQ() { return false; } public boolean isIN() { return true; } public boolean isContains() { return false; } public boolean isOnToken() { return false; } public boolean canEvaluateWithSlices() { return true; } @Override public String toString() { return String.format("IN(%s)", values); } } public static class InWithMarker extends SingleColumnRestriction implements Restriction.IN { protected final AbstractMarker marker; public InWithMarker(AbstractMarker marker) { this.marker = marker; } public List<ByteBuffer> values(QueryOptions options) throws InvalidRequestException { Term.MultiItemTerminal lval = (Term.MultiItemTerminal)marker.bind(options); if (lval == null) throw new InvalidRequestException("Invalid null value for IN restriction"); return lval.getElements(); } public boolean canHaveOnlyOneValue() { return false; } public boolean isSlice() { return false; } public boolean isEQ() { return false; } public boolean isIN() { return true; } public boolean isContains() { return false; } public boolean isOnToken() { return false; } public boolean canEvaluateWithSlices() { return true; } @Override public String toString() { return "IN ?"; } } public static class Slice extends SingleColumnRestriction implements Restriction.Slice { protected final Term[] bounds; protected final boolean[] boundInclusive; protected final boolean onToken; public Slice(boolean onToken) { this.bounds = new Term[2]; this.boundInclusive = new boolean[2]; this.onToken = onToken; } public boolean isSlice() { return true; } public boolean isEQ() { return false; } public boolean isIN() { return false; } public boolean isContains() { return false; } public List<ByteBuffer> values(QueryOptions options) throws InvalidRequestException { throw new UnsupportedOperationException(); } public boolean isOnToken() { return onToken; } public boolean canEvaluateWithSlices() { return true; } /** Returns true if the start or end bound (depending on the argument) is set, false otherwise */ public boolean hasBound(Bound b) { return bounds[b.idx] != null; } public Term bound(Bound b) { return bounds[b.idx]; } public ByteBuffer bound(Bound b, QueryOptions options) throws InvalidRequestException { return bounds[b.idx].bindAndGet(options); } /** Returns true if the start or end bound (depending on the argument) is inclusive, false otherwise */ public boolean isInclusive(Bound b) { return bounds[b.idx] == null || boundInclusive[b.idx]; } public Operator getRelation(Bound eocBound, Bound inclusiveBound) { switch (eocBound) { case START: return boundInclusive[inclusiveBound.idx] ? Operator.GTE : Operator.GT; case END: return boundInclusive[inclusiveBound.idx] ? Operator.LTE : Operator.LT; } throw new AssertionError(); } public Operator getIndexOperator(Bound b) { switch (b) { case START: return boundInclusive[b.idx] ? Operator.GTE : Operator.GT; case END: return boundInclusive[b.idx] ? Operator.LTE : Operator.LT; } throw new AssertionError(); } @Override public final void setBound(Operator operator, Term t) throws InvalidRequestException { Bound b; boolean inclusive; switch (operator) { case GT: b = Bound.START; inclusive = false; break; case GTE: b = Bound.START; inclusive = true; break; case LT: b = Bound.END; inclusive = false; break; case LTE: b = Bound.END; inclusive = true; break; default: throw new AssertionError(); } setBound(b, inclusive, t); } public void setBound(Restriction.Slice slice) throws InvalidRequestException { for (Bound bound : Bound.values()) if (slice.hasBound(bound)) setBound(bound, slice.isInclusive(bound), slice.bound(bound)); } private void setBound(Bound bound, boolean inclusive, Term term) throws InvalidRequestException { assert bounds[bound.idx] == null; bounds[bound.idx] = term; boundInclusive[bound.idx] = inclusive; } @Override public String toString() { return String.format("SLICE(%s %s, %s %s)%s", boundInclusive[0] ? ">=" : ">", bounds[0], boundInclusive[1] ? "<=" : "<", bounds[1], onToken ? "*" : ""); } } // This holds both CONTAINS and CONTAINS_KEY restriction because we might want to have both of them. public static class Contains extends SingleColumnRestriction { private List<Term> values; // for CONTAINS private List<Term> keys; // for CONTAINS_KEY public boolean hasContains() { return values != null; } public boolean hasContainsKey() { return keys != null; } public int numberOfValues() { return values == null ? 0 : values.size(); } public int numberOfKeys() { return keys == null ? 0 : keys.size(); } public void add(Term t, boolean isKey) { if (isKey) addKey(t); else addValue(t); } public void addValue(Term t) { if (values == null) values = new ArrayList<>(); values.add(t); } public void addKey(Term t) { if (keys == null) keys = new ArrayList<>(); keys.add(t); } public List<ByteBuffer> values(QueryOptions options) throws InvalidRequestException { if (values == null) return Collections.emptyList(); List<ByteBuffer> buffers = new ArrayList<ByteBuffer>(values.size()); for (Term value : values) buffers.add(value.bindAndGet(options)); return buffers; } public List<ByteBuffer> keys(QueryOptions options) throws InvalidRequestException { if (keys == null) return Collections.emptyList(); List<ByteBuffer> buffers = new ArrayList<ByteBuffer>(keys.size()); for (Term value : keys) buffers.add(value.bindAndGet(options)); return buffers; } public boolean isSlice() { return false; } public boolean isEQ() { return false; } public boolean isIN() { return false; } public boolean isContains() { return true; } public boolean isOnToken() { return false; } public boolean canEvaluateWithSlices() { return false; } @Override public String toString() { return String.format("CONTAINS(values=%s, keys=%s)", values, keys); } } }
package org.testng.reporters.jq; import org.testng.ISuite; import org.testng.ISuiteResult; import org.testng.ITestContext; import org.testng.ITestResult; import org.testng.collections.Lists; import org.testng.reporters.XMLStringBuffer; import java.util.Collections; import java.util.List; import java.util.Map; public class NavigatorPanel extends BasePanel { private List<INavigatorPanel> m_panels; public NavigatorPanel(Model model, List<INavigatorPanel> panels) { super(model); m_panels = panels; } @Override public void generate(XMLStringBuffer main) { main.push(D, C, "navigator-root"); main.push(D, C, "navigator-suite-header"); main.addRequired(S, "All suites"); main.push("a", C, "collapse-all-link", "href", "#", "title", "Collapse/expand all the suites"); main.push("img", "src", "collapseall.gif", C, "collapse-all-icon"); main.pop("img"); main.pop("a"); main.pop(D); for (ISuite suite : getSuites()) { if (suite.getResults().size() == 0) { continue; } String suiteName = "suite-" + suiteToTag(suite); XMLStringBuffer header = new XMLStringBuffer(main.getCurrentIndent()); Map<String, ISuiteResult> results = suite.getResults(); int failed = 0; int skipped = 0; int passed = 0; for (ISuiteResult result : results.values()) { ITestContext context = result.getTestContext(); failed += context.getFailedTests().size(); skipped += context.getSkippedTests().size(); passed += context.getPassedTests().size(); } // Suite name in big font header.push(D, C, "suite"); header.push(D, C, "rounded-window"); // Extra div so the highlighting logic will only highlight this line and not // the entire container header.push(D, C, "suite-header light-rounded-window-top"); header.push("a", "href", "#", "panel-name", suiteName, C, "navigator-link"); header.addOptional(S, suite.getName(), C, "suite-name border-" + getModel().getStatusForSuite(suite.getName())); header.pop("a"); header.pop(D); header.push(D, C, "navigator-suite-content"); generateInfo(header, suite); generateResult(header, failed, skipped, passed, suite, suiteName); header.pop("ul"); header.pop(D); // suite-section-content header.pop(D); // suite-header header.pop(D); // suite header.pop(D); // result-section header.pop(D); // navigator-suite-content main.addString(header.toXML()); } main.pop(D); } private void generateResult(XMLStringBuffer header, int failed, int skipped, int passed, ISuite suite, String suiteName) { // // Results // header.push(D, C, "result-section"); header.push(D, C, "suite-section-title"); header.addRequired(S, "Results"); header.pop(D); // Method stats int total = failed + skipped + passed; String stats = String.format("%s, %s %s %s", pluralize(total, "method"), maybe(failed, "failed", ", "), maybe(skipped, "skipped", ", "), maybe(passed, "passed", "")); header.push(D, C, "suite-section-content"); header.push("ul"); header.push("li"); header.addOptional(S, stats, C, "method-stats"); header.pop("li"); generateMethodList("Failed methods", new ResultsByStatus(suite, "failed", ITestResult.FAILURE), suiteName, header); generateMethodList("Skipped methods", new ResultsByStatus(suite, "skipped", ITestResult.SKIP), suiteName, header); generateMethodList("Passed methods", new ResultsByStatus(suite, "passed", ITestResult.SUCCESS), suiteName, header); } private void generateInfo(XMLStringBuffer header, ISuite suite) { // // Info // header.push(D, C, "suite-section-title"); header.addRequired(S, "Info"); header.pop(D); header.push(D, C, "suite-section-content"); header.push("ul"); // All the panels for (INavigatorPanel panel : m_panels) { addLinkTo(header, panel, suite); } header.pop("ul"); header.pop(D); // suite-section-content } private void addLinkTo(XMLStringBuffer header, INavigatorPanel panel, ISuite suite) { String text = panel.getNavigatorLink(suite); header.push("li"); header.push("a", "href", "#", "panel-name", panel.getPanelName(suite), C, "navigator-link "); String className = panel.getClassName(); if (className != null) { header.addOptional(S, text, C, className); } else { header.addOptional(S, text); } header.pop("a"); header.pop("li"); } private static String maybe(int count, String s, String sep) { return count > 0 ? count + " " + s + sep: ""; } private List<ITestResult> getMethodsByStatus(ISuite suite, int status) { List<ITestResult> result = Lists.newArrayList(); List<ITestResult> testResults = getModel().getTestResults(suite); for (ITestResult tr : testResults) { if (tr.getStatus() == status) { result.add(tr); } } Collections.sort(result, ResultsByClass.METHOD_NAME_COMPARATOR); return result; } private static interface IResultProvider { List<ITestResult> getResults(); String getType(); } private abstract static class BaseResultProvider implements IResultProvider { protected ISuite m_suite; protected String m_type; public BaseResultProvider(ISuite suite, String type) { m_suite = suite; m_type = type; } @Override public String getType() { return m_type; } } private class ResultsByStatus extends BaseResultProvider { private final int m_status; public ResultsByStatus(ISuite suite, String type, int status) { super(suite, type); m_status = status; } @Override public List<ITestResult> getResults() { return getMethodsByStatus(m_suite, m_status); } } private void generateMethodList(String name, IResultProvider provider, String suiteName, XMLStringBuffer main) { XMLStringBuffer xsb = new XMLStringBuffer(main.getCurrentIndent()); String type = provider.getType(); String image = Model.getImage(type); xsb.push("li"); // The methods themselves xsb.addRequired(S, name, C, "method-list-title " + type); // The mark up to show the (hide)/(show) links xsb.push(S, C, "show-or-hide-methods " + type); xsb.addRequired("a", " (hide)", "href", "#", C, "hide-methods " + type + " " + suiteName, "panel-name", suiteName); xsb.addRequired("a", " (show)", "href", "#",C, "show-methods " + type + " " + suiteName, "panel-name", suiteName); xsb.pop(S); // List of methods xsb.push(D, C, "method-list-content " + type + " " + suiteName); int count = 0; List<ITestResult> testResults = provider.getResults(); if (testResults != null) { Collections.sort(testResults, ResultsByClass.METHOD_NAME_COMPARATOR); for (ITestResult tr : testResults) { String testName = Model.getTestResultName(tr); xsb.push(S); xsb.addEmptyElement("img", "src", image, "width", "3%"); xsb.addRequired("a", testName, "href", "#", "hash-for-method", getModel().getTag(tr), "panel-name", suiteName, "title", tr.getTestClass().getName(), C, "method navigator-link"); xsb.pop(S); xsb.addEmptyElement("br"); count++; } } xsb.pop(D); xsb.pop("li"); if (count > 0) { main.addString(xsb.toXML()); } } }
/* * Copyright 2015 The SageTV Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sage.media.bluray; /** * * @author Narflex */ public class BluRayRandomFile extends sage.BufferedFileChannel implements BluRayStreamer { /** Creates a new instance of BluRayRandomFile */ public BluRayRandomFile(java.io.File bdmvDir, boolean directBuffer, int inTargetTitle) throws java.io.IOException { super(sage.media.bluray.BluRayParser.BLURAY_CHARSET, directBuffer); this.bdmvDir = bdmvDir; targetTitle = inTargetTitle; bdp = new sage.media.bluray.BluRayParser(bdmvDir); bdp.fullyAnalyze(); if (targetTitle <= 0) targetTitle = bdp.getMainPlaylistIndex() + 1; targetTitle = Math.max(1, Math.min(targetTitle, bdp.getNumPlaylists())); currPlaylist = bdp.getPlaylist(targetTitle - 1); fileSequence = new java.io.File[currPlaylist.playlistItems.length]; fileOffsets = new long[fileSequence.length]; ptsOffsets = new long[fileSequence.length]; streamDir = new java.io.File(bdmvDir, "STREAM"); totalSize = 0; long[] totalPts = new long[fileSequence.length]; for (int i = 0; i < fileSequence.length; i++) { fileSequence[i] = new java.io.File(streamDir, currPlaylist.playlistItems[i].itemClips[0].clipName + (bdp.doesUseShortFilenames() ? ".MTS" : ".m2ts")); fileOffsets[i] = totalSize; ptsOffsets[i] = (i == 0 ? 0 : totalPts[i - 1]) - currPlaylist.playlistItems[i].inTime; totalSize += fileSequence[i].length(); totalPts[i] = (i == 0 ? 0 : totalPts[i - 1]) + (currPlaylist.playlistItems[i].outTime - currPlaylist.playlistItems[i].inTime); } if (sage.Sage.DBG) System.out.println("Established BluRay file sequence with " + fileSequence.length + " segments and total size=" + totalSize); currFileIndex = 0; fc = new java.io.FileInputStream(fileSequence[currFileIndex]).getChannel(); chapterOffsets = new long[currPlaylist.playlistMarks.length]; for (int i = 0; i < chapterOffsets.length; i++) { int itemRef = currPlaylist.playlistMarks[i].playItemIdRef; chapterOffsets[i] = (itemRef == 0 ? 0 : totalPts[itemRef - 1]) + currPlaylist.playlistMarks[i].timestamp - currPlaylist.playlistItems[itemRef].inTime; } } public long length() throws java.io.IOException { return totalSize; } public int getCurrClipIndex() { return currFileIndex; } public int getClipIndexForNextRead() { if (getBytesLeftInClip() == 0 && currFileIndex < fileOffsets.length - 1) return currFileIndex + 1; else return currFileIndex; } public long getBytesLeftInClip() { return (currFileIndex < fileOffsets.length - 1) ? (fileOffsets[currFileIndex + 1] - fp) : (totalSize - fp); } public long getClipPtsOffset(int index) { return ptsOffsets[index]; } public int getNumClips() { return fileOffsets.length; } public void seek(long newfp) throws java.io.IOException { if (newfp == fp) return; // Write any pending data before we seek flush(); // See if we can do this seek within the read buffer we have if (rb != null) { if (newfp > fp && newfp < (fp + rb.remaining())) { rb.position(rb.position() + (int)(newfp - fp)); fp = newfp; } else { rb.clear().limit(0); // no valid data in buffer fp = newfp; ensureProperFile(true, false); } } else { fp = newfp; ensureProperFile(true, false); } } protected void ensureProperFile(boolean alwaysSeek, boolean forceOpen) throws java.io.IOException { // Check to see if we need to move to a different file if (forceOpen || fp < fileOffsets[currFileIndex] || (currFileIndex < fileOffsets.length - 1 && fp >= fileOffsets[currFileIndex + 1])) { int oldIndex = currFileIndex; fc.close(); for (currFileIndex = 0; currFileIndex < fileOffsets.length; currFileIndex++) { if (fileOffsets[currFileIndex] > fp) break; } currFileIndex--; if (sage.Sage.DBG) System.out.println("Switching BluRay source file from index " + oldIndex + " to " + currFileIndex); int currAngle = Math.min(prefAngle, currPlaylist.playlistItems[currFileIndex].itemClips.length - 1); fc = new java.io.FileInputStream(new java.io.File(streamDir, currPlaylist.playlistItems[currFileIndex].itemClips[currAngle].clipName + (bdp.doesUseShortFilenames() ? ".MTS" : ".m2ts"))).getChannel(); alwaysSeek = true; rb.clear().limit(0); // no valid data in buffer } if (alwaysSeek) fc.position(fp - fileOffsets[currFileIndex]); } protected void ensureBuffer() throws java.io.IOException { if (rb == null) { rb = direct ? java.nio.ByteBuffer.allocateDirect(bufSize) : java.nio.ByteBuffer.allocate(bufSize); rb.clear().limit(0); } if (rb.remaining() <= 0) { ensureProperFile(false, false); rb.clear(); if (fc.read(rb) < 0) throw new java.io.EOFException(); rb.flip(); } } public void read(java.nio.ByteBuffer b) throws java.io.IOException { if (rb != null && rb.remaining() > 0) { int currRead = Math.min(rb.remaining(), b.remaining()); int oldLimit = rb.limit(); rb.limit(currRead + rb.position()); b.put(rb); rb.limit(oldLimit); fp += currRead; } int leftToRead = b.remaining(); while (leftToRead > 0) { ensureProperFile(false, false); int currRead = fc.read(b); if (currRead < 0) throw new java.io.EOFException(); fp += currRead; leftToRead -= currRead; } } public int transferTo(java.nio.channels.WritableByteChannel out, long length) throws java.io.IOException { long startFp = fp; if (rb != null && rb.remaining() > 0) { int currRead = (int)Math.min(rb.remaining(), length); int oldLimit = rb.limit(); rb.limit(currRead + rb.position()); out.write(rb); rb.limit(oldLimit); fp += currRead; length -= currRead; } while (length > 0) { ensureProperFile(false, false); long curr = fc.transferTo(fp - fileOffsets[currFileIndex], length, out); length -= curr; fp += curr; if (curr <= 0) break; } fc.position(fp - fileOffsets[currFileIndex]); return (int)(fp - startFp); } public void readFully(byte b[], int off, int len) throws java.io.IOException { if (rb != null && rb.remaining() > 0) { int currRead = Math.min(rb.remaining(), len); rb.get(b, off, currRead); fp += currRead; len -= currRead; off += currRead; } while (len > 0) { ensureProperFile(false, false); int currRead = fc.read(java.nio.ByteBuffer.wrap(b, off, len)); if (currRead < 0) throw new java.io.EOFException(); fp += currRead; off += currRead; len -= currRead; } } public void write(int b) throws java.io.IOException { throw new java.io.IOException("Unsupported operation"); } public void write(byte b) throws java.io.IOException { throw new java.io.IOException("Unsupported operation"); } public void writeUnencryptedByte(byte b) throws java.io.IOException { throw new java.io.IOException("Unsupported operation"); } public void write(byte b[], int off, int len) throws java.io.IOException { throw new java.io.IOException("Unsupported operation"); } public void write(byte b[]) throws java.io.IOException { throw new java.io.IOException("Unsupported operation"); } public void writeUTF(String s) throws java.io.IOException { throw new java.io.IOException("Unsupported operation"); } public void writeBoolean(boolean b) throws java.io.IOException { throw new java.io.IOException("Unsupported operation"); } public void writeByte(int b) throws java.io.IOException { throw new java.io.IOException("Unsupported operation"); } public final void setLength(long len) throws java.io.IOException { throw new java.io.IOException("Unsupported operation"); } public int getChapter(long pts45) { for (int i = 0; i < chapterOffsets.length; i++) if (chapterOffsets[i] > pts45) return i; return chapterOffsets.length; } public long getChapterStartMsec(int chapter) { return chapterOffsets[Math.max(0, Math.min(chapter - 1, chapterOffsets.length - 1))] / 45; } public int getNumChapters() { return chapterOffsets.length; } public int getNumAngles() { return currPlaylist.playlistItems[currFileIndex].itemClips.length; } public int getNumTitles() { return bdp.getNumPlaylists(); } public int getTitle() { return targetTitle; } public void setAngle(int currBDAngle) { // Narflex - This does NOT work because the files for each angle may be a different length; so our offsets would be off. /* if (prefAngle != currBDAngle - 1) { prefAngle = currBDAngle - 1; prefAngle = Math.max(0, prefAngle); try { ensureProperFile(true, true); } catch (java.io.IOException e) { System.out.println("ERROR in BluRay angle change:" + e); } }*/ } public String getTitleDesc(int titleNum) { return bdp.getPlaylistDesc(titleNum - 1); } public sage.media.format.ContainerFormat getFileFormat() { return bdp.getFileFormat(targetTitle - 1); } protected long totalSize; protected java.io.File bdmvDir; protected sage.media.bluray.BluRayParser bdp; protected sage.media.bluray.MPLSObject currPlaylist; protected java.io.File[] fileSequence; protected long[] fileOffsets; // bytes protected long[] ptsOffsets; // 45kHz protected long[] chapterOffsets; // 45kHz protected int currFileIndex; protected java.io.File streamDir; protected int prefAngle; protected int targetTitle; }
/* * Copyright 2015 Martin Smock <martin.smock@bluewin.ch> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package li.strolch.model.activity; import static java.util.stream.Collectors.toList; import static li.strolch.model.StrolchModelConstants.BAG_RELATIONS; import static li.strolch.utils.collections.CollectionsHelper.singletonCollector; import java.text.MessageFormat; import java.util.*; import java.util.Map.Entry; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Stream; import li.strolch.exception.StrolchElementNotFoundException; import li.strolch.exception.StrolchException; import li.strolch.exception.StrolchModelException; import li.strolch.exception.StrolchPolicyException; import li.strolch.model.*; import li.strolch.model.Locator.LocatorBuilder; import li.strolch.model.parameter.Parameter; import li.strolch.model.policy.PolicyDef; import li.strolch.model.policy.PolicyDefs; import li.strolch.model.visitor.StrolchElementVisitor; import li.strolch.utils.dbc.DBC; /** * Parameterized object grouping a collection of {@link Activity} and {@link Action} objects defining the process to be * scheduled * * @author Martin Smock <martin.smock@bluewin.ch> */ public class Activity extends AbstractStrolchRootElement implements IActivityElement, StrolchRootElement, Comparable<Activity> { protected Locator locator; protected Version version; protected Activity parent; protected TimeOrdering timeOrdering; protected Map<String, IActivityElement> elements; protected PolicyDefs policyDefs; /** * Empty constructor - for marshalling only! */ public Activity() { super(); } /** * Default constructor * * @param id * the id * @param name * the name * @param type * the type */ public Activity(String id, String name, String type, TimeOrdering timeOrdering) { super(id, name, type); this.timeOrdering = timeOrdering; } @Override public void setId(String id) { this.locator = null; super.setId(id); } @Override public String getObjectType() { return Tags.ACTIVITY; } public TimeOrdering getTimeOrdering() { return this.timeOrdering; } public void setTimeOrdering(TimeOrdering timeOrdering) { assertNotReadonly(); this.timeOrdering = timeOrdering; } @Override public boolean hasVersion() { return this.version != null; } @Override public Version getVersion() { return this.version; } @Override public void setVersion(Version version) throws IllegalArgumentException, IllegalStateException { if (!isRootElement()) throw new IllegalStateException("Can't set the version on non root of " + getLocator()); if (version != null && !getLocator().equals(version.getLocator())) { String msg = "Illegal version as locator is not same: Element: {0} Version: {1}"; throw new IllegalArgumentException(MessageFormat.format(msg, getLocator(), version)); } this.version = version; } private void initElements() { if (this.elements == null) { // use a LinkedHashMap since we will iterate elements in the order // added and lookup elements by ID this.elements = new LinkedHashMap<>(); } } @Override public boolean isAction() { return false; } @Override public boolean isActivity() { return true; } /** * Returns true if this {@link Activity} contains any children i.e. any of {@link Action} or {@link Activity} * * @return true if this {@link Activity} contains any children i.e. any of {@link Action} or {@link Activity} */ public boolean hasElements() { return this.elements != null && !this.elements.isEmpty(); } /** * Returns true if this {@link Activity} contains a child with the given id. The element instance type is ignored, * i.e. {@link Action} or {@link Activity} * * @param id * the id of the element to check for * * @return true if this {@link Activity} contains a child with the given id. The element instance type is ignored, * i.e. {@link Action} or {@link Activity} */ public boolean hasElement(String id) { return this.elements != null && this.elements.containsKey(id); } /** * add an activity element to the {@code LinkedHashMap} of {@code IActivityElements} * * @param activityElement * the element to add */ public void addElement(IActivityElement activityElement) { assertCanAdd(activityElement); activityElement.setParent(this); this.elements.put(activityElement.getId(), activityElement); } /** * add an activity element to the {@code LinkedHashMap} of {@code IActivityElements} before the given element * * @param element * the element before which to add the other element * @param elementToAdd * the element to add */ public void addElementBefore(IActivityElement element, IActivityElement elementToAdd) { assertCanAdd(elementToAdd); Iterator<Entry<String, IActivityElement>> iterator = this.elements.entrySet().iterator(); LinkedHashMap<String, IActivityElement> elements = new LinkedHashMap<>(); boolean added = false; while (iterator.hasNext()) { Entry<String, IActivityElement> next = iterator.next(); if (!added && next.getValue().equals(element)) { elements.put(elementToAdd.getId(), elementToAdd); elements.put(next.getKey(), next.getValue()); added = true; } elements.put(next.getKey(), next.getValue()); } if (!added) throw new IllegalStateException("Element " + element.getId() + " was not found, couldn't add before!"); elementToAdd.setParent(this); this.elements = elements; } /** * add an activity element to the {@code LinkedHashMap} of {@code IActivityElements} after the given element * * @param element * the element before which to add the other element * @param elementToAdd * the element to add */ public void addElementAfter(IActivityElement element, IActivityElement elementToAdd) { assertCanAdd(elementToAdd); Iterator<Entry<String, IActivityElement>> iterator = this.elements.entrySet().iterator(); LinkedHashMap<String, IActivityElement> elements = new LinkedHashMap<>(); boolean added = false; while (iterator.hasNext()) { Entry<String, IActivityElement> next = iterator.next(); elements.put(next.getKey(), next.getValue()); if (!added && next.getValue().equals(element)) { elements.put(elementToAdd.getId(), elementToAdd); added = true; } } if (!added) throw new IllegalStateException("Element " + element.getId() + " was not found, couldn't add after!"); elementToAdd.setParent(this); this.elements = elements; } private void assertCanAdd(IActivityElement elementToAdd) { assertNotReadonly(); DBC.PRE.assertNotEquals("Can't add element to itself!", this, elementToAdd); DBC.PRE.assertNull("Parent can't already be set!", elementToAdd.getParent()); // TODO make sure we can't create a circular dependency initElements(); String id = elementToAdd.getId(); if (id == null) throw new StrolchException("Cannot add IActivityElement without id."); if (this.elements.containsKey(id)) throw new StrolchException( "Activity " + getLocator() + " already contains an activity element with id = " + id); } /** * Removes the element with the given id and returns it, if it exists * * @param id * the id of the element to remove * * @return the removed element, or null if it does not exist */ @SuppressWarnings("unchecked") public <T extends IActivityElement> T remove(String id) { assertNotReadonly(); IActivityElement element = this.elements.remove(id); if (element != null) element.setParent(null); return (T) element; } /** * Returns the {@link Action} with the given ID which is a direct child of this {@link Activity} * * @param id * the id of the {@link Action} to return * * @return the {@link Action} with the given ID */ public Action getAction(String id) { return getElement(id); } /** * Returns the {@link Activity} with the given ID which is a direct child of this {@link Activity} * * @param id * the id of the {@link Activity} to return * * @return the {@link Activity} with the given ID */ public Activity getActivity(String id) { return getElement(id); } /** * get {@code IActivityElement} by id * * @param id * the id of the {@code IActivityElement} * * @return IActivityElement */ public <T extends IActivityElement> T getElement(String id) { if (this.elements == null) throw new IllegalArgumentException("Element " + id + " does not exist on " + getLocator()); @SuppressWarnings("unchecked") T t = (T) this.elements.get(id); if (t == null) throw new IllegalArgumentException("Element " + id + " does not exist on " + getLocator()); return t; } public Optional<IActivityElement> getPreviousElement(IActivityElement element) { if (!hasElements()) return Optional.empty(); IActivityElement previous = null; Iterator<Entry<String, IActivityElement>> iter = elementIterator(); while (iter.hasNext()) { IActivityElement elem = iter.next().getValue(); if (elem == element) return Optional.ofNullable(previous); else previous = elem; } return Optional.empty(); } public Optional<IActivityElement> getPreviousElementByType(IActivityElement element, String type) { if (!hasElements()) return Optional.empty(); List<IActivityElement> reversed = new ArrayList<>(this.elements.values()); Collections.reverse(reversed); boolean foundElem = false; Iterator<IActivityElement> iter = reversed.iterator(); IActivityElement elem; while (iter.hasNext()) { elem = iter.next(); if (foundElem && elem.getType().equals(type)) return Optional.of(elem); else if (elem == element) { foundElem = true; } } return Optional.empty(); } public Optional<IActivityElement> getNextElement(IActivityElement element) { if (!hasElements()) return Optional.empty(); Iterator<Entry<String, IActivityElement>> iter = elementIterator(); IActivityElement previous = iter.next().getValue(); while (iter.hasNext()) { IActivityElement elem = iter.next().getValue(); if (previous == element) return Optional.ofNullable(elem); else previous = elem; } return Optional.empty(); } public Optional<IActivityElement> getNextElementByType(IActivityElement element, String type) { if (!hasElements()) return Optional.empty(); Iterator<Entry<String, IActivityElement>> iter = elementIterator(); boolean foundElem = false; IActivityElement elem; while (iter.hasNext()) { elem = iter.next().getValue(); if (foundElem && elem.getType().equals(type)) return Optional.of(elem); else if (elem == element) { foundElem = true; } } return Optional.empty(); } public <T extends IActivityElement> T findElement(Predicate<IActivityElement> predicate, Supplier<String> msgSupplier) { @SuppressWarnings("unchecked") T t = (T) this.elements.values().stream().filter(predicate).collect(singletonCollector(msgSupplier)); return t; } public List<IActivityElement> findElements(Predicate<IActivityElement> predicate) { return this.elements.values().stream().filter(predicate).collect(toList()); } public List<Action> findActions(Predicate<Action> predicate) { return this.elements.values().stream() // .filter(IActivityElement::isAction) // .map(e -> (Action) e) // .filter(predicate) // .collect(toList()); } public List<Action> findActionsDeep(Predicate<Action> predicate) { return streamActionsDeep().filter(predicate).collect(toList()); } public List<IActivityElement> getElementsByType(String type) { List<IActivityElement> elements = new ArrayList<>(); Iterator<Entry<String, IActivityElement>> iter = elementIterator(); while (iter.hasNext()) { IActivityElement element = iter.next().getValue(); if (element.getType().equals(type)) elements.add(element); } return elements; } /** * @return get the {@code LinkedHashMap} of {@code IActivityElements} */ public Map<String, IActivityElement> getElements() { if (this.elements == null) return Collections.emptyMap(); return this.elements; } public Stream<IActivityElement> streamElements() { return this.elements.values().stream(); } public Stream<Action> streamActionsDeep() { return streamElements().flatMap(e -> { if (e.isAction()) return Stream.of(e); return e.asActivity().streamActionsDeep(); }).map(IActivityElement::asAction); } /** * Returns all the actions as a flat list * * @return the list of actions */ public List<Action> getActionsAsFlatList() { List<Action> actions = new ArrayList<>(); getActionsAsFlatList(actions); return actions; } private void getActionsAsFlatList(List<Action> actions) { for (IActivityElement element : this.elements.values()) { if (element instanceof Activity) ((Activity) element).getActionsAsFlatList(actions); else actions.add((Action) element); } } /** * Returns all the actions in the entire hierarchy with the given state * * @param state * the state of the action to return * * @return the list of actions with the given state */ public List<Action> getActionsWithState(State state) { List<Action> actions = new ArrayList<>(); getActionsWithState(actions, state); return actions; } private void getActionsWithState(List<Action> actions, State state) { for (IActivityElement element : this.elements.values()) { if (element instanceof Activity) ((Activity) element).getActionsWithState(actions, state); else if (element.getState() == state) actions.add((Action) element); } } /** * Returns all the actions in the entire hierarchy with the given type * * @param type * the type of action to return * * @return the list of actions with the given type */ public List<Action> getActionsByType(String type) { List<Action> actions = new ArrayList<>(); getActionsByType(actions, type); return actions; } private void getActionsByType(List<Action> actions, String type) { for (IActivityElement element : this.elements.values()) { if (element instanceof Activity) ((Activity) element).getActionsByType(actions, type); else if (element.getType().equals(type)) actions.add((Action) element); } } public <T extends IActivityElement> T getElementByLocator(Locator locator) { DBC.PRE.assertEquals("Locator is not for this activity!", getLocator(), locator.trim(3)); DBC.PRE.assertTrue("Locator must have at least 5 parts", locator.getSize() >= 4); IActivityElement element = this; for (int i = 3; i < locator.getSize(); i++) { String next = locator.get(i); if (!(element instanceof Activity)) { String msg = "Invalid locator {0} with part {1} as not an Activity but deeper element specified"; //$NON-NLS-1$ throw new StrolchModelException(MessageFormat.format(msg, locator, next)); } element = ((Activity) element).getElement(next); if (element == null) throw new StrolchElementNotFoundException(locator + " does not exist!"); } @SuppressWarnings("unchecked") T t = (T) element; return t; } /** * @return the iterator for entries, which include the id as key and the {@link IActivityElement} as value */ public Iterator<Entry<String, IActivityElement>> elementIterator() { if (this.elements == null) return Collections.emptyIterator(); return this.elements.entrySet().iterator(); } /** * @return the stream for entries, which include the id as key and the {@link IActivityElement} as value */ public Stream<Entry<String, IActivityElement>> elementStream() { if (this.elements == null) return Stream.empty(); return this.elements.entrySet().stream(); } @Override public Activity asActivity() { return this; } @Override public Long getStart() { long start = Long.MAX_VALUE; if (this.elements == null) return start; Iterator<Entry<String, IActivityElement>> elementIterator = elementIterator(); while (elementIterator.hasNext()) { IActivityElement element = elementIterator.next().getValue(); start = Math.min(start, element.getStart()); } return start; } @Override public Long getEnd() { long end = 0L; if (this.elements == null) return end; Iterator<Entry<String, IActivityElement>> elementIterator = elementIterator(); while (elementIterator.hasNext()) { IActivityElement element = elementIterator.next().getValue(); end = Math.max(end, element.getEnd()); } return end; } @Override public State getState() { if (this.elements == null || this.elements.isEmpty()) return State.CREATED; return State.getState(this); } @Override public PolicyDefs getPolicyDefs() { if (this.policyDefs == null) throw new StrolchPolicyException(getLocator() + " has no Policies defined!"); return this.policyDefs; } @Override public PolicyDef getPolicyDef(Class<?> clazz) { return getPolicyDefs().getPolicyDef(clazz); } @Override public PolicyDef getPolicyDef(String type) { return getPolicyDefs().getPolicyDef(type); } @Override public PolicyDef getPolicyDef(Class<?> clazz, PolicyDef defaultDef) { if (!hasPolicyDefs()) return defaultDef; return getPolicyDefs().getPolicyDef(clazz, defaultDef); } @Override public PolicyDef getPolicyDef(String type, PolicyDef defaultDef) { if (!hasPolicyDefs()) return defaultDef; return getPolicyDefs().getPolicyDef(type, defaultDef); } @Override public boolean hasPolicyDefs() { return this.policyDefs != null; } @Override public boolean hasPolicyDef(String type) { return this.policyDefs != null && this.policyDefs.hasPolicyDef(type); } @Override public boolean hasPolicyDef(Class<?> clazz) { return this.policyDefs != null && this.policyDefs.hasPolicyDef(clazz); } @Override public void setPolicyDefs(PolicyDefs policyDefs) { assertNotReadonly(); this.policyDefs = policyDefs; this.policyDefs.setParent(this); } @Override public void addOrUpdate(PolicyDef policyDef) { assertNotReadonly(); DBC.PRE.assertNotNull("policyDef", policyDef); if (this.policyDefs == null) { this.policyDefs = new PolicyDefs(); this.policyDefs.setParent(this); } this.policyDefs.addOrUpdate(policyDef); } @Override public Locator getLocator() { if (this.locator == null) { LocatorBuilder lb = new LocatorBuilder(); fillLocator(lb); this.locator = lb.build(); } return this.locator; } @Override protected void fillLocator(LocatorBuilder locatorBuilder) { if (this.parent != null) this.parent.fillLocator(locatorBuilder); else locatorBuilder.append(Tags.ACTIVITY).append(getType()); locatorBuilder.append(getId()); } @Override public Activity getParent() { return this.parent; } @Override public Activity getRootElement() { return (this.parent == null) ? this : this.parent.getRootElement(); } @Override public boolean isRootElement() { return (this.parent == null); } @Override public Activity getClone() { return getClone(false); } @Override public Activity getClone(boolean withVersion) { Activity clone = new Activity(); clone.timeOrdering = this.timeOrdering; super.fillClone(clone); if (this.elements == null) return clone; for (IActivityElement element : this.elements.values()) { clone.addElement(element.getClone()); } if (this.policyDefs != null) clone.setPolicyDefs(this.policyDefs.getClone()); if (withVersion) clone.setVersion(this.version); clone.locator = this.locator; return clone; } @Override public void setReadOnly() { if (this.policyDefs != null) this.policyDefs.setReadOnly(); elementStream().forEach(e -> e.getValue().setReadOnly()); super.setReadOnly(); } @Override public String toString() { final StringBuilder builder = new StringBuilder(); builder.append("Activity [id="); builder.append(this.id); builder.append(", name="); builder.append(this.name); builder.append(", type="); builder.append(this.type); builder.append(", state="); builder.append(getState()); builder.append(", start="); builder.append(getStart()); builder.append(", end="); builder.append(getEnd()); if (isRootElement()) { builder.append(", version="); builder.append(this.version); } builder.append("]"); return builder.toString(); } @Override public int compareTo(Activity o) { return getId().compareTo(o.getId()); } @Override public <T> T accept(StrolchElementVisitor<T> visitor) { return visitor.visitActivity(this); } @Override public <U, T extends Parameter<U>> T findRelationParam(String paramKey) { return findParameter(BAG_RELATIONS, paramKey); } @Override public <U, T extends Parameter<U>> T findRelationParam(String paramKey, boolean assertExists) { return findParameter(BAG_RELATIONS, paramKey, assertExists); } @Override public <U, T extends Parameter<U>> T findParameter(String bagKey, String paramKey) { T parameter = getParameter(bagKey, paramKey); if (parameter != null) return parameter; if (this.parent != null) return this.parent.findParameter(bagKey, paramKey); return null; } @Override public <U, T extends Parameter<U>> T findParameter(String bagKey, String paramKey, boolean assertExists) throws StrolchModelException { T parameter = getParameter(bagKey, paramKey); if (parameter != null) return parameter; parameter = this.parent == null ? null : this.parent.findParameter(bagKey, paramKey); if (assertExists && parameter == null) { String msg = "The Parameter {0} does not exist"; throw new StrolchModelException(MessageFormat.format(msg, getLocator().append(Tags.BAG, bagKey, paramKey))); } return parameter; } @Override public PolicyDef findPolicy(Class<?> clazz, PolicyDef defaultDef) throws StrolchModelException { return findPolicy(clazz.getSimpleName(), defaultDef); } @Override public PolicyDef findPolicy(String className, PolicyDef defaultDef) throws StrolchModelException { if (hasPolicyDef(className)) return getPolicyDef(className); if (this.parent == null) { if (defaultDef != null) return defaultDef; String msg = "The PolicyDef {0} does not exist"; throw new StrolchModelException(MessageFormat.format(msg, className)); } return this.parent.findPolicy(className, defaultDef); } @Override public void setParent(Activity activity) { assertNotReadonly(); this.parent = activity; } public static Locator locatorFor(String type, String id) { return Locator.valueOf(Tags.ACTIVITY, type, id); } }
/***************************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ****************************************************************************/ package org.apache.pdfbox.preflight.font.descriptor; import static org.apache.pdfbox.preflight.PreflightConstants.ERROR_FONTS_DESCRIPTOR_INVALID; import static org.apache.pdfbox.preflight.PreflightConstants.ERROR_FONTS_FONT_FILEX_INVALID; import static org.apache.pdfbox.preflight.PreflightConstants.ERROR_METADATA_FORMAT; import static org.apache.pdfbox.preflight.PreflightConstants.ERROR_METADATA_FORMAT_STREAM; import static org.apache.pdfbox.preflight.PreflightConstants.ERROR_METADATA_FORMAT_UNKOWN; import static org.apache.pdfbox.preflight.PreflightConstants.ERROR_METADATA_FORMAT_XPACKET; import static org.apache.pdfbox.preflight.PreflightConstants.ERROR_METADATA_UNKNOWN_VALUETYPE; import static org.apache.pdfbox.preflight.PreflightConstants.ERROR_SYNTAX_STREAM_INVALID_FILTER; import static org.apache.pdfbox.preflight.PreflightConstants.FONT_DICTIONARY_KEY_ASCENT; import static org.apache.pdfbox.preflight.PreflightConstants.FONT_DICTIONARY_KEY_CAPHEIGHT; import static org.apache.pdfbox.preflight.PreflightConstants.FONT_DICTIONARY_KEY_DESCENT; import static org.apache.pdfbox.preflight.PreflightConstants.FONT_DICTIONARY_KEY_FLAGS; import static org.apache.pdfbox.preflight.PreflightConstants.FONT_DICTIONARY_KEY_FONTBBOX; import static org.apache.pdfbox.preflight.PreflightConstants.FONT_DICTIONARY_KEY_ITALICANGLE; import static org.apache.pdfbox.preflight.PreflightConstants.FONT_DICTIONARY_KEY_STEMV; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.io.IOUtils; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.common.PDMetadata; import org.apache.pdfbox.pdmodel.common.PDStream; import org.apache.pdfbox.pdmodel.font.PDFont; import org.apache.pdfbox.pdmodel.font.PDFontDescriptor; import org.apache.pdfbox.pdmodel.font.PDFontLike; import org.apache.pdfbox.preflight.PreflightContext; import org.apache.pdfbox.preflight.ValidationResult.ValidationError; import org.apache.pdfbox.preflight.font.container.FontContainer; import org.apache.pdfbox.preflight.font.util.FontMetaDataValidation; import org.apache.xmpbox.XMPMetadata; import org.apache.xmpbox.xml.DomXmpParser; import org.apache.xmpbox.xml.XmpParsingException; import org.apache.xmpbox.xml.XmpParsingException.ErrorType; public abstract class FontDescriptorHelper<T extends FontContainer> { protected T fContainer; protected PreflightContext context; protected PDFontLike font; protected PDFontDescriptor fontDescriptor; private static final Set<String> MANDATORYFIELDS; static { MANDATORYFIELDS = new HashSet<String>(); MANDATORYFIELDS.add(FONT_DICTIONARY_KEY_FLAGS); MANDATORYFIELDS.add(FONT_DICTIONARY_KEY_ITALICANGLE); MANDATORYFIELDS.add(FONT_DICTIONARY_KEY_CAPHEIGHT); MANDATORYFIELDS.add(FONT_DICTIONARY_KEY_FONTBBOX); MANDATORYFIELDS.add(FONT_DICTIONARY_KEY_ASCENT); MANDATORYFIELDS.add(FONT_DICTIONARY_KEY_DESCENT); MANDATORYFIELDS.add(FONT_DICTIONARY_KEY_STEMV); MANDATORYFIELDS.add(COSName.FONT_NAME.getName()); MANDATORYFIELDS.add(COSName.TYPE.getName()); } public FontDescriptorHelper(PreflightContext context, PDFontLike font, T fontContainer) { super(); this.fContainer = fontContainer; this.context = context; this.font = font; } public void validate() { PDFontDescriptor fd = this.font.getFontDescriptor(); boolean isStandard14 = false; if (this.font instanceof PDFont) { isStandard14 = ((PDFont) font).isStandard14(); } // Only a PDFontDescriptorDictionary provides a way to embedded the font program. if (fd != null) { fontDescriptor = fd; if (!isStandard14) { checkMandatoryFields(fontDescriptor.getCOSObject()); } if (hasOnlyOneFontFile(fontDescriptor)) { PDStream fontFile = extractFontFile(fontDescriptor); if (fontFile != null) { processFontFile(fontDescriptor, fontFile); checkFontFileMetaData(fontDescriptor, fontFile); } } else { if (fontFileNotEmbedded(fontDescriptor)) { this.fContainer.push(new ValidationError(ERROR_FONTS_FONT_FILEX_INVALID, fontDescriptor.getFontName() + ": FontFile entry is missing from FontDescriptor")); this.fContainer.notEmbedded(); } else { this.fContainer.push(new ValidationError(ERROR_FONTS_FONT_FILEX_INVALID, fontDescriptor.getFontName() + ": They is more than one FontFile")); } } } else { this.fContainer.push(new ValidationError(ERROR_FONTS_DESCRIPTOR_INVALID, this.font.getName() + ": FontDescriptor is null or is an AFM Descriptor")); this.fContainer.notEmbedded(); } } protected boolean checkMandatoryFields(COSDictionary fDescriptor) { boolean result = true; StringBuilder missingFields = new StringBuilder(); for (String field : MANDATORYFIELDS) { if (!fDescriptor.containsKey(field)) { if (missingFields.length() > 1) { missingFields.append(", "); } missingFields.append(field); } } if (fDescriptor.containsKey(COSName.TYPE)) { COSBase type = fDescriptor.getItem(COSName.TYPE); if (!COSName.FONT_DESC.equals(type)) { this.fContainer.push(new ValidationError(ERROR_FONTS_DESCRIPTOR_INVALID, this.font.getName() + ": /Type in FontDescriptor must be /FontDescriptor, but is " + type)); result = false; } } if (missingFields.length() > 0) { this.fContainer.push(new ValidationError(ERROR_FONTS_DESCRIPTOR_INVALID, this.font.getName() + ": some mandatory fields are missing from the FontDescriptor: " + missingFields + ".")); result = false; } return result; } public abstract PDStream extractFontFile(PDFontDescriptor fontDescriptor); /** * Return true if the FontDescriptor has only one FontFile entry. * * @param fontDescriptor * @return true if the FontDescriptor has only one FontFile entry. */ protected boolean hasOnlyOneFontFile(PDFontDescriptor fontDescriptor) { PDStream ff1 = fontDescriptor.getFontFile(); PDStream ff2 = fontDescriptor.getFontFile2(); PDStream ff3 = fontDescriptor.getFontFile3(); return (ff1 != null ^ ff2 != null ^ ff3 != null); } protected boolean fontFileNotEmbedded(PDFontDescriptor fontDescriptor) { PDStream ff1 = fontDescriptor.getFontFile(); PDStream ff2 = fontDescriptor.getFontFile2(); PDStream ff3 = fontDescriptor.getFontFile3(); return (ff1 == null && ff2 == null && ff3 == null); } protected abstract void processFontFile(PDFontDescriptor fontDescriptor, PDStream fontFile); /** * Type0, Type1 and TrueType FontValidator call this method to check the FontFile meta data. * * @param fontDescriptor * The FontDescriptor which contains the FontFile stream * @param fontFile * The font file stream to check */ protected void checkFontFileMetaData(PDFontDescriptor fontDescriptor, PDStream fontFile) { try { PDMetadata metadata = fontFile.getMetadata(); if (metadata != null) { // Filters are forbidden in a XMP stream if (metadata.getFilters() != null && !metadata.getFilters().isEmpty()) { this.fContainer.push(new ValidationError(ERROR_SYNTAX_STREAM_INVALID_FILTER, this.font.getName() + ": Filter specified in font file metadata dictionnary")); return; } byte[] mdAsBytes = getMetaDataStreamAsBytes(metadata); try { DomXmpParser xmpBuilder = new DomXmpParser(); XMPMetadata xmpMeta = xmpBuilder.parse(mdAsBytes); FontMetaDataValidation fontMDval = new FontMetaDataValidation(); List<ValidationError> ve = new ArrayList<ValidationError>(); fontMDval.analyseFontName(xmpMeta, fontDescriptor, ve); fontMDval.analyseRights(xmpMeta, fontDescriptor, ve); this.fContainer.push(ve); } catch (XmpParsingException e) { if (e.getErrorType() == ErrorType.NoValueType) { this.fContainer.push(new ValidationError(ERROR_METADATA_UNKNOWN_VALUETYPE, e.getMessage(), e)); } else if (e.getErrorType() == ErrorType.XpacketBadEnd) { this.fContainer.push(new ValidationError(ERROR_METADATA_FORMAT_XPACKET, this.font.getName() + ": Unable to parse font metadata due to : " + e.getMessage(), e)); } else { this.fContainer.push(new ValidationError(ERROR_METADATA_FORMAT, e.getMessage(), e)); } } } } catch (IllegalStateException e) { this.fContainer.push(new ValidationError(ERROR_METADATA_FORMAT_UNKOWN, this.font.getName() + ": The Metadata entry doesn't reference a stream object", e)); } } protected final byte[] getMetaDataStreamAsBytes(PDMetadata metadata) { byte[] result = null; ByteArrayOutputStream bos = null; InputStream metaDataContent = null; try { bos = new ByteArrayOutputStream(); metaDataContent = metadata.createInputStream(); IOUtils.copyLarge(metaDataContent, bos); result = bos.toByteArray(); } catch (IOException e) { this.fContainer.push(new ValidationError(ERROR_METADATA_FORMAT_STREAM, this.font.getName() + ": Unable to read font metadata due to : " + e.getMessage(), e)); } finally { IOUtils.closeQuietly(metaDataContent); IOUtils.closeQuietly(bos); } return result; } public static boolean isSubSet(String fontName) { return fontName != null && fontName.matches("^[A-Z]{6}\\+.*"); } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2022 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.data.managers; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IAction; import org.eclipse.jface.action.IContributionManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.dialogs.IDialogSettings; import org.eclipse.jface.text.IUndoManager; import org.eclipse.jface.text.TextViewer; import org.eclipse.jface.text.source.ISourceViewer; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.ST; import org.eclipse.swt.custom.StyledText; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.IWorkbenchCommandConstants; import org.eclipse.ui.PartInitException; import org.eclipse.ui.texteditor.ITextEditorActionConstants; import org.eclipse.ui.texteditor.ITextEditorActionDefinitionIds; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBPMessageType; import org.jkiss.dbeaver.model.data.DBDContent; import org.jkiss.dbeaver.model.data.storage.StringContentStorage; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.runtime.VoidProgressMonitor; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.controls.StyledTextUtils; import org.jkiss.dbeaver.ui.controls.resultset.internal.ResultSetMessages; import org.jkiss.dbeaver.ui.data.IStreamValueEditor; import org.jkiss.dbeaver.ui.data.IValueController; import org.jkiss.dbeaver.ui.dialogs.BaseDialog; import org.jkiss.dbeaver.ui.editors.StringEditorInput; import org.jkiss.dbeaver.ui.editors.SubEditorSite; import org.jkiss.dbeaver.ui.editors.TextEditorUtils; import org.jkiss.dbeaver.ui.editors.content.ContentEditorInput; import org.jkiss.dbeaver.ui.editors.data.internal.DataEditorsActivator; import org.jkiss.dbeaver.ui.editors.text.BaseTextEditor; import org.jkiss.dbeaver.utils.RuntimeUtils; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; /** * AbstractTextPanelEditor */ public abstract class AbstractTextPanelEditor<EDITOR extends BaseTextEditor> implements IStreamValueEditor<StyledText>, IAdaptable { private static final String PREF_TEXT_EDITOR_WORD_WRAP = "content.text.editor.word-wrap"; private static final String PREF_TEXT_EDITOR_AUTO_FORMAT = "content.text.editor.auto-format"; private static final String PREF_TEXT_EDITOR_ENCODING = "content.text.editor.encoding"; private static final Log log = Log.getLog(AbstractTextPanelEditor.class); public static final int LONG_CONTENT_LENGTH = 10000; private IValueController valueController; private IEditorSite subSite; private EDITOR editor; @Override public StyledText createControl(IValueController valueController) { this.valueController = valueController; this.subSite = new SubEditorSite(valueController.getValueSite()); editor = createEditorParty(valueController); try { editor.init(subSite, StringEditorInput.EMPTY_INPUT); } catch (PartInitException e) { valueController.showMessage(e.getMessage(), DBPMessageType.ERROR); return new StyledText(valueController.getEditPlaceholder(), SWT.NONE); } editor.createPartControl(valueController.getEditPlaceholder()); StyledText editorControl = editor.getEditorControl(); assert editorControl != null; initEditorSettings(editorControl); editorControl.addDisposeListener(e -> editor.releaseEditorInput()); editor.addContextMenuContributor(manager -> contributeTextEditorActions(manager, editorControl)); return editorControl; } protected abstract EDITOR createEditorParty(IValueController valueController); protected void contributeTextEditorActions(@NotNull IContributionManager manager, @NotNull final StyledText control) { manager.removeAll(); //StyledTextUtils.fillDefaultStyledTextContextMenu(manager, control); final Point selectionRange = control.getSelectionRange(); manager.add(new StyledTextUtils.StyledTextAction(IWorkbenchCommandConstants.EDIT_COPY, selectionRange.y > 0, control, ST.COPY)); manager.add(new StyledTextUtils.StyledTextAction(IWorkbenchCommandConstants.EDIT_PASTE, control.getEditable(), control, ST.PASTE)); manager.add(new StyledTextUtils.StyledTextAction(IWorkbenchCommandConstants.EDIT_CUT, selectionRange.y > 0, control, ST.CUT)); manager.add(new StyledTextUtils.StyledTextAction(IWorkbenchCommandConstants.EDIT_SELECT_ALL, true, control, ST.SELECT_ALL)); manager.add(new AutoFormatAction()); manager.add(new WordWrapAction(control)); manager.add(new Separator()); manager.add(TextEditorUtils.createFindReplaceAction(editor.getSite().getShell(), editor.getViewer().getFindReplaceTarget())); IAction preferencesAction = editor.getAction(ITextEditorActionConstants.CONTEXT_PREFERENCES); if (preferencesAction != null) { manager.add(new Separator()); manager.add(preferencesAction); } } @Override public void contributeActions(@NotNull IContributionManager manager, @NotNull final StyledText control) { } @Override public void contributeSettings(@NotNull IContributionManager manager, @NotNull final StyledText editorControl) { manager.add(new Separator()); { Action wwAction = new Action(ResultSetMessages.panel_editor_text_word_wrap_name, Action.AS_CHECK_BOX) { @Override public void run() { boolean newWW = !editorControl.getWordWrap(); setChecked(newWW); editorControl.setWordWrap(newWW); getPanelSettings().put(PREF_TEXT_EDITOR_WORD_WRAP, newWW); } }; wwAction.setChecked(editorControl.getWordWrap()); manager.add(wwAction); } BaseTextEditor textEditor = getTextEditor(); if (textEditor != null) { final Action afAction = new AutoFormatAction(); afAction.setChecked(getPanelSettings().getBoolean(PREF_TEXT_EDITOR_AUTO_FORMAT)); manager.add(afAction); } if (textEditor != null) { manager.add(new Action(ResultSetMessages.panel_editor_text_encoding_name) { @Override public void run() { final ChangeEncodingDialog dialog = new ChangeEncodingDialog(getPanelSettings().get(PREF_TEXT_EDITOR_ENCODING)); if (dialog.open() != IDialogConstants.OK_ID) { return; } getPanelSettings().put(PREF_TEXT_EDITOR_ENCODING, dialog.getEncoding()); final EDITOR editor = getTextEditor(); if (editor != null) { final TextViewer viewer = editor.getTextViewer(); if (viewer != null) { final StyledText control = viewer.getTextWidget(); if (control != null && !control.isDisposed()) { try { primeEditorValue(new VoidProgressMonitor(), control, null); } catch (DBException e) { log.error("Can't refresh editor", e); } } } } } }); } } protected EDITOR getTextEditor() { return editor; } private void initEditorSettings(StyledText control) { boolean wwEnabled = getPanelSettings().getBoolean(PREF_TEXT_EDITOR_WORD_WRAP); if (wwEnabled != control.getWordWrap()) { control.setWordWrap(wwEnabled); } } private void applyEditorStyle() { BaseTextEditor textEditor = getTextEditor(); if (textEditor != null && getPanelSettings().getBoolean(PREF_TEXT_EDITOR_AUTO_FORMAT)) { TextViewer textViewer = textEditor.getTextViewer(); if (textViewer != null) { StyledText textWidget = textViewer.getTextWidget(); if (textWidget == null || textWidget.isDisposed()) { return; } textWidget.setRedraw(false); boolean oldEditable = textViewer.isEditable(); if (!oldEditable) { textViewer.setEditable(true); } try { if (textViewer.canDoOperation(ISourceViewer.FORMAT)) { textViewer.doOperation(ISourceViewer.FORMAT); } } catch (Exception e) { log.debug("Error formatting text", e); } finally { if (!oldEditable) { textViewer.setEditable(false); } textWidget.setRedraw(true); } } } } @Override public <T> T getAdapter(Class<T> adapter) { BaseTextEditor textEditor = getTextEditor(); if (textEditor != null) { if (adapter.isAssignableFrom(textEditor.getClass())) { return adapter.cast(textEditor); } if (adapter == IUndoManager.class) { TextViewer textViewer = textEditor.getTextViewer(); if (textViewer != null && textViewer.getUndoManager() != null) { return adapter.cast(textViewer.getUndoManager()); } } return textEditor.getAdapter(adapter); } return null; } @Override public void primeEditorValue(@NotNull DBRProgressMonitor monitor, @NotNull StyledText control, @Nullable DBDContent value) throws DBException { try { // Load contents in two steps (empty + real in async mode). Workaround for some strange bug in StyledText in E4.13 (#6701) final TextViewer textViewer = editor.getTextViewer(); final String encoding = getPanelSettings().get(PREF_TEXT_EDITOR_ENCODING); final ContentEditorInput textInput = new ContentEditorInput(valueController, null, null, encoding, monitor); boolean longContent = textInput.getContentLength() > LONG_CONTENT_LENGTH; if (longContent) { UIUtils.asyncExec(() -> { editor.setInput(new StringEditorInput("Empty", "", true, StandardCharsets.UTF_8.name())); }); } UIUtils.asyncExec(() -> { if (textViewer != null) { StyledText textWidget = textViewer.getTextWidget(); if (textWidget != null && longContent) { GC gc = new GC(textWidget); try { UIUtils.drawMessageOverControl(textWidget, gc, NLS.bind(ResultSetMessages.panel_editor_text_loading_placeholder_label, textInput.getContentLength()), 0); editor.setInput(textInput); } finally { gc.dispose(); } } else { editor.setInput(textInput); } applyEditorStyle(); } }); } catch (Exception e) { throw new DBException("Error loading text value", e); } finally { monitor.done(); } } @Override public void extractEditorValue(@NotNull DBRProgressMonitor monitor, @NotNull StyledText control, @NotNull DBDContent value) throws DBException { if (valueController.getValue() instanceof DBDContent) { monitor.beginTask("Extract text", 1); try { monitor.subTask("Extracting text from editor"); editor.doSave(RuntimeUtils.getNestedMonitor(monitor)); final IEditorInput editorInput = editor.getEditorInput(); if (editorInput instanceof ContentEditorInput) { final ContentEditorInput contentEditorInput = (ContentEditorInput) editorInput; contentEditorInput.updateContentFromFile(monitor, value); } } catch (Exception e) { throw new DBException("Error extracting text from editor", e); } finally { monitor.done(); } } else { value.updateContents( monitor, new StringContentStorage(control.getText())); } } private static IDialogSettings viewerSettings; public static IDialogSettings getPanelSettings() { if (viewerSettings == null) { viewerSettings = UIUtils.getSettingsSection( DataEditorsActivator.getDefault().getDialogSettings(), AbstractTextPanelEditor.class.getSimpleName()); } return viewerSettings; } private static class WordWrapAction extends StyledTextUtils.StyledTextActionEx { private final StyledText text; WordWrapAction(StyledText text) { super(ITextEditorActionDefinitionIds.WORD_WRAP, Action.AS_CHECK_BOX); this.text = text; } @Override public boolean isChecked() { return text.getWordWrap(); } @Override public void run() { text.setWordWrap(!text.getWordWrap()); } } private class AutoFormatAction extends Action { AutoFormatAction() { super(ResultSetMessages.panel_editor_text_auto_format_name, Action.AS_CHECK_BOX); } @Override public boolean isChecked() { return getPanelSettings().getBoolean(PREF_TEXT_EDITOR_AUTO_FORMAT); } @Override public void run() { boolean newAF = !getPanelSettings().getBoolean(PREF_TEXT_EDITOR_AUTO_FORMAT); //setChecked(newAF); getPanelSettings().put(PREF_TEXT_EDITOR_AUTO_FORMAT, newAF); applyEditorStyle(); } } private static class ChangeEncodingDialog extends BaseDialog { private String encoding; public ChangeEncodingDialog(@NotNull String defaultEncoding) { super(UIUtils.getActiveShell(), ResultSetMessages.panel_editor_text_encoding_title, null); this.encoding = defaultEncoding; this.setShellStyle(SWT.DIALOG_TRIM); } @Override protected Composite createDialogArea(Composite parent) { final Composite composite = super.createDialogArea(parent); { final Composite innerComposite = UIUtils.createComposite(composite, 1); innerComposite.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); final Combo encodingCombo = UIUtils.createEncodingCombo(innerComposite, encoding); encodingCombo.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); encodingCombo.addModifyListener(event -> { encoding = encodingCombo.getText(); updateCompletion(); }); } return composite; } @Override protected void createButtonsForButtonBar(Composite parent) { super.createButtonsForButtonBar(parent); updateCompletion(); } private void updateCompletion() { final Button button = getButton(IDialogConstants.OK_ID); try { Charset.forName(encoding); button.setEnabled(true); } catch (Exception ignored) { button.setEnabled(false); } } @NotNull public String getEncoding() { return encoding; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.examples.ml.sql; import java.io.IOException; import java.util.List; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.Ignition; import org.apache.ignite.cache.query.QueryCursor; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.examples.ml.util.MLSandboxDatasets; import org.apache.ignite.examples.ml.util.SandboxMLCache; import org.apache.ignite.ml.dataset.feature.extractor.impl.BinaryObjectVectorizer; import org.apache.ignite.ml.math.primitives.vector.Vector; import org.apache.ignite.ml.math.primitives.vector.VectorUtils; import org.apache.ignite.ml.sql.SqlDatasetBuilder; import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer; import org.apache.ignite.ml.tree.DecisionTreeNode; /** * Example of using distributed {@link DecisionTreeClassificationTrainer} on a data stored in SQL table. */ public class DecisionTreeClassificationTrainerSQLTableExample { /** * Dummy cache name. */ private static final String DUMMY_CACHE_NAME = "dummy_cache"; /** * Run example. */ public static void main(String[] args) throws IgniteCheckedException, IOException { System.out.println(">>> Decision tree classification trainer example started."); // Start ignite grid. try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) { System.out.println(">>> Ignite grid started."); // Dummy cache is required to perform SQL queries. CacheConfiguration<?, ?> cacheCfg = new CacheConfiguration<>(DUMMY_CACHE_NAME) .setSqlSchema("PUBLIC"); IgniteCache<?, ?> cache = null; try { cache = ignite.getOrCreateCache(cacheCfg); System.out.println(">>> Creating table with training data..."); cache.query(new SqlFieldsQuery("create table titanic_train (\n" + " passengerid int primary key,\n" + " pclass int,\n" + " survived int,\n" + " name varchar(255),\n" + " sex varchar(255),\n" + " age float,\n" + " sibsp int,\n" + " parch int,\n" + " ticket varchar(255),\n" + " fare float,\n" + " cabin varchar(255),\n" + " embarked varchar(255)\n" + ") with \"template=partitioned\";")).getAll(); System.out.println(">>> Creating table with test data..."); cache.query(new SqlFieldsQuery("create table titanic_test (\n" + " passengerid int primary key,\n" + " pclass int,\n" + " survived int,\n" + " name varchar(255),\n" + " sex varchar(255),\n" + " age float,\n" + " sibsp int,\n" + " parch int,\n" + " ticket varchar(255),\n" + " fare float,\n" + " cabin varchar(255),\n" + " embarked varchar(255)\n" + ") with \"template=partitioned\";")).getAll(); loadTitanicDatasets(ignite, cache); System.out.println(">>> Prepare trainer..."); DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0); System.out.println(">>> Perform training..."); DecisionTreeNode mdl = trainer.fit( new SqlDatasetBuilder(ignite, "SQL_PUBLIC_TITANIC_TRAIN"), new BinaryObjectVectorizer<>("pclass", "age", "sibsp", "parch", "fare") .withFeature("sex", BinaryObjectVectorizer.Mapping.create().map("male", 1.0).defaultValue(0.0)) .labeled("survived") ); System.out.println("Tree is here: " + mdl.toString(true)); System.out.println(">>> Perform inference..."); try (QueryCursor<List<?>> cursor = cache.query(new SqlFieldsQuery("select " + "pclass, " + "sex, " + "age, " + "sibsp, " + "parch, " + "fare from titanic_test"))) { for (List<?> passenger : cursor) { Vector input = VectorUtils.of(new Double[]{ asDouble(passenger.get(0)), "male".equals(passenger.get(1)) ? 1.0 : 0.0, asDouble(passenger.get(2)), asDouble(passenger.get(3)), asDouble(passenger.get(4)), asDouble(passenger.get(5)), }); double prediction = mdl.predict(input); System.out.printf("Passenger %s will %s.\n", passenger, prediction == 0 ? "die" : "survive"); } } System.out.println(">>> Example completed."); } finally { cache.query(new SqlFieldsQuery("DROP TABLE titanic_train")); cache.query(new SqlFieldsQuery("DROP TABLE titanic_test")); cache.destroy(); } } finally { System.out.flush(); } } /** * Converts specified number into double. * * @param obj Number. * @param <T> Type of number. * @return Double. */ private static <T extends Number> Double asDouble(Object obj) { if (obj == null) return null; if (obj instanceof Number) { Number num = (Number) obj; return num.doubleValue(); } throw new IllegalArgumentException("Object is expected to be a number [obj=" + obj + "]"); } /** * Loads Titanic dataset into cache. * * @param ignite Ignite instance. * @throws IOException If dataset not found. */ static void loadTitanicDatasets(Ignite ignite, IgniteCache<?, ?> cache) throws IOException { List<String> titanicDatasetRows = new SandboxMLCache(ignite).loadDataset(MLSandboxDatasets.TITANIC); List<String> train = titanicDatasetRows.subList(0, 1000); List<String> test = titanicDatasetRows.subList(1000, titanicDatasetRows.size()); insertToCache(cache, train, "titanic_train"); insertToCache(cache, test, "titanic_test"); } /** */ private static void insertToCache(IgniteCache<?, ?> cache, List<String> train, String tableName) { SqlFieldsQuery insertTrain = new SqlFieldsQuery("insert into " + tableName + " " + "(passengerid, pclass, survived, name, sex, age, sibsp, parch, ticket, fare, cabin, embarked) " + "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); int seq = 0; for (String s : train) { String[] line = s.split(";"); int pclass = parseInteger(line[0]); int survived = parseInteger(line[1]); String name = line[2]; String sex = line[3]; double age = parseDouble(line[4]); double sibsp = parseInteger(line[5]); double parch = parseInteger(line[6]); String ticket = line[7]; double fare = parseDouble(line[8]); String cabin = line[9]; String embarked = line[10]; insertTrain.setArgs(seq++, pclass, survived, name, sex, age, sibsp, parch, ticket, fare, cabin, embarked); cache.query(insertTrain); } } /** */ private static Integer parseInteger(String value) { try { return Integer.valueOf(value); } catch (NumberFormatException e) { return 0; } } /** */ private static Double parseDouble(String value) { try { return Double.valueOf(value); } catch (NumberFormatException e) { return 0.0; } } }
package org.apache.helix.participant; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import org.apache.helix.HelixConstants; import org.apache.helix.HelixDataAccessor; import org.apache.helix.HelixException; import org.apache.helix.HelixManager; import org.apache.helix.InstanceType; import org.apache.helix.NotificationContext; import org.apache.helix.PropertyKey.Builder; import org.apache.helix.messaging.handling.HelixStateTransitionHandler; import org.apache.helix.messaging.handling.HelixTaskExecutor; import org.apache.helix.messaging.handling.MessageHandler; import org.apache.helix.model.CurrentState; import org.apache.helix.model.Message; import org.apache.helix.model.StateModelDefinition; import org.apache.helix.model.Message.MessageType; import org.apache.helix.participant.statemachine.StateModel; import org.apache.helix.participant.statemachine.StateModelFactory; import org.apache.helix.participant.statemachine.StateModelParser; import org.apache.log4j.Logger; public class HelixStateMachineEngine implements StateMachineEngine { private static Logger logger = Logger.getLogger(HelixStateMachineEngine.class); // StateModelName->FactoryName->StateModelFactory private final Map<String, Map<String, StateModelFactory<? extends StateModel>>> _stateModelFactoryMap; StateModelParser _stateModelParser; private final HelixManager _manager; private final ConcurrentHashMap<String, StateModelDefinition> _stateModelDefs; public StateModelFactory<? extends StateModel> getStateModelFactory(String stateModelName) { return getStateModelFactory(stateModelName, HelixConstants.DEFAULT_STATE_MODEL_FACTORY); } public StateModelFactory<? extends StateModel> getStateModelFactory(String stateModelName, String factoryName) { if (!_stateModelFactoryMap.containsKey(stateModelName)) { return null; } return _stateModelFactoryMap.get(stateModelName).get(factoryName); } public HelixStateMachineEngine(HelixManager manager) { _stateModelParser = new StateModelParser(); _manager = manager; _stateModelFactoryMap = new ConcurrentHashMap<String, Map<String, StateModelFactory<? extends StateModel>>>(); _stateModelDefs = new ConcurrentHashMap<String, StateModelDefinition>(); } @Override public boolean registerStateModelFactory(String stateModelDef, StateModelFactory<? extends StateModel> factory) { return registerStateModelFactory(stateModelDef, factory, HelixConstants.DEFAULT_STATE_MODEL_FACTORY); } @Override public boolean registerStateModelFactory(String stateModelName, StateModelFactory<? extends StateModel> factory, String factoryName) { if (stateModelName == null || factory == null || factoryName == null) { throw new HelixException("stateModelDef|stateModelFactory|factoryName cannot be null"); } logger.info("Register state model factory for state model " + stateModelName + " using factory name " + factoryName + " with " + factory); if (!_stateModelFactoryMap.containsKey(stateModelName)) { _stateModelFactoryMap.put(stateModelName, new ConcurrentHashMap<String, StateModelFactory<? extends StateModel>>()); } if (_stateModelFactoryMap.get(stateModelName).containsKey(factoryName)) { logger.warn("stateModelFactory for " + stateModelName + " using factoryName " + factoryName + " has already been registered."); return false; } _stateModelFactoryMap.get(stateModelName).put(factoryName, factory); sendNopMessage(); return true; } // TODO: duplicated code in DefaultMessagingService private void sendNopMessage() { if (_manager.isConnected()) { try { Message nopMsg = new Message(MessageType.NO_OP, UUID.randomUUID().toString()); nopMsg.setSrcName(_manager.getInstanceName()); HelixDataAccessor accessor = _manager.getHelixDataAccessor(); Builder keyBuilder = accessor.keyBuilder(); if (_manager.getInstanceType() == InstanceType.CONTROLLER || _manager.getInstanceType() == InstanceType.CONTROLLER_PARTICIPANT) { nopMsg.setTgtName("Controller"); accessor.setProperty(keyBuilder.controllerMessage(nopMsg.getId()), nopMsg); } if (_manager.getInstanceType() == InstanceType.PARTICIPANT || _manager.getInstanceType() == InstanceType.CONTROLLER_PARTICIPANT) { nopMsg.setTgtName(_manager.getInstanceName()); accessor.setProperty(keyBuilder.message(nopMsg.getTgtName(), nopMsg.getId()), nopMsg); } logger.info("Send NO_OP message to " + nopMsg.getTgtName() + ", msgId: " + nopMsg.getId()); } catch (Exception e) { logger.error(e); } } } @Override public void reset() { for (Map<String, StateModelFactory<? extends StateModel>> ftyMap : _stateModelFactoryMap.values()) { for (StateModelFactory<? extends StateModel> stateModelFactory : ftyMap.values()) { Map<String, ? extends StateModel> modelMap = stateModelFactory.getStateModelMap(); if (modelMap == null || modelMap.isEmpty()) { continue; } for (String resourceKey : modelMap.keySet()) { StateModel stateModel = modelMap.get(resourceKey); stateModel.reset(); String initialState = _stateModelParser.getInitialState(stateModel.getClass()); stateModel.updateState(initialState); // TODO probably should update the state on ZK. Shi confirm what needs // to be done here. } } } } @Override public MessageHandler createHandler(Message message, NotificationContext context) { String type = message.getMsgType(); if (!type.equals(MessageType.STATE_TRANSITION.toString())) { throw new HelixException("Unexpected msg type for message " + message.getMsgId() + " type:" + message.getMsgType()); } String partitionKey = message.getPartitionName(); String stateModelName = message.getStateModelDef(); String resourceName = message.getResourceName(); String sessionId = message.getTgtSessionId(); int bucketSize = message.getBucketSize(); if (stateModelName == null) { logger.error("message does not contain stateModelDef"); return null; } String factoryName = message.getStateModelFactoryName(); if (factoryName == null) { factoryName = HelixConstants.DEFAULT_STATE_MODEL_FACTORY; } StateModelFactory stateModelFactory = getStateModelFactory(stateModelName, factoryName); if (stateModelFactory == null) { logger.warn("Cannot find stateModelFactory for model:" + stateModelName + " using factoryName:" + factoryName + " for resourceGroup:" + resourceName); return null; } // check if the state model definition exists and cache it if (!_stateModelDefs.containsKey(stateModelName)) { HelixDataAccessor accessor = _manager.getHelixDataAccessor(); Builder keyBuilder = accessor.keyBuilder(); StateModelDefinition stateModelDef = accessor.getProperty(keyBuilder.stateModelDef(stateModelName)); if (stateModelDef == null) { throw new HelixException("stateModelDef for " + stateModelName + " does NOT exists"); } _stateModelDefs.put(stateModelName, stateModelDef); } // create currentStateDelta for this partition String initState = _stateModelDefs.get(message.getStateModelDef()).getInitialState(); StateModel stateModel = stateModelFactory.getStateModel(partitionKey); if (stateModel == null) { stateModelFactory.createAndAddStateModel(partitionKey); stateModel = stateModelFactory.getStateModel(partitionKey); stateModel.updateState(initState); } CurrentState currentStateDelta = new CurrentState(resourceName); currentStateDelta.setSessionId(sessionId); currentStateDelta.setStateModelDefRef(stateModelName); currentStateDelta.setStateModelFactoryName(factoryName); currentStateDelta.setBucketSize(bucketSize); currentStateDelta.setState(partitionKey, (stateModel.getCurrentState() == null) ? initState : stateModel.getCurrentState()); HelixTaskExecutor executor = (HelixTaskExecutor) context.get(NotificationContext.TASK_EXECUTOR_KEY); return new HelixStateTransitionHandler(stateModel, message, context, currentStateDelta, executor); } @Override public String getMessageType() { return MessageType.STATE_TRANSITION.toString(); } @Override public boolean removeStateModelFactory(String stateModelDef, StateModelFactory<? extends StateModel> factory) { throw new UnsupportedOperationException("Remove not yet supported"); } @Override public boolean removeStateModelFactory(String stateModelDef, StateModelFactory<? extends StateModel> factory, String factoryName) { throw new UnsupportedOperationException("Remove not yet supported"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.amqp; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Session; import javax.jms.TextMessage; import java.util.HashMap; import java.util.UUID; import java.util.concurrent.TimeUnit; import org.apache.activemq.artemis.api.core.QueueConfiguration; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.config.DivertConfiguration; import org.apache.activemq.artemis.core.config.TransformerConfiguration; import org.apache.activemq.artemis.core.server.MessageReference; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.transformer.Transformer; import org.apache.activemq.artemis.core.settings.impl.AddressFullMessagePolicy; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.tests.util.CFUtil; import org.apache.activemq.artemis.tests.util.RandomUtil; import org.apache.activemq.artemis.tests.util.Wait; import org.apache.activemq.artemis.utils.collections.LinkedListIterator; import org.apache.activemq.transport.amqp.client.AmqpClient; import org.apache.activemq.transport.amqp.client.AmqpConnection; import org.apache.activemq.transport.amqp.client.AmqpMessage; import org.apache.activemq.transport.amqp.client.AmqpReceiver; import org.apache.activemq.transport.amqp.client.AmqpSender; import org.apache.activemq.transport.amqp.client.AmqpSession; import org.junit.Assert; import org.junit.Test; public class AmqpExpiredMessageTest extends AmqpClientTestSupport { @Test(timeout = 60000) public void testSendMessageThatIsAlreadyExpiredUsingAbsoluteTime() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setAbsoluteExpiryTime(System.currentTimeMillis() - 5000); message.setText("Test-Message"); sender.send(message); sender.close(); // Now try and get the message AmqpReceiver receiver = session.createReceiver(getQueueName()); receiver.flow(1); AmqpMessage received = receiver.receiveNoWait(); assertNull(received); Wait.assertEquals(0, queueView::getMessageCount); Wait.assertEquals(1, queueView::getMessagesExpired); connection.close(); } @Test(timeout = 60000) public void testExpiryThroughTTL() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setTimeToLive(1); message.setText("Test-Message"); message.setDurable(true); message.setApplicationProperty("key1", "Value1"); sender.send(message); sender.close(); Thread.sleep(100); // Now try and get the message AmqpReceiver receiver = session.createReceiver(getQueueName()); receiver.flow(1); AmqpMessage received = receiver.receiveNoWait(); assertNull(received); Wait.assertEquals(1, queueView::getMessagesExpired); connection.close(); // This will stop and start the server // to make sure the message is decoded again from its binary format // avoiding any parsing cached at the server. server.stop(); server.start(); final Queue dlqView = getProxyToQueue(getDeadLetterAddress()); assertNotNull(dlqView); Wait.assertEquals(1, dlqView::getMessageCount); client = createAmqpClient(); connection = addConnection(client.connect()); session = connection.createSession(); AmqpReceiver receiverDLQ = session.createReceiver(getDeadLetterAddress()); receiverDLQ.flow(1); received = receiverDLQ.receive(5, TimeUnit.SECONDS); assertNotNull("Should have read message from DLQ", received); assertEquals(0, received.getTimeToLive()); assertNotNull(received); assertEquals("Value1", received.getApplicationProperty("key1")); connection.close(); } @Test(timeout = 60000) public void testRetryExpiry() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setTimeToLive(1); message.setText("Test-Message"); message.setDurable(true); message.setApplicationProperty("key1", "Value1"); sender.send(message); message = new AmqpMessage(); message.setTimeToLive(1); message.setBytes(new byte[500 * 1024]); sender.send(message); sender.close(); final Queue dlqView = getProxyToQueue(getDeadLetterAddress()); Wait.assertEquals(2, dlqView::getMessageCount); Assert.assertEquals(2, dlqView.retryMessages(null)); Wait.assertEquals(0, dlqView::getMessageCount); Wait.assertEquals(2, queueView::getMessageCount); AmqpReceiver receiver = session.createReceiver(getQueueName()); // Now try and get the message receiver.flow(2); for (int i = 0; i < 2; i++) { AmqpMessage received = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(received); received.accept(); } connection.close(); Wait.assertEquals(0, queueView::getMessageCount); Wait.assertEquals(0, dlqView::getMessageCount); } /** This test is validating a broker feature where the message copy through the DLQ will receive an annotation. * It is also testing filter on that annotation. */ @Test(timeout = 60000) public void testExpiryThroughTTLValidateAnnotation() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setTimeToLive(1); message.setText("Test-Message"); message.setDurable(true); message.setApplicationProperty("key1", "Value1"); sender.send(message); sender.close(); Thread.sleep(100); // Now try and get the message AmqpReceiver receiver = session.createReceiver(getQueueName()); receiver.flow(1); AmqpMessage received = receiver.receiveNoWait(); assertNull(received); Wait.assertEquals(1, queueView::getMessagesExpired); connection.close(); // This will stop and start the server // to make sure the message is decoded again from its binary format // avoiding any parsing cached at the server. server.stop(); server.start(); final Queue dlqView = getProxyToQueue(getDeadLetterAddress()); assertNotNull(dlqView); Wait.assertEquals(1, dlqView::getMessageCount); client = createAmqpClient(); connection = addConnection(client.connect()); session = connection.createSession(); AmqpReceiver receiverDLQ = session.createReceiver(getDeadLetterAddress(), "\"m.x-opt-ORIG-ADDRESS\"='" + getQueueName() + "'"); receiverDLQ.flow(1); received = receiverDLQ.receive(5, TimeUnit.SECONDS); Assert.assertNotNull(received); Assert.assertEquals(getQueueName(), received.getMessageAnnotation("x-opt-ORIG-ADDRESS")); // close without accepting on purpose, it will issue a redelivery on the second filter receiverDLQ.close(); // Redo the selection, however now using the extra-properties, since the broker will store these as extra properties on AMQP Messages receiverDLQ = session.createReceiver(getDeadLetterAddress(), "_AMQ_ORIG_ADDRESS='" + getQueueName() + "'"); receiverDLQ.flow(1); received = receiverDLQ.receive(5, TimeUnit.SECONDS); Assert.assertEquals(getQueueName(), received.getMessageAnnotation("x-opt-ORIG-ADDRESS")); Assert.assertNotNull(received); received.accept(); assertNotNull("Should have read message from DLQ", received); assertEquals(0, received.getTimeToLive()); assertNotNull(received); assertEquals("Value1", received.getApplicationProperty("key1")); connection.close(); } /** This test is validating a broker feature where the message copy through the DLQ will receive an annotation. * It is also testing filter on that annotation. */ @Test(timeout = 60000) public void testExpiryQpidJMS() throws Exception { ConnectionFactory factory = CFUtil.createConnectionFactory("AMQP", getBrokerAmqpConnectionURI().toString()); Connection connection = factory.createConnection(); try { Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); javax.jms.Queue queue = session.createQueue(getQueueName()); MessageProducer sender = session.createProducer(queue); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); sender.setTimeToLive(1); TextMessage message = session.createTextMessage("Test-Message"); message.setStringProperty("key1", "Value1"); sender.send(message); sender.close(); Wait.assertEquals(1, queueView::getMessagesExpired); final Queue dlqView = getProxyToQueue(getDeadLetterAddress()); assertNotNull(dlqView); Wait.assertEquals(1, dlqView::getMessageCount); connection.start(); javax.jms.Queue queueDLQ = session.createQueue(getDeadLetterAddress()); MessageConsumer receiverDLQ = session.createConsumer(queueDLQ, "\"m.x-opt-ORIG-ADDRESS\"='" + getQueueName() + "'"); Message received = receiverDLQ.receive(5000); Assert.assertNotNull(received); receiverDLQ.close(); } finally { connection.close(); } } @Test(timeout = 60000) public void testSendMessageThatIsNotExpiredUsingAbsoluteTime() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setAbsoluteExpiryTime(System.currentTimeMillis() + 5000); message.setText("Test-Message"); sender.send(message); sender.close(); Wait.assertEquals(1, queueView::getMessageCount); // Now try and get the message AmqpReceiver receiver = session.createReceiver(getQueueName()); receiver.flow(1); AmqpMessage received = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(received); assertEquals(0, queueView.getMessagesExpired()); connection.close(); } @Test(timeout = 60000) public void testSendMessageThatIsExiredUsingAbsoluteTimeWithLongTTL() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setAbsoluteExpiryTime(System.currentTimeMillis() - 5000); // AET should override any TTL set message.setTimeToLive(60000); message.setText("Test-Message"); sender.send(message); sender.close(); Wait.assertEquals(1, queueView::getMessageCount); // Now try and get the message AmqpReceiver receiver = session.createReceiver(getQueueName()); receiver.flow(1); AmqpMessage received = receiver.receiveNoWait(); assertNull(received); Wait.assertEquals(1, queueView::getMessagesExpired); connection.close(); } @Test(timeout = 60000) public void testSendMessageThatIsExpiredUsingTTLWhenAbsoluteIsZero() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setAbsoluteExpiryTime(0); // AET should override any TTL set message.setTimeToLive(100); message.setText("Test-Message"); sender.send(message); sender.close(); Wait.assertEquals(1L, queueView::getMessagesExpired, 10000, 10); // Now try and get the message AmqpReceiver receiver = session.createReceiver(getQueueName()); receiver.flow(1); AmqpMessage received = receiver.receiveNoWait(); assertNull(received); Wait.assertEquals(1, queueView::getMessagesExpired); connection.close(); } @Test(timeout = 60000) public void testSendMessageThatIsNotExpiredUsingAbsoluteTimeWithElspsedTTL() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setAbsoluteExpiryTime(System.currentTimeMillis() + 5000); // AET should override any TTL set message.setTimeToLive(10); message.setText("Test-Message"); sender.send(message); sender.close(); Wait.assertEquals(1, queueView::getMessageCount); // Now try and get the message AmqpReceiver receiver = session.createReceiver(getQueueName()); receiver.flow(1); AmqpMessage received = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(received); Wait.assertEquals(0, queueView::getMessagesExpired); connection.close(); } @Test(timeout = 60000) public void testSendMessageThatIsNotExpiredUsingTimeToLive() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setTimeToLive(5000); message.setText("Test-Message"); sender.send(message); sender.close(); Wait.assertEquals(1, queueView::getMessageCount); // Now try and get the message AmqpReceiver receiver = session.createReceiver(getQueueName()); receiver.flow(1); AmqpMessage received = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(received); Wait.assertEquals(0, queueView::getMessagesExpired); connection.close(); } @Test(timeout = 60000) public void testSendMessageThenAllowToExpiredUsingTimeToLive() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setTimeToLive(10); message.setText("Test-Message"); sender.send(message); sender.close(); Thread.sleep(50); Wait.assertEquals(0, queueView::getMessageCount); // Now try and get the message AmqpReceiver receiver = session.createReceiver(getQueueName()); receiver.flow(1); AmqpMessage received = receiver.receiveNoWait(); assertNull(received); Wait.assertEquals(1, queueView::getMessagesExpired); connection.close(); } @Test(timeout = 60000) public void testExpiredMessageLandsInDLQ() throws Throwable { internalSendExpiry(false); } @Test(timeout = 60000) public void testExpiredMessageLandsInDLQAndExistsAfterRestart() throws Throwable { internalSendExpiry(true); } public void internalSendExpiry(boolean restartServer) throws Throwable { AmqpClient client = createAmqpClient(); AmqpConnection connection = client.connect(); try { // Normal Session which won't create an TXN itself AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); AmqpMessage message = new AmqpMessage(); message.setDurable(true); message.setText("Test-Message"); message.setDeliveryAnnotation("shouldDisappear", 1); message.setAbsoluteExpiryTime(System.currentTimeMillis() + 250); sender.send(message); Queue dlq = getProxyToQueue(getDeadLetterAddress()); assertTrue("Message not movied to DLQ", Wait.waitFor(() -> dlq.getMessageCount() > 0, 7000, 500)); connection.close(); if (restartServer) { server.stop(); server.start(); } connection = client.connect(); session = connection.createSession(); // Read all messages from the Queue AmqpReceiver receiver = session.createReceiver(getDeadLetterAddress()); receiver.flow(20); message = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(message); assertEquals(getQueueName(), message.getMessageAnnotation("x-opt-ORIG-QUEUE")); assertNull(message.getDeliveryAnnotation("shouldDisappear")); assertNull(receiver.receiveNoWait()); } finally { connection.close(); } } @Test(timeout = 60000) public void testExpirationAfterDivert() throws Throwable { final String FORWARDING_ADDRESS = RandomUtil.randomString(); server.createQueue(new QueueConfiguration(FORWARDING_ADDRESS).setRoutingType(RoutingType.ANYCAST)); server.deployDivert(new DivertConfiguration() .setName(RandomUtil.randomString()) .setAddress(getQueueName()) .setForwardingAddress(FORWARDING_ADDRESS) .setTransformerConfiguration(new TransformerConfiguration(MyTransformer.class.getName())) .setExclusive(true)); AmqpClient client = createAmqpClient(); AmqpConnection connection = client.connect(); try { // Normal Session which won't create an TXN itself AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); AmqpMessage message = new AmqpMessage(); message.setDurable(true); message.setText("Test-Message"); message.setDeliveryAnnotation("shouldDisappear", 1); message.setMessageAnnotation("x-opt-routing-type", (byte) 1); sender.send(message); Queue forward = getProxyToQueue(FORWARDING_ADDRESS); assertTrue("Message not diverted", Wait.waitFor(() -> forward.getMessageCount() > 0, 7000, 500)); Queue dlq = getProxyToQueue(getDeadLetterAddress()); assertTrue("Message not moved to DLQ", Wait.waitFor(() -> dlq.getMessageCount() > 0, 7000, 500)); connection.close(); connection = client.connect(); session = connection.createSession(); // Read all messages from the Queue AmqpReceiver receiver = session.createReceiver(getDeadLetterAddress()); receiver.flow(20); message = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(message); assertEquals(FORWARDING_ADDRESS, message.getMessageAnnotation("x-opt-ORIG-QUEUE")); assertNull(message.getDeliveryAnnotation("shouldDisappear")); assertNull(receiver.receiveNoWait()); } finally { connection.close(); } } public static class MyTransformer implements Transformer { public MyTransformer() { } @Override public org.apache.activemq.artemis.api.core.Message transform(org.apache.activemq.artemis.api.core.Message message) { return message.setExpiration(System.currentTimeMillis() + 250); } } @Test(timeout = 60000) public void testDLQdMessageCanBeRedeliveredMultipleTimes() throws Throwable { AmqpClient client = createAmqpClient(); AmqpConnection connection = client.connect(); try { AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); AmqpMessage message = new AmqpMessage(); message.setDurable(true); message.setTimeToLive(250); message.setText("Test-Message"); message.setMessageId(UUID.randomUUID().toString()); message.setApplicationProperty("key", "value"); sender.send(message); Queue dlqView = getProxyToQueue(getDeadLetterAddress()); assertTrue("Message not movied to DLQ", Wait.waitFor(() -> dlqView.getMessageCount() > 0, 7000, 200)); // Read and Modify the message for redelivery repeatedly AmqpReceiver receiver = session.createReceiver(getDeadLetterAddress()); receiver.flow(20); message = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(message); assertEquals(0, message.getWrappedMessage().getDeliveryCount()); message.modified(true, false); message = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(message); assertEquals(1, message.getWrappedMessage().getDeliveryCount()); message.modified(true, false); message = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(message); assertEquals(2, message.getWrappedMessage().getDeliveryCount()); message.modified(true, false); message = receiver.receive(5, TimeUnit.SECONDS); assertNotNull(message); assertEquals(3, message.getWrappedMessage().getDeliveryCount()); } finally { connection.close(); } } @Test(timeout = 60000) public void testExpireThorughAddressSettings() throws Exception { testExpireThorughAddressSettings(false); } @Test(timeout = 60000) public void testExpireThorughAddressSettingsRebootServer() throws Exception { testExpireThorughAddressSettings(true); } private void testExpireThorughAddressSettings(boolean reboot) throws Exception { // Address configuration AddressSettings addressSettings = new AddressSettings(); addressSettings.setAddressFullMessagePolicy(AddressFullMessagePolicy.PAGE); addressSettings.setAutoCreateQueues(isAutoCreateQueues()); addressSettings.setAutoCreateAddresses(isAutoCreateAddresses()); addressSettings.setDeadLetterAddress(SimpleString.toSimpleString(getDeadLetterAddress())); addressSettings.setExpiryAddress(SimpleString.toSimpleString(getDeadLetterAddress())); addressSettings.setExpiryDelay(1000L); server.getAddressSettingsRepository().clear(); server.getAddressSettingsRepository().addMatch("#", addressSettings); AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setText("Test-Message"); message.setDurable(true); message.setApplicationProperty("key1", "Value1"); sender.send(message); message = new AmqpMessage(); message.setBytes(new byte[500 * 1024]); message.setDurable(true); sender.send(message); sender.close(); connection.close(); if (reboot) { server.stop(); server.getConfiguration().setMessageExpiryScanPeriod(100); server.start(); } final Queue serverQueue = server.locateQueue(getQueueName()); try (LinkedListIterator<MessageReference> referenceIterator = serverQueue.iterator()) { while (referenceIterator.hasNext()) { MessageReference ref = referenceIterator.next(); Assert.assertEquals(ref.getMessage().getExpiration(), ref.getMessage().toCore().getExpiration()); Assert.assertTrue(ref.getMessage().getExpiration() > 0); Assert.assertTrue(ref.getMessage().toCore().getExpiration() > 0); } } final Queue dlqView = getProxyToQueue(getDeadLetterAddress()); Wait.assertEquals(2, dlqView::getMessageCount); } @Test public void testPreserveExpirationOnTTL() throws Exception { // Address configuration AddressSettings addressSettings = new AddressSettings(); addressSettings.setAddressFullMessagePolicy(AddressFullMessagePolicy.PAGE); addressSettings.setAutoCreateQueues(isAutoCreateQueues()); addressSettings.setAutoCreateAddresses(isAutoCreateAddresses()); addressSettings.setDeadLetterAddress(SimpleString.toSimpleString(getDeadLetterAddress())); addressSettings.setExpiryAddress(SimpleString.toSimpleString(getDeadLetterAddress())); addressSettings.setExpiryDelay(1000L); server.getAddressSettingsRepository().clear(); server.getAddressSettingsRepository().addMatch("#", addressSettings); AmqpClient client = createAmqpClient(); AmqpConnection connection = addConnection(client.connect()); AmqpSession session = connection.createSession(); AmqpSender sender = session.createSender(getQueueName()); // Get the Queue View early to avoid racing the delivery. final Queue queueView = getProxyToQueue(getQueueName()); assertNotNull(queueView); AmqpMessage message = new AmqpMessage(); message.setText("Test-Message"); message.setDurable(true); message.setTimeToLive(3600 * 1000); message.setApplicationProperty("id", "0"); sender.send(message); message = new AmqpMessage(); message.setBytes(new byte[500 * 1024]); message.setDurable(true); message.setTimeToLive(3600 * 1000); message.setApplicationProperty("id", "1"); sender.send(message); Wait.assertEquals(2, queueView::getMessageCount); LinkedListIterator<MessageReference> linkedListIterator = queueView.iterator(); HashMap<String, Long> dataSet = new HashMap<>(); int count = 0; while (linkedListIterator.hasNext()) { count++; MessageReference ref = linkedListIterator.next(); String idUsed = ref.getMessage().getStringProperty("id"); dataSet.put(idUsed, ref.getMessage().getExpiration()); } Assert.assertEquals(2, count); linkedListIterator.close(); server.stop(); Thread.sleep(500); // we need some time passing, as the TTL can't be recalculated here server.getConfiguration().setMessageExpiryScanPeriod(100); server.start(); final Queue queueViewAfterRestart = getProxyToQueue(getQueueName()); Wait.assertEquals(2, queueViewAfterRestart::getMessageCount); Wait.assertTrue(server::isActive); linkedListIterator = queueViewAfterRestart.iterator(); count = 0; while (linkedListIterator.hasNext()) { count++; MessageReference ref = linkedListIterator.next(); String idUsed = ref.getMessage().getStringProperty("id"); long originalExpiration = dataSet.get(idUsed); System.out.println("original Expiration = " + originalExpiration + " while this expiration = " + ref.getMessage().getExpiration()); Assert.assertEquals(originalExpiration, ref.getMessage().getExpiration()); } Assert.assertEquals(2, count); linkedListIterator.close(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.code.appengine.imageio.metadata; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.MissingResourceException; import java.util.ResourceBundle; import org.apache.harmony.x.imageio.internal.nls.Messages; import com.google.code.appengine.imageio.ImageTypeSpecifier; public abstract class IIOMetadataFormatImpl implements IIOMetadataFormat { @SuppressWarnings({"ConstantDeclaredInAbstractClass"}) public static final String standardMetadataFormatName = "javax_imageio_1.0"; @SuppressWarnings({"StaticNonFinalField"}) private static IIOMetadataFormatImpl standardFormat; private String rootName; private HashMap<String, Element> elementHash = new HashMap<String, Element>(); private String resourceBaseName = getClass().getName() + "Resources"; public IIOMetadataFormatImpl(String rootName, int childPolicy) { if (rootName == null) { throw new IllegalArgumentException(Messages.getString("imageio.63")); } if ( childPolicy < CHILD_POLICY_EMPTY || childPolicy > CHILD_POLICY_MAX || childPolicy == CHILD_POLICY_REPEAT ) { throw new IllegalArgumentException(Messages.getString("imageio.64")); } this.rootName = rootName; Element root = new Element(); root.name = rootName; root.childPolicy = childPolicy; elementHash.put(rootName, root); } public IIOMetadataFormatImpl(String rootName, int minChildren, int maxChildren) { if (rootName == null) { throw new IllegalArgumentException(Messages.getString("imageio.63")); } if (minChildren < 0) { throw new IllegalArgumentException(Messages.getString("imageio.65")); } if (minChildren > maxChildren) { throw new IllegalArgumentException(Messages.getString("imageio.66")); } this.rootName = rootName; Element root = new Element(); root.name = rootName; root.minChildren = minChildren; root.maxChildren = maxChildren; root.childPolicy = CHILD_POLICY_REPEAT; elementHash.put(rootName, root); } @SuppressWarnings({"AbstractMethodOverridesAbstractMethod"}) public abstract boolean canNodeAppear(String elementName, ImageTypeSpecifier imageType); protected void addAttribute( String elementName, String attrName, int dataType, boolean required, int listMinLength, int listMaxLength ) { if (attrName == null) { throw new IllegalArgumentException(Messages.getString("imageio.67")); } if (dataType < DATATYPE_STRING || dataType > DATATYPE_DOUBLE) { throw new IllegalArgumentException(Messages.getString("imageio.68")); } if (listMinLength < 0 || listMinLength > listMaxLength) { throw new IllegalArgumentException(Messages.getString("imageio.69")); } Element element = findElement(elementName); Attlist attr = new Attlist(); attr.name = attrName; attr.dataType = dataType; attr.required = required; attr.listMinLength = listMinLength; attr.listMaxLength = listMaxLength; attr.valueType = VALUE_LIST; element.attributes.put(attrName, attr); } protected void addAttribute( String elementName, String attrName, int dataType, boolean required, String defaultValue ) { if (attrName == null) { throw new IllegalArgumentException(Messages.getString("imageio.67")); } if (dataType < DATATYPE_STRING || dataType > DATATYPE_DOUBLE) { throw new IllegalArgumentException(Messages.getString("imageio.68")); } Element element = findElement(elementName); Attlist attr = new Attlist(); attr.name = attrName; attr.dataType = dataType; attr.required = required; attr.defaultValue = defaultValue; attr.valueType = VALUE_ARBITRARY; element.attributes.put(attrName, attr); } protected void addAttribute( String elementName, String attrName, int dataType, boolean required, String defaultValue, List<String> enumeratedValues ) { if (attrName == null) { throw new IllegalArgumentException(Messages.getString("imageio.67")); } if (dataType < DATATYPE_STRING || dataType > DATATYPE_DOUBLE) { throw new IllegalArgumentException(Messages.getString("imageio.68")); } if (enumeratedValues == null || enumeratedValues.isEmpty()) { throw new IllegalArgumentException(Messages.getString("imageio.6A")); } try { for (String enumeratedValue : enumeratedValues) { if (enumeratedValue == null) { throw new IllegalArgumentException(Messages.getString("imageio.6B")); } } } catch (ClassCastException e) { throw new IllegalArgumentException(Messages.getString("imageio.6C")); } Element element = findElement(elementName); Attlist attr = new Attlist(); attr.name = attrName; attr.dataType = dataType; attr.required = required; attr.defaultValue = defaultValue; attr.enumeratedValues = enumeratedValues; attr.valueType = VALUE_ENUMERATION; element.attributes.put(attrName, attr); } protected void addAttribute( String elementName, String attrName, int dataType, boolean required, String defaultValue, String minValue, String maxValue, boolean minInclusive, boolean maxInclusive ) { if (attrName == null) { throw new IllegalArgumentException(Messages.getString("imageio.67")); } if (dataType < DATATYPE_STRING || dataType > DATATYPE_DOUBLE) { throw new IllegalArgumentException(Messages.getString("imageio.68")); } Element element = findElement(elementName); Attlist attr = new Attlist(); attr.name = attrName; attr.dataType = dataType; attr.required = required; attr.defaultValue = defaultValue; attr.minValue = minValue; attr.maxValue = maxValue; attr.minInclusive = minInclusive; attr.maxInclusive = maxInclusive; attr.valueType = VALUE_RANGE; attr.valueType |= minInclusive ? VALUE_RANGE_MIN_INCLUSIVE_MASK : 0; attr.valueType |= maxInclusive ? VALUE_RANGE_MAX_INCLUSIVE_MASK : 0; element.attributes.put(attrName, attr); } protected void addBooleanAttribute( String elementName, String attrName, boolean hasDefaultValue, boolean defaultValue ) { String defaultVal = hasDefaultValue ? (defaultValue ? "TRUE" : "FALSE") : null; ArrayList<String> values = new ArrayList<String>(2); values.add("TRUE"); values.add("FALSE"); addAttribute(elementName, attrName, DATATYPE_BOOLEAN, true, defaultVal, values); } protected void addChildElement(String elementName, String parentName) { Element parent = findElement(parentName); Element element = findElement(elementName); parent.children.add(element.name); } protected void addElement(String elementName, String parentName, int childPolicy) { if ( childPolicy < CHILD_POLICY_EMPTY || childPolicy > CHILD_POLICY_MAX || childPolicy == CHILD_POLICY_REPEAT ) { throw new IllegalArgumentException(Messages.getString("imageio.64")); } Element parent = findElement(parentName); Element element = new Element(); element.name = elementName; element.childPolicy = childPolicy; elementHash.put(elementName, element); parent.children.add(elementName); } protected void addElement( String elementName, String parentName, int minChildren, int maxChildren ) { if (minChildren < 0) { throw new IllegalArgumentException(Messages.getString("imageio.65")); } if (minChildren > maxChildren) { throw new IllegalArgumentException(Messages.getString("imageio.66")); } Element parent = findElement(parentName); Element element = new Element(); element.name = elementName; element.childPolicy = CHILD_POLICY_REPEAT; element.minChildren = minChildren; element.maxChildren = maxChildren; elementHash.put(elementName, element); parent.children.add(elementName); } protected void addObjectValue( String elementName, Class<?> classType, int arrayMinLength, int arrayMaxLength ) { Element element = findElement(elementName); ObjectValue objVal = new ObjectValue(); objVal.classType = classType; objVal.arrayMaxLength = arrayMaxLength; objVal.arrayMinLength = arrayMinLength; objVal.valueType = VALUE_LIST; element.objectValue = objVal; } protected <T> void addObjectValue( String elementName, Class<T> classType, boolean required, T defaultValue ) { // note: reqired is an unused parameter Element element = findElement(elementName); ObjectValue<T> objVal = new ObjectValue<T>(); objVal.classType = classType; objVal.defaultValue = defaultValue; objVal.valueType = VALUE_ARBITRARY; element.objectValue = objVal; } protected <T> void addObjectValue( String elementName, Class<T> classType, boolean required, T defaultValue, List<? extends T> enumeratedValues ) { // note: reqired is an unused parameter if (enumeratedValues == null || enumeratedValues.isEmpty()) { throw new IllegalArgumentException(Messages.getString("imageio.6A")); } try { for (T enumeratedValue : enumeratedValues) { if (enumeratedValue == null) { throw new IllegalArgumentException(Messages.getString("imageio.6B")); } } } catch (ClassCastException e) { throw new IllegalArgumentException(Messages.getString("imageio.6D")); } Element element = findElement(elementName); ObjectValue<T> objVal = new ObjectValue<T>(); objVal.classType = classType; objVal.defaultValue = defaultValue; objVal.enumeratedValues = enumeratedValues; objVal.valueType = VALUE_ENUMERATION; element.objectValue = objVal; } protected <T extends Object & Comparable<? super T>> void addObjectValue( String elementName, Class<T> classType, T defaultValue, Comparable<? super T> minValue, Comparable<? super T> maxValue, boolean minInclusive, boolean maxInclusive ) { Element element = findElement(elementName); ObjectValue<T> objVal = new ObjectValue<T>(); objVal.classType = classType; objVal.defaultValue = defaultValue; objVal.minValue = minValue; objVal.maxValue = maxValue; objVal.minInclusive = minInclusive; objVal.maxInclusive = maxInclusive; objVal.valueType = VALUE_RANGE; objVal.valueType |= minInclusive ? VALUE_RANGE_MIN_INCLUSIVE_MASK : 0; objVal.valueType |= maxInclusive ? VALUE_RANGE_MAX_INCLUSIVE_MASK : 0; element.objectValue = objVal; } public int getAttributeDataType(String elementName, String attrName) { Attlist attr = findAttribute(elementName, attrName); return attr.dataType; } public String getAttributeDefaultValue(String elementName, String attrName) { Attlist attr = findAttribute(elementName, attrName); return attr.defaultValue; } public String getAttributeDescription(String elementName, String attrName, Locale locale) { findAttribute(elementName, attrName); return getResourceString(elementName + "/" + attrName, locale); } public String[] getAttributeEnumerations(String elementName, String attrName) { Attlist attr = findAttribute(elementName, attrName); if (attr.valueType != VALUE_ENUMERATION) { throw new IllegalArgumentException(Messages.getString("imageio.6E")); } return attr.enumeratedValues.toArray(new String[attr.enumeratedValues.size()]); } public int getAttributeListMaxLength(String elementName, String attrName) { Attlist attr = findAttribute(elementName, attrName); if (attr.valueType != VALUE_LIST) { throw new IllegalArgumentException(Messages.getString("imageio.6F")); } return attr.listMaxLength; } public int getAttributeListMinLength(String elementName, String attrName) { Attlist attr = findAttribute(elementName, attrName); if (attr.valueType != VALUE_LIST) { throw new IllegalArgumentException(Messages.getString("imageio.6F")); } return attr.listMinLength; } public String getAttributeMaxValue(String elementName, String attrName) { Attlist attr = findAttribute(elementName, attrName); if ((attr.valueType & VALUE_RANGE) == 0) { throw new IllegalArgumentException(Messages.getString("imageio.70")); } return attr.maxValue; } public String getAttributeMinValue(String elementName, String attrName) { Attlist attr = findAttribute(elementName, attrName); if ((attr.valueType & VALUE_RANGE) == 0) { throw new IllegalArgumentException(Messages.getString("imageio.70")); } return attr.minValue; } public String[] getAttributeNames(String elementName) { Element element = findElement(elementName); return element.attributes.keySet().toArray(new String[element.attributes.size()]); } public int getAttributeValueType(String elementName, String attrName) { Attlist attr = findAttribute(elementName, attrName); return attr.valueType; } public String[] getChildNames(String elementName) { Element element = findElement(elementName); if (element.childPolicy == CHILD_POLICY_EMPTY) { // Element cannot have children return null; } return element.children.toArray(new String[element.children.size()]); } public int getChildPolicy(String elementName) { Element element = findElement(elementName); return element.childPolicy; } public String getElementDescription(String elementName, Locale locale) { findElement(elementName); // Check if there is such element return getResourceString(elementName, locale); } public int getElementMaxChildren(String elementName) { Element element = findElement(elementName); if (element.childPolicy != CHILD_POLICY_REPEAT) { throw new IllegalArgumentException(Messages.getString("imageio.71")); } return element.maxChildren; } public int getElementMinChildren(String elementName) { Element element = findElement(elementName); if (element.childPolicy != CHILD_POLICY_REPEAT) { throw new IllegalArgumentException(Messages.getString("imageio.71")); } return element.minChildren; } public int getObjectArrayMaxLength(String elementName) { Element element = findElement(elementName); ObjectValue v = element.objectValue; if (v == null || v.valueType != VALUE_LIST) { throw new IllegalArgumentException(Messages.getString("imageio.72")); } return v.arrayMaxLength; } public int getObjectArrayMinLength(String elementName) { Element element = findElement(elementName); ObjectValue v = element.objectValue; if (v == null || v.valueType != VALUE_LIST) { throw new IllegalArgumentException(Messages.getString("imageio.72")); } return v.arrayMinLength; } public Class<?> getObjectClass(String elementName) { ObjectValue v = findObjectValue(elementName); return v.classType; } public Object getObjectDefaultValue(String elementName) { ObjectValue v = findObjectValue(elementName); return v.defaultValue; } public Object[] getObjectEnumerations(String elementName) { Element element = findElement(elementName); ObjectValue v = element.objectValue; if (v == null || v.valueType != VALUE_ENUMERATION) { throw new IllegalArgumentException(Messages.getString("imageio.73")); } return v.enumeratedValues.toArray(); } public Comparable<?> getObjectMaxValue(String elementName) { Element element = findElement(elementName); ObjectValue v = element.objectValue; if (v == null || (v.valueType & VALUE_RANGE) == 0) { throw new IllegalArgumentException(Messages.getString("imageio.74")); } return v.maxValue; } public Comparable<?> getObjectMinValue(String elementName) { Element element = findElement(elementName); ObjectValue v = element.objectValue; if (v == null || (v.valueType & VALUE_RANGE) == 0) { throw new IllegalArgumentException(Messages.getString("imageio.74")); } return v.minValue; } public int getObjectValueType(String elementName) { Element element = findElement(elementName); if (element.objectValue == null) { return VALUE_NONE; } return element.objectValue.valueType; } protected String getResourceBaseName() { return resourceBaseName; } public String getRootName() { return rootName; } public static IIOMetadataFormat getStandardFormatInstance() { if (standardFormat == null) { standardFormat = new IIOStandardMetadataFormat(); } return standardFormat; } public boolean isAttributeRequired(String elementName, String attrName) { return findAttribute(elementName, attrName).required; } protected void removeAttribute(String elementName, String attrName) { Element element = findElement(elementName); element.attributes.remove(attrName); } protected void removeElement(String elementName) { Element element; if ((element = elementHash.get(elementName)) != null) { elementHash.remove(elementName); for (Element e : elementHash.values()) { e.children.remove(element.name); } } } protected void removeObjectValue(String elementName) { Element element = findElement(elementName); element.objectValue = null; } protected void setResourceBaseName(String resourceBaseName) { if (resourceBaseName == null) { throw new IllegalArgumentException(Messages.getString("imageio.75")); } this.resourceBaseName = resourceBaseName; } @SuppressWarnings({"ClassWithoutConstructor"}) private class Element { String name; ArrayList<String> children = new ArrayList<String>(); HashMap<String, Attlist> attributes = new HashMap<String, Attlist>(); int minChildren; int maxChildren; int childPolicy; ObjectValue objectValue; } @SuppressWarnings({"ClassWithoutConstructor"}) private class Attlist { String name; int dataType; boolean required; int listMinLength; int listMaxLength; String defaultValue; List<String> enumeratedValues; String minValue; String maxValue; boolean minInclusive; boolean maxInclusive; int valueType; } @SuppressWarnings({"ClassWithoutConstructor"}) private class ObjectValue<T> { Class<T> classType; int arrayMinLength; int arrayMaxLength; T defaultValue; List<? extends T> enumeratedValues; Comparable<? super T> minValue; Comparable<? super T> maxValue; boolean minInclusive; boolean maxInclusive; int valueType; } private Element findElement(String name) { Element element; if ((element = elementHash.get(name)) == null) { throw new IllegalArgumentException(Messages.getString("imageio.8C", name)); } return element; } private Attlist findAttribute(String elementName, String attributeName) { Element element = findElement(elementName); Attlist attribute; if ((attribute = element.attributes.get(attributeName)) == null) { throw new IllegalArgumentException(Messages.getString("imageio.8D", attributeName)); } return attribute; } private ObjectValue findObjectValue(String elementName) { Element element = findElement(elementName); ObjectValue v = element.objectValue; if (v == null) { throw new IllegalArgumentException(Messages.getString("imageio.76")); } return v; } private String getResourceString(String key, Locale locale) { if (locale == null) { locale = Locale.getDefault(); } // Get the context class loader and try to locate the bundle with it first ClassLoader contextClassloader = AccessController.doPrivileged( new PrivilegedAction<ClassLoader>() { public ClassLoader run() { return Thread.currentThread().getContextClassLoader(); } }); // Now try to get the resource bundle ResourceBundle rb; try { rb = ResourceBundle.getBundle(resourceBaseName, locale, contextClassloader); } catch (MissingResourceException e) { try { rb = ResourceBundle.getBundle(resourceBaseName, locale); } catch (MissingResourceException e1) { return null; } } try { return rb.getString(key); } catch (MissingResourceException e) { return null; } catch (ClassCastException e) { return null; // Not a string resource } } }
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.jimfs; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.jimfs.SystemJimfsFileSystemProvider.FILE_SYSTEM_KEY; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.ProviderNotFoundException; import java.nio.file.spi.FileSystemProvider; import java.util.ServiceConfigurationError; import java.util.ServiceLoader; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import org.checkerframework.checker.nullness.compatqual.NullableDecl; /** * Static factory methods for creating new Jimfs file systems. File systems may either be created * with a basic configuration matching the current operating system or by providing a specific * {@link Configuration}. Basic {@linkplain Configuration#unix() UNIX}, {@linkplain * Configuration#osX() Mac OS X} and {@linkplain Configuration#windows() Windows} configurations are * provided. * * <p>Examples: * * <pre> * // A file system with a configuration similar to the current OS * FileSystem fileSystem = Jimfs.newFileSystem(); * * // A file system with paths and behavior generally matching that of Windows * FileSystem windows = Jimfs.newFileSystem(Configuration.windows()); </pre> * * <p>Additionally, various behavior of the file system can be customized by creating a custom * {@link Configuration}. A modified version of one of the existing default configurations can be * created using {@link Configuration#toBuilder()} or a new configuration can be created from * scratch with {@link Configuration#builder(PathType)}. See {@link Configuration.Builder} for what * can be configured. * * <p>Examples: * * <pre> * // Modify the default UNIX configuration * FileSystem fileSystem = Jimfs.newFileSystem(Configuration.unix() * .toBuilder() * .setAttributeViews("basic", "owner", "posix", "unix") * .setWorkingDirectory("/home/user") * .setBlockSize(4096) * .build()); * * // Create a custom configuration * Configuration config = Configuration.builder(PathType.windows()) * .setRoots("C:\\", "D:\\", "E:\\") * // ... * .build(); </pre> * * @author Colin Decker */ public final class Jimfs { /** The URI scheme for the Jimfs file system ("jimfs"). */ public static final String URI_SCHEME = "jimfs"; private static final Logger LOGGER = Logger.getLogger(Jimfs.class.getName()); private Jimfs() {} /** * Creates a new in-memory file system with a {@linkplain Configuration#forCurrentPlatform() * default configuration} appropriate to the current operating system. * * <p>More specifically, if the operating system is Windows, {@link Configuration#windows()} is * used; if the operating system is Mac OS X, {@link Configuration#osX()} is used; otherwise, * {@link Configuration#unix()} is used. */ public static FileSystem newFileSystem() { return newFileSystem(newRandomFileSystemName()); } /** * Creates a new in-memory file system with a {@linkplain Configuration#forCurrentPlatform() * default configuration} appropriate to the current operating system. * * <p>More specifically, if the operating system is Windows, {@link Configuration#windows()} is * used; if the operating system is Mac OS X, {@link Configuration#osX()} is used; otherwise, * {@link Configuration#unix()} is used. * * <p>The returned file system uses the given name as the host part of its URI and the URIs of * paths in the file system. For example, given the name {@code my-file-system}, the file system's * URI will be {@code jimfs://my-file-system} and the URI of the path {@code /foo/bar} will be * {@code jimfs://my-file-system/foo/bar}. */ public static FileSystem newFileSystem(String name) { return newFileSystem(name, Configuration.forCurrentPlatform()); } /** Creates a new in-memory file system with the given configuration. */ public static FileSystem newFileSystem(Configuration configuration) { return newFileSystem(newRandomFileSystemName(), configuration); } /** * Creates a new in-memory file system with the given configuration. * * <p>The returned file system uses the given name as the host part of its URI and the URIs of * paths in the file system. For example, given the name {@code my-file-system}, the file system's * URI will be {@code jimfs://my-file-system} and the URI of the path {@code /foo/bar} will be * {@code jimfs://my-file-system/foo/bar}. */ public static FileSystem newFileSystem(String name, Configuration configuration) { try { URI uri = new URI(URI_SCHEME, name, null, null); return newFileSystem(uri, configuration); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } } @VisibleForTesting static FileSystem newFileSystem(URI uri, Configuration config) { checkArgument( URI_SCHEME.equals(uri.getScheme()), "uri (%s) must have scheme %s", uri, URI_SCHEME); try { // Create the FileSystem. It uses JimfsFileSystemProvider as its provider, as that is // the provider that actually implements the operations needed for Files methods to work. JimfsFileSystem fileSystem = JimfsFileSystems.newFileSystem(JimfsFileSystemProvider.instance(), uri, config); /* * Now, call FileSystems.newFileSystem, passing it the FileSystem we just created. This * allows the system-loaded SystemJimfsFileSystemProvider instance to cache the FileSystem * so that methods like Paths.get(URI) work. * We do it in this awkward way to avoid issues when the classes in the API (this class * and Configuration, for example) are loaded by a different classloader than the one that * loads SystemJimfsFileSystemProvider using ServiceLoader. See * https://github.com/google/jimfs/issues/18 for gory details. */ try { ImmutableMap<String, ?> env = ImmutableMap.of(FILE_SYSTEM_KEY, fileSystem); FileSystems.newFileSystem(uri, env, SystemJimfsFileSystemProvider.class.getClassLoader()); } catch (ProviderNotFoundException | ServiceConfigurationError ignore) { // See the similar catch block below for why we ignore this. // We log there rather than here so that there's only typically one such message per VM. } return fileSystem; } catch (IOException e) { throw new AssertionError(e); } } /** * The system-loaded instance of {@code SystemJimfsFileSystemProvider}, or {@code null} if it * could not be found or loaded. */ @NullableDecl static final FileSystemProvider systemProvider = getSystemJimfsProvider(); /** * Returns the system-loaded instance of {@code SystemJimfsFileSystemProvider} or {@code null} if * it could not be found or loaded. * * <p>Like {@link FileSystems#newFileSystem(URI, Map, ClassLoader)}, this method first looks in * the list of {@linkplain FileSystemProvider#installedProviders() installed providers} and if not * found there, attempts to load it from the {@code ClassLoader} with {@link ServiceLoader}. * * <p>The idea is that this method should return an instance of the same class (i.e. loaded by the * same class loader) as the class whose static cache a {@code JimfsFileSystem} instance will be * placed in when {@code FileSystems.newFileSystem} is called in {@code Jimfs.newFileSystem}. */ @NullableDecl private static FileSystemProvider getSystemJimfsProvider() { try { for (FileSystemProvider provider : FileSystemProvider.installedProviders()) { if (provider.getScheme().equals(URI_SCHEME)) { return provider; } } /* * Jimfs.newFileSystem passes SystemJimfsFileSystemProvider.class.getClassLoader() to * FileSystems.newFileSystem so that it will fall back to loading from that classloader if * the provider isn't found in the installed providers. So do the same fallback here to ensure * that we can remove file systems from the static cache on SystemJimfsFileSystemProvider if * it gets loaded that way. */ ServiceLoader<FileSystemProvider> loader = ServiceLoader.load( FileSystemProvider.class, SystemJimfsFileSystemProvider.class.getClassLoader()); for (FileSystemProvider provider : loader) { if (provider.getScheme().equals(URI_SCHEME)) { return provider; } } } catch (ProviderNotFoundException | ServiceConfigurationError e) { /* * This can apparently (https://github.com/google/jimfs/issues/31) occur in an environment * where services are not loaded from META-INF/services, such as JBoss/Wildfly. In this * case, FileSystems.newFileSystem will most likely fail in the same way when called from * Jimfs.newFileSystem above, and there will be no way to make URI-based methods like * Paths.get(URI) work. Rather than making the user completly unable to use Jimfs, just * log this exception and continue. * * Note: Catching both ProviderNotFoundException, which would occur if no provider matching * the "jimfs" URI scheme is found, and ServiceConfigurationError, which can occur if the * ServiceLoader finds the META-INF/services entry for Jimfs (or some other * FileSystemProvider!) but is then unable to load that class. */ LOGGER.log( Level.INFO, "An exception occurred when attempting to find the system-loaded FileSystemProvider " + "for Jimfs. This likely means that your environment does not support loading " + "services via ServiceLoader or is not configured correctly. This does not prevent " + "using Jimfs, but it will mean that methods that look up via URI such as " + "Paths.get(URI) cannot work.", e); } return null; } private static String newRandomFileSystemName() { return UUID.randomUUID().toString(); } }
package com.capitalone.dashboard.request; import com.capitalone.dashboard.misc.HygieiaException; import com.capitalone.dashboard.model.Collector; import com.capitalone.dashboard.model.CollectorItem; import com.capitalone.dashboard.model.CollectorType; import com.capitalone.dashboard.model.Owner; import com.capitalone.dashboard.util.GitHubParsedUrl; import org.apache.commons.lang3.StringUtils; import org.hibernate.validator.constraints.NotEmpty; import javax.validation.Valid; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; public class DashboardRemoteRequest { @Valid private DashboardMetaData metaData; @Valid private List<FeatureEntry> featureEntries = new ArrayList<>(); @Valid private List<CodeRepoEntry> codeRepoEntries = new ArrayList<>(); @Valid private List<BuildEntry> buildEntries = new ArrayList<>(); @Valid private List<StaticCodeEntry> staticCodeEntries = new ArrayList<>(); @Valid private List<SecurityScanEntry> securityScanEntries = new ArrayList<>(); @Valid private List<DeploymentEntry> deploymentEntries = new ArrayList<>(); @Valid private List<LibraryScanEntry> libraryScanEntries = new ArrayList<>(); @Valid private List<FunctionalTestEntry> functionalTestEntries = new ArrayList<>(); /** * Dashboard Metadata */ public static class DashboardMetaData { @NotNull private String template; @NotNull private String type; @NotNull @Size(min = 6, max = 50) @Pattern(message = "Special character(s) found", regexp = "^[a-zA-Z0-9 ]*$") private String title; @NotNull private String applicationName; @NotNull private String componentName; @NotNull Owner owner; private String businessService; private String businessApplication; public String getTemplate() { return template; } public void setTemplate(String template) { this.template = template; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getComponentName() { return componentName; } public void setComponentName(String componentName) { this.componentName = componentName; } public String getBusinessService() { return businessService; } public void setBusinessService(String businessService) { this.businessService = businessService; } public String getBusinessApplication() { return businessApplication; } public void setBusinessApplication(String businessApplication) { this.businessApplication = businessApplication; } public Owner getOwner() { return owner; } public void setOwner(Owner owner) { this.owner = owner; } public String getType() { return type; } public void setType(String type) { this.type = type; } } /** * An abstract class to hold the entries: Jira project, github project, build job etc. */ public static abstract class Entry { @NotNull String toolName; @NotNull String description; boolean pushed = false; @NotEmpty Map<String, Object> options = new HashMap<>(); public abstract CollectorType getType(); public String getToolName() { return toolName; } public void setToolName(String toolName) { this.toolName = toolName; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public boolean isPushed() { return pushed; } public void setPushed(boolean pushed) { this.pushed = pushed; } public CollectorItem toCollectorItem(Collector collector) throws HygieiaException{ if (options.keySet().containsAll(collector.getUniqueFields().keySet())) { CollectorItem collectorItem = new CollectorItem(); collectorItem.setEnabled(true); collectorItem.setPushed(isPushed()); collectorItem.setDescription(description); for (String key : options.keySet()) { if (collector.getAllFields().keySet().contains(key)) { collectorItem.getOptions().put(key, options.get(key)); } else { throw new HygieiaException(toolName + " collector does not support field: " + key, HygieiaException.COLLECTOR_ITEM_CREATE_ERROR); } } return collectorItem; } else { throw new HygieiaException("Missing required fields. " + toolName + " collector required fields are: " + String.join(", ", collector.getUniqueFields().keySet()), HygieiaException.COLLECTOR_ITEM_CREATE_ERROR); } } public abstract String getWidgetId(); public abstract String getWidgetName(); public abstract Map<String, Object> toWidgetOptions(); public Map<String, Object> getOptions() { return options; } public void setOptions(Map<String, Object> options) { this.options = options; } } /** * Details for creating Feature widget */ public static class FeatureEntry extends Entry { @Override public CollectorType getType() { return CollectorType.AgileTool; } @Override public String getWidgetId() { return "feature0"; } @Override public String getWidgetName() { return "feature"; } @Override public Map<String, Object> toWidgetOptions() { return null; } } /** * Details for creating Code Repo widget */ public static class CodeRepoEntry extends Entry { @Override public CollectorType getType() { return CollectorType.SCM; } @Override public String getWidgetId() { return "repo0"; } @Override public String getWidgetName() { return "repo"; } @Override public Map<String, Object> toWidgetOptions() { Map<String, Object> opts = new HashMap<>(); opts.put("name", "repo"); opts.put("id", "repo0"); for (String key : options.keySet()) { if("url".equalsIgnoreCase(key)){ GitHubParsedUrl gitHubParsed = new GitHubParsedUrl((String)options.get(key)); String repoUrl = gitHubParsed.getUrl(); opts.put(key, repoUrl); }else{ opts.put(key, options.get(key)); } } Map<String, String> scm = new HashMap<>(); scm.put("name", toolName); scm.put("value", toolName); opts.put("scm", scm); return opts; } } /** * Details for creating Build widget */ public static class BuildEntry extends Entry { @Override public CollectorType getType() { return CollectorType.Build; } @Override public String getWidgetId() { return "build0"; } @Override public String getWidgetName() { return "build"; } @Override public Map<String, Object> toWidgetOptions() { Map<String, Object> opts = new HashMap<>(); opts.put("id", getWidgetId()); opts.put("buildDurationThreshold", 3); opts.put("consecutiveFailureThreshold",5); return opts; } } /** * Details for creating Static Code Analysis in Code Quality Widget */ public static class StaticCodeEntry extends Entry { @Override public CollectorType getType() { return CollectorType.CodeQuality; } @Override public String getWidgetId() { return "codeanalysis0"; } @Override public String getWidgetName() { return "codeanalysis"; } @Override public Map<String, Object> toWidgetOptions() { Map<String, Object> opts = new HashMap<>(); opts.put("id", getWidgetId()); opts.put("testJobNames", Arrays.asList("")); return opts; } } /** * Entry to create Library Scan in Code Quality Widget * */ public static class LibraryScanEntry extends Entry { @Override public CollectorType getType() { return CollectorType.LibraryPolicy; } @Override public String getWidgetId() { return "codeanalysis0"; } @Override public String getWidgetName() { return "codeanalysis"; } @Override public Map<String, Object> toWidgetOptions() { Map<String, Object> opts = new HashMap<>(); opts.put("id", getWidgetId()); opts.put("testJobNames", Arrays.asList("")); return opts; } } /** * Entry to create Security Scan in Code Quality Widget */ public static class SecurityScanEntry extends Entry { @Override public CollectorType getType() { return CollectorType.StaticSecurityScan; } @Override public String getWidgetId() { return "codeanalysis0"; } @Override public String getWidgetName() { return "codeanalysis"; } @Override public Map<String, Object> toWidgetOptions() { Map<String, Object> opts = new HashMap<>(); opts.put("id", getWidgetId()); opts.put("testJobNames", Arrays.asList("")); return opts; } } /** * Entry to create Functional Test in Code Quality Widget */ public static class FunctionalTestEntry extends Entry { @Override public CollectorType getType() { return CollectorType.Test; } @Override public String getWidgetId() { return "codeanalysis0"; } @Override public String getWidgetName() { return "codeanalysis"; } @Override public Map<String, Object> toWidgetOptions() { Map<String, Object> opts = new HashMap<>(); opts.put("id", getWidgetId()); opts.put("testJobNames",Arrays.asList("")); return opts; } } /** * Entry to create Deployment widget */ public static class DeploymentEntry extends Entry { @Override public CollectorType getType() { return CollectorType.Deployment; } @Override public String getWidgetId() { return "deploy0"; } @Override public String getWidgetName() { return "deploy"; } @Override public Map<String, Object> toWidgetOptions() { Map<String, Object> opts = new HashMap<>(); opts.put("id", getWidgetId()); return opts; } } // Getters and setters public DashboardMetaData getMetaData() { return metaData; } public void setMetaData(DashboardMetaData metaData) { this.metaData = metaData; } public List<FeatureEntry> getFeatureEntries() { return featureEntries; } public void setFeatureEntries(List<FeatureEntry> featureEntries) { this.featureEntries = featureEntries; } public List<CodeRepoEntry> getCodeRepoEntries() { return codeRepoEntries; } public void setCodeRepoEntries(List<CodeRepoEntry> codeRepoEntries) { this.codeRepoEntries = codeRepoEntries; } public List<BuildEntry> getBuildEntries() { return buildEntries; } public void setBuildEntries(List<BuildEntry> buildEntries) { this.buildEntries = buildEntries; } public List<StaticCodeEntry> getStaticCodeEntries() { return staticCodeEntries; } public void setStaticCodeEntries(List<StaticCodeEntry> staticCodeEntries) { this.staticCodeEntries = staticCodeEntries; } public List<SecurityScanEntry> getSecurityScanEntries() { return securityScanEntries; } public void setSecurityScanEntries(List<SecurityScanEntry> securityScanEntries) { this.securityScanEntries = securityScanEntries; } public List<DeploymentEntry> getDeploymentEntries() { return deploymentEntries; } public void setDeploymentEntries(List<DeploymentEntry> deploymentEntries) { this.deploymentEntries = deploymentEntries; } public List<LibraryScanEntry> getLibraryScanEntries() { return libraryScanEntries; } public void setLibraryScanEntries(List<LibraryScanEntry> libraryScanEntries) { this.libraryScanEntries = libraryScanEntries; } public List<FunctionalTestEntry> getFunctionalTestEntries() { return functionalTestEntries; } public void setFunctionalTestEntries(List<FunctionalTestEntry> functionalTestEntries) { this.functionalTestEntries = functionalTestEntries; } public List<Entry> getAllEntries() { List<Entry> all = new ArrayList<>(); all.addAll(buildEntries); all.addAll(codeRepoEntries); all.addAll(staticCodeEntries); all.addAll(libraryScanEntries); all.addAll(securityScanEntries); all.addAll(functionalTestEntries); all.addAll(deploymentEntries); return all; } }
package com.netki; import com.netki.dnssec.DNSSECResolver; import com.netki.exceptions.DNSSECException; import com.netki.exceptions.WalletNameCurrencyUnavailableException; import com.netki.exceptions.WalletNameDoesNotExistException; import com.netki.exceptions.WalletNameLookupException; import com.netki.tlsa.TLSAValidator; import org.bitcoinj.uri.BitcoinURI; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.xbill.DNS.Type; import java.net.URL; import java.util.Arrays; import java.util.List; import java.util.concurrent.Exchanger; import static org.junit.Assert.*; import static org.mockito.Mockito.*; @RunWith(PowerMockRunner.class) @PrepareForTest(WalletNameResolver.class) public class WalletNameResolverTest { private DNSSECResolver mockResolver; private TLSAValidator mockTlsaValidator; private WalletNameResolver testObj; @Before public void setUp() { this.mockResolver = mock(DNSSECResolver.class); this.mockTlsaValidator = mock(TLSAValidator.class); this.testObj = mock(WalletNameResolver.class); try { when(this.testObj.resolve(anyString(), anyString(), anyBoolean())).thenCallRealMethod(); when(this.testObj.getAvailableCurrencies(anyString())).thenCallRealMethod(); when(this.testObj.preprocessWalletName(anyString())).thenCallRealMethod(); doCallRealMethod().when(this.testObj).setDNSSECResolver(any(DNSSECResolver.class)); doCallRealMethod().when(this.testObj).setTlsaValidator(any(TLSAValidator.class)); // Setup Backup doCallRealMethod().when(this.mockResolver).getBackupDnsServers(); doCallRealMethod().when(this.mockResolver).setBackupDnsServers(any(List.class)); doCallRealMethod().when(this.mockResolver).useBackupDnsServer(any(Integer.class)); this.mockResolver.setBackupDnsServers(Arrays.asList("8.8.8.8", "8.8.4.4")); this.testObj.setDNSSECResolver(this.mockResolver); this.testObj.setTlsaValidator(this.mockTlsaValidator); } catch (WalletNameLookupException e) { e.printStackTrace(); } } @After public void cleanUp() { reset(this.mockResolver); reset(this.mockTlsaValidator); } /* * Test getAvailableCurrencies() */ @Test public void getAvailableCurrencies_GoRight() { try { when(this.mockResolver.resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT))).thenReturn("btc ltc"); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { List<String> currencies = this.testObj.getAvailableCurrencies("wallet.domain.com"); assertNotNull(currencies); assertEquals(2, currencies.size()); assertTrue(currencies.contains("btc")); assertTrue(currencies.contains("ltc")); assertFalse(currencies.contains("dgc")); verify(this.mockResolver, times(1)).resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT)); } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void getAvailableCurrencies_EmptyResult() { try { when(this.mockResolver.resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT))).thenReturn(""); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { this.testObj.getAvailableCurrencies("wallet.domain.com"); fail("Expected Exception"); } catch (WalletNameDoesNotExistException e) { try { verify(this.mockResolver, times(1)).resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT)); } catch (Exception e1) { } } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void getAvailableCurrencies_NullResult() { try { when(this.mockResolver.resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT))).thenReturn(null); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { this.testObj.getAvailableCurrencies("wallet.domain.com"); fail("Expected Exception"); } catch (WalletNameDoesNotExistException e) { try { verify(this.mockResolver, times(1)).resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT)); } catch (Exception e1) { } } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void getAvailableCurrencies_NonRetryableException() { try { when(this.mockResolver.resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT))).thenThrow(new DNSSECException("message")); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { this.testObj.getAvailableCurrencies("wallet.domain.com"); fail("Expected Exception"); } catch (WalletNameLookupException e) { try { verify(this.mockResolver, times(3)).resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT)); assertEquals("message", e.getMessage()); } catch(Exception e1) { fail("Unknown Test Failure: " + e.getMessage()); } } } @Test public void getAvailableCurrencies_RetriedException() { try { when(this.mockResolver.resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT))).thenThrow(new DNSSECException("message")).thenReturn("btc ltc"); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { List<String> currencies = this.testObj.getAvailableCurrencies("wallet.domain.com"); assertNotNull(currencies); assertEquals(2, currencies.size()); assertTrue(currencies.contains("btc")); assertTrue(currencies.contains("ltc")); assertFalse(currencies.contains("dgc")); verify(this.mockResolver, times(2)).resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT)); } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } /* * Test resolve() */ @Test public void resolve_GoRightAddr() { try { when(this.mockResolver.resolve(eq("_btc._wallet.wallet.domain.com."), eq(Type.TXT))).thenReturn("1CpLXM15vjULK3ZPGUTDMUcGATGR9xGitv"); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { BitcoinURI result = this.testObj.resolve("wallet.domain.com", "btc", true); assertNotNull(result.getAddress()); assertEquals("1CpLXM15vjULK3ZPGUTDMUcGATGR9xGitv", result.getAddress().toString()); verify(this.mockResolver, times(1)).resolve(anyString(), eq(Type.TXT)); verify(this.mockTlsaValidator, never()).validateTLSA(any(URL.class)); verify(this.testObj, never()).processWalletNameUrl(any(URL.class), anyBoolean()); } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void resolve_GoRightURL() { try { when(this.testObj.processWalletNameUrl(any(URL.class), anyBoolean())).thenReturn(new BitcoinURI("bitcoin:1CpLXM15vjULK3ZPGUTDMUcGATGR9xGitv")); when(this.mockResolver.resolve(eq("_btc._wallet.wallet.domain.com."), eq(Type.TXT))).thenReturn("aHR0cHM6Ly9hZGRyZXNzaW1vLm5ldGtpLmNvbS9yZXNvbHZlLzg3NTkzNDg3NTk0Mzc1OTQzNzk4MzQ3MzQ1"); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { BitcoinURI result = this.testObj.resolve("wallet.domain.com", "btc", true); assertNotNull(result.getAddress()); assertEquals("1CpLXM15vjULK3ZPGUTDMUcGATGR9xGitv", result.getAddress().toString()); verify(this.mockResolver, times(1)).resolve(anyString(), eq(Type.TXT)); verify(this.mockTlsaValidator, never()).validateTLSA(any(URL.class)); verify(this.testObj).processWalletNameUrl(eq(new URL("https://addressimo.netki.com/resolve/87593487594375943798347345")), anyBoolean()); } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void resolve_EmptyLabel() { try { this.testObj.resolve("", "btc", true); fail("This should throw an exception"); } catch (WalletNameLookupException e) { try { assertEquals("Wallet Name Label Must Non-Empty", e.getMessage()); verify(this.mockResolver, never()).resolve(anyString(), eq(Type.TXT)); verify(this.mockTlsaValidator, never()).validateTLSA(any(URL.class)); verify(this.testObj, never()).processWalletNameUrl(any(URL.class), anyBoolean()); } catch (Exception e1) { fail("Failure in Test Validation: " + e1.getMessage()); } } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void resolve_CurrencyNotAvailable() { try { when(this.mockResolver.resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT))).thenReturn("btc ltc"); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { this.testObj.resolve("wallet.domain.com", "dgc", true); fail("This should throw an exception"); } catch (WalletNameCurrencyUnavailableException e) { try { assertEquals("Currency Not Available in Wallet Name", e.getMessage()); verify(this.mockResolver, times(1)).resolve(anyString(), eq(Type.TXT)); verify(this.mockTlsaValidator, never()).validateTLSA(any(URL.class)); verify(this.testObj, never()).processWalletNameUrl(any(URL.class), anyBoolean()); } catch (Exception e1) { fail("Failure in Test Validation: " + e1.getMessage()); } } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void resolve_EmptyAddressResolution() { try { when(this.mockResolver.resolve(eq("_btc._wallet.wallet.domain.com."), eq(Type.TXT))).thenReturn(""); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { this.testObj.resolve("wallet.domain.com", "btc", true); fail("This should throw an exception"); } catch (WalletNameCurrencyUnavailableException e) { try { assertEquals("Currency Not Available in Wallet Name", e.getMessage()); verify(this.mockResolver, times(1)).resolve(anyString(), eq(Type.TXT)); verify(this.mockTlsaValidator, never()).validateTLSA(any(URL.class)); verify(this.testObj, never()).processWalletNameUrl(any(URL.class), anyBoolean()); } catch (Exception e1) { fail("Failure in Test Validation: " + e1.getMessage()); } } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void resolve_ResolutionException() { try { when(this.mockResolver.resolve(eq("_btc._wallet.wallet.domain.com."), eq(Type.TXT))).thenThrow(new DNSSECException("message")); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { this.testObj.resolve("wallet.domain.com", "btc", true); fail("This should throw an exception"); } catch (WalletNameLookupException e) { try { assertEquals("message", e.getMessage()); verify(this.mockResolver, times(3)).resolve(anyString(), eq(Type.TXT)); verify(this.mockTlsaValidator, never()).validateTLSA(any(URL.class)); verify(this.testObj, never()).processWalletNameUrl(any(URL.class), anyBoolean()); } catch (Exception e1) { fail("Failure in Test Validation: " + e1.getMessage()); } } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void resolve_ResolutionExceptionRetry() { try { when(this.mockResolver.resolve(eq("_btc._wallet.wallet.domain.com."), eq(Type.TXT))).thenThrow(new DNSSECException("message")).thenReturn("1CpLXM15vjULK3ZPGUTDMUcGATGR9xGitv"); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { BitcoinURI result = this.testObj.resolve("wallet.domain.com", "btc", true); assertNotNull(result.getAddress()); assertEquals("1CpLXM15vjULK3ZPGUTDMUcGATGR9xGitv", result.getAddress().toString()); verify(this.mockResolver, times(2)).resolve(anyString(), eq(Type.TXT)); verify(this.mockTlsaValidator, never()).validateTLSA(any(URL.class)); verify(this.testObj, never()).processWalletNameUrl(any(URL.class), anyBoolean()); } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void resolve_URLException() { try { when(this.mockResolver.resolve(eq("_wallet.wallet.domain.com."), eq(Type.TXT))).thenReturn("btc ltc"); when(this.mockResolver.resolve(eq("_btc._wallet.wallet.domain.com."), eq(Type.TXT))).thenReturn("1CpLXM15vjULK3ZPGUTDMUcGATGR9xGitv"); } catch (Exception e) { fail("Failure to Setup Test: " + e.getMessage()); } try { BitcoinURI result = this.testObj.resolve("wallet.domain.com", "btc", true); assertNotNull(result.getAddress()); assertEquals("1CpLXM15vjULK3ZPGUTDMUcGATGR9xGitv", result.getAddress().toString()); verify(this.mockResolver, times(1)).resolve(anyString(), eq(Type.TXT)); verify(this.mockTlsaValidator, never()).validateTLSA(any(URL.class)); verify(this.testObj, never()).processWalletNameUrl(eq(new URL("https://addressimo.netki.com/resolve/87593487594375943798347345")), anyBoolean()); } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void preprocessWalletName_NonEmail() { try { String result = this.testObj.preprocessWalletName("user.domain.com"); assertEquals("user.domain.com", result); } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void preprocessWalletName_Email() { try { String result = this.testObj.preprocessWalletName("user@domain.com"); assertEquals("147ad31215fd55112ce613a7883902bb306aa35bba879cd2dbe500b9.domain.com", result); } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } @Test public void preprocessWalletName_EmailDoubleAt() { try { String result = this.testObj.preprocessWalletName("user@user@domain.com"); assertEquals("147ad31215fd55112ce613a7883902bb306aa35bba879cd2dbe500b9.user@domain.com", result); } catch (Exception e) { fail("Unknown Test Failure: " + e.getMessage()); } } }
package org.bouncycastle.jcajce.provider.asymmetric.ecgost; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.math.BigInteger; import java.security.interfaces.ECPrivateKey; import java.security.spec.ECParameterSpec; import java.security.spec.ECPoint; import java.security.spec.ECPrivateKeySpec; import java.security.spec.EllipticCurve; import java.util.Enumeration; import org.bouncycastle.asn1.ASN1Encodable; import org.bouncycastle.asn1.ASN1Encoding; import org.bouncycastle.asn1.ASN1Integer; import org.bouncycastle.asn1.ASN1ObjectIdentifier; import org.bouncycastle.asn1.ASN1OctetString; import org.bouncycastle.asn1.ASN1Primitive; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.DERBitString; import org.bouncycastle.asn1.DERNull; import org.bouncycastle.asn1.DEROctetString; import org.bouncycastle.asn1.cryptopro.CryptoProObjectIdentifiers; import org.bouncycastle.asn1.cryptopro.ECGOST3410NamedCurves; import org.bouncycastle.asn1.cryptopro.GOST3410PublicKeyAlgParameters; import org.bouncycastle.asn1.pkcs.PrivateKeyInfo; import org.bouncycastle.asn1.x509.AlgorithmIdentifier; import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; import org.bouncycastle.asn1.x9.X962Parameters; import org.bouncycastle.asn1.x9.X9ECParameters; import org.bouncycastle.crypto.params.ECDomainParameters; import org.bouncycastle.crypto.params.ECPrivateKeyParameters; import org.bouncycastle.jcajce.provider.asymmetric.util.EC5Util; import org.bouncycastle.jcajce.provider.asymmetric.util.ECUtil; import org.bouncycastle.jcajce.provider.asymmetric.util.PKCS12BagAttributeCarrierImpl; import org.bouncycastle.jce.ECGOST3410NamedCurveTable; import org.bouncycastle.jce.interfaces.ECPointEncoder; import org.bouncycastle.jce.interfaces.PKCS12BagAttributeCarrier; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.bouncycastle.jce.spec.ECNamedCurveParameterSpec; import org.bouncycastle.jce.spec.ECNamedCurveSpec; import org.bouncycastle.math.ec.ECCurve; public class BCECGOST3410PrivateKey implements ECPrivateKey, org.bouncycastle.jce.interfaces.ECPrivateKey, PKCS12BagAttributeCarrier, ECPointEncoder { static final long serialVersionUID = 7245981689601667138L; private String algorithm = "ECGOST3410"; private boolean withCompression; private transient GOST3410PublicKeyAlgParameters gostParams; private transient BigInteger d; private transient ECParameterSpec ecSpec; private transient DERBitString publicKey; private transient PKCS12BagAttributeCarrierImpl attrCarrier = new PKCS12BagAttributeCarrierImpl(); protected BCECGOST3410PrivateKey() { } public BCECGOST3410PrivateKey( ECPrivateKey key) { this.d = key.getS(); this.algorithm = key.getAlgorithm(); this.ecSpec = key.getParams(); } public BCECGOST3410PrivateKey( org.bouncycastle.jce.spec.ECPrivateKeySpec spec) { this.d = spec.getD(); if (spec.getParams() != null) // can be null if implicitlyCA { ECCurve curve = spec.getParams().getCurve(); EllipticCurve ellipticCurve; ellipticCurve = EC5Util.convertCurve(curve, spec.getParams().getSeed()); this.ecSpec = EC5Util.convertSpec(ellipticCurve, spec.getParams()); } else { this.ecSpec = null; } } public BCECGOST3410PrivateKey( ECPrivateKeySpec spec) { this.d = spec.getS(); this.ecSpec = spec.getParams(); } public BCECGOST3410PrivateKey( BCECGOST3410PrivateKey key) { this.d = key.d; this.ecSpec = key.ecSpec; this.withCompression = key.withCompression; this.attrCarrier = key.attrCarrier; this.publicKey = key.publicKey; this.gostParams = key.gostParams; } public BCECGOST3410PrivateKey( String algorithm, ECPrivateKeyParameters params, BCECGOST3410PublicKey pubKey, ECParameterSpec spec) { ECDomainParameters dp = params.getParameters(); this.algorithm = algorithm; this.d = params.getD(); if (spec == null) { EllipticCurve ellipticCurve = EC5Util.convertCurve(dp.getCurve(), dp.getSeed()); this.ecSpec = new ECParameterSpec( ellipticCurve, new ECPoint( dp.getG().getAffineXCoord().toBigInteger(), dp.getG().getAffineYCoord().toBigInteger()), dp.getN(), dp.getH().intValue()); } else { this.ecSpec = spec; } this.gostParams = pubKey.getGostParams(); publicKey = getPublicKeyDetails(pubKey); } public BCECGOST3410PrivateKey( String algorithm, ECPrivateKeyParameters params, BCECGOST3410PublicKey pubKey, org.bouncycastle.jce.spec.ECParameterSpec spec) { ECDomainParameters dp = params.getParameters(); this.algorithm = algorithm; this.d = params.getD(); if (spec == null) { EllipticCurve ellipticCurve = EC5Util.convertCurve(dp.getCurve(), dp.getSeed()); this.ecSpec = new ECParameterSpec( ellipticCurve, new ECPoint( dp.getG().getAffineXCoord().toBigInteger(), dp.getG().getAffineYCoord().toBigInteger()), dp.getN(), dp.getH().intValue()); } else { EllipticCurve ellipticCurve = EC5Util.convertCurve(spec.getCurve(), spec.getSeed()); this.ecSpec = new ECParameterSpec( ellipticCurve, new ECPoint( spec.getG().getAffineXCoord().toBigInteger(), spec.getG().getAffineYCoord().toBigInteger()), spec.getN(), spec.getH().intValue()); } this.gostParams = pubKey.getGostParams(); publicKey = getPublicKeyDetails(pubKey); } public BCECGOST3410PrivateKey( String algorithm, ECPrivateKeyParameters params) { this.algorithm = algorithm; this.d = params.getD(); this.ecSpec = null; } BCECGOST3410PrivateKey( PrivateKeyInfo info) throws IOException { populateFromPrivKeyInfo(info); } private void populateFromPrivKeyInfo(PrivateKeyInfo info) throws IOException { ASN1Primitive p = info.getPrivateKeyAlgorithm().getParameters().toASN1Primitive(); if (p instanceof ASN1Sequence && (ASN1Sequence.getInstance(p).size() == 2 || ASN1Sequence.getInstance(p).size() == 3)) { gostParams = GOST3410PublicKeyAlgParameters.getInstance(info.getPrivateKeyAlgorithm().getParameters()); ECNamedCurveParameterSpec spec = ECGOST3410NamedCurveTable.getParameterSpec(ECGOST3410NamedCurves.getName(gostParams.getPublicKeyParamSet())); ECCurve curve = spec.getCurve(); EllipticCurve ellipticCurve = EC5Util.convertCurve(curve, spec.getSeed()); ecSpec = new ECNamedCurveSpec( ECGOST3410NamedCurves.getName(gostParams.getPublicKeyParamSet()), ellipticCurve, new ECPoint( spec.getG().getAffineXCoord().toBigInteger(), spec.getG().getAffineYCoord().toBigInteger()), spec.getN(), spec.getH()); ASN1Encodable privKey = info.parsePrivateKey(); byte[] encVal = ASN1OctetString.getInstance(privKey).getOctets(); byte[] dVal = new byte[encVal.length]; for (int i = 0; i != encVal.length; i++) { dVal[i] = encVal[encVal.length - 1 - i]; } this.d = new BigInteger(1, dVal); } else { // for backwards compatibility X962Parameters params = X962Parameters.getInstance(info.getPrivateKeyAlgorithm().getParameters()); if (params.isNamedCurve()) { ASN1ObjectIdentifier oid = ASN1ObjectIdentifier.getInstance(params.getParameters()); X9ECParameters ecP = ECUtil.getNamedCurveByOid(oid); if (ecP == null) // GOST Curve { ECDomainParameters gParam = ECGOST3410NamedCurves.getByOID(oid); EllipticCurve ellipticCurve = EC5Util.convertCurve(gParam.getCurve(), gParam.getSeed()); ecSpec = new ECNamedCurveSpec( ECGOST3410NamedCurves.getName(oid), ellipticCurve, new ECPoint( gParam.getG().getAffineXCoord().toBigInteger(), gParam.getG().getAffineYCoord().toBigInteger()), gParam.getN(), gParam.getH()); } else { EllipticCurve ellipticCurve = EC5Util.convertCurve(ecP.getCurve(), ecP.getSeed()); ecSpec = new ECNamedCurveSpec( ECUtil.getCurveName(oid), ellipticCurve, new ECPoint( ecP.getG().getAffineXCoord().toBigInteger(), ecP.getG().getAffineYCoord().toBigInteger()), ecP.getN(), ecP.getH()); } } else if (params.isImplicitlyCA()) { ecSpec = null; } else { X9ECParameters ecP = X9ECParameters.getInstance(params.getParameters()); EllipticCurve ellipticCurve = EC5Util.convertCurve(ecP.getCurve(), ecP.getSeed()); this.ecSpec = new ECParameterSpec( ellipticCurve, new ECPoint( ecP.getG().getAffineXCoord().toBigInteger(), ecP.getG().getAffineYCoord().toBigInteger()), ecP.getN(), ecP.getH().intValue()); } ASN1Encodable privKey = info.parsePrivateKey(); if (privKey instanceof ASN1Integer) { ASN1Integer derD = ASN1Integer.getInstance(privKey); this.d = derD.getValue(); } else { org.bouncycastle.asn1.sec.ECPrivateKey ec = org.bouncycastle.asn1.sec.ECPrivateKey.getInstance(privKey); this.d = ec.getKey(); this.publicKey = ec.getPublicKey(); } } } public String getAlgorithm() { return algorithm; } /** * return the encoding format we produce in getEncoded(). * * @return the string "PKCS#8" */ public String getFormat() { return "PKCS#8"; } /** * Return a PKCS8 representation of the key. The sequence returned * represents a full PrivateKeyInfo object. * * @return a PKCS8 representation of the key. */ public byte[] getEncoded() { if (gostParams != null) { byte[] encKey = new byte[32]; extractBytes(encKey, 0, this.getS()); try { PrivateKeyInfo info = new PrivateKeyInfo(new AlgorithmIdentifier(CryptoProObjectIdentifiers.gostR3410_2001, gostParams), new DEROctetString(encKey)); return info.getEncoded(ASN1Encoding.DER); } catch (IOException e) { return null; } } else { X962Parameters params; int orderBitLength; if (ecSpec instanceof ECNamedCurveSpec) { ASN1ObjectIdentifier curveOid = ECUtil.getNamedCurveOid(((ECNamedCurveSpec)ecSpec).getName()); if (curveOid == null) // guess it's the OID { curveOid = new ASN1ObjectIdentifier(((ECNamedCurveSpec)ecSpec).getName()); } params = new X962Parameters(curveOid); orderBitLength = ECUtil.getOrderBitLength(ecSpec.getOrder(), this.getS()); } else if (ecSpec == null) { params = new X962Parameters(DERNull.INSTANCE); orderBitLength = ECUtil.getOrderBitLength(null, this.getS()); } else { ECCurve curve = EC5Util.convertCurve(ecSpec.getCurve()); X9ECParameters ecP = new X9ECParameters( curve, EC5Util.convertPoint(curve, ecSpec.getGenerator(), withCompression), ecSpec.getOrder(), BigInteger.valueOf(ecSpec.getCofactor()), ecSpec.getCurve().getSeed()); params = new X962Parameters(ecP); orderBitLength = ECUtil.getOrderBitLength(ecSpec.getOrder(), this.getS()); } PrivateKeyInfo info; org.bouncycastle.asn1.sec.ECPrivateKey keyStructure; if (publicKey != null) { keyStructure = new org.bouncycastle.asn1.sec.ECPrivateKey(orderBitLength, this.getS(), publicKey, params); } else { keyStructure = new org.bouncycastle.asn1.sec.ECPrivateKey(orderBitLength, this.getS(), params); } try { info = new PrivateKeyInfo(new AlgorithmIdentifier(CryptoProObjectIdentifiers.gostR3410_2001, params.toASN1Primitive()), keyStructure.toASN1Primitive()); return info.getEncoded(ASN1Encoding.DER); } catch (IOException e) { return null; } } } private void extractBytes(byte[] encKey, int offSet, BigInteger bI) { byte[] val = bI.toByteArray(); if (val.length < 32) { byte[] tmp = new byte[32]; System.arraycopy(val, 0, tmp, tmp.length - val.length, val.length); val = tmp; } for (int i = 0; i != 32; i++) { encKey[offSet + i] = val[val.length - 1 - i]; } } public ECParameterSpec getParams() { return ecSpec; } public org.bouncycastle.jce.spec.ECParameterSpec getParameters() { if (ecSpec == null) { return null; } return EC5Util.convertSpec(ecSpec, withCompression); } org.bouncycastle.jce.spec.ECParameterSpec engineGetSpec() { if (ecSpec != null) { return EC5Util.convertSpec(ecSpec, withCompression); } return BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa(); } public BigInteger getS() { return d; } public BigInteger getD() { return d; } public void setBagAttribute( ASN1ObjectIdentifier oid, ASN1Encodable attribute) { attrCarrier.setBagAttribute(oid, attribute); } public ASN1Encodable getBagAttribute( ASN1ObjectIdentifier oid) { return attrCarrier.getBagAttribute(oid); } public Enumeration getBagAttributeKeys() { return attrCarrier.getBagAttributeKeys(); } public void setPointFormat(String style) { withCompression = !("UNCOMPRESSED".equalsIgnoreCase(style)); } public boolean equals(Object o) { if (!(o instanceof BCECGOST3410PrivateKey)) { return false; } BCECGOST3410PrivateKey other = (BCECGOST3410PrivateKey)o; return getD().equals(other.getD()) && (engineGetSpec().equals(other.engineGetSpec())); } public int hashCode() { return getD().hashCode() ^ engineGetSpec().hashCode(); } public String toString() { StringBuffer buf = new StringBuffer(); String nl = System.getProperty("line.separator"); buf.append("EC Private Key").append(nl); buf.append(" S: ").append(this.d.toString(16)).append(nl); return buf.toString(); } private DERBitString getPublicKeyDetails(BCECGOST3410PublicKey pub) { try { SubjectPublicKeyInfo info = SubjectPublicKeyInfo.getInstance(ASN1Primitive.fromByteArray(pub.getEncoded())); return info.getPublicKeyData(); } catch (IOException e) { // should never happen return null; } } private void readObject( ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); byte[] enc = (byte[])in.readObject(); populateFromPrivKeyInfo(PrivateKeyInfo.getInstance(ASN1Primitive.fromByteArray(enc))); this.attrCarrier = new PKCS12BagAttributeCarrierImpl(); } private void writeObject( ObjectOutputStream out) throws IOException { out.defaultWriteObject(); out.writeObject(this.getEncoded()); } }
package org.deeplearning4j.models.word2vec.wordstore; import org.deeplearning4j.models.word2vec.wordstore.inmemory.InMemoryLookupCache; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * Created by fartovii on 08.11.15. */ public class VocabularyHolderTest { @Test public void testTransferBackToVocabCache() throws Exception { VocabularyHolder holder = new VocabularyHolder(); holder.addWord("test"); holder.addWord("tests"); holder.addWord("testz"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("testz"); InMemoryLookupCache cache = new InMemoryLookupCache(false); holder.updateHuffmanCodes(); holder.transferBackToVocabCache(cache); // checking word frequency transfer assertEquals(3,cache.numWords()); assertEquals(1, cache.wordFrequency("test")); assertEquals(2, cache.wordFrequency("testz")); assertEquals(3, cache.wordFrequency("tests")); // checking Huffman tree transfer assertEquals("tests", cache.wordAtIndex(0)); assertEquals("testz", cache.wordAtIndex(1)); assertEquals("test", cache.wordAtIndex(2)); } @Test public void testConstructor() throws Exception { InMemoryLookupCache cache = new InMemoryLookupCache(true); VocabularyHolder holder = new VocabularyHolder(cache, false); // no more UNK token here assertEquals(0, holder.numWords()); } /** * In this test we make sure SPECIAL words are not affected by truncation in extending vocab * @throws Exception */ @Test public void testSpecial1() throws Exception { VocabularyHolder holder = new VocabularyHolder.Builder() .minWordFrequency(1) .build(); holder.addWord("test"); holder.addWord("tests"); holder.truncateVocabulary(); assertEquals(2, holder.numWords()); VocabCache cache = new InMemoryLookupCache(); holder.transferBackToVocabCache(cache); VocabularyHolder holder2 = new VocabularyHolder.Builder() .externalCache(cache) .minWordFrequency(10) // .markAsSpecial(true) .build(); holder2.addWord("testz"); assertEquals(3, holder2.numWords()); holder2.truncateVocabulary(); assertEquals(2, holder2.numWords()); } @Test public void testScavenger1() throws Exception { VocabularyHolder holder = new VocabularyHolder.Builder() .minWordFrequency(5) .hugeModelExpected(true) .scavengerActivationThreshold(1000000) // this value doesn't really matters, since we'll call for scavenger manually .scavengerRetentionDelay(3) .build(); holder.addWord("test"); holder.addWord("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.activateScavenger(); assertEquals(2, holder.numWords()); holder.activateScavenger(); assertEquals(2, holder.numWords()); // after third activation, word "test" should be removed holder.activateScavenger(); assertEquals(1, holder.numWords()); } @Test public void testScavenger2() throws Exception { VocabularyHolder holder = new VocabularyHolder.Builder() .minWordFrequency(5) .hugeModelExpected(true) .scavengerActivationThreshold(1000000) // this value doesn't really matters, since we'll call for scavenger manually .scavengerRetentionDelay(3) .build(); holder.addWord("test"); holder.incrementWordCounter("test"); holder.addWord("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.activateScavenger(); assertEquals(2, holder.numWords()); holder.activateScavenger(); assertEquals(2, holder.numWords()); // after third activation, word "test" should be removed holder.activateScavenger(); assertEquals(1, holder.numWords()); } @Test public void testScavenger3() throws Exception { VocabularyHolder holder = new VocabularyHolder.Builder() .minWordFrequency(5) .hugeModelExpected(true) .scavengerActivationThreshold(1000000) // this value doesn't really matters, since we'll call for scavenger manually .scavengerRetentionDelay(3) .build(); holder.addWord("test"); holder.activateScavenger(); assertEquals(1, holder.numWords()); holder.incrementWordCounter("test"); holder.addWord("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.activateScavenger(); assertEquals(2, holder.numWords()); // after third activation, word "test" should NOT be removed, since at point 0 we have freq == 1, and 2 in the following tests holder.activateScavenger(); assertEquals(2, holder.numWords()); // here we should have all retention points shifted, and word "test" should be removed holder.activateScavenger(); assertEquals(1, holder.numWords()); } @Test public void testScavenger4() throws Exception { VocabularyHolder holder = new VocabularyHolder.Builder() .minWordFrequency(5) .hugeModelExpected(true) .scavengerActivationThreshold(1000000) // this value doesn't really matters, since we'll call for scavenger manually .scavengerRetentionDelay(3) .build(); holder.addWord("test"); holder.activateScavenger(); assertEquals(1, holder.numWords()); holder.incrementWordCounter("test"); holder.addWord("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.incrementWordCounter("tests"); holder.activateScavenger(); assertEquals(2, holder.numWords()); // after third activation, word "test" should NOT be removed, since at point 0 we have freq == 1, and 2 in the following tests holder.activateScavenger(); assertEquals(2, holder.numWords()); holder.incrementWordCounter("test"); // here we should have all retention points shifted, and word "test" should NOT be removed, since now it's above the scavenger threshold holder.activateScavenger(); assertEquals(2, holder.numWords()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.client.solrj.request; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.LinkedHashMap; import org.apache.solr.client.solrj.impl.LBHttpSolrServer; import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocRouter; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.XML; /** * * * @since solr 1.3 */ public class UpdateRequest extends AbstractUpdateRequest { public static final String VER = "ver"; public static final String OVERWRITE = "ow"; public static final String COMMIT_WITHIN = "cw"; private Map<SolrInputDocument,Map<String,Object>> documents = null; private Iterator<SolrInputDocument> docIterator = null; private Map<String,Map<String,Object>> deleteById = null; private List<String> deleteQuery = null; public UpdateRequest() { super(METHOD.POST, "/update"); } public UpdateRequest(String url) { super(METHOD.POST, url); } // --------------------------------------------------------------------------- // --------------------------------------------------------------------------- /** * clear the pending documents and delete commands */ public void clear() { if (documents != null) { documents.clear(); } if (deleteById != null) { deleteById.clear(); } if (deleteQuery != null) { deleteQuery.clear(); } } // --------------------------------------------------------------------------- // --------------------------------------------------------------------------- public UpdateRequest add(final SolrInputDocument doc) { if (documents == null) { documents = new LinkedHashMap<SolrInputDocument,Map<String,Object>>(); } documents.put(doc, null); return this; } public UpdateRequest add(final SolrInputDocument doc, Boolean overwrite) { return add(doc, null, overwrite); } public UpdateRequest add(final SolrInputDocument doc, Integer commitWithin) { return add(doc, commitWithin, null); } public UpdateRequest add(final SolrInputDocument doc, Integer commitWithin, Boolean overwrite) { if (documents == null) { documents = new LinkedHashMap<SolrInputDocument,Map<String,Object>>(); } Map<String,Object> params = new HashMap<String,Object>(2); if (commitWithin != null) params.put(COMMIT_WITHIN, commitWithin); if (overwrite != null) params.put(OVERWRITE, overwrite); documents.put(doc, params); return this; } public UpdateRequest add(final Collection<SolrInputDocument> docs) { if (documents == null) { documents = new LinkedHashMap<SolrInputDocument,Map<String,Object>>(); } for (SolrInputDocument doc : docs) { documents.put(doc, null); } return this; } public UpdateRequest deleteById(String id) { if (deleteById == null) { deleteById = new LinkedHashMap<String,Map<String,Object>>(); } deleteById.put(id, null); return this; } public UpdateRequest deleteById(List<String> ids) { if (deleteById == null) { deleteById = new LinkedHashMap<String,Map<String,Object>>(); } for (String id : ids) { deleteById.put(id, null); } return this; } public UpdateRequest deleteById(String id, Long version) { if (deleteById == null) { deleteById = new LinkedHashMap<String,Map<String,Object>>(); } Map<String,Object> params = new HashMap<String,Object>(1); params.put(VER, version); deleteById.put(id, params); return this; } public UpdateRequest deleteByQuery(String q) { if (deleteQuery == null) { deleteQuery = new ArrayList<String>(); } deleteQuery.add(q); return this; } /** * @param router to route updates with * @param col DocCollection for the updates * @param urlMap of the cluster * @param params params to use * @param idField the id field * @return a Map of urls to requests */ public Map<String,LBHttpSolrServer.Req> getRoutes(DocRouter router, DocCollection col, Map<String,List<String>> urlMap, ModifiableSolrParams params, String idField) { if ((documents == null || documents.size() == 0) && (deleteById == null || deleteById.size() == 0)) { return null; } Map<String,LBHttpSolrServer.Req> routes = new HashMap<String,LBHttpSolrServer.Req>(); if (documents != null) { Set<Entry<SolrInputDocument,Map<String,Object>>> entries = documents.entrySet(); for (Entry<SolrInputDocument,Map<String,Object>> entry : entries) { SolrInputDocument doc = entry.getKey(); Object id = doc.getFieldValue(idField); if (id == null) { return null; } Slice slice = router.getTargetSlice(id .toString(), doc, null, col); if (slice == null) { return null; } List<String> urls = urlMap.get(slice.getName()); String leaderUrl = urls.get(0); LBHttpSolrServer.Req request = (LBHttpSolrServer.Req) routes .get(leaderUrl); if (request == null) { UpdateRequest updateRequest = new UpdateRequest(); updateRequest.setMethod(getMethod()); updateRequest.setCommitWithin(getCommitWithin()); updateRequest.setParams(params); updateRequest.setPath(getPath()); request = new LBHttpSolrServer.Req(updateRequest, urls); routes.put(leaderUrl, request); } UpdateRequest urequest = (UpdateRequest) request.getRequest(); urequest.add(doc); } } // Route the deleteById's if (deleteById != null) { Iterator<Map.Entry<String,Map<String,Object>>> entries = deleteById.entrySet() .iterator(); while (entries.hasNext()) { Map.Entry<String,Map<String,Object>> entry = entries.next(); String deleteId = entry.getKey(); Map<String,Object> map = entry.getValue(); Long version = null; if (map != null) { version = (Long) map.get(VER); } Slice slice = router.getTargetSlice(deleteId, null, null, col); if (slice == null) { return null; } List<String> urls = urlMap.get(slice.getName()); String leaderUrl = urls.get(0); LBHttpSolrServer.Req request = routes.get(leaderUrl); if (request != null) { UpdateRequest urequest = (UpdateRequest) request.getRequest(); urequest.deleteById(deleteId, version); } else { UpdateRequest urequest = new UpdateRequest(); urequest.setParams(params); urequest.deleteById(deleteId, version); request = new LBHttpSolrServer.Req(urequest, urls); routes.put(leaderUrl, request); } } } return routes; } public void setDocIterator(Iterator<SolrInputDocument> docIterator) { this.docIterator = docIterator; } public void setDeleteQuery(List<String> deleteQuery) { this.deleteQuery = deleteQuery; } // -------------------------------------------------------------------------- // -------------------------------------------------------------------------- @Override public Collection<ContentStream> getContentStreams() throws IOException { return ClientUtils.toContentStreams(getXML(), ClientUtils.TEXT_XML); } public String getXML() throws IOException { StringWriter writer = new StringWriter(); writeXML(writer); writer.flush(); // If action is COMMIT or OPTIMIZE, it is sent with params String xml = writer.toString(); // System.out.println( "SEND:"+xml ); return (xml.length() > 0) ? xml : null; } private List<Map<SolrInputDocument,Map<String,Object>>> getDocLists(Map<SolrInputDocument,Map<String,Object>> documents) { List<Map<SolrInputDocument,Map<String,Object>>> docLists = new ArrayList<Map<SolrInputDocument,Map<String,Object>>>(); Map<SolrInputDocument,Map<String,Object>> docList = null; if (this.documents != null) { Boolean lastOverwrite = true; Integer lastCommitWithin = -1; Set<Entry<SolrInputDocument,Map<String,Object>>> entries = this.documents .entrySet(); for (Entry<SolrInputDocument,Map<String,Object>> entry : entries) { Map<String,Object> map = entry.getValue(); Boolean overwrite = null; Integer commitWithin = null; if (map != null) { overwrite = (Boolean) entry.getValue().get(OVERWRITE); commitWithin = (Integer) entry.getValue().get(COMMIT_WITHIN); } if (overwrite != lastOverwrite || commitWithin != lastCommitWithin || docLists.size() == 0) { docList = new LinkedHashMap<SolrInputDocument,Map<String,Object>>(); docLists.add(docList); } docList.put(entry.getKey(), entry.getValue()); lastCommitWithin = commitWithin; lastOverwrite = overwrite; } } if (docIterator != null) { docList = new LinkedHashMap<SolrInputDocument,Map<String,Object>>(); docLists.add(docList); while (docIterator.hasNext()) { SolrInputDocument doc = docIterator.next(); if (doc != null) { docList.put(doc, null); } } } return docLists; } /** * @since solr 1.4 */ public void writeXML(Writer writer) throws IOException { List<Map<SolrInputDocument,Map<String,Object>>> getDocLists = getDocLists(documents); for (Map<SolrInputDocument,Map<String,Object>> docs : getDocLists) { if ((docs != null && docs.size() > 0)) { Entry<SolrInputDocument,Map<String,Object>> firstDoc = docs.entrySet() .iterator().next(); Map<String,Object> map = firstDoc.getValue(); Integer cw = null; Boolean ow = null; if (map != null) { cw = (Integer) firstDoc.getValue().get(COMMIT_WITHIN); ow = (Boolean) firstDoc.getValue().get(OVERWRITE); } if (ow == null) ow = true; int commitWithin = (cw != null && cw != -1) ? cw : this.commitWithin; boolean overwrite = ow; if (commitWithin > -1 || overwrite != true) { writer.write("<add commitWithin=\"" + commitWithin + "\" " + "overwrite=\"" + overwrite + "\">"); } else { writer.write("<add>"); } Set<Entry<SolrInputDocument,Map<String,Object>>> entries = docs .entrySet(); for (Entry<SolrInputDocument,Map<String,Object>> entry : entries) { ClientUtils.writeXML(entry.getKey(), writer); } writer.write("</add>"); } } // Add the delete commands boolean deleteI = deleteById != null && deleteById.size() > 0; boolean deleteQ = deleteQuery != null && deleteQuery.size() > 0; if (deleteI || deleteQ) { if (commitWithin > 0) { writer.append("<delete commitWithin=\"" + commitWithin + "\">"); } else { writer.append("<delete>"); } if (deleteI) { for (Map.Entry<String,Map<String,Object>> entry : deleteById.entrySet()) { writer.append("<id"); Map<String,Object> map = entry.getValue(); if (map != null) { Long version = (Long) map.get(VER); if (version != null) { writer.append(" version=\"" + version + "\""); } } writer.append(">"); XML.escapeCharData(entry.getKey(), writer); writer.append("</id>"); } } if (deleteQ) { for (String q : deleteQuery) { writer.append("<query>"); XML.escapeCharData(q, writer); writer.append("</query>"); } } writer.append("</delete>"); } } // -------------------------------------------------------------------------- // -------------------------------------------------------------------------- // -------------------------------------------------------------------------- // // -------------------------------------------------------------------------- public List<SolrInputDocument> getDocuments() { if (documents == null) return null; List<SolrInputDocument> docs = new ArrayList<SolrInputDocument>(documents.size()); docs.addAll(documents.keySet()); return docs; } public Map<SolrInputDocument,Map<String,Object>> getDocumentsMap() { return documents; } public Iterator<SolrInputDocument> getDocIterator() { return docIterator; } public List<String> getDeleteById() { if (deleteById == null) return null; List<String> deletes = new ArrayList<String>(deleteById.keySet()); return deletes; } public Map<String,Map<String,Object>> getDeleteByIdMap() { return deleteById; } public List<String> getDeleteQuery() { return deleteQuery; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.makeStatic; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.TestFrameworks; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.javadoc.PsiDocTag; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.changeSignature.JavaChangeInfoImpl; import com.intellij.refactoring.changeSignature.JavaChangeSignatureUsageProcessor; import com.intellij.refactoring.changeSignature.ParameterInfoImpl; import com.intellij.refactoring.changeSignature.ThrownExceptionInfo; import com.intellij.refactoring.changeSignature.inCallers.JavaCallerChooser; import com.intellij.refactoring.util.CanonicalTypes; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.refactoring.util.javadoc.MethodJavaDocHelper; import com.intellij.usageView.UsageInfo; import com.intellij.util.Consumer; import com.intellij.util.IncorrectOperationException; import com.intellij.util.VisibilityUtil; import com.intellij.util.containers.MultiMap; import com.intellij.util.ui.tree.TreeUtil; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; /** * @author dsl */ public class MakeMethodStaticProcessor extends MakeMethodOrClassStaticProcessor<PsiMethod> { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.makeMethodStatic.MakeMethodStaticProcessor"); private List<PsiMethod> myAdditionalMethods; public MakeMethodStaticProcessor(final Project project, final PsiMethod method, final Settings settings) { super(project, method, settings); } @Override protected boolean findAdditionalMembers(final Set<UsageInfo> toMakeStatic) { if (!toMakeStatic.isEmpty()) { myAdditionalMethods = new ArrayList<PsiMethod>(); if (ApplicationManager.getApplication().isUnitTestMode()) { for (UsageInfo usageInfo : toMakeStatic) { myAdditionalMethods.add((PsiMethod)usageInfo.getElement()); } } else { final JavaCallerChooser chooser = new MakeStaticJavaCallerChooser(myMember, myProject, new Consumer<Set<PsiMethod>>() { @Override public void consume(Set<PsiMethod> methods) { myAdditionalMethods.addAll(methods); } }) { @Override protected ArrayList<UsageInfo> getTopLevelItems() { return new ArrayList<UsageInfo>(toMakeStatic); } }; TreeUtil.expand(chooser.getTree(), 2); if (!chooser.showAndGet()) { return false; } } } return true; } @Override protected MultiMap<PsiElement, String> getConflictDescriptions(UsageInfo[] usages) { MultiMap<PsiElement, String> descriptions = super.getConflictDescriptions(usages); if (mySettings.isMakeClassParameter() || mySettings.isMakeFieldParameters()) { for (UsageInfo usage : usages) { PsiElement element = usage.getElement(); if (element instanceof PsiMethodReferenceExpression) { descriptions.putValue(element, "Method reference will be corrupted"); } } } return descriptions; } protected void changeSelfUsage(SelfUsageInfo usageInfo) throws IncorrectOperationException { PsiElement parent = usageInfo.getElement().getParent(); LOG.assertTrue(parent instanceof PsiMethodCallExpression); PsiMethodCallExpression methodCall = (PsiMethodCallExpression) parent; final PsiExpression qualifier = methodCall.getMethodExpression().getQualifierExpression(); if (qualifier != null) qualifier.delete(); PsiElementFactory factory = JavaPsiFacade.getInstance(methodCall.getProject()).getElementFactory(); PsiExpressionList args = methodCall.getArgumentList(); PsiElement addParameterAfter = null; if(mySettings.isMakeClassParameter()) { PsiElement arg = factory.createExpressionFromText(mySettings.getClassParameterName(), null); addParameterAfter = args.addAfter(arg, null); } if(mySettings.isMakeFieldParameters()) { List<Settings.FieldParameter> parameters = mySettings.getParameterOrderList(); for (Settings.FieldParameter fieldParameter : parameters) { PsiElement arg = factory.createExpressionFromText(fieldParameter.name, null); if (addParameterAfter == null) { addParameterAfter = args.addAfter(arg, null); } else { addParameterAfter = args.addAfter(arg, addParameterAfter); } } } } protected void changeSelf(PsiElementFactory factory, UsageInfo[] usages) throws IncorrectOperationException { final MethodJavaDocHelper javaDocHelper = new MethodJavaDocHelper(myMember); PsiParameterList paramList = myMember.getParameterList(); PsiElement addParameterAfter = null; PsiDocTag anchor = null; List<PsiType> addedTypes = new ArrayList<PsiType>(); final PsiClass containingClass = myMember.getContainingClass(); LOG.assertTrue(containingClass != null); if (mySettings.isDelegate()) { List<ParameterInfoImpl> params = new ArrayList<ParameterInfoImpl>(); PsiParameter[] parameters = myMember.getParameterList().getParameters(); if (mySettings.isMakeClassParameter()) { params.add(new ParameterInfoImpl(-1, mySettings.getClassParameterName(), factory.createType(containingClass, PsiSubstitutor.EMPTY), "this")); } if (mySettings.isMakeFieldParameters()) { for (Settings.FieldParameter parameter : mySettings.getParameterOrderList()) { params.add(new ParameterInfoImpl(-1, mySettings.getClassParameterName(), parameter.type, parameter.field.getName())); } } for (int i = 0; i < parameters.length; i++) { params.add(new ParameterInfoImpl(i)); } final PsiType returnType = myMember.getReturnType(); LOG.assertTrue(returnType != null); JavaChangeSignatureUsageProcessor.generateDelegate(new JavaChangeInfoImpl(VisibilityUtil.getVisibilityModifier(myMember.getModifierList()), myMember, myMember.getName(), CanonicalTypes.createTypeWrapper(returnType), params.toArray(new ParameterInfoImpl[params.size()]), new ThrownExceptionInfo[0], false, Collections.<PsiMethod>emptySet(), Collections.<PsiMethod>emptySet())); } if (mySettings.isMakeClassParameter()) { // Add parameter for object PsiType parameterType = factory.createType(containingClass, PsiSubstitutor.EMPTY); addedTypes.add(parameterType); final String classParameterName = mySettings.getClassParameterName(); PsiParameter parameter = factory.createParameter(classParameterName, parameterType); if(makeClassParameterFinal(usages)) { PsiUtil.setModifierProperty(parameter, PsiModifier.FINAL, true); } addParameterAfter = paramList.addAfter(parameter, null); anchor = javaDocHelper.addParameterAfter(classParameterName, anchor); } if (mySettings.isMakeFieldParameters()) { List<Settings.FieldParameter> parameters = mySettings.getParameterOrderList(); for (Settings.FieldParameter fieldParameter : parameters) { final PsiType fieldParameterType = fieldParameter.field.getType(); final PsiParameter parameter = factory.createParameter(fieldParameter.name, fieldParameterType); addedTypes.add(fieldParameterType); if (makeFieldParameterFinal(fieldParameter.field, usages)) { PsiUtil.setModifierProperty(parameter, PsiModifier.FINAL, true); } addParameterAfter = paramList.addAfter(parameter, addParameterAfter); anchor = javaDocHelper.addParameterAfter(fieldParameter.name, anchor); } } makeStatic(myMember); if (myAdditionalMethods != null) { for (PsiMethod method : myAdditionalMethods) { makeStatic(method); } } } private void makeStatic(PsiMethod member) { final PsiAnnotation overrideAnnotation = AnnotationUtil.findAnnotation(member, CommonClassNames.JAVA_LANG_OVERRIDE); if (overrideAnnotation != null) { overrideAnnotation.delete(); } setupTypeParameterList(member); // Add static modifier final PsiModifierList modifierList = member.getModifierList(); modifierList.setModifierProperty(PsiModifier.STATIC, true); modifierList.setModifierProperty(PsiModifier.FINAL, false); modifierList.setModifierProperty(PsiModifier.DEFAULT, false); } protected void changeInternalUsage(InternalUsageInfo usage, PsiElementFactory factory) throws IncorrectOperationException { if (!mySettings.isChangeSignature()) return; PsiElement element = usage.getElement(); if (element instanceof PsiReferenceExpression) { PsiReferenceExpression newRef = null; if (mySettings.isMakeFieldParameters()) { PsiElement resolved = ((PsiReferenceExpression) element).resolve(); if (resolved instanceof PsiField) { String name = mySettings.getNameForField((PsiField) resolved); if (name != null) { newRef = (PsiReferenceExpression) factory.createExpressionFromText(name, null); } } } if (newRef == null && mySettings.isMakeClassParameter()) { newRef = (PsiReferenceExpression) factory.createExpressionFromText( mySettings.getClassParameterName() + "." + element.getText(), null); } if (newRef != null) { CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(myProject); newRef = (PsiReferenceExpression) codeStyleManager.reformat(newRef); element.replace(newRef); } } else if (element instanceof PsiThisExpression && mySettings.isMakeClassParameter()) { element.replace(factory.createExpressionFromText(mySettings.getClassParameterName(), null)); } else if (element instanceof PsiSuperExpression && mySettings.isMakeClassParameter()) { element.replace(factory.createExpressionFromText(mySettings.getClassParameterName(), null)); } else if (element instanceof PsiNewExpression && mySettings.isMakeClassParameter()) { final PsiNewExpression newExpression = ((PsiNewExpression)element); LOG.assertTrue(newExpression.getQualifier() == null); final String newText = mySettings.getClassParameterName() + "." + newExpression.getText(); final PsiExpression expr = factory.createExpressionFromText(newText, null); element.replace(expr); } } protected void changeExternalUsage(UsageInfo usage, PsiElementFactory factory) throws IncorrectOperationException { final PsiElement element = usage.getElement(); if (!(element instanceof PsiReferenceExpression)) return; PsiReferenceExpression methodRef = (PsiReferenceExpression) element; PsiElement parent = methodRef.getParent(); PsiExpression instanceRef; instanceRef = methodRef.getQualifierExpression(); PsiElement newQualifier; final PsiClass memberClass = myMember.getContainingClass(); if (instanceRef == null || instanceRef instanceof PsiSuperExpression) { PsiClass contextClass = PsiTreeUtil.getParentOfType(element, PsiClass.class); if (!InheritanceUtil.isInheritorOrSelf(contextClass, memberClass, true)) { instanceRef = factory.createExpressionFromText(memberClass.getQualifiedName() + ".this", null); } else { instanceRef = factory.createExpressionFromText("this", null); } newQualifier = null; } else { newQualifier = factory.createReferenceExpression(memberClass); } if (mySettings.getNewParametersNumber() > 1) { int copyingSafetyLevel = RefactoringUtil.verifySafeCopyExpression(instanceRef); if (copyingSafetyLevel == RefactoringUtil.EXPR_COPY_PROHIBITED) { String tempVar = RefactoringUtil.createTempVar(instanceRef, parent, true); instanceRef = factory.createExpressionFromText(tempVar, null); } } PsiElement anchor = null; PsiExpressionList argList = null; PsiExpression[] exprs = new PsiExpression[0]; if (parent instanceof PsiMethodCallExpression) { argList = ((PsiMethodCallExpression)parent).getArgumentList(); exprs = argList.getExpressions(); if (mySettings.isMakeClassParameter()) { if (exprs.length > 0) { anchor = argList.addBefore(instanceRef, exprs[0]); } else { anchor = argList.add(instanceRef); } } } if (mySettings.isMakeFieldParameters()) { List<Settings.FieldParameter> parameters = mySettings.getParameterOrderList(); for (Settings.FieldParameter fieldParameter : parameters) { PsiReferenceExpression fieldRef; if (newQualifier != null) { fieldRef = (PsiReferenceExpression)factory.createExpressionFromText( "a." + fieldParameter.field.getName(), null); fieldRef.getQualifierExpression().replace(instanceRef); } else { fieldRef = (PsiReferenceExpression)factory.createExpressionFromText(fieldParameter.field.getName(), null); } if (anchor != null) { anchor = argList.addAfter(fieldRef, anchor); } else if (argList != null) { if (exprs.length > 0) { anchor = argList.addBefore(fieldRef, exprs[0]); } else { anchor = argList.add(fieldRef); } } } } if (newQualifier != null) { methodRef.getQualifierExpression().replace(newQualifier); } } protected void findExternalUsages(final ArrayList<UsageInfo> result) { if (mySettings.isDelegate()) return; findExternalReferences(myMember, result); } @Override protected void processExternalReference(PsiElement element, PsiMethod method, ArrayList<UsageInfo> result) { if (!mySettings.isChangeSignature()) { final PsiMethod containingMethod = MakeStaticJavaCallerChooser.isTheLastClassRef(element, method); if (containingMethod != null && !TestFrameworks.getInstance().isTestMethod(containingMethod)) { result.add(new ChainedCallUsageInfo(containingMethod)); } } } }
/* JAT: Java Astrodynamics Toolkit * * Copyright (c) 2003 National Aeronautics and Space Administration. All rights reserved. * * This file is part of JAT. JAT is free software; you can * redistribute it and/or modify it under the terms of the * NASA Open Source Agreement * * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * NASA Open Source Agreement for more details. * * You should have received a copy of the NASA Open Source Agreement * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * * File Created on Aug 28, 2003 */ package jat.coreNOSA.cm; import jat.coreNOSA.algorithm.integrators.LinePrinter; import jat.coreNOSA.math.MatrixVector.data.VectorN; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.StringTokenizer; /** * The GroundStationList.java Class provides a way to deal with * a list of finite burns read from a file. * * @author * @version 1.0 */ public class FiniteBurnList implements Serializable { /** * */ private static final long serialVersionUID = -3126113111430876278L; private ArrayList list = new ArrayList(); /** Constructor */ public FiniteBurnList() { } /** Constructor * @param filename String containing directory and filename where the data resides */ public FiniteBurnList(String filename) { this.readFromFile(filename); } /** Add a burn to the collection * @param burn FiniteBurn object */ public void add(FiniteBurn burn) { list.add(burn); } /** Get a FiniteBurn out of the collection * @param index Index of the measurement * @return the FiniteBurn */ public FiniteBurn get(int index) { return (FiniteBurn) list.get(index); } /** Return the size of the list * @return the number of burns in the list */ public int size() { return list.size(); } /** Returns the thrust direction unit vector * @param index int containing the burn index * @return VectorN containing the thrust direction unit vector. */ public VectorN unitVector(int index) { FiniteBurn burn = this.get(index); return burn.unitVector; } /** * Returns the start time of the burn * @param index int containing the burn index * @return double containing the start time. */ public double startTime(int index) { FiniteBurn burn = this.get(index); return burn.tstart; } /** * Returns the stop time of the burn * @param index int containing the burn index * @return double containing the stop time. */ public double stopTime(int index) { FiniteBurn burn = this.get(index); return burn.tstop; } /** * Returns the acceleration of the burn * @param index int containing the burn index * @return double containing time of the acceleration. */ public double accel(int index) { FiniteBurn burn = this.get(index); return burn.accel; } /** * Returns whether there is more data * @param index measurement index * @return true if there is more data to be read */ public boolean hasNext(int index) { boolean out = false; if (index < (this.size() - 1)) { out = true; } return out; } /** Read the burn data from a tab-delimited ASCII text file. * @param file filename and directory */ public void readFromFile(String file) { try { FileReader fr = new FileReader(file); BufferedReader in = new BufferedReader(fr); String line; // loop through the file, one line at a time while ((line = in.readLine()) != null) { StringTokenizer tok = new StringTokenizer(line, "\t"); int total = tok.countTokens(); // check for consistent number of columns if (total != 6) { System.out.println( "DeltaV_List.readFromFile: Number of columns do not match"); System.exit(-99); } double[] temp = new double[6]; for (int i = 0; i < 6; i++) { String token = tok.nextToken(); temp[i] = Double.parseDouble(token); } VectorN unit = new VectorN(temp[3], temp[4], temp[5]); FiniteBurn burn = new FiniteBurn(temp[0], temp[1], temp[2], unit); this.add(burn); } in.close(); fr.close(); } catch (IOException e) { System.err.println("Error opening:" + file); return; } } /** Write the burn data out to tab-delimited ASCII text file. * @param file filename and directory */ public void sendToFile(String file) { LinePrinter lp = new LinePrinter(file); int index = 0; // loop through the file, one line at a time while (this.hasNext(index)) { FiniteBurn burn = this.get(index); String out = burn.tstart+"\t"+burn.tstop+"\t"+burn.accel+"\t"+burn.unitVector.toString(); lp.println(out); index = index + 1; } lp.close(); } /** Recover a serialized GroundStationList File * @param filename string containing the directory and filename. * @return the trajectory */ public static FiniteBurnList recover(String filename) { FiniteBurnList out = new FiniteBurnList(); try { FileInputStream file = new FileInputStream(filename); ObjectInputStream in = new ObjectInputStream(file); out = (FiniteBurnList) in.readObject(); in.close(); file.close(); } catch (Exception e) { System.err.println("recover: " + e); } return out; } /** Write the burn data out to a FiniteBurn File * @param filename string containing the directory and filename. */ public void serialize(String filename) { try { FileOutputStream file = new FileOutputStream(filename); ObjectOutputStream out = new ObjectOutputStream(file); out.writeObject(this); out.close(); file.close(); } catch (Exception e) { System.err.println("serialize: " + e); } } public static void main(String[] args) { FiniteBurnList x = new FiniteBurnList(); x.readFromFile("C:\\Jat\\jat\\input\\burns\\vbar_burns.txt"); // Serialize the burns x.serialize("C:\\Jat\\jat\\input\\burns\\vbar_burns.jat"); System.out.println("burn list serialized"); // Recover the trajectory and print all to screen FiniteBurnList bl = FiniteBurnList.recover("C:\\Jat\\jat\\input\\burns\\vbar_burns.jat"); System.out.println("Printing Recovered DeltaV's"); int index = 0; for (int i = 0; i < bl.size(); i++){ double tstart = bl.startTime(i); double tstop = bl.stopTime(i); double acc = bl.accel(i); VectorN unit = bl.unitVector(i); System.out.println("burn: "+i+" "+tstart+" "+tstop+" "+acc+" "+unit); } } }
package com.cmu.scout.ui; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Date; import android.app.AlertDialog; import android.app.Dialog; import android.content.ContentValues; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.provider.MediaStore; import android.support.v4.app.DialogFragment; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.widget.ResourceCursorAdapter; import android.text.TextUtils; import android.view.ContextMenu; import android.view.ContextMenu.ContextMenuInfo; import android.view.LayoutInflater; import android.view.View; import android.view.WindowManager; import android.widget.AdapterView; import android.widget.AdapterView.AdapterContextMenuInfo; import android.widget.AdapterView.OnItemClickListener; import android.widget.EditText; import android.widget.ImageView; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuItem; import com.cmu.scout.R; import com.cmu.scout.camera.BaseCameraActivity; import com.cmu.scout.provider.ScoutContract.Matches; import com.cmu.scout.provider.ScoutContract.Teams; public class OldTeamListActivity extends BaseCameraActivity implements LoaderManager.LoaderCallbacks<Cursor> { // private static final String TAG = "TeamListActivity"; // private static final boolean DEBUG = true; private static final int TEAM_LIST_LOADER = 0x01; // camera intent request code private static final int ACTION_TAKE_PHOTO_CODE = 1; private static final String PHOTO_PATH_STORAGE_KEY = "CurrentPhotoPath"; private static final String TEAM_ID_STORAGE_KEY = "CurrentTeamId"; private static final String CAMERA_ACTION = "android.hardware.camera"; private String mCurrentPhotoPath; private String mCurrentPhotoName; private long mCurrentTeamId; private static final String JPEG_FILE_PREFIX = "IMG_"; private static final String JPEG_FILE_SUFFIX = ".jpg"; public String mSelection = null; private TeamListAdapter mAdapter; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.team_list_view); // if (DEBUG) Log.v(TAG, "+++ ON CREATE +++"); // enable "up" navigation getSupportActionBar().setDisplayHomeAsUpEnabled(true); setActionBarTitle(getResources().getString(R.string.team_scouting_title)); getSupportLoaderManager().initLoader(TEAM_LIST_LOADER, null, this); mAdapter = new TeamListAdapter(this, R.layout.team_list_row, null, 0); ListView lv = (ListView) findViewById(R.id.team_list); lv.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View v, int position, long id) { onTeamSelected((int)id); } }); lv.setAdapter(mAdapter); registerForContextMenu(lv); } @Override protected void onSaveInstanceState(Bundle outState) { outState.putString(PHOTO_PATH_STORAGE_KEY, mCurrentPhotoPath); outState.putLong(TEAM_ID_STORAGE_KEY, mCurrentTeamId); super.onSaveInstanceState(outState); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); mCurrentPhotoPath = savedInstanceState.getString(PHOTO_PATH_STORAGE_KEY); mCurrentTeamId = savedInstanceState.getLong(TEAM_ID_STORAGE_KEY); } public void onTeamSelected(int id) { final Intent data = new Intent(this, OldTeamInputActivity.class); final Uri uri = Teams.buildTeamIdUri("" + id); data.setData(uri); startActivity(data); } /** * Setup menus */ @Override public boolean onCreateOptionsMenu(Menu menu) { getSupportMenuInflater().inflate(R.menu.team_grid_options_menu, menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: // go to home screen when app icon in action bar is clicked Intent intent = new Intent(this, DashboardActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(intent); return true; case R.id.menu_add_team: showDialog(); return true; } return super.onOptionsItemSelected(item); } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) { super.onCreateContextMenu(menu, v, menuInfo); final boolean cameraAvailable = isCameraAvailable(this, CAMERA_ACTION) && isIntentAvailable(this, "android.media.action.IMAGE_CAPTURE"); getMenuInflater().inflate(R.menu.team_grid_context_menu, menu); // add only if the device has the camera application installed menu.findItem(R.id.menu_take_picture).setEnabled(cameraAvailable); } @Override public boolean onContextItemSelected(android.view.MenuItem item) { AdapterContextMenuInfo info = (AdapterContextMenuInfo) item.getMenuInfo(); switch (item.getItemId()) { case R.id.menu_take_picture: mCurrentPhotoPath = null; mCurrentPhotoName = null; mCurrentTeamId = -1; dispatchTakePictureIntent(ACTION_TAKE_PHOTO_CODE, info.id); return true; case R.id.menu_view_picture: Intent intent = new Intent(); intent.setAction(android.content.Intent.ACTION_VIEW); Uri teamUri = Teams.buildTeamIdUri(""+info.id); Cursor cur = getContentResolver().query(teamUri, null, null, null, null); Uri uri = null; if (cur != null && cur.moveToFirst()) { String photo = cur.getString(cur.getColumnIndex(Teams.TEAM_PHOTO)); uri = (!TextUtils.isEmpty(photo)) ? Uri.parse(photo) : null; cur.close(); } if (uri != null) { intent.setDataAndType(uri, "image/*"); startActivity(intent); } else { Toast.makeText(this, R.string.image_not_found, Toast.LENGTH_SHORT).show(); } return true; case R.id.menu_delete_team: showConfirmDeleteDialog(info.id); return true; } return super.onContextItemSelected(item); } private void setActionBarTitle(String title) { // if (DEBUG) Log.v(TAG, "setActionBarTitle()"); if (title != null) { getSupportActionBar().setTitle(title); } } @SuppressWarnings("unused") private void setActionBarSubtitle(String subtitle) { // if (DEBUG) Log.v(TAG, "setActionBarSubtitle()"); if (subtitle != null) { getSupportActionBar().setSubtitle(subtitle); } } /** * Add-team dialog methods */ public void showDialog() { // if (DEBUG) Log.v(TAG, "showDialog()"); AddTeamDialog.newInstance().show(getSupportFragmentManager(), AddTeamDialog.TAG); } public void doPositiveClick(String teamName) { // if (DEBUG) Log.v(TAG, "doPositiveClick()"); if (TextUtils.isEmpty(teamName)) { Toast.makeText(this, "Invalid team number.", Toast.LENGTH_SHORT).show(); } else { final Cursor cur = getContentResolver().query(Teams.CONTENT_URI, new String[] { Teams.TEAM_NUM }, Teams.TEAM_NUM + " = ? ", new String[] { ""+teamName }, null); if (cur != null && cur.moveToFirst()) { // user is attempting to insert duplicate team number into database Toast.makeText(this, R.string.duplicate_team_number, Toast.LENGTH_SHORT).show(); cur.close(); } else { // insert new team into database int teamNum = Integer.valueOf(teamName); ContentValues values = new ContentValues(); values.put(Teams.TEAM_NUM, teamNum); getContentResolver().insert(Teams.CONTENT_URI, values); } } } public void doNegativeClick() { // if (DEBUG) Log.v(TAG, "doNegativeClick()"); /* Do nothing */ } public static class AddTeamDialog extends DialogFragment { private static final String TAG = "AddTeamDialog"; // private static final boolean DEBUG = true; public static AddTeamDialog newInstance() { // if (DEBUG) Log.v(TAG, "newInstance()"); return new AddTeamDialog(); } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { // if (DEBUG) Log.v(TAG, "onCreateDialog"); LayoutInflater factory = LayoutInflater.from(getActivity()); final View edit = factory .inflate(R.layout.add_team_edit_text, null); final Dialog dialog = new AlertDialog.Builder(getActivity()) .setTitle(R.string.add_team_dialog_title) .setView(edit) .setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { String teamName = ((EditText) edit) .getText().toString(); ((OldTeamListActivity) getActivity()) .doPositiveClick(teamName); } }) .setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { ((OldTeamListActivity) getActivity()) .doNegativeClick(); } }).create(); edit.setOnFocusChangeListener(new View.OnFocusChangeListener() { @Override public void onFocusChange(View v, boolean hasFocus) { if (hasFocus) { dialog.getWindow() .setSoftInputMode( WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE); } } }); return dialog; } } /** * Loader callback methods */ private static final String[] PROJECTION = new String[] { Teams._ID, Teams.TEAM_NUM, Teams.TEAM_PHOTO }; private static final String DEFAULT_SORT = " COLLATE LOCALIZED ASC"; @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { return new CursorLoader(this, Teams.CONTENT_URI, PROJECTION, null, null, Teams.TEAM_NUM + DEFAULT_SORT); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) { mAdapter.swapCursor(cursor); } @Override public void onLoaderReset(Loader<Cursor> loader) { mAdapter.swapCursor(null); } private static class TeamListAdapter extends ResourceCursorAdapter { // private static final String TAG = "TeamListAdapter"; // private static final boolean DEBUG = false; public TeamListAdapter(Context context, int layout, Cursor cur, int flags) { super(context, layout, cur, flags); } @Override public void bindView(View view, Context ctx, Cursor cur) { // if (DEBUG) Log.v(TAG, "bindView()"); // use ViewHolder pattern to reduce number of times we search the // View hierarchy with "findViewById" ViewHolder holder = (ViewHolder) view.getTag(); if (holder == null) { holder = new ViewHolder(); // initialize TextViews holder.teamNum = (TextView) view.findViewById(R.id.team_list_row_number); holder.teamPhoto = (ImageView) view.findViewById(R.id.team_list_row_photo); // initialize column indices holder.teamNumCol = cur.getColumnIndexOrThrow(Teams.TEAM_NUM); holder.teamPhotoCol = cur.getColumnIndexOrThrow(Teams.TEAM_PHOTO); view.setTag(holder); } holder.teamNum.setText("" + cur.getInt(holder.teamNumCol)); String uri = cur.getString(holder.teamPhotoCol); if (!TextUtils.isEmpty(uri)) { long photoId = Long.parseLong(Uri.parse(uri).getLastPathSegment()); Bitmap bitmap = MediaStore.Images.Thumbnails.getThumbnail(ctx.getContentResolver(), photoId, MediaStore.Images.Thumbnails.MICRO_KIND, null); holder.teamPhoto.setImageBitmap(bitmap); } else { holder.teamPhoto.setImageDrawable(ctx.getResources().getDrawable(R.drawable.ic_contact_picture)); } } static class ViewHolder { TextView teamNum; ImageView teamPhoto; int teamNumCol, teamPhotoCol; } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { // if (DEBUG) Log.v(TAG, "ON ACTIVITY RESULT: " + requestCode + " " + resultCode); switch (requestCode) { case ACTION_TAKE_PHOTO_CODE: if (resultCode == RESULT_OK) { handleBigCameraPhoto(); break; } } } private void dispatchTakePictureIntent(int actionCode, long teamId) { // if (DEBUG) Log.v(TAG, "dispatchTakePictureIntent()"); final Intent takePicture = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); mCurrentPhotoPath = null; mCurrentPhotoName = null; mCurrentTeamId = teamId; switch (actionCode) { case ACTION_TAKE_PHOTO_CODE: File f = null; try { f = createImageFile(); mCurrentPhotoPath = f.getAbsolutePath(); takePicture.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(f)); } catch (IOException e) { e.printStackTrace(); f = null; mCurrentPhotoPath = null; mCurrentPhotoName = null; } break; } startActivityForResult(takePicture, actionCode); } private File createImageFile() throws IOException { // if (DEBUG) Log.v(TAG, "createImageFile()"); // Create an image file name String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); String imageFileName = JPEG_FILE_PREFIX + timeStamp + "_"; File albumF = getAlbumDir(); File imageF = File.createTempFile(imageFileName, JPEG_FILE_SUFFIX, albumF); mCurrentPhotoName = imageFileName; return imageF; } private void handleBigCameraPhoto() { // if (DEBUG) Log.v(TAG, "handleBigCameraPhoto()"); if (mCurrentPhotoPath != null) { scaleBitmap(); } } private void galleryAddPic() { // if (DEBUG) Log.v(TAG, "galleryAddPic()"); if (isIntentAvailable(this, "android.intent.action.MEDIA_SCANNER_SCAN_FILE")) { Intent mediaScanIntent = new Intent("android.intent.action.MEDIA_SCANNER_SCAN_FILE"); File f = new File(mCurrentPhotoPath); Uri contentUri = Uri.fromFile(f); mediaScanIntent.setData(contentUri); this.sendBroadcast(mediaScanIntent); } } private void scaleBitmap() { new ScaleBitmapTask().execute(); } // TODO: THIS IS A PRETTY STUPID WAY TO IMPLEMENT THIS... FIX LATER!!! private class ScaleBitmapTask extends AsyncTask<String, String, String> { @Override protected String doInBackground(String... urls) { // TODO: FIX THIS LATER! int targetW = 512; int targetH = 512; /* Get the size of the image */ BitmapFactory.Options bmOptions = new BitmapFactory.Options(); bmOptions.inJustDecodeBounds = true; //bmOptions.inSampleSize = 4; BitmapFactory.decodeFile(mCurrentPhotoPath, bmOptions); int photoW = bmOptions.outWidth; int photoH = bmOptions.outHeight; /* Figure out which way needs to be reduced less */ int scaleFactor = 1; if ((targetW > 0) || (targetH > 0)) { scaleFactor = Math.min(photoW / targetW, photoH / targetH); } /* Set bitmap options to scale the image decode target */ bmOptions.inJustDecodeBounds = false; bmOptions.inSampleSize = scaleFactor; bmOptions.inPurgeable = true; /* Decode the JPEG file into a Bitmap */ Bitmap bitmap = BitmapFactory.decodeFile(mCurrentPhotoPath, bmOptions); Uri photoUri = Uri.parse(MediaStore.Images.Media.insertImage(getContentResolver(), bitmap, mCurrentPhotoName + "_scaled.jpg", null)); ContentValues values = new ContentValues(); values.put(Teams.TEAM_PHOTO, photoUri.toString()); getContentResolver().update(Teams.buildTeamIdUri("" + mCurrentTeamId), values, null, null); return null; } @Override protected void onPostExecute(String result) { mAdapter.notifyDataSetChanged(); galleryAddPic(); } } public void showConfirmDeleteDialog(final long teamId) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(R.string.confirm_delete_team) .setMessage(R.string.confirm_delete_team_message) .setIcon(R.drawable.ic_dialog_alert_holo_light) .setPositiveButton(R.string.yes, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { Uri teamUri = Teams.buildTeamIdUri("" + teamId); Uri teamMatchesUri = Matches.buildMatchTeamIdUri(""+teamId); getContentResolver().delete(teamUri, null, null); getContentResolver().delete(teamMatchesUri, null, null); } }) .setNegativeButton(R.string.no, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); AlertDialog alert = builder.create(); alert.show(); } }
/******************************************************************************* * Open Behavioral Health Information Technology Architecture (OBHITA.org) * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the <organization> nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ package gov.samhsa.acs.pep; import gov.samhsa.acs.common.bean.XacmlResult; import gov.samhsa.acs.common.dto.XacmlRequest; import gov.samhsa.acs.common.dto.XacmlResponse; import gov.samhsa.acs.common.exception.DS4PException; import gov.samhsa.acs.common.namespace.PepNamespaceContext; import gov.samhsa.acs.contexthandler.ContextHandler; import gov.samhsa.acs.documentsegmentation.DocumentSegmentation; import gov.samhsa.acs.pep.c32getter.C32Getter; import gov.samhsa.acs.pep.xdsbregistry.XdsbRegistry; import gov.samhsa.acs.pep.xdsbrepository.XdsbRepository; import gov.samhsa.acs.xdsb.common.UniqueOidProviderImpl; import gov.samhsa.acs.xdsb.common.XdsbDocumentType; import gov.samhsa.acs.xdsb.common.XdsbMetadataGeneratorImpl; import gov.samhsa.consent2share.schema.documentsegmentation.SegmentDocumentResponse; import gov.samhsa.ds4ppilot.schema.pep.FilterC32Response; import gov.samhsa.ds4ppilot.schema.pep.RegisteryStoredQueryResponse; import gov.samhsa.ds4ppilot.schema.pep.RetrieveDocumentSetResponse; import gov.va.ehtac.ds4p.ws.EnforcePolicy; import gov.va.ehtac.ds4p.ws.EnforcePolicyResponse.Return; import ihe.iti.xds_b._2007.ProvideAndRegisterDocumentSetRequest; import ihe.iti.xds_b._2007.ProvideAndRegisterDocumentSetRequest.Document; import ihe.iti.xds_b._2007.RetrieveDocumentSetRequest; import ihe.iti.xds_b._2007.RetrieveDocumentSetRequest.DocumentRequest; import ihe.iti.xds_b._2007.RetrieveDocumentSetResponse.DocumentResponse; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import java.util.ArrayList; import java.util.Date; import java.util.GregorianCalendar; import java.util.List; import java.util.UUID; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.PropertyException; import javax.xml.bind.Unmarshaller; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathFactory; import oasis.names.tc.ebxml_regrep.xsd.lcm._3.SubmitObjectsRequest; import oasis.names.tc.ebxml_regrep.xsd.query._3.AdhocQueryRequest; import oasis.names.tc.ebxml_regrep.xsd.query._3.AdhocQueryResponse; import oasis.names.tc.ebxml_regrep.xsd.query._3.ResponseOptionType; import oasis.names.tc.ebxml_regrep.xsd.rim._3.AdhocQueryType; import oasis.names.tc.ebxml_regrep.xsd.rim._3.ClassificationType; import oasis.names.tc.ebxml_regrep.xsd.rim._3.ExtrinsicObjectType; import oasis.names.tc.ebxml_regrep.xsd.rim._3.IdentifiableType; import oasis.names.tc.ebxml_regrep.xsd.rim._3.SlotType1; import oasis.names.tc.ebxml_regrep.xsd.rim._3.ValueListType; import oasis.names.tc.ebxml_regrep.xsd.rs._3.RegistryErrorList; import oasis.names.tc.ebxml_regrep.xsd.rs._3.RegistryResponse; import org.hl7.v3.Device; import org.hl7.v3.Id; import org.hl7.v3.PRPAIN201301UV02; import org.hl7.v3.PRPAIN201302UV02; import org.hl7.v3.PatientIdentityFeedRequestType.ControlActProcess; import org.hl7.v3.PatientIdentityFeedRequestType.ControlActProcess.Subject; import org.hl7.v3.PatientIdentityFeedRequestType.ControlActProcess.Subject.RegistrationEvent; import org.hl7.v3.PatientIdentityFeedRequestType.ControlActProcess.Subject.RegistrationEvent.Subject1; import org.hl7.v3.PatientIdentityFeedRequestType.ControlActProcess.Subject.RegistrationEvent.Subject1.Patient; import org.hl7.v3.PatientIdentityFeedRequestType.ControlActProcess.Subject.RegistrationEvent.Subject1.Patient.PatientPerson; import org.hl7.v3.PatientIdentityFeedRequestType.ControlActProcess.Subject.RegistrationEvent.Subject1.Patient.PatientPerson.Addr; import org.hl7.v3.PatientIdentityFeedRequestType.ControlActProcess.Subject.RegistrationEvent.Subject1.Patient.PatientPerson.BirthTime; import org.hl7.v3.PatientIdentityFeedRequestType.ControlActProcess.Subject.RegistrationEvent.Subject1.Patient.PatientPerson.Name; import org.hl7.v3.PatientIdentityFeedRequestType.Receiver; import org.hl7.v3.PatientIdentityFeedRequestType.Sender; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.InputSource; /** * The Class PepImpl. */ public class PepImpl implements Pep { /** The permit. */ private final String PERMIT = "Permit"; /** The context handler. */ private final ContextHandler contextHandler; /** The C32 getter. */ private final C32Getter c32Getter; /** The document segmentation service. */ private final DocumentSegmentation documentSegmentation; /** The data handler to bytes converter. */ private final DataHandlerToBytesConverter dataHandlerToBytesConverter; /** The xdsbRepository. */ private final XdsbRepository xdsbRepository; /** The xdsbRegistry. */ private final XdsbRegistry xdsbRegistry; /** The subject purpose of use. */ private String subjectPurposeOfUse; // = "TREAT"; /** The subject locality. */ private String subjectLocality; // = "2.16.840.1.113883.3.467"; /** The organization. */ private String organization; // = "SAMHSA"; /** The organization id. */ private String organizationId; // = "FEiSystems"; /** The resource name. */ private String resourceName; // = "NwHINDirectSend"; /** The resource type. */ private String resourceType; // = "C32"; /** The resource action. */ private String resourceAction; // = "Execute"; /** The home community id. */ private String homeCommunityId; /** The Constant LOGGER. */ private static final Logger LOGGER = LoggerFactory.getLogger(PepImpl.class); /** * Instantiates a new pep impl. * * @param contextHandler * the context handler * @param c32Getter * the C32 getter * @param documentSegmentation * the document segmentation * @param dataHandlerToBytesConverter * the data handler to bytes converter * @param xdsbRepository * the xdsb repository * @param xdsbRegistry * the xdsb registry */ public PepImpl(ContextHandler contextHandler, C32Getter c32Getter, DocumentSegmentation documentSegmentation, DataHandlerToBytesConverter dataHandlerToBytesConverter, XdsbRepository xdsbRepository, XdsbRegistry xdsbRegistry) { super(); this.contextHandler = contextHandler; this.c32Getter = c32Getter; this.documentSegmentation = documentSegmentation; this.dataHandlerToBytesConverter = dataHandlerToBytesConverter; this.xdsbRepository = xdsbRepository; this.xdsbRegistry = xdsbRegistry; } /* * (non-Javadoc) * * @see Pep#handleC32Request(java. lang.String, boolean, java.lang.String, * java.lang.String) */ @Override public FilterC32Response handleC32Request(String patientId, boolean packageAsXdm, String senderEmailAddress, String recipientEmailAddress) { StringWriter xacmlResponseXml = new StringWriter(); byte[] processedPayload; FilterC32Response c32Response = new FilterC32Response(); c32Response.setPatientId(patientId); Return result = null; try { EnforcePolicy.Xspasubject xspasubject = setXspaSubject( recipientEmailAddress, UUID.randomUUID().toString()); EnforcePolicy.Xsparesource xsparesource = setXspaResource(patientId); result = contextHandler.enforcePolicy(xspasubject, xsparesource); } catch (Exception e) { throw new DS4PException(e.toString(), e); } c32Response.setPdpDecision(result.getPdpDecision()); if (result.getPdpDecision().equals(PERMIT)) { String originalC32 = c32Getter.getC32(patientId); try { XacmlResult xacmlResult = getXacmlResponse(result); JAXBContext jaxbContext = JAXBContext .newInstance(XacmlResult.class); Marshaller marshaller = jaxbContext.createMarshaller(); marshaller.setProperty("com.sun.xml.bind.xmlDeclaration", Boolean.FALSE); marshaller.marshal(xacmlResult, xacmlResponseXml); SegmentDocumentResponse segmentDocumentResponse = documentSegmentation .segmentDocument(originalC32, xacmlResponseXml.toString(), packageAsXdm, true, senderEmailAddress, recipientEmailAddress, ""); processedPayload = dataHandlerToBytesConverter .toByteArray(segmentDocumentResponse .getProcessedDocument()); c32Response.setMaskedDocument(segmentDocumentResponse .getMaskedDocument()); c32Response.setFilteredStreamBody(processedPayload); } catch (PropertyException e) { throw new DS4PException(e.toString(), e); } catch (JAXBException e) { throw new DS4PException(e.toString(), e); } catch (IOException e) { throw new DS4PException(e.toString(), e); } } return c32Response; } /* * (non-Javadoc) * * @see Pep#handleC32Request(java. lang.String, boolean, java.lang.String, * java.lang.String) */ @Override public FilterC32Response handleC32Request(String recepientSubjectNPI, String intermediarySubjectNPI, String resourceId, boolean packageAsXdm, String senderEmailAddress, String recipientEmailAddress, String xdsDocumentEntryUniqueId) { StringWriter xacmlResponseXml = new StringWriter(); byte[] processedPayload; FilterC32Response c32Response = new FilterC32Response(); c32Response.setPatientId(resourceId); XacmlResponse xacmlResponse = null; XacmlRequest xacmlRequest = null; try { xacmlRequest = setXacmlRequest(recepientSubjectNPI, intermediarySubjectNPI, subjectPurposeOfUse, resourceId, UUID.randomUUID().toString()); xacmlResponse = contextHandler.enforcePolicy(xacmlRequest); } catch (Exception e) { throw new DS4PException(e.toString(), e); } c32Response.setPdpDecision(xacmlResponse.getPdpDecision()); if (xacmlResponse.getPdpDecision().toLowerCase() .equals(PERMIT.toLowerCase())) { String originalC32 = c32Getter.getC32(resourceId); try { XacmlResult xacmlResult = getXacmlResponse(xacmlRequest, xacmlResponse); JAXBContext jaxbContext = JAXBContext .newInstance(XacmlResult.class); Marshaller marshaller = jaxbContext.createMarshaller(); marshaller.setProperty("com.sun.xml.bind.xmlDeclaration", Boolean.FALSE); marshaller.marshal(xacmlResult, xacmlResponseXml); SegmentDocumentResponse segmentDocumentResponse = documentSegmentation .segmentDocument(originalC32, xacmlResponseXml.toString(), packageAsXdm, true, senderEmailAddress, recipientEmailAddress, xdsDocumentEntryUniqueId); processedPayload = dataHandlerToBytesConverter .toByteArray(segmentDocumentResponse .getProcessedDocument()); c32Response.setMaskedDocument(segmentDocumentResponse .getMaskedDocument()); c32Response.setFilteredStreamBody(processedPayload); } catch (PropertyException e) { throw new DS4PException(e.toString(), e); } catch (JAXBException e) { throw new DS4PException(e.toString(), e); } catch (IOException e) { throw new DS4PException(e.toString(), e); } } return c32Response; } /* * (non-Javadoc) * * @see Pep#saveDocumentSetToXdsRepository(java.lang.String) */ @Override public boolean saveDocumentSetToXdsRepository(String documentSet) { String patientId = null; String patientLastName = null; String patientFirstName = null; String patientAddressLine = null; String patientCity = null; String patientState = null; String patientBirthDate = null; try { // TODO: Refactor these code to a new class to be testable org.w3c.dom.Document document = loadXmlFrom(documentSet); XPathFactory xpathFactory = XPathFactory.newInstance(); XPath xpath = xpathFactory.newXPath(); xpath.setNamespaceContext(new PepNamespaceContext()); // Get patient id String xpathForPatientId = "//hl7:recordTarget/hl7:patientRole/hl7:id/@extension[1]"; patientId = xpath.evaluate(xpathForPatientId, document); // Get patient last name String xpathForLastName = "//hl7:patientRole/hl7:patient/hl7:name/hl7:family/text()"; patientLastName = xpath.evaluate(xpathForLastName, document); // Get patient first name String xpathForFirstName = "//hl7:patientRole/hl7:patient/hl7:name/hl7:given[1]/text()"; patientFirstName = xpath.evaluate(xpathForFirstName, document); // Get patient address line String xpathForAddressLine = "//hl7:patientRole/hl7:addr/hl7:streetAddressLine[1]"; patientAddressLine = xpath.evaluate(xpathForAddressLine, document); // Get patient city String xpathForCity = "//hl7:patientRole/hl7:addr/hl7:city"; patientCity = xpath.evaluate(xpathForCity, document); // Get patient state String xpathForState = "//hl7:patientRole/hl7:addr/hl7:state"; patientState = xpath.evaluate(xpathForState, document); // Get patient birth date String xpathForBirthDate = "//hl7:patientRole/hl7:patient/hl7:birthTime/@value"; patientBirthDate = xpath.evaluate(xpathForBirthDate, document); } catch (Exception e) { throw new DS4PException( "Error occurred when getting the patient id and other patient demographic information from CDA document.", e); } // PatientPerson PatientPerson patientPerson = new PatientPerson(); Name name = new Name(); name.setFamily(patientLastName); name.setGiven(patientFirstName); patientPerson.setName(name); BirthTime birthTime = new BirthTime(); birthTime.setValue(patientBirthDate); patientPerson.setBirthTime(birthTime); Addr addr = new Addr(); addr.setStreetAddressLine(patientAddressLine); addr.setCity(patientCity); addr.setState(patientState); patientPerson.getAddr().add(addr); // Patient Patient patient = new Patient(); Id patientHl7Id = new Id(); patientHl7Id.setRoot(subjectLocality); // Domain Id (Home Community Id) patientHl7Id.setExtension(patientId); // PatientId in the domain patient.setId(patientHl7Id); patient.setPatientPerson(patientPerson); // Subject 1 Subject1 subject1 = new Subject1(); subject1.setPatient(patient); // RegistrationEvent RegistrationEvent registrationEvent = new RegistrationEvent(); registrationEvent.setSubject1(subject1); // Subject Subject subject = new Subject(); subject.setRegistrationEvent(registrationEvent); // ControlActProcess ControlActProcess controlActProcess = new ControlActProcess(); controlActProcess.setSubject(subject); // PRPAIN201301UV02 PRPAIN201301UV02 prpain201301uv02 = new PRPAIN201301UV02(); prpain201301uv02.setControlActProcess(controlActProcess); Id PRPAIN201302UVId = new Id(); PRPAIN201302UVId.setRoot("cdc0d3fa-4467-11dc-a6be-3603d686610257"); prpain201301uv02.setId(PRPAIN201302UVId); Receiver receiver = new Receiver(); receiver.setTypeCode("RCV"); Device receiverDevice = new Device(); receiverDevice.setDeterminerCode("INSTANCE"); Id receiverDeviceId = new Id(); receiverDeviceId.setRoot("1.2.840.114350.1.13.99999.4567"); receiverDevice.setId(receiverDeviceId); receiver.setDevice(receiverDevice); prpain201301uv02.setReceiver(receiver); Sender sender = new Sender(); sender.setTypeCode("SND"); Device senderDevice = new Device(); senderDevice.setDeterminerCode("INSTANCE"); Id senderDeviceId = new Id(); senderDeviceId.setRoot("1.2.840.114350.1.13.99998.8734"); senderDevice.setId(senderDeviceId); sender.setDevice(senderDevice); prpain201301uv02.setSender(sender); // First try to add patient to XdsbRegistry String responseOfAddPatient = xdsbRegistry .addPatientRegistryRecord(prpain201301uv02); if (patientExistsInRegistyBeforeAdding(responseOfAddPatient)) { // Try to revise patient // PRPAIN201302UV PRPAIN201302UV02 prpain201302uv = new PRPAIN201302UV02(); prpain201302uv.setControlActProcess(controlActProcess); prpain201302uv.setId(PRPAIN201302UVId); prpain201302uv.setReceiver(receiver); prpain201302uv.setSender(sender); String result = xdsbRegistry .revisePatientRegistryRecord(prpain201302uv); // TODO: Check the result here to see if the CA code is return. If // not throws exception // LOGGER.debug(result); } String metadataString = new XdsbMetadataGeneratorImpl( new UniqueOidProviderImpl(), XdsbDocumentType.CLINICAL_DOCUMENT) .generateMetadataXml(documentSet, subjectLocality); SubmitObjectsRequest submitObjectRequest = null; // Marshal this metadata string to SubmitObjectsRequest try { submitObjectRequest = unmarshallFromXml(SubmitObjectsRequest.class, metadataString); } catch (JAXBException e1) { LOGGER.debug(e1.toString(), e1); } // LOGGER.debug(metadataString); String documentId = null; // Get the document id from meta data try { org.w3c.dom.Document document = loadXmlFrom(metadataString); XPathFactory xpathFactory = XPathFactory.newInstance(); XPath xpath = xpathFactory.newXPath(); xpath.setNamespaceContext(new PepNamespaceContext()); // Get document id String xpathForDocumentId = "//rim:ExtrinsicObject/@id[1]"; documentId = xpath.evaluate(xpathForDocumentId, document); } catch (Exception e1) { LOGGER.debug(e1.toString(), e1); } Document document = new Document(); document.setId(documentId); document.setValue(documentSet.getBytes()); ProvideAndRegisterDocumentSetRequest request = new ProvideAndRegisterDocumentSetRequest(); request.getDocument().add(document); request.setSubmitObjectsRequest(submitObjectRequest); RegistryResponse registryResponse = null; try { registryResponse = xdsbRepository .provideAndRegisterDocumentSetRequest(request); /* * try { LOGGER.debug(marshall(registryResponse)); } catch * (Throwable e) { LOGGER.debug(e.toString(),e); } */ RegistryErrorList registryErrorList = registryResponse .getRegistryErrorList(); if (registryErrorList != null && registryErrorList.getRegistryError().size() > 0) return false; } catch (Exception e) { throw new DS4PException( "Document cannot be saved to the XDS repository.", e); } return true; } /* * (non-Javadoc) * * @see Pep#retrieveDocumentSetRequest(java.lang.String, java.lang.String, * java.lang.String, java.lang.String, gov.va.ehtac.ds4p.ws.EnforcePolicy) */ @Override public RetrieveDocumentSetResponse retrieveDocumentSetRequest( String homeCommunityId, String repositoryUniqueId, String documentUniqueId, String messageId, EnforcePolicy enforcePolicy) { RetrieveDocumentSetResponse retrieveDocumentSetResponse = new RetrieveDocumentSetResponse(); RetrieveDocumentSetRequest retrieveDocumentSetRequest = new RetrieveDocumentSetRequest(); ihe.iti.xds_b._2007.RetrieveDocumentSetResponse xdsbRetrieveDocumentSetResponse = null; StringWriter xacmlResponseXml = new StringWriter(); byte[] processedPayload; Return result = null; try { DocumentRequest documentRequest = new DocumentRequest(); documentRequest.setHomeCommunityId(homeCommunityId); documentRequest.setRepositoryUniqueId(repositoryUniqueId); documentRequest.setDocumentUniqueId(documentUniqueId); retrieveDocumentSetRequest.getDocumentRequest() .add(documentRequest); result = contextHandler.enforcePolicy( enforcePolicy.getXspasubject(), enforcePolicy.getXsparesource()); if (result.getPdpDecision().equals(PERMIT)) { XacmlResult xacmlResult = getXacmlResponse(result); JAXBContext jaxbContext = JAXBContext .newInstance(XacmlResult.class); Marshaller marshaller = jaxbContext.createMarshaller(); marshaller.setProperty("com.sun.xml.bind.xmlDeclaration", Boolean.FALSE); marshaller.marshal(xacmlResult, xacmlResponseXml); xdsbRetrieveDocumentSetResponse = xdsbRepository .retrieveDocumentSetRequest(retrieveDocumentSetRequest); // get original cda DocumentResponse documentResponse = xdsbRetrieveDocumentSetResponse .getDocumentResponse().get(0); byte[] rawDocument = documentResponse.getDocument(); String originalDocument = new String(rawDocument); // LOGGER.debug(originalC32); if (!isConsentDocument(originalDocument)) { SegmentDocumentResponse segmentDocumentResponse = documentSegmentation .segmentDocument(originalDocument, xacmlResponseXml .toString(), /* "<xacmlResult><pdpDecision>Permit</pdpDecision><purposeOfUse>TREAT</purposeOfUse><messageId>4617a579-1881-4e40-9f98-f85bd81d6502</messageId><homeCommunityId>2.16.840.1.113883.3.467</homeCommunityId><pdpObligation>urn:oasis:names:tc:xspa:2.0:resource:org:us-privacy-law:42CFRPart2</pdpObligation><pdpObligation>urn:oasis:names:tc:xspa:2.0:resource:org:refrain-policy:NORDSLCD</pdpObligation><pdpObligation>urn:oasis:names:tc:xspa:2.0:resource:patient:redact:ETH</pdpObligation><pdpObligation>urn:oasis:names:tc:xspa:2.0:resource:patient:redact:PSY</pdpObligation><pdpObligation>urn:oasis:names:tc:xspa:2.0:resource:patient:mask:HIV</pdpObligation></xacmlResult>" */ false, true, "leo.smith@direct.obhita-stage.org", enforcePolicy.getXspasubject() .getSubjectEmailAddress(), ""); processedPayload = dataHandlerToBytesConverter .toByteArray(segmentDocumentResponse .getProcessedDocument()); // get processed document String processedDocument = new String(processedPayload); // LOGGER.debug("processedDoc: " + processedDocument); // set processed document in payload DocumentResponse document = new DocumentResponse(); document.setDocument(processedDocument.getBytes()); xdsbRetrieveDocumentSetResponse.getDocumentResponse().set( 0, document); // set response from xdsb retrieveDocumentSetResponse .setReturn(marshall(xdsbRetrieveDocumentSetResponse)); retrieveDocumentSetResponse .setKekEncryptionKey(segmentDocumentResponse .getKekEncryptionKey()); retrieveDocumentSetResponse .setKekMaskingKey(segmentDocumentResponse .getKekMaskingKey()); retrieveDocumentSetResponse .setMetadata(segmentDocumentResponse .getPostProcessingMetadata()); } else { DocumentResponse document = new DocumentResponse(); document.setDocument(rawDocument); xdsbRetrieveDocumentSetResponse.getDocumentResponse().set( 0, document); // set response from xdsb retrieveDocumentSetResponse .setReturn(marshall(xdsbRetrieveDocumentSetResponse)); } } } catch (PropertyException e) { throw new DS4PException(e.toString(), e); } catch (JAXBException e) { throw new DS4PException(e.toString(), e); } catch (IOException e) { throw new DS4PException(e.toString(), e); } catch (Throwable e) { throw new DS4PException(e.toString(), e); } return retrieveDocumentSetResponse; } /* * (non-Javadoc) * * @see Pep#registeryStoredQueryRequest(java.lang.String, * gov.va.ehtac.ds4p.ws.EnforcePolicy) */ @Override public RegisteryStoredQueryResponse registeryStoredQueryRequest( String patientId, EnforcePolicy enforcePolicy) { AdhocQueryRequest registryStoredQuery = new AdhocQueryRequest(); ResponseOptionType responseOptionType = new ResponseOptionType(); responseOptionType.setReturnComposedObjects(true); responseOptionType.setReturnType("LeafClass"); registryStoredQuery.setResponseOption(responseOptionType); AdhocQueryType adhocQueryType = new AdhocQueryType(); adhocQueryType.setId("urn:uuid:14d4debf-8f97-4251-9a74-a90016b0af0d"); // FindDocuments // by patientId registryStoredQuery.setAdhocQuery(adhocQueryType); SlotType1 patientIdSlotType = new SlotType1(); patientIdSlotType.setName("$XDSDocumentEntryPatientId"); ValueListType patientIdValueListType = new ValueListType(); if (patientId.indexOf("'") != 0) { patientId = "'" + patientId; } if (patientId.lastIndexOf("'") != patientId.length() - 1) { patientId = patientId + "'"; } patientIdValueListType.getValue().add(patientId); // PatientId patientIdSlotType.setValueList(patientIdValueListType); adhocQueryType.getSlot().add(patientIdSlotType); SlotType1 statusSlotType = new SlotType1(); statusSlotType.setName("$XDSDocumentEntryStatus"); ValueListType statusValueListType = new ValueListType(); statusValueListType.getValue().add( "('urn:oasis:names:tc:ebxml-regrep:StatusType:Approved')"); statusSlotType.setValueList(statusValueListType); adhocQueryType.getSlot().add(statusSlotType); RegisteryStoredQueryResponse response = new RegisteryStoredQueryResponse(); Return enforcePolicyResult = null; try { enforcePolicyResult = contextHandler.enforcePolicy( enforcePolicy.getXspasubject(), enforcePolicy.getXsparesource()); // verify identify of the individual and return decision if (enforcePolicyResult.getPdpDecision().equals(PERMIT)) { AdhocQueryResponse result = xdsbRegistry .registryStoredQuery(registryStoredQuery); result = getResponseWithLatestDocumentEntriesForConsentAndNonconsent(result); String xmlResponse = marshall(result); response.setReturn(xmlResponse); } } catch (Throwable e) { throw new DS4PException(e.toString(), e); } return response; } /** * Gets the subject purpose of use. * * @return the subject purpose of use */ public String getSubjectPurposeOfUse() { return subjectPurposeOfUse; } /** * Sets the subject purpose of use. * * @param subjectPurposeOfUse * the new subject purpose of use */ public void setSubjectPurposeOfUse(String subjectPurposeOfUse) { this.subjectPurposeOfUse = subjectPurposeOfUse; } /** * Gets the subject locality. * * @return the subject locality */ public String getSubjectLocality() { return subjectLocality; } /** * Sets the subject locality. * * @param subjectLocality * the new subject locality */ public void setSubjectLocality(String subjectLocality) { this.subjectLocality = subjectLocality; } /** * Gets the organization. * * @return the organization */ public String getOrganization() { return organization; } /** * Sets the organization. * * @param organization * the new organization */ public void setOrganization(String organization) { this.organization = organization; } /** * Gets the organization id. * * @return the organization id */ public String getOrganizationId() { return organizationId; } /** * Sets the organization id. * * @param organizationId * the new organization id */ public void setOrganizationId(String organizationId) { this.organizationId = organizationId; } /** * Gets the resource name. * * @return the resource name */ public String getResourceName() { return resourceName; } /** * Sets the resource name. * * @param resourceName * the new resource name */ public void setResourceName(String resourceName) { this.resourceName = resourceName; } /** * Gets the resource type. * * @return the resource type */ public String getResourceType() { return resourceType; } /** * Sets the resource type. * * @param resourceType * the new resource type */ public void setResourceType(String resourceType) { this.resourceType = resourceType; } /** * Gets the resource action. * * @return the resource action */ public String getResourceAction() { return resourceAction; } /** * Sets the resource action. * * @param resourceAction * the new resource action */ public void setResourceAction(String resourceAction) { this.resourceAction = resourceAction; } /** * Gets the home community id. * * @return the home community id */ public String getHomeCommunityId() { return homeCommunityId; } /** * Sets the home community id. * * @param homeCommunityId * the new home community id */ public void setHomeCommunityId(String homeCommunityId) { this.homeCommunityId = homeCommunityId; } /** * Sets the xspa resource. * * @param patientId * the patient id * @return the enforce policy. xsparesource */ public EnforcePolicy.Xsparesource setXspaResource(String patientId) { EnforcePolicy.Xsparesource xsparesource = new EnforcePolicy.Xsparesource(); xsparesource.setResourceId(patientId); xsparesource.setResourceName(resourceName); xsparesource.setResourceType(resourceType); xsparesource.setResourceAction(resourceAction); return xsparesource; } /** * Sets the xspa subject. * * @param recipientEmailAddress * the recipient email address * @param messageId * the message id * @return the enforce policy. xspasubject */ public EnforcePolicy.Xspasubject setXspaSubject( String recipientEmailAddress, String messageId) { EnforcePolicy.Xspasubject xspasubject = new EnforcePolicy.Xspasubject(); xspasubject.setSubjectPurposeOfUse(subjectPurposeOfUse); xspasubject.setSubjectLocality(subjectLocality); xspasubject.setSubjectEmailAddress(recipientEmailAddress); xspasubject.setSubjectId(recipientEmailAddress); xspasubject.setOrganization(organization); xspasubject.setOrganizationId(organizationId); xspasubject.setMessageId(messageId); return xspasubject; } /** * Sets the xacml request. * * @param recepientSubjectNPI * the recepient subject npi * @param intermediarySubjectNPI * the intermediary subject npi * @param purposeOfUse * the purpose of use * @param resourceId * the resource id * @param messageId * the message id * @return the xacml request */ public XacmlRequest setXacmlRequest(String recepientSubjectNPI, String intermediarySubjectNPI, String purposeOfUse, String resourceId, String messageId) { XacmlRequest xacmlRequest = new XacmlRequest(); xacmlRequest.setIntermediarySubjectNPI(intermediarySubjectNPI); xacmlRequest.setPurposeOfUse(purposeOfUse); xacmlRequest.setRecepientSubjectNPI(recepientSubjectNPI); xacmlRequest.setPatientId(resourceId); xacmlRequest.setPatientUniqueId(resourceId); xacmlRequest.setMessageId(messageId); xacmlRequest.setHomeCommunityId(homeCommunityId); return xacmlRequest; } /** * Gets the XACML response. * * @param result * the result * @return the XACML response */ private XacmlResult getXacmlResponse(Return result) { XacmlResult xacmlResult = new XacmlResult(); xacmlResult.setHomeCommunityId(result.getHomeCommunityId()); xacmlResult.setMessageId(result.getMessageId()); xacmlResult.setPdpDecision(result.getPdpDecision()); xacmlResult.setPdpObligations(result.getPdpObligation()); xacmlResult.setSubjectPurposeOfUse(result.getPurposeOfUse()); return xacmlResult; } /** * Gets the xacml response. * * @param xacmlRequest * the xacml request * @param xacmlResponse * the xacml response * @return the xacml response */ private XacmlResult getXacmlResponse(XacmlRequest xacmlRequest, XacmlResponse xacmlResponse) { XacmlResult xacmlResult = new XacmlResult(); xacmlResult.setHomeCommunityId(xacmlRequest.getHomeCommunityId()); xacmlResult.setMessageId(xacmlRequest.getMessageId()); xacmlResult.setPdpDecision(xacmlResponse.getPdpDecision()); xacmlResult.setPdpObligations(xacmlResponse.getPdpObligation()); xacmlResult.setSubjectPurposeOfUse(xacmlRequest.getPurposeOfUse()); return xacmlResult; } /** * Marshall. * * @param obj * the obj * @return the string * @throws Throwable * the throwable */ private static String marshall(Object obj) throws Throwable { final JAXBContext context = JAXBContext.newInstance(obj.getClass()); Marshaller marshaller = context.createMarshaller(); StringWriter stringWriter = new StringWriter(); marshaller.marshal(obj, stringWriter); return stringWriter.toString(); } /** * Unmarshall from xml. * * @param <T> * the generic type * @param clazz * the clazz * @param xml * the xml * @return the t * @throws JAXBException * the jAXB exception */ @SuppressWarnings("unchecked") private static <T> T unmarshallFromXml(Class<T> clazz, String xml) throws JAXBException { JAXBContext context = JAXBContext.newInstance(clazz); Unmarshaller um = context.createUnmarshaller(); ByteArrayInputStream input = new ByteArrayInputStream(xml.getBytes()); return (T) um.unmarshal(input); } /** * Load xml from. * * @param xml * the xml * @return the org.w3c.dom. document * @throws Exception * the exception */ private static org.w3c.dom.Document loadXmlFrom(String xml) throws Exception { InputSource is = new InputSource(new StringReader(xml)); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder = null; builder = factory.newDocumentBuilder(); org.w3c.dom.Document document = builder.parse(is); return document; } /** * Gets the response with latest document entries for consent and * nonconsent. * * @param adhocQueryResponse * the adhoc query response * @return the response with latest document entries for consent and * nonconsent */ private static AdhocQueryResponse getResponseWithLatestDocumentEntriesForConsentAndNonconsent( AdhocQueryResponse adhocQueryResponse) { int documentEntryCount = adhocQueryResponse.getRegistryObjectList() .getIdentifiable().size(); if (documentEntryCount >= 2) { int theLatestConsentDocumentEntryIndex = -1; Date theLatestConsentDocumentEntryCreationTime = new Date( Long.MIN_VALUE); int theLatestNonConsentDocumentEntryIndex = -1; Date theLatestNonConsentDocumentEntryCreationTime = new Date( Long.MIN_VALUE); for (int index = 0; index < documentEntryCount; index++) { JAXBElement<?> jaxbElement = adhocQueryResponse .getRegistryObjectList().getIdentifiable().get(index); @SuppressWarnings("unchecked") JAXBElement<ExtrinsicObjectType> jaxbElementOfExtrinsicObjectType = (JAXBElement<ExtrinsicObjectType>) (jaxbElement); if (!jaxbElementOfExtrinsicObjectType.equals(null)) { ExtrinsicObjectType extrinsicObjectType = jaxbElementOfExtrinsicObjectType .getValue(); boolean isConsentDocumentEntry = false; // Get the classCode (Consent or others) for (ClassificationType classificationType : extrinsicObjectType .getClassification()) { if (classificationType .getClassificationScheme() .equalsIgnoreCase( "urn:uuid:41a5887f-8865-4c09-adf7-e362475b143a")) { if (classificationType.getNodeRepresentation() .equalsIgnoreCase("Consent")) { isConsentDocumentEntry = true; } } } for (SlotType1 slotType1 : extrinsicObjectType.getSlot()) { if (slotType1.getName().equals("creationTime")) { String datetimeString = slotType1.getValueList() .getValue().get(0); int lengthOfDateTimeString = datetimeString .length(); int year = lengthOfDateTimeString >= 4 ? Integer .parseInt(datetimeString.substring(0, 4)) : 0; int month = lengthOfDateTimeString >= 6 ? Integer .parseInt(datetimeString.substring(4, 6)) : 0; int day = lengthOfDateTimeString >= 8 ? Integer .parseInt(datetimeString.substring(6, 8)) : 0; int hour = lengthOfDateTimeString >= 10 ? Integer .parseInt(datetimeString.substring(8, 10)) : 0; int minute = lengthOfDateTimeString >= 12 ? Integer .parseInt(datetimeString.substring(10, 12)) : 0; int second = lengthOfDateTimeString >= 14 ? Integer .parseInt(datetimeString.substring(12, 14)) : 0; GregorianCalendar gregorianCalendar = new GregorianCalendar( year, month, day, hour, minute, second); Date creationTime = gregorianCalendar.getTime(); if (isConsentDocumentEntry && creationTime .after(theLatestConsentDocumentEntryCreationTime)) { theLatestConsentDocumentEntryCreationTime = creationTime; theLatestConsentDocumentEntryIndex = index; } else if (!isConsentDocumentEntry && creationTime .after(theLatestNonConsentDocumentEntryCreationTime)) { theLatestNonConsentDocumentEntryCreationTime = creationTime; theLatestNonConsentDocumentEntryIndex = index; } } } } } List<JAXBElement<? extends IdentifiableType>> latestDocumentEntryList = new ArrayList<JAXBElement<? extends IdentifiableType>>(); if (theLatestConsentDocumentEntryIndex != -1) { latestDocumentEntryList.add(adhocQueryResponse .getRegistryObjectList().getIdentifiable() .get(theLatestConsentDocumentEntryIndex)); } if (theLatestNonConsentDocumentEntryIndex != -1) { latestDocumentEntryList.add(adhocQueryResponse .getRegistryObjectList().getIdentifiable() .get(theLatestNonConsentDocumentEntryIndex)); } if (latestDocumentEntryList.size() > 0) { adhocQueryResponse.getRegistryObjectList().getIdentifiable() .clear(); } for (int i = 0; i < latestDocumentEntryList.size(); i++) { adhocQueryResponse.getRegistryObjectList().getIdentifiable() .add(latestDocumentEntryList.get(i)); } } /* * try { LOGGER.debug(marshall(adhocQueryResponse)); } catch (Throwable * e) { LOGGER.debug(e.toString(),e); } */ return adhocQueryResponse; } /** * Checks if is consent document. * * @param originalDocument * the original document * @return true, if is consent document */ private boolean isConsentDocument(String originalDocument) { boolean consentDocumentExists = false; try { org.w3c.dom.Document document = loadXmlFrom(originalDocument); XPathFactory xpathFactory = XPathFactory.newInstance(); XPath xpath = xpathFactory.newXPath(); xpath.setNamespaceContext(new PepNamespaceContext()); String xpathExpression = "count(//*[@root='2.16.840.1.113883.3.445.1']) > 0"; consentDocumentExists = (Boolean) xpath.evaluate(xpathExpression, document, XPathConstants.BOOLEAN); } catch (Exception e) { throw new DS4PException( "Error occurred when getting the templateId count for consent from CDA document.", e); } return consentDocumentExists; } /** * Patient exists in registy before adding. * * @param responseOfAddPatient * the response of add patient * @return true, if successful */ public static boolean patientExistsInRegistyBeforeAdding( String responseOfAddPatient) { boolean patientExistsInRegistyBeforeAdding = false; try { // TODO: Refactor these code to a new class to be testable org.w3c.dom.Document document = loadXmlFrom(responseOfAddPatient); XPathFactory xpathFactory = XPathFactory.newInstance(); XPath xpath = xpathFactory.newXPath(); xpath.setNamespaceContext(new PepNamespaceContext()); // Get acknowledgment type code String xpathForAcknowledgementTypeCode = String.format( "//%s:acknowledgement/%s:typeCode/@code", PepNamespaceContext.HL7_PREFIX, PepNamespaceContext.HL7_PREFIX); String acknowledgementTypeCode = xpath.evaluate( xpathForAcknowledgementTypeCode, document); if (acknowledgementTypeCode.equals("CE")) { patientExistsInRegistyBeforeAdding = true; } } catch (Exception e) { throw new DS4PException( "Error occurred when getting the patient id and other patient demographic information from CDA document.", e); } return patientExistsInRegistyBeforeAdding; } }
/* * Copyright 2015-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.glowroot.agent.plugin.jdbc; import java.sql.Connection; import java.sql.SQLException; import java.util.Iterator; import java.util.Set; import com.google.common.collect.Sets; import org.apache.commons.dbcp.BasicDataSource; import org.apache.commons.dbcp.DelegatingConnection; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.glowroot.agent.it.harness.AppUnderTest; import org.glowroot.agent.it.harness.Container; import org.glowroot.agent.it.harness.Containers; import org.glowroot.agent.it.harness.TransactionMarker; import org.glowroot.wire.api.model.TraceOuterClass.Trace; import static org.assertj.core.api.Assertions.assertThat; public class ConnectionAndTxLifecycleIT { private static final String PLUGIN_ID = "jdbc"; private static Container container; @BeforeClass public static void setUp() throws Exception { container = Containers.create(); } @AfterClass public static void tearDown() throws Exception { container.close(); } @After public void afterEachTest() throws Exception { container.checkAndReset(); } @Test public void testConnectionLifecycle() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureConnectionLifecycleTraceEntries", true); // when Trace trace = container.execute(ExecuteGetConnectionAndConnectionClose.class); // then Iterator<Trace.Entry> i = trace.getEntryList().iterator(); Trace.Entry entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc get connection"); entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc connection close"); assertThat(i.hasNext()).isFalse(); } @Test public void testConnectionLifecycleDisabled() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureGetConnection", false); container.getConfigService().setPluginProperty(PLUGIN_ID, "captureConnectionClose", false); // when Trace trace = container.execute(ExecuteGetConnectionAndConnectionClose.class); // then Trace.Header header = trace.getHeader(); Trace.Timer rootTimer = header.getMainThreadRootTimer(); assertThat(rootTimer.getChildTimerList()).isEmpty(); assertThat(header.getEntryCount()).isZero(); } @Test public void testConnectionLifecyclePartiallyDisabled() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureConnectionClose", true); // when Trace trace = container.execute(ExecuteGetConnectionAndConnectionClose.class); // then Trace.Header header = trace.getHeader(); Trace.Timer rootTimer = header.getMainThreadRootTimer(); assertThat(rootTimer.getChildTimerList()).hasSize(2); // ordering is by total desc, so order is not fixed Set<String> childTimerNames = Sets.newHashSet(); childTimerNames.add(rootTimer.getChildTimerList().get(0).getName()); childTimerNames.add(rootTimer.getChildTimerList().get(1).getName()); assertThat(childTimerNames).containsOnly("jdbc get connection", "jdbc connection close"); assertThat(header.getEntryCount()).isZero(); } @Test public void testConnectionLifecycleGetConnectionThrows() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureConnectionLifecycleTraceEntries", true); // when Trace trace = container.execute(ExecuteGetConnectionOnThrowingDataSource.class); // then Iterator<Trace.Entry> i = trace.getEntryList().iterator(); Trace.Entry entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc get connection"); assertThat(entry.getError().getMessage()) .isEqualTo("java.sql.SQLException: A getconnection failure"); assertThat(i.hasNext()).isFalse(); } @Test public void testConnectionLifecycleGetConnectionThrowsDisabled() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureGetConnection", false); container.getConfigService().setPluginProperty(PLUGIN_ID, "captureConnectionClose", false); // when Trace trace = container.execute(ExecuteGetConnectionOnThrowingDataSource.class); // then Trace.Header header = trace.getHeader(); Trace.Timer rootTimer = header.getMainThreadRootTimer(); assertThat(rootTimer.getChildTimerList()).isEmpty(); assertThat(header.getEntryCount()).isZero(); } @Test public void testConnectionLifecycleGetConnectionThrowsPartiallyDisabled() throws Exception { // when Trace trace = container.execute(ExecuteGetConnectionOnThrowingDataSource.class); // then Trace.Header header = trace.getHeader(); Trace.Timer rootTimer = header.getMainThreadRootTimer(); assertThat(rootTimer.getChildTimerList()).hasSize(1); assertThat(rootTimer.getChildTimerList().get(0).getName()).isEqualTo("jdbc get connection"); assertThat(header.getEntryCount()).isZero(); } @Test public void testConnectionLifecycleCloseConnectionThrows() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureConnectionLifecycleTraceEntries", true); // when Trace trace = container.execute(ExecuteCloseConnectionOnThrowingDataSource.class); // then Iterator<Trace.Entry> i = trace.getEntryList().iterator(); Trace.Entry entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc get connection"); entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc connection close"); assertThat(entry.getError().getMessage()) .isEqualTo("java.sql.SQLException: A close failure"); assertThat(i.hasNext()).isFalse(); } @Test public void testConnectionLifecycleCloseConnectionThrowsDisabled() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureGetConnection", false); container.getConfigService().setPluginProperty(PLUGIN_ID, "captureConnectionClose", false); // when Trace trace = container.execute(ExecuteCloseConnectionOnThrowingDataSource.class); // then Trace.Header header = trace.getHeader(); Trace.Timer rootTimer = header.getMainThreadRootTimer(); assertThat(rootTimer.getChildTimerList()).isEmpty(); assertThat(header.getEntryCount()).isZero(); } @Test public void testConnectionLifecycleCloseConnectionThrowsPartiallyDisabled() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureConnectionClose", true); // when Trace trace = container.execute(ExecuteCloseConnectionOnThrowingDataSource.class); // then Trace.Header header = trace.getHeader(); Trace.Timer rootTimer = header.getMainThreadRootTimer(); assertThat(rootTimer.getChildTimerList()).hasSize(2); // ordering is by total desc, so order is not fixed Set<String> childTimerNames = Sets.newHashSet(); childTimerNames.add(rootTimer.getChildTimerList().get(0).getName()); childTimerNames.add(rootTimer.getChildTimerList().get(1).getName()); assertThat(childTimerNames).containsOnly("jdbc get connection", "jdbc connection close"); assertThat(header.getEntryCount()).isZero(); } @Test public void testTransactionLifecycle() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureTransactionLifecycleTraceEntries", true); // when Trace trace = container.execute(ExecuteSetAutoCommit.class); // then Iterator<Trace.Entry> i = trace.getEntryList().iterator(); Trace.Entry entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc set autocommit: false"); entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc set autocommit: true"); if (i.hasNext()) { entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc commit"); } assertThat(i.hasNext()).isFalse(); } @Test public void testTransactionLifecycleThrowing() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureTransactionLifecycleTraceEntries", true); // when Trace trace = container.execute(ExecuteSetAutoCommitThrowing.class); // then Iterator<Trace.Entry> i = trace.getEntryList().iterator(); Trace.Entry entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc set autocommit: false"); assertThat(entry.getError().getMessage()) .isEqualTo("java.sql.SQLException: A setautocommit failure"); entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc set autocommit: true"); assertThat(entry.getError().getMessage()) .isEqualTo("java.sql.SQLException: A setautocommit failure"); assertThat(i.hasNext()).isFalse(); } @Test public void testConnectionLifecycleAndTransactionLifecycleTogether() throws Exception { // given container.getConfigService().setPluginProperty(PLUGIN_ID, "captureConnectionLifecycleTraceEntries", true); container.getConfigService().setPluginProperty(PLUGIN_ID, "captureTransactionLifecycleTraceEntries", true); // when Trace trace = container.execute(ExecuteGetConnectionAndConnectionClose.class); // then Iterator<Trace.Entry> i = trace.getEntryList().iterator(); Trace.Entry entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc get connection (autocommit: true)"); entry = i.next(); assertThat(entry.getDepth()).isEqualTo(0); assertThat(entry.getMessage()).isEqualTo("jdbc connection close"); assertThat(i.hasNext()).isFalse(); } public static class ExecuteGetConnectionAndConnectionClose implements AppUnderTest, TransactionMarker { private BasicDataSource dataSource; @Override public void executeApp() throws Exception { dataSource = new BasicDataSource(); dataSource.setDriverClassName("org.hsqldb.jdbc.JDBCDriver"); dataSource.setUrl("jdbc:hsqldb:mem:test"); // BasicDataSource opens and closes a test connection on first getConnection(), // so just getting that out of the way before starting transaction dataSource.getConnection().close(); transactionMarker(); } @Override public void transactionMarker() throws Exception { dataSource.getConnection().close(); } } public static class ExecuteGetConnectionOnThrowingDataSource implements AppUnderTest, TransactionMarker { private BasicDataSource dataSource; @Override public void executeApp() throws Exception { dataSource = new BasicDataSource() { @Override public Connection getConnection() throws SQLException { throw new SQLException("A getconnection failure"); } }; dataSource.setDriverClassName("org.hsqldb.jdbc.JDBCDriver"); dataSource.setUrl("jdbc:hsqldb:mem:test"); transactionMarker(); } @Override public void transactionMarker() throws Exception { try { dataSource.getConnection(); } catch (SQLException e) { } } } public static class ExecuteCloseConnectionOnThrowingDataSource implements AppUnderTest, TransactionMarker { private BasicDataSource dataSource; @Override public void executeApp() throws Exception { dataSource = new BasicDataSource() { private boolean first = true; @Override public Connection getConnection() throws SQLException { if (first) { // BasicDataSource opens and closes a test connection on first // getConnection() first = false; return super.getConnection(); } return new DelegatingConnection(super.getConnection()) { @Override public void close() throws SQLException { throw new SQLException("A close failure"); } }; } }; dataSource.setDriverClassName("org.hsqldb.jdbc.JDBCDriver"); dataSource.setUrl("jdbc:hsqldb:mem:test"); // BasicDataSource opens and closes a test connection on first getConnection(), // so just getting that out of the way before starting transaction dataSource.getConnection().close(); transactionMarker(); } @Override public void transactionMarker() throws Exception { try { dataSource.getConnection().close(); } catch (SQLException e) { } } } public static class ExecuteSetAutoCommit implements AppUnderTest, TransactionMarker { private Connection connection; @Override public void executeApp() throws Exception { connection = Connections.createConnection(); try { transactionMarker(); } finally { Connections.closeConnection(connection); } } @Override public void transactionMarker() throws Exception { connection.setAutoCommit(false); connection.setAutoCommit(true); } } public static class ExecuteSetAutoCommitThrowing implements AppUnderTest, TransactionMarker { private Connection connection; @Override public void executeApp() throws Exception { connection = new DelegatingConnection(Connections.createConnection()) { @Override public void setAutoCommit(boolean autoCommit) throws SQLException { throw new SQLException("A setautocommit failure"); } }; try { transactionMarker(); } finally { Connections.closeConnection(connection); } } @Override public void transactionMarker() { try { connection.setAutoCommit(false); } catch (SQLException e) { } try { connection.setAutoCommit(true); } catch (SQLException e) { } } } }
/* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.item.inventory; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import org.spongepowered.api.Sponge; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.BlockType; import org.spongepowered.api.block.tileentity.TileEntity; import org.spongepowered.api.data.DataHolder; import org.spongepowered.api.data.DataSerializable; import org.spongepowered.api.data.DataView; import org.spongepowered.api.data.key.Key; import org.spongepowered.api.data.manipulator.DataManipulator; import org.spongepowered.api.data.manipulator.ImmutableDataManipulator; import org.spongepowered.api.data.persistence.DataBuilder; import org.spongepowered.api.data.value.BaseValue; import org.spongepowered.api.item.ItemType; import org.spongepowered.api.text.translation.Translatable; import org.spongepowered.api.util.ResettableBuilder; import java.util.Map; import java.util.Set; import java.util.function.Consumer; import java.util.function.Predicate; /** * Represents a stack of a specific {@link ItemType}. Supports serialization and * can be compared using the comparators listed in {@link ItemStackComparators}. * * <p>{@link ItemStack}s have varying properties and data, it is adviseable to * use {@link DataHolder#get(Class)} to retrieve different information * regarding this item stack.</p> */ public interface ItemStack extends DataHolder, DataSerializable, Translatable { /** * Creates a new {@link Builder} to build an {@link ItemStack}. * * @return The new builder */ static Builder builder() { return Sponge.getRegistry().createBuilder(Builder.class); } /** * Creates a new {@link ItemStack} of the provided {@link ItemType} * and quantity. * * @param itemType The item type * @param quantity The quantity * @return The new item stack */ static ItemStack of(ItemType itemType, int quantity) { return builder().itemType(itemType).quantity(quantity).build(); } /** * Gets the {@link ItemType} of this {@link ItemStack}. * * @return The item type */ ItemType getItem(); /** * Gets the quantity of items in this stack. This may exceed the max stack * size of the item, and if added to an inventory will then be divided by * the max stack. * * @return Quantity of items */ int getQuantity(); /** * Sets the quantity in this stack. * * @param quantity Quantity * @throws IllegalArgumentException If quantity set exceeds the * {@link ItemStack#getMaxStackQuantity()} */ void setQuantity(int quantity) throws IllegalArgumentException; /** * Get the maximum quantity per stack. By default, returns * {@link ItemType#getMaxStackQuantity()}, unless a * different value has been set for this specific stack. * * @return Max stack quantity */ int getMaxStackQuantity(); /** * Gets the {@link ItemStackSnapshot} of this {@link ItemStack}. All known * {@link DataManipulator}s existing on this {@link ItemStack} are added * as copies to the {@link ItemStackSnapshot}. * * @return The newly created item stack snapshot */ ItemStackSnapshot createSnapshot(); /** * Returns true if the specified {@link ItemStack} has the same stack * size, {@link ItemType}, and data. Note that this method is not an * overrider of {@link Object#equals(Object)} in order to maintain * compatibility with the base game. Therefore, ItemStacks may not behave * as expected when using them in equality based constructs such as * {@link Map}s or {@link Set}s. * * @param that ItemStack to compare * @return True if this equals the ItemStack */ boolean equalTo(ItemStack that); @Override ItemStack copy(); interface Builder extends DataBuilder<ItemStack> { @Override Builder from(ItemStack value); /** * Sets the {@link ItemType} of the item stack. * * @param itemType The type of item * @return This builder, for chaining */ Builder itemType(ItemType itemType); ItemType getCurrentItem(); /** * Sets the quantity of the item stack. * * @param quantity The quantity of the item stack * @return This builder, for chaining * @throws IllegalArgumentException If the quantity is outside the allowed bounds */ Builder quantity(int quantity) throws IllegalArgumentException; /** * Adds a {@link Key} and related {@link Object} value to apply to the * resulting {@link ItemStack}. Note that the resulting * {@link ItemStack} may not actually accept the provided {@code Key} * for various reasons due to support or simply that the value itself * is not supported. Offering custom data is not supported through this, * use {@link #itemData(DataManipulator)} instead. * * @param key The key to identiy the value to * @param value The value to apply * @param <E> The type of value * @return This builder, for chaining */ <E> Builder keyValue(Key<? extends BaseValue<E>> key, E value); /** * Sets the {@link DataManipulator} to add to the {@link ItemStack}. * * @param itemData The item data to set * @return This builder, for chaining * @throws IllegalArgumentException If the item data is incompatible with the item */ Builder itemData(DataManipulator<?, ?> itemData) throws IllegalArgumentException; /** * Sets the {@link ImmutableDataManipulator} to add to the * {@link ItemStack}. * * @param itemData The item data to set * @return This builder, for chaining * @throws IllegalArgumentException If the item data is incompatible */ Builder itemData(ImmutableDataManipulator<?, ?> itemData) throws IllegalArgumentException; /** * Adds the given {@link Key} with the given value. * * @param key The key to assign the value with * @param value The value to assign with the key * @param <V> The type of the value * @return This builder, for chaining * @throws IllegalArgumentException If the item data is incompatible */ <V> Builder add(Key<? extends BaseValue<V>> key, V value) throws IllegalArgumentException; /** * Sets all the settings in this builder from the item stack blueprint. * * @param itemStack The item stack to copy * @return This builder, for chaining */ Builder fromItemStack(ItemStack itemStack); /** * Sets the data to recreate a {@link BlockState} in a held {@link ItemStack} * state. * * @param blockState The block state to use * @return This builder, for chaining */ default Builder fromBlockState(BlockState blockState) { checkNotNull(blockState); final BlockType blockType= blockState.getType(); checkArgument(blockType.getItem().isPresent(), "Missing valid ItemType for BlockType: " + blockType.getId()); itemType(blockType.getItem().get()); blockState.getContainers().forEach(this::itemData); return this; } /** * Attempts to reconstruct the builder with all of the data from * {@link ItemStack#toContainer()} including all custom data. * * @param container The container to translate * @return This bulder, for chaining */ Builder fromContainer(DataView container); /** * Reconstructs this builder to use the {@link ItemStackSnapshot} * for all the values and data it may contain. * * @param snapshot The snapshot * @return This builder, for chaining */ default Builder fromSnapshot(ItemStackSnapshot snapshot) { return fromItemStack(snapshot.createStack()); } /** * Attempts to reconstruct a {@link BlockSnapshot} including all data * and {@link TileEntity} related data if necessary for creating an * {@link ItemStack} representation. * * @param blockSnapshot The snapshot to use * @return This builder, for chaining */ Builder fromBlockSnapshot(BlockSnapshot blockSnapshot); Builder remove(Class<? extends DataManipulator<?, ?>> manipulatorClass); default Builder apply(Predicate<Builder> predicate, Consumer<Builder> consumer) { if (predicate.test(this)) { consumer.accept(this); } return this; } /** * Builds an instance of an ItemStack. * * @return A new instance of an ItemStack * @throws IllegalStateException If the item stack is not completed */ ItemStack build() throws IllegalStateException; } }
/** * Copyright 2015 Confluent Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package io.confluent.connect.hdfs; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.kafka.common.TopicPartition; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.UUID; import io.confluent.connect.hdfs.filter.CommittedFileFilter; import io.confluent.connect.hdfs.storage.Storage; public class FileUtils { private static final Logger log = LoggerFactory.getLogger(FileUtils.class); public static String logFileName(String url, String logsDir, TopicPartition topicPart) { return fileName(url, logsDir, topicPart, "log"); } public static String directoryName(String url, String topicsDir, TopicPartition topicPart) { String topic = topicPart.topic(); int partition = topicPart.partition(); return url + "/" + topicsDir + "/" + topic + "/" + partition; } public static String fileName(String url, String topicsDir, TopicPartition topicPart, String name) { String topic = topicPart.topic(); int partition = topicPart.partition(); return url + "/" + topicsDir + "/" + topic + "/" + partition + "/" + name; } public static String hiveDirectoryName(String url, String topicsDir, String topic) { return url + "/" + topicsDir + "/" + topic + "/"; } public static String fileName(String url, String topicsDir, String directory, String name) { return url + "/" + topicsDir + "/" + directory + "/" + name; } public static String directoryName(String url, String topicsDir, String directory) { return url + "/" + topicsDir + "/" + directory; } public static String tempFileName(String url, String topicsDir, String directory, String extension) { UUID id = UUID.randomUUID(); String name = id.toString() + "_" + "tmp" + extension; return fileName(url, topicsDir, directory, name); } public static String committedFileName(String url, String topicsDir, String directory, TopicPartition topicPart, long startOffset, long endOffset, String extension, String zeroPadFormat) { String topic = topicPart.topic(); int partition = topicPart.partition(); StringBuilder sb = new StringBuilder(); sb.append(topic); sb.append(HdfsSinkConnecorConstants.COMMMITTED_FILENAME_SEPARATOR); sb.append(partition); sb.append(HdfsSinkConnecorConstants.COMMMITTED_FILENAME_SEPARATOR); sb.append(String.format(zeroPadFormat, startOffset)); sb.append(HdfsSinkConnecorConstants.COMMMITTED_FILENAME_SEPARATOR); sb.append(String.format(zeroPadFormat, endOffset)); sb.append(extension); String name = sb.toString(); return fileName(url, topicsDir, directory, name); } public static String topicDirectory(String url, String topicsDir, String topic) { return url + "/" + topicsDir + "/" + topic; } private static ArrayList<FileStatus> traverseImpl(Storage storage, Path path, PathFilter filter) throws IOException { if (!storage.exists(path.toString())) { return new ArrayList<>(); } ArrayList<FileStatus> result = new ArrayList<>(); FileStatus[] statuses = storage.listStatus(path.toString()); for (FileStatus status : statuses) { if (status.isDirectory()) { result.addAll(traverseImpl(storage, status.getPath(), filter)); } else { if (filter.accept(status.getPath())) { result.add(status); } } } return result; } public static FileStatus[] traverse(Storage storage, Path path, PathFilter filter) throws IOException { ArrayList<FileStatus> result = traverseImpl(storage, path, filter); return result.toArray(new FileStatus[result.size()]); } public static FileStatus fileStatusWithMaxOffset(Storage storage, Path path, CommittedFileFilter filter) throws IOException { if (!storage.exists(path.toString())) { return null; } long maxOffset = -1L; FileStatus fileStatusWithMaxOffset = null; FileStatus[] statuses = storage.listStatus(path.toString()); for (FileStatus status : statuses) { if (status.isDirectory()) { FileStatus fileStatus = fileStatusWithMaxOffset(storage, status.getPath(), filter); if (fileStatus != null) { long offset = extractOffset(fileStatus.getPath().getName()); if (offset > maxOffset) { maxOffset = offset; fileStatusWithMaxOffset = fileStatus; } } } else { String filename = status.getPath().getName(); log.trace("Checked for max offset: {}", status.getPath()); if (filter.accept(status.getPath())) { long offset = extractOffset(filename); if (offset > maxOffset) { maxOffset = offset; fileStatusWithMaxOffset = status; } } } } return fileStatusWithMaxOffset; } public static long extractOffset(String filename) { return Long.parseLong(filename.split(HdfsSinkConnecorConstants.COMMMITTED_FILENAME_SEPARATOR_REGEX)[3]); } private static ArrayList<FileStatus> getDirectoriesImpl(Storage storage, Path path) throws IOException { FileStatus[] statuses = storage.listStatus(path.toString()); ArrayList<FileStatus> result = new ArrayList<>(); for (FileStatus status : statuses) { if (status.isDirectory()) { int count = 0; FileStatus[] fileStatuses = storage.listStatus(status.getPath().toString()); for (FileStatus fileStatus : fileStatuses) { if (fileStatus.isDirectory()) { result.addAll(getDirectoriesImpl(storage, fileStatus.getPath())); } else { count++; } } if (count == fileStatuses.length) { result.add(status); } } } return result; } public static FileStatus[] getDirectories(Storage storage, Path path) throws IOException { ArrayList<FileStatus> result = getDirectoriesImpl(storage, path); return result.toArray(new FileStatus[result.size()]); } private static ArrayList<FileStatus> traverseImpl(FileSystem fs, Path path) throws IOException { if (!fs.exists(path)) { return new ArrayList<>(); } ArrayList<FileStatus> result = new ArrayList<>(); FileStatus[] statuses = fs.listStatus(path); for (FileStatus status : statuses) { if (status.isDirectory()) { result.addAll(traverseImpl(fs, status.getPath())); } else { result.add(status); } } return result; } public static FileStatus[] traverse(FileSystem fs, Path path) throws IOException { ArrayList<FileStatus> result = traverseImpl(fs, path); return result.toArray(new FileStatus[result.size()]); } }
package com.sixsq.slipstream.persistence; /* * +=================================================================+ * SlipStream Server (WAR) * ===== * Copyright (C) 2013 SixSq Sarl (sixsq.com) * ===== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -=================================================================- */ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Matcher; import javax.persistence.CascadeType; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Inheritance; import javax.persistence.InheritanceType; import javax.persistence.ManyToOne; import javax.persistence.MapKey; import javax.persistence.OneToMany; import javax.persistence.Transient; import org.hibernate.annotations.CollectionType; import org.simpleframework.xml.Attribute; import org.simpleframework.xml.Element; import org.simpleframework.xml.ElementArray; import org.simpleframework.xml.ElementMap; import com.sixsq.slipstream.exceptions.ValidationException; @Entity @Inheritance(strategy = InheritanceType.SINGLE_TABLE) @SuppressWarnings("serial") public class Node extends Parameterized<Node, NodeParameter> { private static final String NETWORK_KEY = ImageModule.NETWORK_KEY; @Id @GeneratedValue private Long id; @Attribute private String name; @Attribute private int multiplicity = RuntimeParameter.MULTIPLICITY_NODE_START_INDEX; @Attribute(empty = "0") private int maxProvisioningFailures = 0; @Attribute private String cloudService = CloudImageIdentifier.DEFAULT_CLOUD_SERVICE; public String getCloudService() { return cloudService; } public void setCloudService(String cloudService) { this.cloudService = cloudService; } @Attribute(required = false) private String imageUri; public String getImageUri() { return imageUri; } public void setImageUri(String imageUri) { this.imageUri = imageUri; } @Transient private ImageModule image; /** * Holds the <parameter-name> and the corresponding * <node-name>.<index>:<parameter-name>. */ @ElementMap(required = false) @MapKey(name = "name") @OneToMany(mappedBy = "container", cascade = CascadeType.ALL, fetch = FetchType.EAGER, orphanRemoval = true) @CollectionType(type = "com.sixsq.slipstream.persistence.ConcurrentHashMapType") private Map<String, NodeParameter> parameterMappings = new ConcurrentHashMap<String, NodeParameter>(); @ManyToOne private DeploymentModule module; protected Node() { } public Node(String name, String imageUri) throws ValidationException { this.name = name; this.imageUri = imageUri; } public Node(String name, ImageModule image) throws ValidationException { this(name, image.getResourceUri()); this.image = image; } @Override @ElementMap(name = "parameters", required = false, valueType = NodeParameter.class) protected void setParameters(Map<String, NodeParameter> parameters) { this.parameters = parameters; } @Override @ElementMap(name = "parameters", required = false, valueType = NodeParameter.class) public Map<String, NodeParameter> getParameters() { return parameters; } public DeploymentModule getModule() { return module; } public void setModule(DeploymentModule module) { this.module = module; } public void validate() throws ValidationException { super.validate(); Matcher matcher = RuntimeParameter.NODE_NAME_ONLY_PATTERN.matcher(name); if (!matcher.matches()) { throwValidationException("invalid node name: " + name); } image.validate(); } public Long getId() { return id; } public int getMultiplicity() { return multiplicity; } public void setMultiplicity(String multiplicity) throws ValidationException { int parsedMultiplicity; try { parsedMultiplicity = Integer.parseInt(multiplicity); } catch (NumberFormatException ex) { throw (new ValidationException("Invalid multiplicity value")); } setMultiplicity(parsedMultiplicity); } public void setMultiplicity(int multiplicity) throws ValidationException { if (multiplicity < 0) { throw (new ValidationException("Invalid multiplicity, it must be positive")); } this.multiplicity = multiplicity; } public void setMaxProvisioningFailures(int value) { this.maxProvisioningFailures = value; } public int getMaxProvisioningFailures() { return this.maxProvisioningFailures; } @Attribute(required = false) public void setNetwork(String network) { } @Attribute(required = false) public String getNetwork() throws ValidationException { return extractParameterWithOverride(NETWORK_KEY); } /** * Look for a value in the local parameter list, otherwise return the value * from the image parameter list */ private String extractParameterWithOverride(String key) throws ValidationException { ImageModule image = getImage(); if (image != null) { return getParameterValue(key, image.getParameterValue(key, null)); } else { // The image is missing, but this will be picked-up when running return null; } } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } public Map<String, NodeParameter> getParameterMappings() { return parameterMappings; } /** * Assembled notes. Includes notes from inherited images. */ @Transient @ElementArray(required = false, entry = "note") public String[] getNotes() { List<String> notes = new ArrayList<String>(); ImageModule image = getImage(); if (image != null) { notes.addAll(Arrays.asList(image.getNotes())); } return notes.toArray(new String[0]); } @Transient @ElementArray(required = false, entry = "note") private void setNotes(String[] notes) { } public void setParameterMappings(Map<String, NodeParameter> parameterMappings) { this.parameterMappings = parameterMappings; } public void setParameterMapping(NodeParameter nodeParameter, DeploymentModule deployment) throws ValidationException { validateMapping(nodeParameter, deployment); getParameterMappings().put(nodeParameter.getName(), nodeParameter); } private void validateMapping(NodeParameter nodeParameter, DeploymentModule deployment) throws ValidationException { ModuleParameter inputParameter = image.getParameter(nodeParameter.getName()); if (!ParameterCategory.Input.name().equals(inputParameter.getCategory())) { throw new ValidationException("Input parameter " + nodeParameter.getName() + " not Input category"); } if (nodeParameter.isStringValue()) { return; } ModuleParameter outputParameter = extractModuleParameterFromNodeString(nodeParameter.getValue(), deployment); if (!ParameterCategory.Output.name().equals(outputParameter.getCategory())) { throw new ValidationException("Output parameter " + outputParameter.getName() + " not Output category"); } } private ModuleParameter extractModuleParameterFromNodeString(String fullyQualifiedParameterName, DeploymentModule deployment) { String[] parts = fullyQualifiedParameterName.split(":"); String nodeName = parts[0]; String paramName = parts[1]; return deployment.getNodes().get(nodeName).getImage().getParameter(paramName); } @Element(required = false) public ImageModule getImage() { if (image == null) { image = (ImageModule) ImageModule.load(imageUri); } return image; } @Element(required = false) public void setImage(ImageModule image) { this.image = image; } @Override public String getResourceUri() { return null; } public void setParameterMapping(NodeParameter parameter) { parameter.setContainer(this); this.getParameterMappings().put(parameter.getName(), parameter); } @Override public void setContainer(NodeParameter parameter) { parameter.setContainer(this); } public Node copy() throws ValidationException { Node copy = new Node(getName(), getImageUri()); copy = (Node) copyTo(copy); copy.setCloudService(getCloudService()); copy.setMultiplicity(getMultiplicity()); copy.setNetwork(getNetwork()); return copy; } @Override public Node store() { return (Node) super.store(); } }
/* * This file is part of "lunisolar-magma". * * (C) Copyright 2014-2022 Lunisolar (http://lunisolar.eu/). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.lunisolar.magma.func.tuple; import eu.lunisolar.magma.basics.meta.LTuple; import eu.lunisolar.magma.basics.Null; import eu.lunisolar.magma.basics.fluent.Fluent; import eu.lunisolar.magma.basics.meta.aType; import eu.lunisolar.magma.basics.meta.aType.*; import eu.lunisolar.magma.basics.meta.functional.*; import eu.lunisolar.magma.func.*; import eu.lunisolar.magma.func.consumer.*; ; import eu.lunisolar.magma.func.consumer.primitives.bi.*; import eu.lunisolar.magma.func.consumer.primitives.tri.*; import eu.lunisolar.magma.func.function.*; import eu.lunisolar.magma.func.function.to.*; import eu.lunisolar.magma.func.function.from.*; import eu.lunisolar.magma.func.operator.unary.*; import eu.lunisolar.magma.func.operator.binary.*; import eu.lunisolar.magma.func.predicate.*; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import java.util.*; import java.util.stream.*; /** * Exact equivalent of input parameters used in LTriSrtConsumer. */ @SuppressWarnings("UnusedDeclaration") public interface LSrtTriple extends LTuple<Short> , Comparable<LSrtTriple> { int SIZE = 3; short first(); default short value() { return first(); } short second(); short third(); @Override default Short get(int index) { switch(index) { case 1: return first(); case 2: return second(); case 3: return third(); default: throw new NoSuchElementException(); } } default short getShort(int index) { switch(index) { case 1: return first(); case 2: return second(); case 3: return third(); default: throw new NoSuchElementException(); } } /** Tuple size */ @Override default int tupleSize() { return SIZE; } /** Static hashCode() implementation method that takes same arguments as fields of the LSrtTriple and calculates hash from it. */ static int argHashCode(short a1,short a2,short a3) { final int prime = 31; int result = 1; result = prime * result + Short.hashCode(a1); result = prime * result + Short.hashCode(a2); result = prime * result + Short.hashCode(a3); return result; } /** Static equals() implementation that takes same arguments (doubled) as fields of the LSrtTriple and checks if all values are equal. */ static boolean argEquals(short a1,short a2,short a3, short b1,short b2,short b3) { return a1==b1 && // a2==b2 && // a3==b3; // } /** * Static equals() implementation that takes two tuples and checks if they are equal. * Tuples are considered equal if are implementing LSrtTriple interface (among others) and their LSrtTriple values are equal regardless of the implementing class * and how many more values there are. */ static boolean argEquals(LSrtTriple the, Object that) { return Null.equals(the, that, (one, two) -> { // Intentionally all implementations of LSrtTriple are allowed. if (!(two instanceof LSrtTriple)) { return false; } LSrtTriple other = (LSrtTriple) two; return argEquals(one.first(), one.second(), one.third(), other.first(), other.second(), other.third()); }); } /** * Static equals() implementation that takes two tuples and checks if they are equal. */ public static boolean tupleEquals(LSrtTriple the, Object that) { return Null.equals(the, that, (one, two) -> { // Intentionally all implementations of LSrtTriple are allowed. if (!(two instanceof LSrtTriple)) { return false; } LSrtTriple other = (LSrtTriple) two; return one.tupleSize() == other.tupleSize() && argEquals(one.first(), one.second(), one.third(), other.first(), other.second(), other.third()); }); } @Override default Iterator<Short> iterator() { return new Iterator<Short>() { private int index; @Override public boolean hasNext() { return index<SIZE; } @Override public Short next() { index++; return get(index); } }; } default PrimitiveIterator.OfInt intIterator() { return new PrimitiveIterator.OfInt() { private int index; @Override public boolean hasNext() { return index<SIZE; } @Override public int nextInt() { index++; return getShort(index); } }; } @Override default int compareTo(LSrtTriple that) { return Null.compare(this, that, (one, two) -> { int retval = 0; return (retval = Short.compare(one.first(), two.first())) != 0 ? retval : // (retval = Short.compare(one.second(), two.second())) != 0 ? retval : // (retval = Short.compare(one.third(), two.third())) != 0 ? retval : 0; // }); } abstract class AbstractSrtTriple implements LSrtTriple { @Override public boolean equals(Object that) { return LSrtTriple.tupleEquals(this, that); } @Override public int hashCode() { return LSrtTriple.argHashCode(first(),second(),third()); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append('('); sb.append(first()); sb.append(','); sb.append(second()); sb.append(','); sb.append(third()); sb.append(')'); return sb.toString(); } } /** * Mutable tuple. */ interface Mut<SELF extends Mut<SELF>> extends LSrtTriple { SELF first(short first) ; SELF second(short second) ; SELF third(short third) ; default SELF setFirst(short first) { this.first(first); return (SELF) this; } /** Sets value if predicate(newValue) OR newValue::predicate is true */ default SELF setFirstIfArg(short first, LSrtPredicate predicate) { if (predicate.test(first())) { return this.first(first); } return (SELF) this; } /** Sets value derived from non-null argument, only if argument is not null. */ default <R> SELF setFirstIfArgNotNull(R arg, LToSrtFunction<R> func) { if ( arg != null ) { return this.first(func.applyAsSrt(arg)); } return (SELF) this; } /** Sets value if predicate(current) OR current::predicate is true */ default SELF setFirstIf(LSrtPredicate predicate, short first) { if (predicate.test(this.first())) { return this.first(first); } return (SELF) this; } /** Sets new value if predicate predicate(newValue, current) OR newValue::something(current) is true. */ default SELF setFirstIf(short first, LBiSrtPredicate predicate) { // the order of arguments is intentional, to allow predicate: if (predicate.test(first, this.first())) { return this.first(first); } return (SELF) this; } /** Sets new value if predicate predicate(current, newValue) OR current::something(newValue) is true. */ default SELF setFirstIf(LBiSrtPredicate predicate, short first) { if (predicate.test(this.first(), first)) { return this.first(first); } return (SELF) this; } default SELF setSecond(short second) { this.second(second); return (SELF) this; } /** Sets value if predicate(newValue) OR newValue::predicate is true */ default SELF setSecondIfArg(short second, LSrtPredicate predicate) { if (predicate.test(second())) { return this.second(second); } return (SELF) this; } /** Sets value derived from non-null argument, only if argument is not null. */ default <R> SELF setSecondIfArgNotNull(R arg, LToSrtFunction<R> func) { if ( arg != null ) { return this.second(func.applyAsSrt(arg)); } return (SELF) this; } /** Sets value if predicate(current) OR current::predicate is true */ default SELF setSecondIf(LSrtPredicate predicate, short second) { if (predicate.test(this.second())) { return this.second(second); } return (SELF) this; } /** Sets new value if predicate predicate(newValue, current) OR newValue::something(current) is true. */ default SELF setSecondIf(short second, LBiSrtPredicate predicate) { // the order of arguments is intentional, to allow predicate: if (predicate.test(second, this.second())) { return this.second(second); } return (SELF) this; } /** Sets new value if predicate predicate(current, newValue) OR current::something(newValue) is true. */ default SELF setSecondIf(LBiSrtPredicate predicate, short second) { if (predicate.test(this.second(), second)) { return this.second(second); } return (SELF) this; } default SELF setThird(short third) { this.third(third); return (SELF) this; } /** Sets value if predicate(newValue) OR newValue::predicate is true */ default SELF setThirdIfArg(short third, LSrtPredicate predicate) { if (predicate.test(third())) { return this.third(third); } return (SELF) this; } /** Sets value derived from non-null argument, only if argument is not null. */ default <R> SELF setThirdIfArgNotNull(R arg, LToSrtFunction<R> func) { if ( arg != null ) { return this.third(func.applyAsSrt(arg)); } return (SELF) this; } /** Sets value if predicate(current) OR current::predicate is true */ default SELF setThirdIf(LSrtPredicate predicate, short third) { if (predicate.test(this.third())) { return this.third(third); } return (SELF) this; } /** Sets new value if predicate predicate(newValue, current) OR newValue::something(current) is true. */ default SELF setThirdIf(short third, LBiSrtPredicate predicate) { // the order of arguments is intentional, to allow predicate: if (predicate.test(third, this.third())) { return this.third(third); } return (SELF) this; } /** Sets new value if predicate predicate(current, newValue) OR current::something(newValue) is true. */ default SELF setThirdIf(LBiSrtPredicate predicate, short third) { if (predicate.test(this.third(), third)) { return this.third(third); } return (SELF) this; } default SELF reset() { this.first((short)0); this.second((short)0); this.third((short)0); return (SELF) this; } } public static MutSrtTriple of() { return of( (short)0 , (short)0 , (short)0 ); } public static MutSrtTriple of(short a1,short a2,short a3){ return new MutSrtTriple(a1,a2,a3); } public static MutSrtTriple copyOf(LSrtTriple tuple) { return of(tuple.first(), tuple.second(), tuple.third()); } /** * Mutable, non-comparable tuple. */ class MutSrtTriple extends AbstractSrtTriple implements Mut<MutSrtTriple> { private short first; private short second; private short third; public MutSrtTriple(short a1,short a2,short a3){ this.first = a1; this.second = a2; this.third = a3; } public @Override short first() { return first; } public @Override MutSrtTriple first(short first) { this.first = first; return this; } public @Override short second() { return second; } public @Override MutSrtTriple second(short second) { this.second = second; return this; } public @Override short third() { return third; } public @Override MutSrtTriple third(short third) { this.third = third; return this; } } public static ImmSrtTriple immutableOf(short a1,short a2,short a3){ return new ImmSrtTriple(a1,a2,a3); } public static ImmSrtTriple immutableCopyOf(LSrtTriple tuple) { return immutableOf(tuple.first(), tuple.second(), tuple.third()); } /** * Immutable, non-comparable tuple. */ @Immutable final class ImmSrtTriple extends AbstractSrtTriple { private final short first; private final short second; private final short third; public ImmSrtTriple(short a1,short a2,short a3){ this.first = a1; this.second = a2; this.third = a3; } public @Override short first() { return first; } public @Override short second() { return second; } public @Override short third() { return third; } } public static Iterator<LSrtTriple.MutSrtTriple> mutIterator(PrimitiveIterator.OfInt items) { return iterator(items, LSrtTriple::of);} public static Iterator<LSrtTriple.ImmSrtTriple> immIterator(PrimitiveIterator.OfInt items) { return iterator(items, LSrtTriple::immutableOf);} public static <R> Iterator<R> iterator(PrimitiveIterator.OfInt items, LTriSrtFunction<R> factory) { return iterator(SA.shortIterator(), items, factory); } public static Stream<LSrtTriple.MutSrtTriple> mutStream(IntStream items) { return stream(items, LSrtTriple::of);} public static Stream<LSrtTriple.ImmSrtTriple> immStream(IntStream items) { return stream(items, LSrtTriple::immutableOf);} public static <R> Stream<R> stream(IntStream items, LTriSrtFunction<R> factory) { var pairs = iterator(items.iterator(), factory); return StreamSupport.stream(Spliterators.spliteratorUnknownSize(pairs, Spliterator.ORDERED), false); } public static <C,R> Stream<R> stream(SequentialRead<C, ?, aShort> sa, C source, LTriSrtFunction<R> factory) { var pairs = iterator(sa, source, factory); return StreamSupport.stream(Spliterators.spliteratorUnknownSize(pairs, Spliterator.ORDERED), false); } public static <C,R> Stream<R> stream(IndexedRead<C, aShort> ia, C source, LTriSrtFunction<R> factory) { var pairs = iterator(ia, source, factory); return StreamSupport.stream(Spliterators.spliteratorUnknownSize(pairs, Spliterator.ORDERED), false); } public static <C,R> Iterator<R> iterator(SequentialRead<C, ?, aShort> sa, C source, LTriSrtFunction<R> factory) { C iterator = (C) ((LFunction) sa.adapter()).apply(source); LPredicate<C> testFunc = (LPredicate<C>) sa.tester(); LToSrtFunction<C> nextFunc = (LToSrtFunction<C>) sa.supplier(); return new Iterator<R>() { @Override public boolean hasNext() { return testFunc.doApplyAsBoolean(iterator);} @Override public R next() { var a1 = nextFunc.applyAsSrt(iterator); var a2 = nextFunc.applyAsSrt(iterator); var a3 = nextFunc.applyAsSrt(iterator); return factory.apply(a1,a2,a3); } }; } public static <C,R> Iterator<R> iterator(IndexedRead<C, aShort> ia, C source, LTriSrtFunction<R> factory) { int size = ia.size(source); LOiToSrtFunction<C> oiFunc = (LOiToSrtFunction<C>) ia.getter(); return new Iterator<R>() { private int index = 0; @Override public boolean hasNext() { return index < size;} @Override public R next() { var a1 = oiFunc.applyAsSrt(source, index++); var a2 = oiFunc.applyAsSrt(source, index++); var a3 = oiFunc.applyAsSrt(source, index++); return factory.apply(a1,a2,a3); } }; } public static void forEach(IntStream items, LTriSrtConsumer consumer) { forEach(items.iterator(), consumer); } public static void forEach(PrimitiveIterator.OfInt items, LTriSrtConsumer consumer) { var emptyTuples = iterator(items, (a1,a2,a3) -> { consumer.accept(a1,a2,a3); return null; }); while (emptyTuples.hasNext()) { emptyTuples.next(); } } }
package ee.telekom.workflow.core.workflowinstance; import java.lang.invoke.MethodHandles; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import ee.telekom.workflow.core.common.UnexpectedStatusException; import ee.telekom.workflow.core.common.WorkflowEngineConfiguration; import ee.telekom.workflow.core.error.ExecutionErrorService; import ee.telekom.workflow.executor.marshall.Marshaller; @Service @Transactional public class WorkflowInstanceServiceImpl implements WorkflowInstanceService{ private static final Logger log = LoggerFactory.getLogger( MethodHandles.lookup().lookupClass() ); @Autowired private WorkflowInstanceDao dao; @Autowired private ExecutionErrorService executionErrorService; @Autowired private WorkflowEngineConfiguration config; @Override public WorkflowInstance create( String workflowName, Integer workflowVersion, Map<String, Object> arguments, String label1, String label2 ){ WorkflowInstance woin = new WorkflowInstance(); woin.setWorkflowName( workflowName ); woin.setWorkflowVersion( workflowVersion ); woin.setAttributes( Marshaller.serializeAttributes( arguments ) ); woin.setLabel1( StringUtils.trimToNull( label1 ) ); woin.setLabel2( StringUtils.trimToNull( label2 ) ); woin.setClusterName( config.getClusterName() ); woin.setLocked( false ); woin.setStatus( WorkflowInstanceStatus.NEW ); dao.create( woin ); log.info( "Created workflow instance {}", woin.getRefNum() ); return woin; } @Override public WorkflowInstance find( long refNum ){ return dao.findByRefNum( refNum ); } @Override @Transactional(propagation = Propagation.NOT_SUPPORTED) public void markStarting( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.STARTING, WorkflowInstanceStatus.NEW ); } @Override public void markExecuting( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.EXECUTING, WorkflowInstanceStatus.STARTING ); } @Override public void markExecuted( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.EXECUTED, WorkflowInstanceStatus.EXECUTING ); } @Override public void markAbort( long refNum ) throws UnexpectedStatusException{ Collection<WorkflowInstanceStatus> expectedStatuses = Arrays.asList( WorkflowInstanceStatus.NEW, WorkflowInstanceStatus.STARTING, WorkflowInstanceStatus.STARTING_ERROR, WorkflowInstanceStatus.EXECUTING, WorkflowInstanceStatus.EXECUTING_ERROR, WorkflowInstanceStatus.SUSPENDED ); updateStatus( refNum, WorkflowInstanceStatus.ABORT, expectedStatuses ); } @Override public void markAborting( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.ABORTING, WorkflowInstanceStatus.ABORT ); } @Override public void markAborted( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.ABORTED, WorkflowInstanceStatus.ABORTING ); } @Override public void assertIsExecuting( long refNum ) throws UnexpectedStatusException{ WorkflowInstanceStatus status = dao.findStatusByRefNum( refNum ); if ( !WorkflowInstanceStatus.EXECUTING.equals( status ) ){ throw new UnexpectedStatusException( WorkflowInstanceStatus.EXECUTING ); } } @Override public void rewindAfterError( long refNum ) throws UnexpectedStatusException{ WorkflowInstance woin = find( refNum ); if( WorkflowInstanceStatus.STARTING_ERROR.equals( woin.getStatus() ) ){ markNewAfterStartingError( refNum ); } else if( WorkflowInstanceStatus.ABORTING_ERROR.equals( woin.getStatus() ) ){ markAbortAfterAbortingError( refNum ); } else if( WorkflowInstanceStatus.EXECUTING_ERROR.equals( woin.getStatus() ) ){ markExecutingAfterExecutingError( refNum ); } else{ throw new UnexpectedStatusException( Arrays.asList( WorkflowInstanceStatus.STARTING_ERROR, WorkflowInstanceStatus.ABORTING_ERROR, WorkflowInstanceStatus.EXECUTING_ERROR ) ); } } private void markNewAfterStartingError( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.NEW, WorkflowInstanceStatus.STARTING_ERROR ); } private void markExecutingAfterExecutingError( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.EXECUTING, WorkflowInstanceStatus.EXECUTING_ERROR ); } private void markAbortAfterAbortingError( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.ABORT, WorkflowInstanceStatus.ABORTING_ERROR ); } @Override public void suspend( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.SUSPENDED, WorkflowInstanceStatus.EXECUTING ); } @Override public void resume( long refNum ) throws UnexpectedStatusException{ updateStatus( refNum, WorkflowInstanceStatus.EXECUTING, WorkflowInstanceStatus.SUSPENDED ); } @Override public void handleStartingError( long woinRefNum, Exception exception ) throws UnexpectedStatusException{ executionErrorService.handleError( woinRefNum, null, exception ); Collection<WorkflowInstanceStatus> expectedStatuses = Arrays.asList( WorkflowInstanceStatus.NEW, WorkflowInstanceStatus.STARTING, WorkflowInstanceStatus.EXECUTING, WorkflowInstanceStatus.EXECUTED ); updateStatus( woinRefNum, WorkflowInstanceStatus.STARTING_ERROR, expectedStatuses ); } @Override public void handleAbortingError( long woinRefNum, Exception exception ) throws UnexpectedStatusException{ executionErrorService.handleError( woinRefNum, null, exception ); Collection<WorkflowInstanceStatus> expectedStatuses = Arrays.asList( WorkflowInstanceStatus.ABORT, WorkflowInstanceStatus.ABORTING, WorkflowInstanceStatus.ABORTED ); updateStatus( woinRefNum, WorkflowInstanceStatus.ABORTING_ERROR, expectedStatuses ); } @Override public void handleCompleteError( long woinRefNum, Long woitRefNum, Exception exception ) throws UnexpectedStatusException{ executionErrorService.handleError( woinRefNum, woitRefNum, exception ); updateStatus( woinRefNum, WorkflowInstanceStatus.EXECUTING_ERROR, WorkflowInstanceStatus.EXECUTING ); } @Override public void lock( List<Long> refNums ){ dao.updateLock( refNums, true ); } @Override public void unlock( long refNum ){ dao.updateLockAndNodeName( refNum, false, null ); } @Override public void updateNodeNameFromNull( long refNum, String nodeName ){ boolean updateFailed = !dao.updateNodeNameFromNull( refNum, nodeName ); if( updateFailed ){ throw new UnexpectedStatusException( "Expected workflow instance " + refNum + " node_name to be NULL" ); } } @Override public void updateState( long refNum, String state ){ WorkflowInstanceStatus expectedStatus = WorkflowInstanceStatus.ABORTING; boolean updateFailed = !dao.updateState( refNum, state, expectedStatus ); if( updateFailed ){ throw new UnexpectedStatusException( expectedStatus ); } } @Override public void updateHistory( Long refNum, String history ){ WorkflowInstanceStatus expectedStatus = WorkflowInstanceStatus.ABORTING; boolean updateFailed = !dao.updateHistory( refNum, history, expectedStatus ); if( updateFailed ){ throw new UnexpectedStatusException( expectedStatus ); } } @Override public void recoverNotAssigned( String clusterName ){ int count = dao.recoverNotAssigned( clusterName ); log.info( "Recovered {} locked workflow instances not assigned to a node name for cluster {}", count, clusterName ); } @Override public void recoverNew( String nodeName ){ int count = dao.recover( nodeName, WorkflowInstanceStatus.NEW, WorkflowInstanceStatus.NEW ); log.info( "Recovered {} new workflow instances for node {}", count, nodeName ); } @Override public void recoverStarting( String nodeName ){ int count = dao.recover( nodeName, WorkflowInstanceStatus.STARTING, WorkflowInstanceStatus.NEW ); log.info( "Recovered {} starting workflow instances for node {}", count, nodeName ); } @Override public void recoverExecuting( String nodeName ){ int count = dao.recover( nodeName, WorkflowInstanceStatus.EXECUTING, WorkflowInstanceStatus.EXECUTING ); log.info( "Recovered {} executing workflow instances for node {}", count, nodeName ); } @Override public void recoverAbort( String nodeName ){ int count = dao.recover( nodeName, WorkflowInstanceStatus.ABORT, WorkflowInstanceStatus.ABORT ); log.info( "Recovered {} aborting workflow instances for node {}", count, nodeName ); } @Override public void recoverAborting( String nodeName ){ int count = dao.recover( nodeName, WorkflowInstanceStatus.ABORTING, WorkflowInstanceStatus.ABORT ); log.info( "Recovered {} abort workflow instances for node {}", count, nodeName ); } @Override public List<WorkflowInstance> findStuck( String clusterName, int workItemExecutionTimeWarnSeconds ){ return dao.findStuck( clusterName, workItemExecutionTimeWarnSeconds ); } private void updateStatus( long refNum, WorkflowInstanceStatus newStatus, WorkflowInstanceStatus expectedStatus ) throws UnexpectedStatusException{ updateStatus( refNum, newStatus, Collections.singleton( expectedStatus ) ); } private void updateStatus( long refNum, WorkflowInstanceStatus newStatus, Collection<WorkflowInstanceStatus> expectedStatuses ) throws UnexpectedStatusException{ boolean updateFailed = !dao.updateStatus( refNum, newStatus, expectedStatuses ); if( updateFailed ){ throw new UnexpectedStatusException( expectedStatuses ); } else{ log.info( "Updated the status of workflow instance {} to {}", refNum, newStatus ); } } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.template.impl; import com.intellij.codeInsight.CodeInsightActionHandler; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.CodeInsightUtilBase; import com.intellij.codeInsight.completion.PlainPrefixMatcher; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.lookup.*; import com.intellij.codeInsight.lookup.impl.LookupImpl; import com.intellij.codeInsight.template.TemplateManager; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.openapi.application.Result; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.regex.Pattern; public class ListTemplatesHandler implements CodeInsightActionHandler { @Override public void invoke(@NotNull final Project project, @NotNull final Editor editor, @NotNull PsiFile file) { if (!CodeInsightUtilBase.prepareEditorForWrite(editor)) return; if (!FileDocumentManager.getInstance().requestWriting(editor.getDocument(), project)) { return; } EditorUtil.fillVirtualSpaceUntilCaret(editor); PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument()); int offset = editor.getCaretModel().getOffset(); String prefix = getPrefix(editor.getDocument(), offset, false); String prefixWithoutDots = getPrefix(editor.getDocument(), offset, true); List<TemplateImpl> matchingTemplates = new ArrayList<TemplateImpl>(); ArrayList<TemplateImpl> applicableTemplates = SurroundWithTemplateHandler.getApplicableTemplates(editor, file, false); final Pattern prefixSearchPattern = Pattern.compile(".*\\b" + prefixWithoutDots + ".*"); for (TemplateImpl template : applicableTemplates) { final String templateDescription = template.getDescription(); if (template.getKey().startsWith(prefix) || !prefixWithoutDots.isEmpty() && templateDescription != null && prefixSearchPattern.matcher(templateDescription).matches()) { matchingTemplates.add(template); } } if (matchingTemplates.isEmpty()) { matchingTemplates.addAll(applicableTemplates); prefixWithoutDots = ""; } if (matchingTemplates.size() == 0) { String text = prefixWithoutDots.length() == 0 ? CodeInsightBundle.message("templates.no.defined") : CodeInsightBundle.message("templates.no.defined.with.prefix", prefix); HintManager.getInstance().showErrorHint(editor, text); return; } Collections.sort(matchingTemplates, TemplateListPanel.TEMPLATE_COMPARATOR); showTemplatesLookup(project, editor, prefixWithoutDots, matchingTemplates); } public static void showTemplatesLookup(final Project project, final Editor editor, @NotNull String prefix, List<TemplateImpl> matchingTemplates) { final LookupImpl lookup = (LookupImpl)LookupManager.getInstance(project).createLookup(editor, LookupElement.EMPTY_ARRAY, prefix, new TemplatesArranger()); for (TemplateImpl template : matchingTemplates) { lookup.addItem(createTemplateElement(template), new PlainPrefixMatcher(prefix)); } showLookup(lookup, null); } private static LiveTemplateLookupElement createTemplateElement(final TemplateImpl template) { return new LiveTemplateLookupElement(template, false) { @Override public Set<String> getAllLookupStrings() { String description = template.getDescription(); if (description == null) { return super.getAllLookupStrings(); } return ContainerUtil.newHashSet(getLookupString(), description); } }; } private static String computePrefix(TemplateImpl template, String argument) { String key = template.getKey(); if (argument == null) { return key; } if (key.length() > 0 && Character.isJavaIdentifierPart(key.charAt(key.length() - 1))) { return key + ' ' + argument; } return key + argument; } public static void showTemplatesLookup(final Project project, final Editor editor, Map<TemplateImpl, String> template2Argument) { final LookupImpl lookup = (LookupImpl)LookupManager.getInstance(project).createLookup(editor, LookupElement.EMPTY_ARRAY, "", new LookupArranger.DefaultArranger()); for (TemplateImpl template : template2Argument.keySet()) { String prefix = computePrefix(template, template2Argument.get(template)); lookup.addItem(createTemplateElement(template), new PlainPrefixMatcher(prefix)); } showLookup(lookup, template2Argument); } private static void showLookup(LookupImpl lookup, @Nullable Map<TemplateImpl, String> template2Argument) { Editor editor = lookup.getEditor(); Project project = editor.getProject(); lookup.addLookupListener(new MyLookupAdapter(project, editor, template2Argument)); lookup.refreshUi(false, true); lookup.showLookup(); } @Override public boolean startInWriteAction() { return true; } public static String getPrefix(Document document, int offset, boolean lettersOnly) { CharSequence chars = document.getCharsSequence(); int start = offset; while (true) { if (start == 0) break; char c = chars.charAt(start - 1); if (!(Character.isJavaIdentifierPart(c) || !lettersOnly && c == '.')) break; start--; } return chars.subSequence(start, offset).toString(); } private static class MyLookupAdapter extends LookupAdapter { private final Project myProject; private final Editor myEditor; private final Map<TemplateImpl, String> myTemplate2Argument; public MyLookupAdapter(Project project, Editor editor, Map<TemplateImpl, String> template2Argument) { myProject = project; myEditor = editor; myTemplate2Argument = template2Argument; } @Override public void itemSelected(LookupEvent event) { FeatureUsageTracker.getInstance().triggerFeatureUsed("codeassists.liveTemplates"); LookupElement item = event.getItem(); if (item instanceof LiveTemplateLookupElement) { final TemplateImpl template = ((LiveTemplateLookupElement)item).getTemplate(); final String argument = myTemplate2Argument != null ? myTemplate2Argument.get(template) : null; new WriteCommandAction(myProject) { @Override protected void run(Result result) throws Throwable { ((TemplateManagerImpl)TemplateManager.getInstance(myProject)).startTemplateWithPrefix(myEditor, template, null, argument); } }.execute(); } } } private static class TemplatesArranger extends LookupArranger { @Override public Pair<List<LookupElement>, Integer> arrangeItems(@NotNull Lookup lookup, boolean onExplicitAction) { LinkedHashSet<LookupElement> result = new LinkedHashSet<LookupElement>(); List<LookupElement> items = getMatchingItems(); for (LookupElement item : items) { if (item.getLookupString().startsWith(lookup.itemPattern(item))) { result.add(item); } } result.addAll(items); ArrayList<LookupElement> list = new ArrayList<LookupElement>(result); int selected = lookup.isSelectionTouched() ? list.indexOf(lookup.getCurrentItem()) : 0; return new Pair<List<LookupElement>, Integer>(list, selected >= 0 ? selected : 0); } @Override public LookupArranger createEmptyCopy() { return new TemplatesArranger(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.util.jsse; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLServerSocket; import javax.net.ssl.SSLSocket; import org.apache.camel.CamelContext; public class SSLContextParametersTest extends AbstractJsseParametersTest { public void testPropertyPlaceholders() throws Exception { CamelContext camelContext = this.createPropertiesPlaceholderAwareContext(); KeyStoreParameters ksp = new KeyStoreParameters(); ksp.setCamelContext(camelContext); ksp.setType("{{keyStoreParameters.type}}"); ksp.setProvider("{{keyStoreParameters.provider}}"); ksp.setResource("{{keyStoreParameters.resource}}"); ksp.setPassword("{{keyStoreParamerers.password}}"); KeyManagersParameters kmp = new KeyManagersParameters(); kmp.setCamelContext(camelContext); kmp.setKeyStore(ksp); kmp.setKeyPassword("{{keyManagersParameters.keyPassword}}"); kmp.setAlgorithm("{{keyManagersParameters.algorithm}}"); kmp.setProvider("{{keyManagersParameters.provider}}"); TrustManagersParameters tmp = new TrustManagersParameters(); tmp.setCamelContext(camelContext); tmp.setKeyStore(ksp); tmp.setAlgorithm("{{trustManagersParameters.algorithm}}"); tmp.setProvider("{{trustManagersParameters.provider}}"); CipherSuitesParameters csp = new CipherSuitesParameters(); csp.getCipherSuite().add("{{cipherSuite.0}}"); SecureSocketProtocolsParameters sspp = new SecureSocketProtocolsParameters(); sspp.getSecureSocketProtocol().add("{{secureSocketProtocol.0}}"); SSLContextServerParameters scsp = new SSLContextServerParameters(); scsp.setCamelContext(camelContext); scsp.setClientAuthentication("{{sslContextServerParameters.clientAuthentication}}"); SSLContextParameters scp = new SSLContextParameters(); scp.setCamelContext(camelContext); scp.setKeyManagers(kmp); scp.setTrustManagers(tmp); scp.setServerParameters(scsp); scp.setProvider("{{sslContextParameters.provider}}"); scp.setSecureSocketProtocol("{{sslContextParameters.protocol}}"); scp.setSessionTimeout("{{sslContextParameters.sessionTimeout}}"); scp.setCipherSuites(csp); scp.setSecureSocketProtocols(sspp); SSLContext context = scp.createSSLContext(); SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(serverSocket.getNeedClientAuth()); context.getSocketFactory().createSocket(); context.createSSLEngine(); } public void testServerParametersClientAuthentication() throws Exception { SSLContext controlContext = SSLContext.getInstance("TLS"); controlContext.init(null, null, null); SSLEngine controlEngine = controlContext.createSSLEngine(); SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket(); SSLContextParameters scp = new SSLContextParameters(); SSLContextServerParameters scsp = new SSLContextServerParameters(); scp.setServerParameters(scsp); SSLContext context = scp.createSSLContext(); SSLEngine engine = context.createSSLEngine(); SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(controlServerSocket.getWantClientAuth(), serverSocket.getWantClientAuth()); assertEquals(controlServerSocket.getNeedClientAuth(), serverSocket.getNeedClientAuth()); assertEquals(controlEngine.getWantClientAuth(), engine.getWantClientAuth()); assertEquals(controlEngine.getNeedClientAuth(), engine.getNeedClientAuth()); // ClientAuthentication - NONE scsp.setClientAuthentication(ClientAuthentication.NONE.name()); context = scp.createSSLContext(); engine = context.createSSLEngine(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(false, serverSocket.getWantClientAuth()); assertEquals(false, serverSocket.getNeedClientAuth()); assertEquals(false, engine.getWantClientAuth()); assertEquals(false, engine.getNeedClientAuth()); // ClientAuthentication - WANT scsp.setClientAuthentication(ClientAuthentication.WANT.name()); context = scp.createSSLContext(); engine = context.createSSLEngine(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(true, serverSocket.getWantClientAuth()); assertEquals(false, serverSocket.getNeedClientAuth()); assertEquals(true, engine.getWantClientAuth()); assertEquals(false, engine.getNeedClientAuth()); // ClientAuthentication - REQUIRE scsp.setClientAuthentication(ClientAuthentication.REQUIRE.name()); context = scp.createSSLContext(); engine = context.createSSLEngine(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(false, serverSocket.getWantClientAuth()); assertEquals(true, serverSocket.getNeedClientAuth()); assertEquals(false, engine.getWantClientAuth()); assertEquals(true, engine.getNeedClientAuth()); } public void testServerParameters() throws Exception { SSLContext controlContext = SSLContext.getInstance("TLS"); controlContext.init(null, null, null); SSLEngine controlEngine = controlContext.createSSLEngine(); SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket(); SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket(); SSLContextParameters scp = new SSLContextParameters(); SSLContextServerParameters scsp = new SSLContextServerParameters(); scp.setServerParameters(scsp); SSLContext context = scp.createSSLContext(); SSLEngine engine = context.createSSLEngine(); SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket(); SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites())); assertTrue(Arrays.equals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites())); assertTrue(Arrays.equals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()), serverSocket.getEnabledCipherSuites())); assertEquals(controlServerSocket.getWantClientAuth(), serverSocket.getWantClientAuth()); assertEquals(controlServerSocket.getNeedClientAuth(), serverSocket.getNeedClientAuth()); // No csp or filter on server params passes through shared config scp.setCipherSuites(new CipherSuitesParameters()); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(0, serverSocket.getEnabledCipherSuites().length); // Csp on server params scp.setCipherSuites(null); CipherSuitesParameters csp = new CipherSuitesParameters(); scsp.setCipherSuites(csp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites())); assertTrue(Arrays.equals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites())); assertEquals(0, serverSocket.getEnabledCipherSuites().length); // Cipher suites filter on server params FilterParameters filter = new FilterParameters(); filter.getExclude().add(".*"); scsp.setCipherSuites(null); scsp.setCipherSuitesFilter(filter); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites())); assertTrue(Arrays.equals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites())); assertEquals(0, serverSocket.getEnabledCipherSuites().length); // Csp on server overrides cipher suites filter on server filter.getInclude().add(".*"); filter.getExclude().clear(); scsp.setCipherSuites(csp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites())); assertTrue(Arrays.equals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites())); assertEquals(0, serverSocket.getEnabledCipherSuites().length); // Sspp on server params SecureSocketProtocolsParameters sspp = new SecureSocketProtocolsParameters(); scsp.setSecureSocketProtocols(sspp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertTrue(Arrays.equals(controlSocket.getEnabledProtocols(), socket.getEnabledProtocols())); assertEquals(0, serverSocket.getEnabledProtocols().length); // Secure socket protocols filter on client params filter = new FilterParameters(); filter.getExclude().add(".*"); scsp.setSecureSocketProtocols(null); scsp.setSecureSocketProtocolsFilter(filter); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertTrue(Arrays.equals(controlSocket.getEnabledProtocols(), socket.getEnabledProtocols())); assertEquals(0, serverSocket.getEnabledProtocols().length); // Sspp on client params overrides secure socket protocols filter on client filter.getInclude().add(".*"); filter.getExclude().clear(); scsp.setSecureSocketProtocols(sspp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertTrue(Arrays.equals(controlSocket.getEnabledProtocols(), socket.getEnabledProtocols())); assertEquals(0, serverSocket.getEnabledProtocols().length); // Server session timeout only affects server session configuration scsp.setSessionTimeout("12345"); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(controlContext.getClientSessionContext().getSessionTimeout(), context.getClientSessionContext().getSessionTimeout()); assertEquals(12345, context.getServerSessionContext().getSessionTimeout()); } private void checkProtocols(String[] control, String[] configured) { //With the IBM JDK, an "default" unconfigured control socket is more //restricted than with the Sun JDK. For example, with //SSLContext.getInstance("TLS"), on Sun, you get // TLSv1, SSLv3, SSLv2Hello //but with IBM, you only get: // TLSv1 //We'll check to make sure the "default" protocols are amongst the list //that are in after configuration. assertTrue(Arrays.asList(configured).containsAll(Arrays.asList(control))); } public void testClientParameters() throws Exception { SSLContext controlContext = SSLContext.getInstance("TLS"); controlContext.init(null, null, null); SSLEngine controlEngine = controlContext.createSSLEngine(); SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket(); SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket(); SSLContextParameters scp = new SSLContextParameters(); SSLContextClientParameters sccp = new SSLContextClientParameters(); scp.setClientParameters(sccp); SSLContext context = scp.createSSLContext(); SSLEngine engine = context.createSSLEngine(); SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket(); SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites())); assertTrue(Arrays.equals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites())); assertTrue(Arrays.equals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()), serverSocket.getEnabledCipherSuites())); // No csp or filter on client params passes through shared config scp.setCipherSuites(new CipherSuitesParameters()); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(0, socket.getEnabledCipherSuites().length); // Csp on client params scp.setCipherSuites(null); CipherSuitesParameters csp = new CipherSuitesParameters(); sccp.setCipherSuites(csp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites())); assertEquals(0, socket.getEnabledCipherSuites().length); assertTrue(Arrays.equals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()), serverSocket.getEnabledCipherSuites())); // Cipher suites filter on client params FilterParameters filter = new FilterParameters(); filter.getExclude().add(".*"); sccp.setCipherSuites(null); sccp.setCipherSuitesFilter(filter); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites())); assertEquals(0, socket.getEnabledCipherSuites().length); assertTrue(Arrays.equals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()), serverSocket.getEnabledCipherSuites())); // Csp on client overrides cipher suites filter on client filter.getInclude().add(".*"); filter.getExclude().clear(); sccp.setCipherSuites(csp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites())); assertEquals(0, socket.getEnabledCipherSuites().length); assertTrue(Arrays.equals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()), serverSocket.getEnabledCipherSuites())); // Sspp on client params SecureSocketProtocolsParameters sspp = new SecureSocketProtocolsParameters(); sccp.setSecureSocketProtocols(sspp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertEquals(0, socket.getEnabledProtocols().length); checkProtocols(controlServerSocket.getEnabledProtocols(), serverSocket.getEnabledProtocols()); // Secure socket protocols filter on client params filter = new FilterParameters(); filter.getExclude().add(".*"); sccp.setSecureSocketProtocols(null); sccp.setSecureSocketProtocolsFilter(filter); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertEquals(0, socket.getEnabledProtocols().length); checkProtocols(controlServerSocket.getEnabledProtocols(), serverSocket.getEnabledProtocols()); // Sspp on client params overrides secure socket protocols filter on client filter.getInclude().add(".*"); filter.getExclude().clear(); sccp.setSecureSocketProtocols(sspp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertEquals(0, socket.getEnabledProtocols().length); checkProtocols(controlServerSocket.getEnabledProtocols(), serverSocket.getEnabledProtocols()); // Client session timeout only affects client session configuration sccp.setSessionTimeout("12345"); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(controlContext.getServerSessionContext().getSessionTimeout(), context.getServerSessionContext().getSessionTimeout()); assertEquals(12345, context.getClientSessionContext().getSessionTimeout()); } public void testCipherSuites() throws Exception { SSLContext controlContext = SSLContext.getInstance("TLS"); controlContext.init(null, null, null); SSLEngine controlEngine = controlContext.createSSLEngine(); SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket(); SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket(); // default SSLContextParameters scp = new SSLContextParameters(); SSLContext context = scp.createSSLContext(); SSLEngine engine = context.createSSLEngine(); SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket(); SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledCipherSuites(), engine.getEnabledCipherSuites())); assertTrue(Arrays.equals(controlSocket.getEnabledCipherSuites(), socket.getEnabledCipherSuites())); assertTrue(Arrays.equals(this.getDefaultCipherSuiteIncludes(controlServerSocket.getSupportedCipherSuites()), serverSocket.getEnabledCipherSuites())); // empty csp CipherSuitesParameters csp = new CipherSuitesParameters(); scp.setCipherSuites(csp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(0, engine.getEnabledCipherSuites().length); assertEquals(0, socket.getEnabledCipherSuites().length); assertEquals(0, serverSocket.getEnabledCipherSuites().length); // explicit csp csp.getCipherSuite().add(controlEngine.getEnabledCipherSuites()[0]); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(1, engine.getEnabledCipherSuites().length); assertEquals(controlEngine.getEnabledCipherSuites()[0], engine.getEnabledCipherSuites()[0]); assertEquals(1, socket.getEnabledCipherSuites().length); assertEquals(controlEngine.getEnabledCipherSuites()[0], socket.getEnabledCipherSuites()[0]); assertEquals(1, serverSocket.getEnabledCipherSuites().length); assertEquals(controlEngine.getEnabledCipherSuites()[0], serverSocket.getEnabledCipherSuites()[0]); // explicit csp overrides filter FilterParameters filter = new FilterParameters(); filter.getInclude().add(".*"); scp.setCipherSuitesFilter(filter); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(1, engine.getEnabledCipherSuites().length); assertEquals(controlEngine.getEnabledCipherSuites()[0], engine.getEnabledCipherSuites()[0]); assertEquals(1, socket.getEnabledCipherSuites().length); assertEquals(controlEngine.getEnabledCipherSuites()[0], socket.getEnabledCipherSuites()[0]); assertEquals(1, socket.getEnabledCipherSuites().length); assertEquals(controlEngine.getEnabledCipherSuites()[0], serverSocket.getEnabledCipherSuites()[0]); } public void testCipherSuitesFilter() throws Exception { SSLContext controlContext = SSLContext.getInstance("TLS"); controlContext.init(null, null, null); SSLEngine controlEngine = controlContext.createSSLEngine(); SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket(); SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket(); // default SSLContextParameters scp = new SSLContextParameters(); SSLContext context = scp.createSSLContext(); SSLEngine engine = context.createSSLEngine(); SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket(); SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertTrue(Arrays.equals(controlSocket.getEnabledProtocols(), socket.getEnabledProtocols())); checkProtocols(controlServerSocket.getEnabledProtocols(), serverSocket.getEnabledProtocols()); // empty filter FilterParameters filter = new FilterParameters(); scp.setSecureSocketProtocolsFilter(filter); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(0, engine.getEnabledProtocols().length); assertEquals(0, socket.getEnabledProtocols().length); assertEquals(0, serverSocket.getEnabledProtocols().length); // explicit filter filter.getInclude().add(".*"); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertTrue(Arrays.equals(controlSocket.getEnabledProtocols(), socket.getEnabledProtocols())); checkProtocols(controlServerSocket.getEnabledProtocols(), serverSocket.getEnabledProtocols()); // explicit filter with excludes (excludes overrides) filter.getExclude().add(".*"); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(0, engine.getEnabledProtocols().length); assertEquals(0, socket.getEnabledProtocols().length); assertEquals(0, serverSocket.getEnabledProtocols().length); // explicit filter single include filter.getInclude().clear(); filter.getExclude().clear(); filter.getInclude().add("TLS.*"); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(engine.getEnabledProtocols().length >= 1); assertStartsWith(engine.getEnabledProtocols(), "TLS"); assertTrue(socket.getEnabledProtocols().length >= 1); assertStartsWith(socket.getEnabledProtocols(), "TLS"); assertTrue(socket.getEnabledProtocols().length >= 1); assertStartsWith(serverSocket.getEnabledProtocols(), "TLS"); } public void testSecureSocketProtocols() throws Exception { SSLContext controlContext = SSLContext.getInstance("TLS"); controlContext.init(null, null, null); SSLEngine controlEngine = controlContext.createSSLEngine(); SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket(); SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket(); // default SSLContextParameters scp = new SSLContextParameters(); SSLContext context = scp.createSSLContext(); SSLEngine engine = context.createSSLEngine(); SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket(); SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertTrue(Arrays.equals(controlSocket.getEnabledProtocols(), socket.getEnabledProtocols())); checkProtocols(controlServerSocket.getEnabledProtocols(), serverSocket.getEnabledProtocols()); // empty sspp SecureSocketProtocolsParameters sspp = new SecureSocketProtocolsParameters(); scp.setSecureSocketProtocols(sspp); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(0, engine.getEnabledProtocols().length); assertEquals(0, socket.getEnabledProtocols().length); assertEquals(0, serverSocket.getEnabledProtocols().length); // explicit sspp sspp.getSecureSocketProtocol().add("TLSv1"); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(1, engine.getEnabledProtocols().length); assertEquals("TLSv1", engine.getEnabledProtocols()[0]); assertEquals(1, socket.getEnabledProtocols().length); assertEquals("TLSv1", socket.getEnabledProtocols()[0]); assertEquals(1, serverSocket.getEnabledProtocols().length); assertEquals("TLSv1", serverSocket.getEnabledProtocols()[0]); // explicit sspp overrides filter FilterParameters filter = new FilterParameters(); filter.getInclude().add(".*"); scp.setSecureSocketProtocolsFilter(filter); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(1, engine.getEnabledProtocols().length); assertEquals("TLSv1", engine.getEnabledProtocols()[0]); assertEquals(1, socket.getEnabledProtocols().length); assertEquals("TLSv1", socket.getEnabledProtocols()[0]); assertEquals(1, socket.getEnabledProtocols().length); assertEquals("TLSv1", serverSocket.getEnabledProtocols()[0]); } public void testSecureSocketProtocolsFilter() throws Exception { SSLContext controlContext = SSLContext.getInstance("TLS"); controlContext.init(null, null, null); SSLEngine controlEngine = controlContext.createSSLEngine(); SSLSocket controlSocket = (SSLSocket) controlContext.getSocketFactory().createSocket(); SSLServerSocket controlServerSocket = (SSLServerSocket) controlContext.getServerSocketFactory().createServerSocket(); // default SSLContextParameters scp = new SSLContextParameters(); SSLContext context = scp.createSSLContext(); SSLEngine engine = context.createSSLEngine(); SSLSocket socket = (SSLSocket) context.getSocketFactory().createSocket(); SSLServerSocket serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertTrue(Arrays.equals(controlSocket.getEnabledProtocols(), socket.getEnabledProtocols())); checkProtocols(controlServerSocket.getEnabledProtocols(), serverSocket.getEnabledProtocols()); // empty filter FilterParameters filter = new FilterParameters(); scp.setSecureSocketProtocolsFilter(filter); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(0, engine.getEnabledProtocols().length); assertEquals(0, socket.getEnabledProtocols().length); assertEquals(0, serverSocket.getEnabledProtocols().length); // explicit filter filter.getInclude().add(".*"); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(Arrays.equals(controlEngine.getEnabledProtocols(), engine.getEnabledProtocols())); assertTrue(Arrays.equals(controlSocket.getEnabledProtocols(), socket.getEnabledProtocols())); checkProtocols(controlServerSocket.getEnabledProtocols(), serverSocket.getEnabledProtocols()); // explicit filter with excludes (excludes overrides) filter.getExclude().add(".*"); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertEquals(0, engine.getEnabledProtocols().length); assertEquals(0, socket.getEnabledProtocols().length); assertEquals(0, serverSocket.getEnabledProtocols().length); // explicit filter single include filter.getInclude().clear(); filter.getExclude().clear(); filter.getInclude().add("TLS.*"); context = scp.createSSLContext(); engine = context.createSSLEngine(); socket = (SSLSocket) context.getSocketFactory().createSocket(); serverSocket = (SSLServerSocket) context.getServerSocketFactory().createServerSocket(); assertTrue(engine.getEnabledProtocols().length >= 1); assertStartsWith(engine.getEnabledProtocols(), "TLS"); assertTrue(socket.getEnabledProtocols().length >= 1); assertStartsWith(socket.getEnabledProtocols(), "TLS"); assertTrue(socket.getEnabledProtocols().length >= 1); assertStartsWith(serverSocket.getEnabledProtocols(), "TLS"); } public void testSessionTimeout() throws Exception { SSLContextParameters scp = new SSLContextParameters(); scp.setSessionTimeout("60"); SSLContext context = scp.createSSLContext(); assertEquals(60, context.getClientSessionContext().getSessionTimeout()); assertEquals(60, context.getServerSessionContext().getSessionTimeout()); scp.setSessionTimeout("0"); context = scp.createSSLContext(); assertEquals(0, context.getClientSessionContext().getSessionTimeout()); assertEquals(0, context.getServerSessionContext().getSessionTimeout()); } public void testDefaultSecureSocketProtocol() throws Exception { SSLContextParameters scp = new SSLContextParameters(); SSLContext context = scp.createSSLContext(); assertEquals("TLS", context.getProtocol()); } public void testSecureSocketProtocol() throws Exception { SSLContextParameters scp = new SSLContextParameters(); scp.setSecureSocketProtocol("SSLv3"); SSLContext context = scp.createSSLContext(); assertEquals("SSLv3", context.getProtocol()); } public void testProvider() throws Exception { SSLContextParameters scp = new SSLContextParameters(); scp.createSSLContext(); SSLContext context = scp.createSSLContext(); SSLContext defaultContext = SSLContext.getDefault(); assertEquals(defaultContext.getProvider().getName(), context.getProvider().getName()); } protected String[] getDefaultCipherSuiteIncludes(String[] availableCipherSuites) { List<String> enabled = new LinkedList<String>(); for (String string : availableCipherSuites) { if (!string.contains("_anon_") && !string.contains("_NULL_")) { enabled.add(string); } } return enabled.toArray(new String[enabled.size()]); } protected void assertStartsWith(String[] values, String prefix) { for (String value : values) { assertTrue(value.startsWith(prefix)); } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.intellij.images.thumbnail.impl; import com.intellij.ide.CopyPasteDelegator; import com.intellij.ide.CopyPasteSupport; import com.intellij.ide.DeleteProvider; import com.intellij.ide.IdeBundle; import com.intellij.ide.util.DeleteHandler; import com.intellij.ide.util.PropertiesComponent; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.ui.JBPopupMenu; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.vfs.*; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.pom.Navigatable; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.util.PsiUtilCore; import com.intellij.ui.*; import com.intellij.ui.components.JBList; import com.intellij.util.containers.ContainerUtil; import org.intellij.images.ImagesBundle; import org.intellij.images.fileTypes.ImageFileTypeManager; import org.intellij.images.options.*; import org.intellij.images.search.ImageTagManager; import org.intellij.images.search.TagFilter; import org.intellij.images.thumbnail.ThumbnailView; import org.intellij.images.thumbnail.actionSystem.ThumbnailViewActionUtil; import org.intellij.images.thumbnail.actionSystem.ThumbnailViewActions; import org.intellij.images.thumbnail.actions.ThemeFilter; import org.intellij.images.thumbnail.actions.ToggleTagsPanelAction; import org.intellij.images.ui.ImageComponent; import org.intellij.images.ui.ImageComponentDecorator; import org.intellij.images.ui.ThumbnailComponent; import org.intellij.images.ui.ThumbnailComponentUI; import org.intellij.images.vfs.IfsUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionListener; import java.awt.image.BufferedImage; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.List; import java.util.*; final class ThumbnailViewUI extends JPanel implements DataProvider, Disposable { private static final Navigatable[] EMPTY_NAVIGATABLE_ARRAY = new Navigatable[]{}; private final ThumbnailView thumbnailView; private final CopyPasteSupport copyPasteSupport; private final DeleteProvider deleteProvider; private ThumbnailListCellRenderer cellRenderer; private JList list; private JPanel tagsPanel; private static final Comparator<VirtualFile> VIRTUAL_FILE_COMPARATOR = (o1, o2) -> { if (o1.isDirectory() && !o2.isDirectory()) { return -1; } if (o2.isDirectory() && !o1.isDirectory()) { return 1; } return o1.getPath().compareToIgnoreCase(o2.getPath()); }; private DefaultListModel<String> listModel; private Splitter previewSplitter; ThumbnailViewUI(ThumbnailViewImpl thumbnailView) { super(new BorderLayout()); this.thumbnailView = thumbnailView; copyPasteSupport = new CopyPasteDelegator(thumbnailView.getProject(), this); deleteProvider = new DeleteHandler.DefaultDeleteProvider(); } private void createUI() { if (cellRenderer == null || list == null) { cellRenderer = new ThumbnailListCellRenderer(); ImageComponent imageComponent = cellRenderer.getImageComponent(); VirtualFileManager.getInstance().addVirtualFileListener(new VFSListener(), this); Options options = OptionsManager.getInstance().getOptions(); EditorOptions editorOptions = options.getEditorOptions(); // Set options TransparencyChessboardOptions chessboardOptions = editorOptions.getTransparencyChessboardOptions(); imageComponent.setTransparencyChessboardVisible(chessboardOptions.isShowDefault()); imageComponent.setTransparencyChessboardCellSize(chessboardOptions.getCellSize()); imageComponent.setTransparencyChessboardWhiteColor(chessboardOptions.getWhiteColor()); imageComponent.setTransparencyChessboardBlankColor(chessboardOptions.getBlackColor()); imageComponent.setFileNameVisible(editorOptions.isFileNameVisible()); imageComponent.setFileSizeVisible(editorOptions.isFileSizeVisible()); options.addPropertyChangeListener(new OptionsChangeListener(), this); list = new JBList(); list.setModel(new DefaultListModel()); list.setLayoutOrientation(JList.HORIZONTAL_WRAP); list.setVisibleRowCount(-1); list.setCellRenderer(cellRenderer); list.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); list.addListSelectionListener(e -> updateTagsPreviewModel()); ThumbnailsMouseAdapter mouseListener = new ThumbnailsMouseAdapter(); list.addMouseListener(mouseListener); list.addMouseMotionListener(mouseListener); ThumbnailComponentUI componentUI = (ThumbnailComponentUI)ThumbnailComponentUI.createUI(cellRenderer); Dimension preferredSize = componentUI.getPreferredSize(cellRenderer); list.setFixedCellWidth(preferredSize.width); list.setFixedCellHeight(preferredSize.height); JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(list, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); scrollPane.setBorder(IdeBorderFactory.createBorder(SideBorder.TOP)); ActionManager actionManager = ActionManager.getInstance(); ActionGroup actionGroup = (ActionGroup) actionManager.getAction(ThumbnailViewActions.GROUP_TOOLBAR); ActionToolbar actionToolbar = actionManager.createActionToolbar( ThumbnailViewActions.ACTION_PLACE, actionGroup, true ); actionToolbar.setTargetComponent(this); JComponent toolbar = actionToolbar.getComponent(); FocusRequester focusRequester = new FocusRequester(); toolbar.addMouseListener(focusRequester); scrollPane.addMouseListener(focusRequester); add(toolbar, BorderLayout.NORTH); previewSplitter = new Splitter(); previewSplitter.setFirstComponent(scrollPane); previewSplitter.setProportion(1); previewSplitter.setSecondComponent(null); add(previewSplitter, BorderLayout.CENTER); } updateTagsPreview(); } private JPanel createTagPreviewPanel() { listModel = new DefaultListModel<>(); updateTagsPreviewModel(); JBList<String> tagsList = new JBList<>(listModel); tagsList.setEmptyText(ImagesBundle.message("list.empty.text.no.tags.defined")); ImageTagManager imageTagManager = ImageTagManager.getInstance(thumbnailView.getProject()); return ToolbarDecorator.createDecorator(tagsList) .setAddAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { JBPopupFactory.getInstance().createActionGroupPopup(IdeBundle.message("popup.title.add.tags"), new AddTagGroup(), button.getDataContext(), JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, false) .show(button.getPreferredPopupPoint()); } }) .setRemoveAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { String selectedValue = tagsList.getSelectedValue(); if (selectedValue != null) { Arrays.stream(getSelection()) .forEach(virtualFile -> imageTagManager.removeTag(selectedValue, virtualFile)); } updateTagsPreviewModel(); } }) .disableUpDownActions() .setToolbarPosition(ActionToolbarPosition.RIGHT) .createPanel(); } private void updateTagsPreview() { Project project = thumbnailView.getProject(); boolean enabled = PropertiesComponent.getInstance(project).getBoolean(ToggleTagsPanelAction.TAGS_PANEL_VISIBLE, false); float splitterProportion = previewSplitter.getProportion(); if (enabled) { if (splitterProportion == 1) { previewSplitter.setProportion(Float.valueOf(PropertiesComponent.getInstance(project).getValue(ToggleTagsPanelAction.TAGS_PANEL_PROPORTION, "0.5f"))); } if (tagsPanel == null) { tagsPanel = createTagPreviewPanel(); } previewSplitter.setSecondComponent(tagsPanel); } else { if (splitterProportion != 1) { PropertiesComponent.getInstance(thumbnailView.getProject()) .setValue(ToggleTagsPanelAction.TAGS_PANEL_PROPORTION, String.valueOf(splitterProportion)); } previewSplitter.setProportion(1); previewSplitter.setSecondComponent(null); } } private void updateTagsPreviewModel() { if (listModel == null) return; listModel.clear(); VirtualFile[] selection = getSelection(); ImageTagManager tagManager = ImageTagManager.getInstance(thumbnailView.getProject()); List<String> commonTags = null; for (VirtualFile virtualFile : selection) { List<String> tags = tagManager.getTags(virtualFile); if (commonTags == null) { commonTags = new ArrayList<>(tags); } else { commonTags.retainAll(tags); } } if (commonTags != null) { commonTags.forEach(listModel::addElement); } } public void refresh() { createUI(); if (list != null) { DefaultListModel model = (DefaultListModel) list.getModel(); model.clear(); VirtualFile root = thumbnailView.getRoot(); if (root != null && root.isValid() && root.isDirectory()) { Set<VirtualFile> files = findFiles(root.getChildren()); VirtualFile[] virtualFiles = VfsUtilCore.toVirtualFileArray(files); Arrays.sort(virtualFiles, VIRTUAL_FILE_COMPARATOR); model.ensureCapacity(model.size() + virtualFiles.length + 1); ThemeFilter filter = thumbnailView.getFilter(); TagFilter[] tagFilters = thumbnailView.getTagFilters(); for (VirtualFile virtualFile : virtualFiles) { if (filter == null || filter.accepts(virtualFile)) { if (tagFilters == null || Arrays.stream(tagFilters).anyMatch( tagFilter -> tagFilter.accepts(virtualFile))) { model.addElement(virtualFile); } } } if (model.size() > 0) { list.setSelectedIndex(0); } } else { thumbnailView.setVisible(false); } } } public boolean isTransparencyChessboardVisible() { createUI(); return cellRenderer.getImageComponent().isTransparencyChessboardVisible(); } public void setTransparencyChessboardVisible(boolean visible) { createUI(); cellRenderer.getImageComponent().setTransparencyChessboardVisible(visible); list.repaint(); } public void setFileNameVisible(boolean visible) { createUI(); cellRenderer.getImageComponent().setFileNameVisible(visible); list.repaint(); } public boolean isFileNameVisible() { createUI(); return cellRenderer.getImageComponent().isFileNameVisible(); } public void setFileSizeVisible(boolean visible) { createUI(); cellRenderer.getImageComponent().setFileSizeVisible(visible); list.repaint(); } public boolean isFileSizeVisible() { createUI(); return cellRenderer.getImageComponent().isFileSizeVisible(); } public void setSelected(VirtualFile file, boolean selected) { createUI(); list.setSelectedValue(file, false); } public void scrollToSelection() { int minSelectionIndex = list.getMinSelectionIndex(); int maxSelectionIndex = list.getMaxSelectionIndex(); if (minSelectionIndex != -1 && maxSelectionIndex != -1) { list.scrollRectToVisible(list.getCellBounds(minSelectionIndex, maxSelectionIndex)); } } public boolean isSelected(VirtualFile file) { int index = ((DefaultListModel) list.getModel()).indexOf(file); return index != -1 && list.isSelectedIndex(index); } public VirtualFile @NotNull [] getSelection() { if (list != null) { Object[] selectedValues = list.getSelectedValues(); if (selectedValues != null) { VirtualFile[] files = new VirtualFile[selectedValues.length]; for (int i = 0; i < selectedValues.length; i++) { files[i] = (VirtualFile) selectedValues[i]; } return files; } } return VirtualFile.EMPTY_ARRAY; } private final class ThumbnailListCellRenderer extends ThumbnailComponent implements ListCellRenderer { private final ImageFileTypeManager typeManager = ImageFileTypeManager.getInstance(); @Override public Component getListCellRendererComponent( JList list, Object value, int index, boolean isSelected, boolean cellHasFocus ) { if (value instanceof VirtualFile) { VirtualFile file = (VirtualFile) value; setFileName(file.getName()); String toolTipText = IfsUtil.getReferencePath(thumbnailView.getProject(), file); if (!isFileSizeVisible()) { String description = getImageComponent().getDescription(); if (description != null) { toolTipText += " [" + description + "]"; } } setToolTipText(toolTipText); setDirectory(file.isDirectory()); if (file.isDirectory()) { int imagesCount = 0; VirtualFile[] children = file.getChildren(); for (VirtualFile child : children) { if (typeManager.isImage(child)) { imagesCount++; if (imagesCount > 100) { break; } } } setImagesCount(imagesCount); } else { // File rendering setFileSize(file.getLength()); try { ImageComponent imageComponent = getImageComponent(); BufferedImage image = IfsUtil.getImage(file, imageComponent); imageComponent.getDocument().setValue(image); setFormat(IfsUtil.getFormat(file)); } catch (Exception e) { // Ignore ImageComponent imageComponent = getImageComponent(); imageComponent.getDocument().setValue((BufferedImage)null); } } } else { ImageComponent imageComponent = getImageComponent(); imageComponent.getDocument().setValue((BufferedImage)null); setFileName(null); setFileSize(0); setToolTipText(null); } if (isSelected) { setForeground(list.getSelectionForeground()); setBackground(list.getSelectionBackground()); } else { setForeground(list.getForeground()); setBackground(list.getBackground()); } return this; } } private Set<VirtualFile> findFiles(VirtualFile[] roots) { Set<VirtualFile> files = new HashSet<>(); for (VirtualFile root : roots) { files.addAll(findFiles(root)); } return files; } private Set<VirtualFile> findFiles(VirtualFile file) { Set<VirtualFile> files = new HashSet<>(0); Project project = thumbnailView.getProject(); if (!project.isDisposed()) { ProjectRootManager rootManager = ProjectRootManager.getInstance(project); boolean projectIgnored = rootManager.getFileIndex().isExcluded(file); if (!projectIgnored && !FileTypeManager.getInstance().isFileIgnored(file)) { ImageFileTypeManager typeManager = ImageFileTypeManager.getInstance(); if (file.isDirectory()) { if (thumbnailView.isRecursive()) { files.addAll(findFiles(file.getChildren())); } else if (isImagesInDirectory(file)) { files.add(file); } } else if (typeManager.isImage(file)) { files.add(file); } } } return files; } private boolean isImagesInDirectory(VirtualFile dir) { ImageFileTypeManager typeManager = ImageFileTypeManager.getInstance(); VirtualFile[] files = dir.getChildren(); for (VirtualFile file : files) { if (file.isDirectory()) { // We can be sure for fast searching return true; } if (typeManager.isImage(file)) { return true; } } return false; } private final class ThumbnailsMouseAdapter extends MouseAdapter implements MouseMotionListener { @Override public void mouseDragged(MouseEvent e) { Point point = e.getPoint(); int index = list.locationToIndex(point); if (index != -1) { Rectangle cellBounds = list.getCellBounds(index, index); if (!cellBounds.contains(point) && (KeyEvent.CTRL_DOWN_MASK & e.getModifiersEx()) != KeyEvent.CTRL_DOWN_MASK) { list.clearSelection(); e.consume(); } } } @Override public void mouseMoved(MouseEvent e) { } @Override public void mousePressed(MouseEvent e) { Point point = e.getPoint(); int index = list.locationToIndex(point); if (index != -1) { Rectangle cellBounds = list.getCellBounds(index, index); if (!cellBounds.contains(point) && (KeyEvent.CTRL_DOWN_MASK & e.getModifiersEx()) != KeyEvent.CTRL_DOWN_MASK) { list.clearSelection(); e.consume(); } } } @Override public void mouseClicked(MouseEvent e) { Point point = e.getPoint(); int index = list.locationToIndex(point); if (index != -1) { Rectangle cellBounds = list.getCellBounds(index, index); if (!cellBounds.contains(point) && (KeyEvent.CTRL_DOWN_MASK & e.getModifiersEx()) != KeyEvent.CTRL_DOWN_MASK) { index = -1; list.clearSelection(); } } if (index != -1) { if (MouseEvent.BUTTON1 == e.getButton() && e.getClickCount() == 2) { // Double click list.setSelectedIndex(index); VirtualFile selected = (VirtualFile) list.getSelectedValue(); if (selected != null) { if (selected.isDirectory()) { thumbnailView.setRoot(selected); } else { FileEditorManager fileEditorManager = FileEditorManager.getInstance(thumbnailView.getProject()); fileEditorManager.openFile(selected, true); } e.consume(); } } if (MouseEvent.BUTTON3 == e.getButton() && e.getClickCount() == 1) { // Ensure that we have selection if ((KeyEvent.CTRL_DOWN_MASK & e.getModifiersEx()) != KeyEvent.CTRL_DOWN_MASK) { // Ctrl is not pressed list.setSelectedIndex(index); } else { // Ctrl is pressed list.getSelectionModel().addSelectionInterval(index, index); } // Single right click ActionManager actionManager = ActionManager.getInstance(); ActionGroup actionGroup = (ActionGroup) actionManager.getAction(ThumbnailViewActions.GROUP_POPUP); ActionPopupMenu menu = actionManager.createActionPopupMenu(ThumbnailViewActions.ACTION_PLACE, actionGroup); JPopupMenu popupMenu = menu.getComponent(); popupMenu.pack(); JBPopupMenu.showByEvent(e, popupMenu); e.consume(); } } } } @Override @Nullable public Object getData(@NotNull String dataId) { if (CommonDataKeys.PROJECT.is(dataId)) { return thumbnailView.getProject(); } else if (CommonDataKeys.VIRTUAL_FILE.is(dataId)) { VirtualFile[] selectedFiles = getSelectedFiles(); return selectedFiles.length > 0 ? selectedFiles[0] : null; } else if (CommonDataKeys.VIRTUAL_FILE_ARRAY.is(dataId)) { return getSelectedFiles(); } else if (CommonDataKeys.PSI_FILE.is(dataId)) { return getData(CommonDataKeys.PSI_ELEMENT.getName()); } else if (CommonDataKeys.PSI_ELEMENT.is(dataId)) { VirtualFile[] selectedFiles = getSelectedFiles(); return selectedFiles.length > 0 ? PsiManager.getInstance(thumbnailView.getProject()).findFile(selectedFiles[0]) : null; } else if (LangDataKeys.PSI_ELEMENT_ARRAY.is(dataId)) { return getSelectedElements(); } else if (CommonDataKeys.NAVIGATABLE.is(dataId)) { VirtualFile[] selectedFiles = getSelectedFiles(); return new ThumbnailNavigatable(selectedFiles.length > 0 ? selectedFiles[0] : null); } else if (PlatformDataKeys.COPY_PROVIDER.is(dataId)) { return copyPasteSupport.getCopyProvider(); } else if (PlatformDataKeys.CUT_PROVIDER.is(dataId)) { return copyPasteSupport.getCutProvider(); } else if (PlatformDataKeys.PASTE_PROVIDER.is(dataId)) { return copyPasteSupport.getPasteProvider(); } else if (PlatformDataKeys.DELETE_ELEMENT_PROVIDER.is(dataId)) { return deleteProvider; } else if (CommonDataKeys.NAVIGATABLE_ARRAY.is(dataId)) { VirtualFile[] selectedFiles = getSelectedFiles(); Set<Navigatable> navigatables = new HashSet<>(selectedFiles.length); for (VirtualFile selectedFile : selectedFiles) { if (!selectedFile.isDirectory()) { navigatables.add(new ThumbnailNavigatable(selectedFile)); } } return navigatables.toArray(EMPTY_NAVIGATABLE_ARRAY); } else if (ThumbnailView.DATA_KEY.is(dataId)) { return thumbnailView; } else if (ImageComponentDecorator.DATA_KEY.is(dataId)) { return thumbnailView; } return null; } private PsiElement @NotNull [] getSelectedElements() { VirtualFile[] selectedFiles = getSelectedFiles(); Set<PsiElement> psiElements = new HashSet<>(selectedFiles.length); PsiManager psiManager = PsiManager.getInstance(thumbnailView.getProject()); for (VirtualFile file : selectedFiles) { PsiFile psiFile = psiManager.findFile(file); PsiElement element = psiFile != null ? psiFile : psiManager.findDirectory(file); if (element != null) { psiElements.add(element); } } return PsiUtilCore.toPsiElementArray(psiElements); } private VirtualFile @NotNull [] getSelectedFiles() { if (list != null) { Object[] selectedValues = list.getSelectedValues(); if (selectedValues != null) { VirtualFile[] files = new VirtualFile[selectedValues.length]; for (int i = 0; i < selectedValues.length; i++) { files[i] = (VirtualFile) selectedValues[i]; } return files; } } return VirtualFile.EMPTY_ARRAY; } @Override public void dispose() { removeAll(); list = null; cellRenderer = null; tagsPanel = null; } private final class ThumbnailNavigatable implements Navigatable { private final VirtualFile file; ThumbnailNavigatable(VirtualFile file) { this.file = file; } @Override public void navigate(boolean requestFocus) { if (file != null) { FileEditorManager manager = FileEditorManager.getInstance(thumbnailView.getProject()); manager.openFile(file, true); } } @Override public boolean canNavigate() { return file != null; } @Override public boolean canNavigateToSource() { return file != null; } } private final class VFSListener implements VirtualFileListener { @Override public void contentsChanged(@NotNull VirtualFileEvent event) { VirtualFile file = event.getFile(); if (list != null) { int index = ((DefaultListModel) list.getModel()).indexOf(file); if (index != -1) { Rectangle cellBounds = list.getCellBounds(index, index); list.repaint(cellBounds); } } } @Override public void fileDeleted(@NotNull VirtualFileEvent event) { VirtualFile file = event.getFile(); VirtualFile root = thumbnailView.getRoot(); if (root != null && VfsUtilCore.isAncestor(file, root, false)) { refresh(); } if (list != null) { ((DefaultListModel) list.getModel()).removeElement(file); } } @Override public void propertyChanged(@NotNull VirtualFilePropertyEvent event) { refresh(); } @Override public void fileCreated(@NotNull VirtualFileEvent event) { refresh(); } @Override public void fileMoved(@NotNull VirtualFileMoveEvent event) { refresh(); } } private final class OptionsChangeListener implements PropertyChangeListener { @Override public void propertyChange(PropertyChangeEvent evt) { Options options = (Options) evt.getSource(); EditorOptions editorOptions = options.getEditorOptions(); TransparencyChessboardOptions chessboardOptions = editorOptions.getTransparencyChessboardOptions(); GridOptions gridOptions = editorOptions.getGridOptions(); ImageComponent imageComponent = cellRenderer.getImageComponent(); imageComponent.setTransparencyChessboardCellSize(chessboardOptions.getCellSize()); imageComponent.setTransparencyChessboardWhiteColor(chessboardOptions.getWhiteColor()); imageComponent.setTransparencyChessboardBlankColor(chessboardOptions.getBlackColor()); imageComponent.setGridLineZoomFactor(gridOptions.getLineZoomFactor()); imageComponent.setGridLineSpan(gridOptions.getLineSpan()); imageComponent.setGridLineColor(gridOptions.getLineColor()); } } private class FocusRequester extends MouseAdapter { @Override public void mouseClicked(MouseEvent e) { IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(ThumbnailViewUI.this, true)); } } public class AddTagGroup extends ActionGroup { public AddTagGroup() { setPopup(true); } @Override public AnAction @NotNull [] getChildren(@Nullable AnActionEvent e) { if (e == null) return EMPTY_ARRAY; Project project = e.getProject(); ImageTagManager tagManager = ImageTagManager.getInstance(project); List<String> tags = tagManager.getAllTags(); int tagsNumber = tags.size(); AnAction[] actions = new AnAction[tagsNumber + 1]; for (int i = 0; i < tagsNumber; i++) { String tag = tags.get(i); actions[i] = new AnAction(tag) { @Override public void actionPerformed(@NotNull AnActionEvent e) { for (VirtualFile file : thumbnailView.getSelection()) { tagManager.addTag(tag, file); } updateTagsPreviewModel(); } @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabledAndVisible(!ContainerUtil.exists(thumbnailView.getSelection(), file -> tagManager.hasTag(tag, file))); } }; } actions[tagsNumber] = new AnAction(IdeBundle.messagePointer("action.Anonymous.text.new.tag")) { @Override public void actionPerformed(@NotNull AnActionEvent e) { ThumbnailView view = ThumbnailViewActionUtil.getVisibleThumbnailView(e); if (view != null) { VirtualFile[] selection = view.getSelection(); if (selection.length > 0) { String tag = Messages.showInputDialog("", IdeBundle.message("dialog.title.new.tag.name"), null); if (tag != null) { for (VirtualFile file : selection) { tagManager.addTag(tag, file); } } } } } }; return actions; } } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.javaFX.fxml; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.daemon.Validator; import com.intellij.codeInsight.daemon.impl.analysis.HighlightUtil; import com.intellij.codeInsight.daemon.impl.analysis.JavaGenericsUtil; import com.intellij.lang.ASTNode; import com.intellij.lang.xml.XMLLanguage; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.cache.CacheManager; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.UsageSearchContext; import com.intellij.psi.search.searches.ClassInheritorsSearch; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.*; import com.intellij.psi.xml.*; import com.intellij.util.Processor; import com.intellij.xml.XmlAttributeDescriptor; import com.intellij.xml.XmlElementDescriptor; import gnu.trove.THashMap; import gnu.trove.THashSet; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.javaFX.fxml.descriptors.JavaFxClassTagDescriptorBase; import org.jetbrains.plugins.javaFX.fxml.descriptors.JavaFxPropertyTagDescriptor; import org.jetbrains.plugins.javaFX.indexing.JavaFxControllerClassIndex; import java.util.*; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.stream.Collectors; public class JavaFxPsiUtil { private static final Logger LOG = Logger.getInstance(JavaFxPsiUtil.class); public static XmlProcessingInstruction createSingleImportInstruction(String qualifiedName, Project project) { final String importText = "<?import " + qualifiedName + "?>"; final PsiElement child = PsiFileFactory.getInstance(project).createFileFromText("a.fxml", XMLLanguage.INSTANCE, importText).getFirstChild(); return PsiTreeUtil.findChildOfType(child, XmlProcessingInstruction.class); } public static List<String> parseImports(XmlFile file) { return parseInstructions(file, "import"); } public static List<String> parseInjectedLanguages(XmlFile file) { return parseInstructions(file, "language"); } private static List<String> parseInstructions(XmlFile file, String instructionName) { final List<String> definedImports = new ArrayList<>(); final XmlDocument document = file.getDocument(); if (document != null) { final XmlProlog prolog = document.getProlog(); final Collection<XmlProcessingInstruction> instructions = new ArrayList<>(PsiTreeUtil.findChildrenOfType(prolog, XmlProcessingInstruction.class)); for (final XmlProcessingInstruction instruction : instructions) { final String instructionTarget = getInstructionTarget(instructionName, instruction); if (instructionTarget != null) { definedImports.add(instructionTarget); } } } return definedImports; } @Nullable public static String getInstructionTarget(String instructionName, XmlProcessingInstruction instruction) { final ASTNode node = instruction.getNode(); ASTNode xmlNameNode = node.findChildByType(XmlTokenType.XML_NAME); ASTNode importNode = node.findChildByType(XmlTokenType.XML_TAG_CHARACTERS); if (!(xmlNameNode == null || !instructionName.equals(xmlNameNode.getText()) || importNode == null)) { return importNode.getText(); } return null; } public static PsiClass findPsiClass(String name, PsiElement context) { final Project project = context.getProject(); if (!StringUtil.getShortName(name).equals(name)) { final PsiClass psiClass = JavaPsiFacade.getInstance(project).findClass(name, GlobalSearchScope.allScope(project)); if (psiClass != null) { return psiClass; } return findNestedPsiClass(name, context, project); } return findPsiClass(name, parseImports((XmlFile)context.getContainingFile()), context, project); } private static PsiClass findNestedPsiClass(String name, PsiElement context, Project project) { final int dotIndex = name.indexOf('.'); if (dotIndex > 0) { final String outerName = name.substring(0, dotIndex); final PsiClass outerClass = findPsiClass(outerName, parseImports((XmlFile)context.getContainingFile()), context, project); if (outerClass != null) { final List<String> nameChain = StringUtil.split(name, ".", true, false); final List<String> nestedNames = nameChain.subList(1, nameChain.size()); PsiClass aClass = outerClass; for (String nestedName : nestedNames) { aClass = aClass.findInnerClassByName(nestedName, true); if (aClass == null) return null; } return aClass; } } return null; } private static PsiClass findPsiClass(String name, List<String> imports, PsiElement context, Project project) { PsiClass psiClass = null; if (imports != null) { JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); PsiFile file = context.getContainingFile(); for (String anImport : imports) { if (StringUtil.getShortName(anImport).equals(name)) { psiClass = psiFacade.findClass(anImport, file.getResolveScope()); } else if (StringUtil.endsWith(anImport, ".*")) { psiClass = psiFacade.findClass(StringUtil.trimEnd(anImport, "*") + name, file.getResolveScope()); } if (psiClass != null) { return psiClass; } } } return null; } public static void insertImportWhenNeeded(XmlFile xmlFile, String shortName, String qualifiedName) { if (shortName != null && qualifiedName != null && findPsiClass(shortName, xmlFile.getRootTag()) == null) { final XmlDocument document = xmlFile.getDocument(); if (document != null) { final XmlProcessingInstruction processingInstruction = createSingleImportInstruction(qualifiedName, xmlFile.getProject()); final XmlProlog prolog = document.getProlog(); if (prolog != null) { prolog.add(processingInstruction); } else { document.addBefore(processingInstruction, document.getRootTag()); } PostprocessReformattingAspect.getInstance(xmlFile.getProject()).doPostponedFormatting(xmlFile.getViewProvider()); } } } public static PsiClass getPropertyClass(PsiElement member) { final PsiClassType classType = getPropertyClassType(member); return classType != null ? classType.resolve() : null; } public static PsiClassType getPropertyClassType(PsiElement member) { return getPropertyClassType(member, JavaFxCommonNames.JAVAFX_BEANS_PROPERTY_OBJECT_PROPERTY); } public static PsiClassType getPropertyClassType(PsiElement member, final String superTypeFQN) { if (member instanceof PsiMember) { final PsiType type = PropertyUtilBase.getPropertyType((PsiMember)member); if (type instanceof PsiClassType) { final PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)type).resolveGenerics(); final PsiClass attributeClass = resolveResult.getElement(); if (attributeClass != null) { final PsiClass objectProperty = JavaPsiFacade.getInstance(attributeClass.getProject()) .findClass(superTypeFQN, attributeClass.getResolveScope()); if (objectProperty != null) { final PsiSubstitutor superClassSubstitutor = TypeConversionUtil .getClassSubstitutor(objectProperty, attributeClass, resolveResult.getSubstitutor()); if (superClassSubstitutor != null) { final PsiType propertyType = superClassSubstitutor.substitute(objectProperty.getTypeParameters()[0]); if (propertyType instanceof PsiClassType) { return (PsiClassType)propertyType; } } else { return (PsiClassType)type; } } } } } return null; } public static PsiMethod findStaticPropertySetter(String attributeName, XmlTag context) { final String packageName = StringUtil.getPackageName(attributeName); if (context != null && !StringUtil.isEmptyOrSpaces(packageName)) { final PsiClass classWithStaticProperty = findPsiClass(packageName, context); if (classWithStaticProperty != null) { return findStaticPropertySetter(attributeName, classWithStaticProperty); } } return null; } @Nullable public static PsiMethod findStaticPropertySetter(@NotNull String attributeName, @Nullable PsiClass classWithStaticProperty) { if (classWithStaticProperty == null) return null; final String setterName = PropertyUtilBase.suggestSetterName(StringUtil.getShortName(attributeName)); final PsiMethod[] setters = classWithStaticProperty.findMethodsByName(setterName, true); for (PsiMethod setter : setters) { if (setter.hasModifierProperty(PsiModifier.PUBLIC) && setter.hasModifierProperty(PsiModifier.STATIC) && setter.getParameterList().getParametersCount() == 2) { return setter; } } return null; } public static PsiMethod findPropertyGetter(@NotNull PsiClass psiClass, @Nullable String propertyName) { if (StringUtil.isEmpty(propertyName)) return null; PsiMethod getter = findPropertyGetter(psiClass, propertyName, null); if (getter != null) { return getter; } return findPropertyGetter(psiClass, propertyName, PsiType.BOOLEAN); } private static PsiMethod findPropertyGetter(final PsiClass psiClass, final String propertyName, final PsiType propertyType) { final String getterName = PropertyUtilBase.suggestGetterName(propertyName, propertyType); final PsiMethod[] getters = psiClass.findMethodsByName(getterName, true); for (PsiMethod getter : getters) { if (getter.hasModifierProperty(PsiModifier.PUBLIC) && !getter.hasModifierProperty(PsiModifier.STATIC) && PropertyUtilBase.isSimplePropertyGetter(getter)) { return getter; } } return null; } public static PsiMethod findObservablePropertyGetter(@NotNull PsiClass psiClass, @Nullable String propertyName) { if (StringUtil.isEmpty(propertyName)) return null; final PsiMethod[] getters = psiClass.findMethodsByName(propertyName + JavaFxCommonNames.PROPERTY_METHOD_SUFFIX, true); for (PsiMethod getter : getters) { if (getter.hasModifierProperty(PsiModifier.PUBLIC) && !getter.hasModifierProperty(PsiModifier.STATIC) && getter.getParameterList().getParametersCount() == 0 && InheritanceUtil.isInheritor(getter.getReturnType(), JavaFxCommonNames.JAVAFX_BEANS_VALUE_OBSERVABLE_VALUE)) { return getter; } } return null; } private static final Key<CachedValue<PsiClass>> INJECTED_CONTROLLER = Key.create("javafx.injected.controller"); private static final RecursionGuard ourGuard = RecursionManager.createGuard("javafx.controller"); public static PsiClass getControllerClass(final PsiFile containingFile) { if (containingFile instanceof XmlFile) { final XmlTag rootTag = ((XmlFile)containingFile).getRootTag(); final Project project = containingFile.getProject(); if (rootTag != null) { XmlAttribute attribute = rootTag.getAttribute(FxmlConstants.FX_CONTROLLER); if (attribute != null) { final PsiClass controllerClass = findControllerClass(containingFile, project, attribute); if (controllerClass != null) { return controllerClass; } } } if (Registry.is("javafx.fxml.controller.from.loader", false)) { final CachedValuesManager manager = CachedValuesManager.getManager(containingFile.getProject()); final PsiClass injectedControllerClass = manager.getCachedValue( containingFile, INJECTED_CONTROLLER, () -> computeInjectedControllerClass(containingFile), true); if (injectedControllerClass != null) { return injectedControllerClass; } } if (rootTag != null && FxmlConstants.FX_ROOT.equals(rootTag.getName())) { final XmlAttribute rootTypeAttr = rootTag.getAttribute(FxmlConstants.TYPE); if (rootTypeAttr != null) { return findControllerClass(containingFile, project, rootTypeAttr); } } } return null; } private static PsiClass findControllerClass(PsiFile containingFile, Project project, XmlAttribute attribute) { final String attributeValue = attribute.getValue(); if (!StringUtil.isEmptyOrSpaces(attributeValue)) { final GlobalSearchScope customScope = GlobalSearchScope.projectScope(project).intersectWith(containingFile.getResolveScope()); return JavaPsiFacade.getInstance(project).findClass(attributeValue, customScope); } return null; } public static boolean isEventHandlerProperty(@NotNull XmlAttribute attribute) { final PsiClass tagClass = getTagClass(attribute.getParent()); return tagClass != null && getEventHandlerPropertyType(tagClass, attribute.getName()) != null; } @Nullable public static PsiClass getTagClass(@Nullable XmlAttributeValue xmlAttributeValue) { if (xmlAttributeValue != null) { final PsiElement parent = xmlAttributeValue.getParent(); if (parent instanceof XmlAttribute) { final XmlTag xmlTag = ((XmlAttribute)parent).getParent(); return getTagClass(xmlTag); } } return null; } @Nullable public static PsiClass getTagClass(@Nullable XmlTag xmlTag) { if (xmlTag != null) { final XmlElementDescriptor descriptor = xmlTag.getDescriptor(); if (descriptor != null) { final PsiElement declaration = descriptor.getDeclaration(); if (declaration instanceof PsiClass) { return (PsiClass)declaration; } } } return null; } @Nullable public static PsiElement getAttributeDeclaration(@Nullable XmlAttributeValue xmlAttributeValue) { if (xmlAttributeValue != null) { final PsiElement parent = xmlAttributeValue.getParent(); if (parent instanceof XmlAttribute) { final XmlAttributeDescriptor descriptor = ((XmlAttribute)parent).getDescriptor(); if (descriptor != null) { return descriptor.getDeclaration(); } } } return null; } public static boolean isVisibleInFxml(@NotNull PsiMember psiMember) { return psiMember.hasModifierProperty(PsiModifier.PUBLIC) || AnnotationUtil.isAnnotated(psiMember, JavaFxCommonNames.JAVAFX_FXML_ANNOTATION, 0); } @Nullable public static PsiMethod findValueOfMethod(@NotNull final PsiType psiType) { final PsiClass psiClass = PsiUtil.resolveClassInClassTypeOnly(psiType); return psiClass != null ? findValueOfMethod(psiClass) : null; } @Nullable public static PsiMethod findValueOfMethod(@NotNull final PsiClass psiClass) { return CachedValuesManager.getCachedValue(psiClass, () -> { final PsiMethod[] methods = psiClass.findMethodsByName(JavaFxCommonNames.VALUE_OF, true); for (PsiMethod method : methods) { if (method.hasModifierProperty(PsiModifier.STATIC)) { final PsiParameter[] parameters = method.getParameterList().getParameters(); if (parameters.length == 1) { final PsiType type = parameters[0].getType(); if (type.equalsToText(CommonClassNames.JAVA_LANG_STRING) || type.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) { if (psiClass.equals(PsiUtil.resolveClassInType(method.getReturnType()))) { return CachedValueProvider.Result.create(method, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } } } return CachedValueProvider.Result.create(null, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static boolean isReadOnly(String attributeName, XmlTag tag) { if (findStaticPropertySetter(attributeName, tag) != null) return false; final XmlElementDescriptor descriptor = tag.getDescriptor(); if (descriptor instanceof JavaFxClassTagDescriptorBase) { return ((JavaFxClassTagDescriptorBase)descriptor).isReadOnlyAttribute(attributeName); } return false; } public static boolean isExpressionBinding(@Nullable String value) { return value != null && value.startsWith("${") && value.endsWith("}"); } public static boolean isIncorrectExpressionBinding(@Nullable String value) { if (value == null || !value.startsWith("$")) return false; if (value.length() == 1) return true; final boolean expressionStarts = value.startsWith("${"); final boolean expressionEnds = value.endsWith("}"); if (expressionStarts && expressionEnds && value.length() == 3) return true; if (expressionStarts != expressionEnds) return true; if (expressionStarts && value.indexOf('{', 2) >= 2) return true; if (expressionEnds && value.indexOf('}') < value.length() - 1) return true; return false; } @Nullable public static PsiType getWritablePropertyType(@Nullable final PsiType type, @NotNull final Project project) { final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(type); final PsiClass psiClass = resolveResult.getElement(); if (psiClass != null) { final PsiClass propertyClass = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_BEANS_PROPERTY, GlobalSearchScope.allScope(project)); if (propertyClass != null) { final PsiSubstitutor substitutor = TypeConversionUtil.getClassSubstitutor(propertyClass, psiClass, resolveResult.getSubstitutor()); if (substitutor != null) { return substitutor.substitute(propertyClass.getTypeParameters()[0]); } } } return null; } @Nullable private static PsiType getDefaultPropertyExpectedType(@Nullable PsiClass aClass) { if (aClass == null) return null; return CachedValuesManager.getCachedValue(aClass, () -> { final PsiAnnotation annotation = AnnotationUtil.findAnnotationInHierarchy(aClass, Collections.singleton(JavaFxCommonNames.JAVAFX_BEANS_DEFAULT_PROPERTY)); if (annotation != null) { final PsiAnnotationMemberValue memberValue = annotation.findAttributeValue(null); if (memberValue != null) { final String propertyName = StringUtil.unquoteString(memberValue.getText()); final PsiMethod getter = findPropertyGetter(aClass, propertyName); if (getter != null) { final PsiType propertyType = eraseFreeTypeParameters(getter.getReturnType(), getter); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } return CachedValueProvider.Result.create(null, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static String getDefaultPropertyName(@Nullable PsiClass aClass) { if (aClass == null) { return null; } final PsiAnnotation annotation = AnnotationUtil.findAnnotationInHierarchy(aClass, Collections.singleton( JavaFxCommonNames.JAVAFX_BEANS_DEFAULT_PROPERTY)); if (annotation != null) { final PsiAnnotationMemberValue memberValue = annotation.findAttributeValue(null); if (memberValue != null) { return StringUtil.unquoteString(memberValue.getText()); } } return null; } public static boolean isAbleToInstantiate(@NotNull PsiClass psiClass) { return isAbleToInstantiate(psiClass, message -> { }); } public static boolean isAbleToInstantiate(@NotNull PsiClass psiClass, @NotNull Consumer<String> messageConsumer) { if (psiClass.isEnum() || hasNamedArgOrNoArgConstructor(psiClass)) return true; final PsiMethod valueOf = findValueOfMethod(psiClass); if (valueOf == null) { if (!hasBuilder(psiClass)) { messageConsumer.accept("Unable to instantiate"); return false; } } return true; } private static boolean hasNamedArgOrNoArgConstructor(@NotNull PsiClass psiClass) { if (psiClass.getConstructors().length == 0) return true; return CachedValuesManager.getCachedValue(psiClass, () -> { for (PsiMethod constructor : psiClass.getConstructors()) { final PsiParameter[] parameters = constructor.getParameterList().getParameters(); if (parameters.length == 0) { return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } boolean annotated = true; for (PsiParameter parameter : parameters) { if (!AnnotationUtil.isAnnotated(parameter, JavaFxCommonNames.JAVAFX_BEANS_NAMED_ARG, 0)) { annotated = false; break; } } if (annotated) return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } return CachedValueProvider.Result.create(false, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static boolean hasBuilder(@NotNull final PsiClass psiClass) { return CachedValuesManager.getCachedValue(psiClass, () -> { final Project project = psiClass.getProject(); final PsiClass builderClass = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_FXML_BUILDER, GlobalSearchScope.allScope(project)); if (builderClass != null) { final PsiMethod[] buildMethods = builderClass.findMethodsByName("build", false); if (buildMethods.length == 1 && buildMethods[0].getParameterList().getParametersCount() == 0) { if (ClassInheritorsSearch.search(builderClass).forEach(aClass -> { PsiType returnType = null; final PsiMethod method = MethodSignatureUtil.findMethodBySuperMethod(aClass, buildMethods[0], false); if (method != null) { returnType = method.getReturnType(); } return !Comparing.equal(psiClass, PsiUtil.resolveClassInClassTypeOnly(returnType)); })) { return CachedValueProvider.Result.create(false, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static boolean isClassAcceptable(@Nullable XmlTag targetTag, @Nullable final PsiClass fromClass) { return isClassAcceptable(targetTag, fromClass, (message, type) -> { }); } public static boolean isClassAcceptable(@Nullable XmlTag targetTag, @Nullable final PsiClass fromClass, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { if (targetTag == null || fromClass == null || !fromClass.isValid()) { return true; } final XmlElementDescriptor tagDescriptor = targetTag.getDescriptor(); if (tagDescriptor instanceof JavaFxPropertyTagDescriptor) { final PsiClass containingClass = ((JavaFxPropertyTagDescriptor)tagDescriptor).getPsiClass(); final PsiType targetType = getWritablePropertyType(containingClass, tagDescriptor.getDeclaration()); return canCoerce(targetType, fromClass, targetTag, messageConsumer); } else if (tagDescriptor instanceof JavaFxClassTagDescriptorBase) { final PsiElement tagDeclaration = tagDescriptor.getDeclaration(); if (tagDeclaration instanceof PsiClass) { PsiClass defaultPropertyOwnerClass = (PsiClass)tagDeclaration; final XmlAttribute factoryAttr = targetTag.getAttribute(FxmlConstants.FX_FACTORY); if (factoryAttr != null) { defaultPropertyOwnerClass = getFactoryProducedClass((PsiClass)tagDeclaration, factoryAttr.getValue()); } final PsiType targetType = getDefaultPropertyExpectedType(defaultPropertyOwnerClass); if (targetType != null) { return canCoerce(targetType, fromClass, targetTag, messageConsumer); } if (!isObservableCollection(defaultPropertyOwnerClass)) { return noDefaultPropertyError(messageConsumer); } } } return true; } private static boolean noDefaultPropertyError(@NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { messageConsumer.accept("Parent tag has no default property", Validator.ValidationHost.ErrorType.ERROR); return false; } private static boolean canCoerce(@Nullable PsiType targetType, @NotNull PsiClass fromClass, @NotNull PsiElement context, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { if (targetType == null) return true; PsiType collectionItemType = JavaGenericsUtil.getCollectionItemType(targetType, fromClass.getResolveScope()); if (collectionItemType == null && InheritanceUtil.isInheritor(targetType, JavaFxCommonNames.JAVAFX_BEANS_PROPERTY)) { collectionItemType = getWritablePropertyType(targetType, fromClass.getProject()); } if (collectionItemType != null) { return canCoerceImpl(collectionItemType, fromClass, context, messageConsumer); } return canCoerceImpl(targetType, fromClass, context, messageConsumer); } @Nullable private static PsiType eraseFreeTypeParameters(@Nullable PsiType psiType, @NotNull PsiMember member) { final PsiClass containingClass = member.getContainingClass(); return eraseFreeTypeParameters(psiType, containingClass); } @Nullable private static PsiType eraseFreeTypeParameters(@Nullable PsiType psiType, @Nullable PsiClass containingClass) { if (containingClass == null) return null; return JavaPsiFacade.getElementFactory(containingClass.getProject()).createRawSubstitutor(containingClass).substitute(psiType); } private static boolean canCoerceImpl(@NotNull PsiType targetType, @NotNull PsiClass fromClass, @NotNull PsiElement context, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { if (targetType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT) || targetType.equalsToText(CommonClassNames.JAVA_LANG_STRING) || targetType.isAssignableFrom(PsiTypesUtil.getClassType(fromClass))) { return true; } final PsiClassType boxedTargetClass = targetType instanceof PsiPrimitiveType ? ((PsiPrimitiveType)targetType).getBoxedType(context) : null; if (boxedTargetClass != null && InheritanceUtil.isInheritor(boxedTargetClass, CommonClassNames.JAVA_LANG_NUMBER) || InheritanceUtil.isInheritor(targetType, CommonClassNames.JAVA_LANG_NUMBER)) { if (Comparing.strEqual(fromClass.getQualifiedName(), CommonClassNames.JAVA_LANG_STRING) || InheritanceUtil.isInheritor(fromClass, CommonClassNames.JAVA_LANG_NUMBER)) { return true; } return unrelatedTypesWarning(targetType, fromClass, messageConsumer); } final PsiMethod valueOfMethod = findValueOfMethod(targetType); final PsiType valueOfParameterType = valueOfMethod != null && valueOfMethod.getParameterList().getParametersCount() == 1 ? valueOfMethod.getParameterList().getParameters()[0].getType() : null; if (valueOfParameterType != null && valueOfParameterType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) { return true; } if (Comparing.strEqual(fromClass.getQualifiedName(), CommonClassNames.JAVA_LANG_STRING)) { if (isPrimitiveOrBoxed(targetType) || valueOfParameterType != null && valueOfParameterType.equalsToText(CommonClassNames.JAVA_LANG_STRING)) { return true; } } if (valueOfMethod != null) { return unrelatedTypesWarning(targetType, fromClass, messageConsumer); } return unableToCoerceError(targetType, fromClass, messageConsumer); } private static boolean unableToCoerceError(@NotNull PsiType targetType, @NotNull PsiClass fromClass, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { messageConsumer.accept("Unable to coerce " + HighlightUtil.formatClass(fromClass) + " to " + targetType.getCanonicalText(), Validator.ValidationHost.ErrorType.ERROR); return false; } private static boolean unrelatedTypesWarning(@NotNull PsiType targetType, @NotNull PsiClass fromClass, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { messageConsumer.accept("Conversion between unrelated types, " + HighlightUtil.formatClass(fromClass) + " to " + targetType.getCanonicalText(), Validator.ValidationHost.ErrorType.WARNING); return true; } public static boolean isOutOfHierarchy(final XmlAttributeValue element) { XmlTag tag = PsiTreeUtil.getParentOfType(element, XmlTag.class); while (tag != null) { if (FxmlConstants.FX_DEFINE.equals(tag.getName())) { return true; } tag = tag.getParentTag(); } return false; } public static PsiType getWrappedPropertyType(final PsiField field, final Project project, final Map<String, PsiType> typeMap) { return CachedValuesManager.getCachedValue(field, () -> { final PsiType fieldType = field.getType(); final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(fieldType); final PsiClass fieldClass = resolveResult.getElement(); if (fieldClass == null) { final PsiType propertyType = eraseFreeTypeParameters(fieldType, field); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } PsiType substitute = null; for (String typeName : typeMap.keySet()) { if (InheritanceUtil.isInheritor(fieldType, typeName)) { substitute = typeMap.get(typeName); break; } } if (substitute == null) { if (!InheritanceUtil.isInheritor(fieldType, JavaFxCommonNames.JAVAFX_BEANS_VALUE_OBSERVABLE_VALUE)) { final PsiType propertyType = eraseFreeTypeParameters(fieldType, field); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } final PsiClass aClass = JavaPsiFacade.getInstance(project) .findClass(JavaFxCommonNames.JAVAFX_BEANS_VALUE_OBSERVABLE_VALUE, GlobalSearchScope.allScope(project)); LOG.assertTrue(aClass != null); final PsiSubstitutor substitutor = TypeConversionUtil.getSuperClassSubstitutor(aClass, fieldClass, resolveResult.getSubstitutor()); final PsiMethod[] values = aClass.findMethodsByName(JavaFxCommonNames.GET_VALUE, false); LOG.assertTrue(values.length == 1); substitute = substitutor.substitute(values[0].getReturnType()); } final PsiType propertyType = eraseFreeTypeParameters(substitute, field); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } @Nullable public static PsiType getWritablePropertyType(@Nullable PsiClass containingClass, @Nullable PsiElement declaration) { if (declaration instanceof PsiField) { return getWrappedPropertyType((PsiField)declaration, declaration.getProject(), JavaFxCommonNames.ourWritableMap); } if (declaration instanceof PsiMethod) { final PsiMethod method = (PsiMethod)declaration; if (method.getParameterList().getParametersCount() != 0) { return getSetterArgumentType(method); } final String propertyName = PropertyUtilBase.getPropertyName(method); final PsiClass psiClass = containingClass != null ? containingClass : method.getContainingClass(); if (propertyName != null && containingClass != null) { final PsiMethod setter = findInstancePropertySetter(psiClass, propertyName); if (setter != null) { final PsiType setterArgumentType = getSetterArgumentType(setter); if (setterArgumentType != null) return setterArgumentType; } } return getGetterReturnType(method); } return null; } @Nullable private static PsiType getSetterArgumentType(@NotNull PsiMethod method) { return CachedValuesManager.getCachedValue(method, () -> { final PsiParameter[] parameters = method.getParameterList().getParameters(); final boolean isStatic = method.hasModifierProperty(PsiModifier.STATIC); if (isStatic && parameters.length == 2 || !isStatic && parameters.length == 1) { final PsiType argumentType = eraseFreeTypeParameters(parameters[parameters.length - 1].getType(), method); return CachedValueProvider.Result.create(argumentType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } return CachedValueProvider.Result.create(null, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } private static PsiType getGetterReturnType(@NotNull PsiMethod method) { return CachedValuesManager.getCachedValue(method, () -> { final PsiType returnType = eraseFreeTypeParameters(method.getReturnType(), method); return CachedValueProvider.Result.create(returnType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } @Nullable public static PsiType getReadablePropertyType(@Nullable PsiElement declaration) { if (declaration instanceof PsiField) { return getWrappedPropertyType((PsiField)declaration, declaration.getProject(), JavaFxCommonNames.ourReadOnlyMap); } if (declaration instanceof PsiMethod) { PsiMethod psiMethod = (PsiMethod)declaration; if (psiMethod.getParameterList().getParametersCount() == 0 && !psiMethod.hasModifierProperty(PsiModifier.STATIC)) { return getGetterReturnType(psiMethod); } } return null; } @NotNull public static Map<String, XmlAttributeValue> collectFileIds(@Nullable final XmlTag currentTag) { if (currentTag == null) return Collections.emptyMap(); final PsiFile containingFile = currentTag.getContainingFile(); return collectFileIds(containingFile, false); } @NotNull public static Map<String, XmlAttributeValue> collectFileIds(@Nullable PsiFile psiFile, boolean skipController) { if (!(psiFile instanceof XmlFile)) return Collections.emptyMap(); final XmlTag rootTag = ((XmlFile)psiFile).getRootTag(); if (rootTag == null) return Collections.emptyMap(); final Map<String, XmlAttributeValue> cachedIds = CachedValuesManager .getCachedValue(rootTag, () -> new CachedValueProvider.Result<>(prepareFileIds(rootTag), PsiModificationTracker.MODIFICATION_COUNT)); if (skipController && cachedIds.containsKey(FxmlConstants.CONTROLLER)) { final Map<String, XmlAttributeValue> filteredIds = new THashMap<>(cachedIds); filteredIds.remove(FxmlConstants.CONTROLLER); return filteredIds; } return cachedIds; } @NotNull private static Map<String, XmlAttributeValue> prepareFileIds(XmlTag rootTag) { final Map<String, XmlAttributeValue> fileIds = new THashMap<>(); for (XmlTag tag : SyntaxTraverser.psiTraverser().withRoot(rootTag).filter(XmlTag.class)) { final XmlAttribute idAttribute = tag.getAttribute(FxmlConstants.FX_ID); if (idAttribute != null) { final String idValue = idAttribute.getValue(); if (idValue != null) fileIds.put(idValue, idAttribute.getValueElement()); } } final XmlAttribute controllerAttribute = rootTag.getAttribute(FxmlConstants.FX_CONTROLLER); if (controllerAttribute != null) { fileIds.put(FxmlConstants.CONTROLLER, controllerAttribute.getValueElement()); } return fileIds; } @Nullable public static PsiClass getTagClassById(@Nullable XmlAttributeValue xmlAttributeValue, @Nullable String id, @NotNull PsiElement context) { return FxmlConstants.CONTROLLER.equals(id) ? getControllerClass(context.getContainingFile()) : getTagClass(xmlAttributeValue); } @Nullable public static PsiClass getWritablePropertyClass(@Nullable XmlAttributeValue xmlAttributeValue) { if (xmlAttributeValue != null) { return getPropertyClass(getWritablePropertyType(xmlAttributeValue), xmlAttributeValue); } return null; } @Nullable public static PsiType getWritablePropertyType(@Nullable XmlAttributeValue xmlAttributeValue) { final PsiClass tagClass = getTagClass(xmlAttributeValue); if (tagClass != null) { final PsiElement declaration = getAttributeDeclaration(xmlAttributeValue); if (declaration != null) { return getWritablePropertyType(tagClass, declaration); } } return null; } @Nullable public static PsiClass getPropertyClass(@Nullable PsiType propertyType, @NotNull PsiElement context) { if (propertyType instanceof PsiPrimitiveType) { PsiClassType boxedType = ((PsiPrimitiveType)propertyType).getBoxedType(context); return boxedType != null ? boxedType.resolve() : null; } return PsiUtil.resolveClassInType(propertyType); } public static boolean hasConversionFromAnyType(@NotNull PsiClass targetClass) { return Comparing.strEqual(targetClass.getQualifiedName(), CommonClassNames.JAVA_LANG_STRING) || findValueOfMethod(targetClass) != null; } @Nullable public static String getBoxedPropertyType(@Nullable PsiClass containingClass, @Nullable PsiMember declaration) { PsiType psiType = getWritablePropertyType(containingClass, declaration); if (psiType instanceof PsiPrimitiveType) { return ((PsiPrimitiveType)psiType).getBoxedTypeName(); } if (PsiPrimitiveType.getUnboxedType(psiType) != null) { final PsiClass psiClass = PsiUtil.resolveClassInType(psiType); if (psiClass != null) { return psiClass.getQualifiedName(); } } return null; } @Contract("null->false") public static boolean isPrimitiveOrBoxed(@Nullable PsiType psiType) { return psiType instanceof PsiPrimitiveType || PsiPrimitiveType.getUnboxedType(psiType) != null; } @NotNull public static Map<String, PsiMember> getReadableProperties(@Nullable PsiClass psiClass) { if (psiClass != null) { return CachedValuesManager.getCachedValue(psiClass, () -> CachedValueProvider.Result.create(prepareReadableProperties(psiClass), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); } return Collections.emptyMap(); } @NotNull private static Map<String, PsiMember> prepareReadableProperties(@NotNull PsiClass psiClass) { final Map<String, PsiMember> acceptableMembers = new THashMap<>(); for (PsiMethod method : psiClass.getAllMethods()) { if (method.hasModifierProperty(PsiModifier.STATIC) || !method.hasModifierProperty(PsiModifier.PUBLIC)) continue; if (PropertyUtilBase.isSimplePropertyGetter(method)) { final String propertyName = PropertyUtilBase.getPropertyName(method); assert propertyName != null; acceptableMembers.put(propertyName, method); } } return acceptableMembers; } @NotNull public static Map<String, PsiMember> getWritableProperties(@Nullable PsiClass psiClass) { if (psiClass != null) { return CachedValuesManager.getCachedValue(psiClass, () -> CachedValueProvider.Result.create(prepareWritableProperties(psiClass), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); } return Collections.emptyMap(); } @NotNull private static Map<String, PsiMember> prepareWritableProperties(@NotNull PsiClass psiClass) { // todo search for setter in corresponding builder class, e.g. MyDataBuilder.setText() + MyData.getText(), reuse logic from hasBuilder() final Map<String, PsiMember> acceptableMembers = new THashMap<>(); for (String propertyName : prepareConstructorNamedArgProperties(psiClass)) { if (!acceptableMembers.containsKey(propertyName)) { final PsiField field = psiClass.findFieldByName(propertyName, true); if (field != null && !field.hasModifierProperty(PsiModifier.STATIC)) { acceptableMembers.put(propertyName, field); } } } for (PsiMethod method : psiClass.getAllMethods()) { if (method.hasModifierProperty(PsiModifier.STATIC) || !method.hasModifierProperty(PsiModifier.PUBLIC)) continue; if (PropertyUtilBase.isSimplePropertyGetter(method)) { PsiMember acceptableMember = method; final String propertyName = PropertyUtilBase.getPropertyName(method); assert propertyName != null; PsiMethod setter = findInstancePropertySetter(psiClass, propertyName); if (setter != null) { final PsiType setterArgType = setter.getParameterList().getParameters()[0].getType(); final PsiField field = psiClass.findFieldByName(propertyName, true); if (field != null && !field.hasModifierProperty(PsiModifier.STATIC)) { final PsiType fieldType = getWritablePropertyType(psiClass, field); if (fieldType == null || setterArgType.isConvertibleFrom(fieldType)) { acceptableMember = field; } } } else { final PsiType returnType = method.getReturnType(); if (returnType != null && isWritablePropertyType(psiClass, returnType)) { final PsiField field = psiClass.findFieldByName(propertyName, true); if (field != null && !field.hasModifierProperty(PsiModifier.STATIC)) { final PsiType fieldType = getWritablePropertyType(psiClass, field); if (fieldType == null || returnType.isAssignableFrom(fieldType)) { acceptableMember = field; } } } else { acceptableMember = null; } } if (acceptableMember != null) acceptableMembers.put(propertyName, acceptableMember); } } return acceptableMembers; } @Nullable private static String getPropertyNameFromNamedArgAnnotation(@NotNull PsiParameter parameter) { final PsiAnnotation annotation = AnnotationUtil.findAnnotation(parameter, JavaFxCommonNames.JAVAFX_BEANS_NAMED_ARG); if (annotation != null) { final PsiAnnotationMemberValue psiValue = annotation.findAttributeValue(JavaFxCommonNames.VALUE); if (psiValue instanceof PsiLiteralExpression) { final Object value = ((PsiLiteralExpression)psiValue).getValue(); if (value instanceof String) { return (String)value; } } } return null; } /** * Unlike normal properties (fields, getters/setters) named constructor parameters can be declared many times, possibly with different types */ @NotNull public static Set<String> getConstructorNamedArgProperties(@Nullable PsiClass psiClass) { if (psiClass != null) { return CachedValuesManager.getCachedValue(psiClass, () -> CachedValueProvider.Result.create( prepareConstructorNamedArgProperties(psiClass), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); } return Collections.emptySet(); } @NotNull private static Set<String> prepareConstructorNamedArgProperties(@NotNull PsiClass psiClass) { final Set<String> properties = new THashSet<>(); for (PsiMethod constructor : psiClass.getConstructors()) { if (constructor.hasModifierProperty(PsiModifier.PUBLIC)) { final PsiParameter[] parameters = constructor.getParameterList().getParameters(); for (PsiParameter parameter : parameters) { final String propertyName = getPropertyNameFromNamedArgAnnotation(parameter); if (!StringUtil.isEmpty(propertyName)) { properties.add(propertyName); } } } } return properties; } @Nullable public static PsiMethod findInstancePropertySetter(@NotNull PsiClass psiClass, @Nullable String propertyName) { if (StringUtil.isEmpty(propertyName)) return null; final String suggestedSetterName = PropertyUtilBase.suggestSetterName(propertyName); final PsiMethod[] setters = psiClass.findMethodsByName(suggestedSetterName, true); for (PsiMethod setter : setters) { if (setter.hasModifierProperty(PsiModifier.PUBLIC) && !setter.hasModifierProperty(PsiModifier.STATIC) && PropertyUtilBase.isSimplePropertySetter(setter)) { return setter; } } return null; } private static boolean isWritablePropertyType(@NotNull PsiClass psiClass, @NotNull PsiType fieldType) { return isObservableCollection(PsiUtil.resolveClassInType(fieldType)) && JavaGenericsUtil.getCollectionItemType(fieldType, psiClass.getResolveScope()) != null || InheritanceUtil.isInheritor(fieldType, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_MAP); } public static boolean isObservableCollection(@Nullable PsiClass psiClass) { return psiClass != null && (InheritanceUtil.isInheritor(psiClass, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_LIST) || InheritanceUtil.isInheritor(psiClass, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_SET) || InheritanceUtil.isInheritor(psiClass, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_ARRAY)); } @Nullable private static PsiSubstitutor getTagClassSubstitutor(@NotNull XmlAttribute xmlAttribute, @NotNull PsiClass controllerClass) { final XmlTag xmlTag = xmlAttribute.getParent(); final PsiClass tagClass = getTagClass(xmlTag); if (tagClass != null) { final String tagFieldName = xmlTag.getAttributeValue(FxmlConstants.FX_ID); if (!StringUtil.isEmpty(tagFieldName)) { final PsiField tagField = controllerClass.findFieldByName(tagFieldName, true); if (tagField != null && !tagField.hasModifierProperty(PsiModifier.STATIC) && isVisibleInFxml(tagField)) { final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(tagField.getType()); final PsiClass resolvedClass = resolveResult.getElement(); if (resolvedClass != null) { return TypeConversionUtil.getClassSubstitutor(tagClass, resolvedClass, resolveResult.getSubstitutor()); } } } } return null; } @Nullable public static PsiClassType getDeclaredEventType(@NotNull XmlAttribute xmlAttribute) { final PsiClass tagClass = getTagClass(xmlAttribute.getParent()); if (tagClass != null) { final PsiType eventHandlerPropertyType = getEventHandlerPropertyType(tagClass, xmlAttribute.getName()); if (eventHandlerPropertyType != null) { final PsiClass controllerClass = getControllerClass(xmlAttribute.getContainingFile()); if (controllerClass != null) { final PsiSubstitutor tagClassSubstitutor = getTagClassSubstitutor(xmlAttribute, controllerClass); final PsiType handlerType = tagClassSubstitutor != null ? tagClassSubstitutor.substitute(eventHandlerPropertyType) : eventHandlerPropertyType; final PsiClassType eventType = substituteEventType(handlerType, xmlAttribute.getProject()); final PsiType erasedType = eraseFreeTypeParameters(eventType, tagClass); return erasedType instanceof PsiClassType ? (PsiClassType)erasedType : null; } } } return null; } @Nullable private static PsiType getEventHandlerPropertyType(@NotNull PsiClass tagClass, @NotNull String eventName) { final PsiMethod[] handlerSetterCandidates = tagClass.findMethodsByName(PropertyUtilBase.suggestSetterName(eventName), true); for (PsiMethod handlerSetter : handlerSetterCandidates) { if (!handlerSetter.hasModifierProperty(PsiModifier.STATIC) && handlerSetter.hasModifierProperty(PsiModifier.PUBLIC)) { final PsiType propertyType = PropertyUtilBase.getPropertyType(handlerSetter); if (InheritanceUtil.isInheritor(propertyType, JavaFxCommonNames.JAVAFX_EVENT_EVENT_HANDLER)) { return propertyType; } } } final PsiField handlerField = tagClass.findFieldByName(eventName, true); final PsiClassType propertyType = getPropertyClassType(handlerField); if (InheritanceUtil.isInheritor(propertyType, JavaFxCommonNames.JAVAFX_EVENT_EVENT_HANDLER)) { return propertyType; } return null; } @Nullable private static PsiClassType substituteEventType(@Nullable PsiType eventHandlerType, @NotNull Project project) { if (!(eventHandlerType instanceof PsiClassType)) return null; final PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)eventHandlerType).resolveGenerics(); final PsiClass eventHandlerClass = resolveResult.getElement(); if (eventHandlerClass == null) return null; final PsiSubstitutor eventHandlerClassSubstitutor = resolveResult.getSubstitutor(); final PsiClass eventHandlerInterface = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_EVENT_EVENT_HANDLER, GlobalSearchScope.allScope(project)); if (eventHandlerInterface == null) return null; if (!InheritanceUtil.isInheritorOrSelf(eventHandlerClass, eventHandlerInterface, true)) return null; final PsiTypeParameter[] typeParameters = eventHandlerInterface.getTypeParameters(); if (typeParameters.length != 1) return null; final PsiTypeParameter eventTypeParameter = typeParameters[0]; final PsiSubstitutor substitutor = TypeConversionUtil.getSuperClassSubstitutor(eventHandlerInterface, eventHandlerClass, eventHandlerClassSubstitutor); final PsiType eventType = substitutor.substitute(eventTypeParameter); if (eventType instanceof PsiClassType) { return (PsiClassType)eventType; } if (eventType instanceof PsiWildcardType) { // TODO Handle wildcards more accurately final PsiType boundType = ((PsiWildcardType)eventType).getBound(); if (boundType instanceof PsiClassType) { return (PsiClassType)boundType; } } return null; } @Nullable private static PsiClass getFactoryProducedClass(@Nullable PsiClass psiClass, @Nullable String factoryMethodName) { if (psiClass == null || factoryMethodName == null) return null; final PsiMethod[] methods = psiClass.findMethodsByName(factoryMethodName, true); for (PsiMethod method : methods) { if (method.getParameterList().getParametersCount() == 0 && method.hasModifierProperty(PsiModifier.STATIC)) { return PsiUtil.resolveClassInClassTypeOnly(method.getReturnType()); } } return null; } @Nullable public static String validateEnumConstant(@NotNull PsiClass enumClass, @NonNls @Nullable String name) { if (!enumClass.isEnum() || name == null) return null; final Set<String> constantNames = CachedValuesManager.getCachedValue(enumClass, () -> CachedValueProvider.Result.create(Arrays.stream(enumClass.getFields()) .filter(PsiEnumConstant.class::isInstance) .map(PsiField::getName) .map(String::toUpperCase) .collect(Collectors.toCollection(THashSet::new)), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); if (!constantNames.contains(name.toUpperCase())) { return "No enum constant '" + name + "' in " + enumClass.getQualifiedName(); } return null; } @NotNull public static String getPropertyName(@NotNull String memberName, boolean isMethod) { if (!isMethod) return memberName; final String propertyName = PropertyUtilBase.getPropertyName(memberName); return propertyName != null ? propertyName : memberName; } @Nullable public static PsiClass getTagValueClass(@NotNull XmlTag xmlTag) { return getTagValueClass(xmlTag, getTagClass(xmlTag)).getFirst(); } @NotNull public static Pair<PsiClass, Boolean> getTagValueClass(@NotNull XmlTag xmlTag, @Nullable PsiClass tagClass) { if (tagClass != null) { final XmlAttribute constAttr = xmlTag.getAttribute(FxmlConstants.FX_CONSTANT); if (constAttr != null) { final PsiField constField = tagClass.findFieldByName(constAttr.getValue(), true); if (constField != null) { final PsiType constType = constField.getType(); return Pair.create(PsiUtil.resolveClassInClassTypeOnly( constType instanceof PsiPrimitiveType ? ((PsiPrimitiveType)constType).getBoxedType(xmlTag) : constType), true); } } else { final XmlAttribute factoryAttr = xmlTag.getAttribute(FxmlConstants.FX_FACTORY); if (factoryAttr != null) { return Pair.create(getFactoryProducedClass(tagClass, factoryAttr.getValue()), true); } } } return Pair.create(tagClass, false); } public static boolean isControllerClass(@NotNull PsiClass psiClass) { final Project project = psiClass.getProject(); final GlobalSearchScope resolveScope = psiClass.getResolveScope(); if (isControllerClassName(project, psiClass.getQualifiedName(), resolveScope)) { return true; } final Ref<Boolean> refFound = new Ref<>(false); ClassInheritorsSearch.search(psiClass, resolveScope, true, true, false).forEach((aClass) -> { if (isControllerClassName(project, aClass.getQualifiedName(), resolveScope)) { refFound.set(true); return false; } return true; }); return refFound.get(); } private static boolean isControllerClassName(@NotNull Project project, @Nullable String qualifiedName, @NotNull GlobalSearchScope resolveScope) { return qualifiedName != null && !JavaFxControllerClassIndex.findFxmlWithController(project, qualifiedName, resolveScope).isEmpty(); } @Nullable private static CachedValueProvider.Result<PsiClass> computeInjectedControllerClass(PsiFile containingFile) { return ourGuard.doPreventingRecursion(containingFile, true, () -> { final Project project = containingFile.getProject(); final Ref<PsiClass> injectedController = new Ref<>(); final PsiClass fxmlLoader = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_FXML_FXMLLOADER, GlobalSearchScope.allScope(project)); if (fxmlLoader != null) { final PsiMethod[] injectControllerMethods = fxmlLoader.findMethodsByName("setController", false); if (injectControllerMethods.length == 1) { final GlobalSearchScope globalSearchScope = GlobalSearchScope .getScopeRestrictedByFileTypes(containingFile.getResolveScope(), StdFileTypes.JAVA); final VirtualFile[] virtualFiles = CacheManager.SERVICE.getInstance(project).getVirtualFilesWithWord( ClassUtil.extractClassName(JavaFxCommonNames.JAVAFX_FXML_FXMLLOADER), UsageSearchContext.IN_CODE, globalSearchScope, true); if (virtualFiles.length == 0) { return new CachedValueProvider.Result<>(null, PsiModificationTracker.MODIFICATION_COUNT); } final GlobalSearchScope filesScope = GlobalSearchScope.filesScope(project, Arrays.asList(virtualFiles)); final Processor<PsiReference> processor = loaderReference -> findControllerClassInjection(loaderReference, injectedController, injectControllerMethods[0]); ReferencesSearch.search(containingFile, filesScope).forEach(reference -> { final PsiElement element = reference.getElement(); if (element instanceof PsiLiteralExpression) { final PsiNewExpression expression = PsiTreeUtil.getParentOfType(element, PsiNewExpression.class); if (expression != null) { final PsiType type = expression.getType(); if (type != null && type.equalsToText(JavaFxCommonNames.JAVAFX_FXML_FXMLLOADER)) { final PsiElement parent = expression.getParent(); if (parent instanceof PsiLocalVariable) { ReferencesSearch.search(parent).forEach(processor); final PsiClass controller = injectedController.get(); if (controller != null) { return false; } } } } } return true; }); } } return new CachedValueProvider.Result<>(injectedController.get(), PsiModificationTracker.MODIFICATION_COUNT); }); } private static boolean findControllerClassInjection(@NotNull PsiReference loaderReference, @NotNull Ref<PsiClass> injectedController, PsiMethod injectControllerMethod) { final PsiElement element = loaderReference.getElement(); if (element instanceof PsiReferenceExpression) { final PsiMethodCallExpression methodCallExpression = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class); if (methodCallExpression != null && methodCallExpression.resolveMethod() == injectControllerMethod) { final PsiExpression[] expressions = methodCallExpression.getArgumentList().getExpressions(); if (expressions.length > 0) { final PsiClass psiClass = PsiUtil.resolveClassInType(expressions[0].getType()); if (psiClass != null) { injectedController.set(psiClass); return false; } } } } return true; } public static boolean isJavaFxPackageImported(@NotNull PsiFile file) { if (!(file instanceof PsiJavaFile)) return false; final PsiJavaFile javaFile = (PsiJavaFile)file; return CachedValuesManager.getCachedValue( javaFile, () -> { final PsiImportList importList = javaFile.getImportList(); if (importList != null) { for (PsiImportStatementBase statementBase : importList.getAllImportStatements()) { final PsiJavaCodeReferenceElement importReference = statementBase.getImportReference(); if (importReference != null) { final String qualifiedName = importReference.getQualifiedName(); if (qualifiedName != null && qualifiedName.startsWith("javafx.")) { return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } } return CachedValueProvider.Result.create(false, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.s4.client; import org.apache.s4.collector.EventWrapper; import org.apache.s4.listener.EventHandler; import org.apache.s4.message.Request; import org.apache.s4.message.Response; import org.apache.s4.util.ByteArrayIOChannel; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.LinkedBlockingQueue; import org.apache.log4j.Logger; public abstract class ClientStub implements OutputStub, InputStub { protected static final Logger logger = Logger.getLogger("adapter"); /** * Description of the protocol implemented by a concrete instance of this * stub. */ public static class Info { public final String name; public final int versionMajor; public final int versionMinor; public Info(String name, int versionMajor, int versionMinor) { this.name = name; this.versionMajor = versionMajor; this.versionMinor = versionMinor; } } /** * Meta-information about the protocol that this stub uses to talk to * external clients. * * This is sent to the client as a part of the handshake. */ abstract public Info getProtocolInfo(); /** * Stream names that are accepted by this stub to be forwarded to its * clients. */ @Override public List<String> getAcceptedStreams() { return null; } private List<EventHandler> handlers = new ArrayList<EventHandler>(); /** * A handler that can inject events produced by this stub into the S4 * cluster. */ @Override public void addHandler(EventHandler handler) { this.handlers.add(handler); } /** * Remove a handler. */ @Override public boolean removeHandler(EventHandler handler) { return handlers.remove(handler); } /** * Convert an array of bytes into an event wrapper. This method is used to * translate data received from a client into events that may be injected * into the S4 cluster. * * @param v * array of bytes * @return EventWrapper constructed from the byte array. */ abstract public EventWrapper eventWrapperFromBytes(byte[] v); /** * Convert an event wrapper into a byte array. Events received from the S4 * cluster for dispatching to a client are translated into a byte array * using this method. * * @param e * an {@link EventWrapper} * @return a byte array */ abstract public byte[] bytesFromEventWrapper(EventWrapper e); /** * Construct an I/O channel over which the stub can communicate with a * client. The channel allows arrys of bytes to be exchanged between the * stub and client. * * @param socket * TCP/IP socket * @return an IO Channel to send and recv byte arrays * @throws IOException * if the underlying socket could not provide valid input and * output streams. */ public IOChannel createIOChannel(Socket socket) throws IOException { return new ByteArrayIOChannel(socket); } // send an event into the cluster via adapter. void injectEvent(EventWrapper e) { for (EventHandler handler : handlers) { handler.processEvent(e); } } // private List<ClientConnection> clients = new // ArrayList<ClientConnection>(); HashMap<UUID, ClientConnection> clients = new HashMap<UUID, ClientConnection>(); /** * Create a client connection and add it to list of clients. * * @param socket * client's I/O socket */ private void addClient(ClientConnection c) { synchronized (clients) { logger.info("adding client " + c.uuid); clients.put(c.uuid, c); } } LinkedBlockingQueue<EventWrapper> queue = new LinkedBlockingQueue<EventWrapper>(); @Override public int getQueueSize() { return queue.size(); } @Override public void queueWork(EventWrapper e) { queue.offer(e); } ServerSocket serverSocket = null; public void setConnectionPort(int port) throws IOException { serverSocket = new ServerSocket(port); } private Thread acceptThread = null; private Thread senderThread = null; public void init() { // start accepting new clients and sending events to them (acceptThread = new Thread(connectionListener)).start(); (senderThread = new Thread(sender)).start(); } public void shutdown() { // stop accepting new clients if (acceptThread != null) { acceptThread.interrupt(); acceptThread = null; } // stop sending events to them. if (senderThread != null) { senderThread.interrupt(); senderThread = null; } // stop all connected clients. List<ClientConnection> clientCopy = new ArrayList<ClientConnection>(clients.values()); for (ClientConnection c : clientCopy) { c.stop(); c.close(); } } private final Runnable connectionListener = new Runnable() { Handshake handshake = null; public void run() { if (handshake == null) handshake = new Handshake(ClientStub.this); try { while (serverSocket != null && serverSocket.isBound() && !Thread.currentThread().isInterrupted()) { Socket socket = serverSocket.accept(); ClientConnection connection = handshake.execute(socket); if (connection != null) { addClient(connection); connection.start(); } } } catch (IOException e) { logger.info("exception in client connection listener", e); } } }; public final Runnable sender = new Runnable() { ArrayList<ClientConnection> disconnect = new ArrayList<ClientConnection>(); public void run() { while (!Thread.currentThread().isInterrupted()) { try { EventWrapper event = queue.take(); // Responses need special handling. if (event.getEvent() instanceof Response) { dispatchResponse(event); continue; } // TODO: include check to see if the event belongs to a // particular client. dispatchToAllClients(event); } catch (InterruptedException e) { return; } } } private void dispatchToAllClients(EventWrapper event) { byte[] b = bytesFromEventWrapper(event); String stream = event.getStreamName(); synchronized (clients) { for (ClientConnection c : clients.values()) { if (c.good() && c.streamAccepted(stream)) { try { c.io.send(b); } catch (IOException e) { logger.error("error sending message to client " + c.uuid + ". disconnecting", e); disconnect.add(c); } } } } if (disconnect.size() > 0) { for (ClientConnection d : disconnect) d.close(); disconnect.clear(); } } private void dispatchResponse(EventWrapper event) { Response res = (Response) event.getEvent(); Request.RInfo rinfo = res.getRInfo(); if (rinfo instanceof Request.ClientRInfo) { UUID uuid = ((Request.ClientRInfo) rinfo).getRequesterUUID(); ClientConnection c = clients.get(uuid); if (c != null && c.good() && c.clientReadMode.takePrivate()) { try { byte[] b = bytesFromEventWrapper(event); if (b != null) c.io.send(b); } catch (IOException e) { logger.error("error sending response to client " + c.uuid + ". disconnecting", e); c.close(); } } else { logger.warn("no active client found for response: " + res); } } } }; }
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.util; import com.google.j2objc.annotations.Weak; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.lang.reflect.Array; /** * An {@code Map} specialized for use with {@code Enum} types as keys. */ public class EnumMap<K extends Enum<K>, V> extends AbstractMap<K, V> implements Serializable, Cloneable, Map<K, V> { // BEGIN android-changed // added implements Map<K, V> for apicheck // END android-changed private static final long serialVersionUID = 458661240069192865L; private Class<K> keyType; transient K[] keys; transient V[] values; transient boolean[] hasMapping; private transient int mappingsCount; transient int enumSize; private transient EnumMapEntrySet<K, V> entrySet = null; private static class Entry<KT extends Enum<KT>, VT> extends MapEntry<KT, VT> { private final EnumMap<KT, VT> enumMap; private final int ordinal; Entry(KT theKey, VT theValue, EnumMap<KT, VT> em) { super(theKey, theValue); enumMap = em; ordinal = theKey.ordinal(); } @Override public boolean equals(Object object) { if (!enumMap.hasMapping[ordinal]) { return false; } boolean isEqual = false; if (object instanceof Map.Entry) { Map.Entry<?, ?> entry = (Map.Entry<?, ?>) object; Object enumKey = entry.getKey(); if (key.equals(enumKey)) { Object theValue = entry.getValue(); if (enumMap.values[ordinal] == null) { isEqual = (theValue == null); } else { isEqual = enumMap.values[ordinal].equals(theValue); } } } return isEqual; } @Override public int hashCode() { return (enumMap.keys[ordinal] == null ? 0 : enumMap.keys[ordinal].hashCode()) ^ (enumMap.values[ordinal] == null ? 0 : enumMap.values[ordinal].hashCode()); } @Override public KT getKey() { checkEntryStatus(); return enumMap.keys[ordinal]; } @Override public VT getValue() { checkEntryStatus(); return enumMap.values[ordinal]; } @Override public VT setValue(VT value) { checkEntryStatus(); return enumMap.put(enumMap.keys[ordinal], value); } @Override public String toString() { StringBuilder result = new StringBuilder(enumMap.keys[ordinal].toString()); result.append("="); result.append(enumMap.values[ordinal] == null ? "null" : enumMap.values[ordinal].toString()); return result.toString(); } private void checkEntryStatus() { if (!enumMap.hasMapping[ordinal]) { throw new IllegalStateException(); } } } private static class EnumMapIterator<E, KT extends Enum<KT>, VT> implements Iterator<E> { int position = 0; int prePosition = -1; final EnumMap<KT, VT> enumMap; final MapEntry.Type<E, KT, VT> type; EnumMapIterator(MapEntry.Type<E, KT, VT> value, EnumMap<KT, VT> em) { enumMap = em; type = value; } public boolean hasNext() { int length = enumMap.enumSize; for (; position < length; position++) { if (enumMap.hasMapping[position]) { break; } } return position != length; } public E next() { if (!hasNext()) { throw new NoSuchElementException(); } prePosition = position++; return type.get(new MapEntry<KT, VT>(enumMap.keys[prePosition], enumMap.values[prePosition])); } public void remove() { checkStatus(); if (enumMap.hasMapping[prePosition]) { enumMap.remove(enumMap.keys[prePosition]); } prePosition = -1; } @Override public String toString() { if (prePosition == -1) { return super.toString(); } return type.get( new MapEntry<KT, VT>(enumMap.keys[prePosition], enumMap.values[prePosition])).toString(); } private void checkStatus() { if (prePosition == -1) { throw new IllegalStateException(); } } } private static class EnumMapKeySet<KT extends Enum<KT>, VT> extends AbstractSet<KT> { @Weak private final EnumMap<KT, VT> enumMap; EnumMapKeySet(EnumMap<KT, VT> em) { enumMap = em; } @Override public void clear() { enumMap.clear(); } @Override public boolean contains(Object object) { return enumMap.containsKey(object); } @Override public Iterator<KT> iterator() { return new EnumMapIterator<KT, KT, VT>( new MapEntry.Type<KT, KT, VT>() { public KT get(MapEntry<KT, VT> entry) { return entry.key; } }, enumMap); } @Override public boolean remove(Object object) { if (contains(object)) { enumMap.remove(object); return true; } return false; } @Override public int size() { return enumMap.size(); } } private static class EnumMapValueCollection<KT extends Enum<KT>, VT> extends AbstractCollection<VT> { @Weak private final EnumMap<KT, VT> enumMap; EnumMapValueCollection(EnumMap<KT, VT> em) { enumMap = em; } @Override public void clear() { enumMap.clear(); } @Override public boolean contains(Object object) { return enumMap.containsValue(object); } @Override public Iterator<VT> iterator() { return new EnumMapIterator<VT, KT, VT>( new MapEntry.Type<VT, KT, VT>() { public VT get(MapEntry<KT, VT> entry) { return entry.value; } }, enumMap); } @Override public boolean remove(Object object) { if (object == null) { for (int i = 0; i < enumMap.enumSize; i++) { if (enumMap.hasMapping[i] && enumMap.values[i] == null) { enumMap.remove(enumMap.keys[i]); return true; } } } else { for (int i = 0; i < enumMap.enumSize; i++) { if (enumMap.hasMapping[i] && object.equals(enumMap.values[i])) { enumMap.remove(enumMap.keys[i]); return true; } } } return false; } @Override public int size() { return enumMap.size(); } } private static class EnumMapEntryIterator<E, KT extends Enum<KT>, VT> extends EnumMapIterator<E, KT, VT> { EnumMapEntryIterator(MapEntry.Type<E, KT, VT> value, EnumMap<KT, VT> em) { super(value, em); } @Override public E next() { if (!hasNext()) { throw new NoSuchElementException(); } prePosition = position++; return type.get(new EnumMap.Entry<KT, VT>(enumMap.keys[prePosition], enumMap.values[prePosition], enumMap)); } } private static class EnumMapEntrySet<KT extends Enum<KT>, VT> extends AbstractSet<Map.Entry<KT, VT>> { @Weak private final EnumMap<KT, VT> enumMap; EnumMapEntrySet(EnumMap<KT, VT> em) { enumMap = em; } @Override public void clear() { enumMap.clear(); } @Override public boolean contains(Object object) { boolean isEqual = false; if (object instanceof Map.Entry) { Object enumKey = ((Map.Entry<?, ?>) object).getKey(); Object enumValue = ((Map.Entry<?, ?>) object).getValue(); if (enumMap.containsKey(enumKey)) { VT value = enumMap.get(enumKey); if (value == null) { isEqual = enumValue == null; } else { isEqual = value.equals(enumValue); } } } return isEqual; } @Override public Iterator<Map.Entry<KT, VT>> iterator() { return new EnumMapEntryIterator<Map.Entry<KT, VT>, KT, VT>( new MapEntry.Type<Map.Entry<KT, VT>, KT, VT>() { public Map.Entry<KT, VT> get(MapEntry<KT, VT> entry) { return entry; } }, enumMap); } @Override public boolean remove(Object object) { if (contains(object)) { enumMap.remove(((Map.Entry<?, ?>) object).getKey()); return true; } return false; } @Override public int size() { return enumMap.size(); } @Override public Object[] toArray() { Object[] entryArray = new Object[enumMap.size()]; return toArray(entryArray); } @Override public <T> T[] toArray(T[] array) { int size = enumMap.size(); int index = 0; T[] entryArray = array; if (size > array.length) { Class<?> clazz = array.getClass().getComponentType(); @SuppressWarnings("unchecked") T[] newArray = (T[]) Array.newInstance(clazz, size); entryArray = newArray; } Iterator<Map.Entry<KT, VT>> iter = iterator(); for (; index < size; index++) { Map.Entry<KT, VT> entry = iter.next(); @SuppressWarnings("unchecked") T newEntry = (T) new MapEntry<KT, VT>(entry.getKey(), entry.getValue()); entryArray[index] = newEntry; } if (index < array.length) { entryArray[index] = null; } return entryArray; } } /** * Constructs an empty {@code EnumMap} using the given key type. * * @param keyType * the class object giving the type of the keys used by this {@code EnumMap}. * @throws NullPointerException * if {@code keyType} is {@code null}. */ public EnumMap(Class<K> keyType) { initialization(keyType); } /** * Constructs an {@code EnumMap} using the same key type as the given {@code EnumMap} and * initially containing the same mappings. * * @param map * the {@code EnumMap} from which this {@code EnumMap} is initialized. * @throws NullPointerException * if {@code map} is {@code null}. */ public EnumMap(EnumMap<K, ? extends V> map) { initialization(map); } /** * Constructs an {@code EnumMap} initialized from the given map. If the given map * is an {@code EnumMap} instance, this constructor behaves in the exactly the same * way as {@link EnumMap#EnumMap(EnumMap)}}. Otherwise, the given map * should contain at least one mapping. * * @param map * the map from which this {@code EnumMap} is initialized. * @throws IllegalArgumentException * if {@code map} is not an {@code EnumMap} instance and does not contain * any mappings. * @throws NullPointerException * if {@code map} is {@code null}. */ public EnumMap(Map<K, ? extends V> map) { if (map instanceof EnumMap) { @SuppressWarnings("unchecked") EnumMap<K, ? extends V> enumMap = (EnumMap<K, ? extends V>) map; initialization(enumMap); } else { if (map.isEmpty()) { throw new IllegalArgumentException("map is empty"); } Iterator<K> iter = map.keySet().iterator(); K enumKey = iter.next(); // Confirm the key is actually an enum: Throw ClassCastException if not. Enum.class.cast(enumKey); Class<?> clazz = enumKey.getClass(); if (!clazz.isEnum()) { // Each enum value can have its own subclass. In this case we want the abstract // super-class which has the values() method. clazz = clazz.getSuperclass(); } @SuppressWarnings("unchecked") Class<K> enumClass = (Class<K>) clazz; initialization(enumClass); putAllImpl(map); } } /** * Removes all elements from this {@code EnumMap}, leaving it empty. * * @see #isEmpty() * @see #size() */ @Override public void clear() { Arrays.fill(values, null); Arrays.fill(hasMapping, false); mappingsCount = 0; } /** * Returns a shallow copy of this {@code EnumMap}. * * @return a shallow copy of this {@code EnumMap}. */ @Override public EnumMap<K, V> clone() { try { @SuppressWarnings("unchecked") EnumMap<K, V> enumMap = (EnumMap<K, V>) super.clone(); enumMap.initialization(this); return enumMap; } catch (CloneNotSupportedException e) { throw new AssertionError(e); } } /** * Returns whether this {@code EnumMap} contains the specified key. * * @param key * the key to search for. * @return {@code true} if this {@code EnumMap} contains the specified key, * {@code false} otherwise. */ @Override public boolean containsKey(Object key) { if (isValidKeyType(key)) { int keyOrdinal = ((Enum) key).ordinal(); return hasMapping[keyOrdinal]; } return false; } /** * Returns whether this {@code EnumMap} contains the specified value. * * @param value * the value to search for. * @return {@code true} if this {@code EnumMap} contains the specified value, * {@code false} otherwise. */ @Override public boolean containsValue(Object value) { if (value == null) { for (int i = 0; i < enumSize; i++) { if (hasMapping[i] && values[i] == null) { return true; } } } else { for (int i = 0; i < enumSize; i++) { if (hasMapping[i] && value.equals(values[i])) { return true; } } } return false; } /** * Returns a {@code Set} containing all of the mappings in this {@code EnumMap}. Each mapping is * an instance of {@link Map.Entry}. As the {@code Set} is backed by this {@code EnumMap}, * changes in one will be reflected in the other. * <p> * The order of the entries in the set will be the order that the enum keys * were declared in. * * @return a {@code Set} of the mappings. */ @Override public Set<Map.Entry<K, V>> entrySet() { if (entrySet == null) { entrySet = new EnumMapEntrySet<K, V>(this); } return entrySet; } /** * Compares the argument to the receiver, and returns {@code true} if the * specified {@code Object} is an {@code EnumMap} and both {@code EnumMap}s contain the same mappings. * * @param object * the {@code Object} to compare with this {@code EnumMap}. * @return boolean {@code true} if {@code object} is the same as this {@code EnumMap}, * {@code false} otherwise. * @see #hashCode() * @see #entrySet() */ @Override public boolean equals(Object object) { if (this == object) { return true; } if (!(object instanceof EnumMap)) { return super.equals(object); } @SuppressWarnings("unchecked") EnumMap<K, V> enumMap = (EnumMap<K, V>) object; if (keyType != enumMap.keyType || size() != enumMap.size()) { return false; } return Arrays.equals(hasMapping, enumMap.hasMapping) && Arrays.equals(values, enumMap.values); } /** * Returns the value of the mapping with the specified key. * * @param key * the key. * @return the value of the mapping with the specified key, or {@code null} * if no mapping for the specified key is found. */ @Override public V get(Object key) { if (!isValidKeyType(key)) { return null; } int keyOrdinal = ((Enum) key).ordinal(); return values[keyOrdinal]; } /** * Returns a set of the keys contained in this {@code EnumMap}. The {@code Set} is backed by * this {@code EnumMap} so changes to one are reflected in the other. The {@code Set} does not * support adding. * <p> * The order of the set will be the order that the enum keys were declared * in. * * @return a {@code Set} of the keys. */ @Override public Set<K> keySet() { if (keySet == null) { keySet = new EnumMapKeySet<K, V>(this); } return keySet; } /** * Maps the specified key to the specified value. * * @param key * the key. * @param value * the value. * @return the value of any previous mapping with the specified key or * {@code null} if there was no mapping. * @throws UnsupportedOperationException * if adding to this map is not supported. * @throws ClassCastException * if the class of the key or value is inappropriate for this * map. * @throws IllegalArgumentException * if the key or value cannot be added to this map. * @throws NullPointerException * if the key or value is {@code null} and this {@code EnumMap} does not * support {@code null} keys or values. */ @Override public V put(K key, V value) { return putImpl(key, value); } /** * Copies every mapping in the specified {@code Map} to this {@code EnumMap}. * * @param map * the {@code Map} to copy mappings from. * @throws UnsupportedOperationException * if adding to this {@code EnumMap} is not supported. * @throws ClassCastException * if the class of a key or value is inappropriate for this * {@code EnumMap}. * @throws IllegalArgumentException * if a key or value cannot be added to this map. * @throws NullPointerException * if a key or value is {@code null} and this {@code EnumMap} does not * support {@code null} keys or values. */ @Override public void putAll(Map<? extends K, ? extends V> map) { putAllImpl(map); } /** * Removes a mapping with the specified key from this {@code EnumMap}. * * @param key * the key of the mapping to remove. * @return the value of the removed mapping or {@code null} if no mapping * for the specified key was found. * @throws UnsupportedOperationException * if removing from this {@code EnumMap} is not supported. */ @Override public V remove(Object key) { if (!isValidKeyType(key)) { return null; } int keyOrdinal = ((Enum) key).ordinal(); if (hasMapping[keyOrdinal]) { hasMapping[keyOrdinal] = false; mappingsCount--; } V oldValue = values[keyOrdinal]; values[keyOrdinal] = null; return oldValue; } /** * Returns the number of elements in this {@code EnumMap}. * * @return the number of elements in this {@code EnumMap}. */ @Override public int size() { return mappingsCount; } /** * Returns a {@code Collection} of the values contained in this {@code EnumMap}. The returned * {@code Collection} complies with the general rule specified in * {@link Map#values()}. The {@code Collection}'s {@code Iterator} will return the values * in the their corresponding keys' natural order (the {@code Enum} constants are * declared in this order). * <p> * The order of the values in the collection will be the order that their * corresponding enum keys were declared in. * * @return a collection of the values contained in this {@code EnumMap}. */ @Override public Collection<V> values() { if (valuesCollection == null) { valuesCollection = new EnumMapValueCollection<K, V>(this); } return valuesCollection; } @SuppressWarnings("unchecked") private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); initialization(keyType); int elementCount = stream.readInt(); K enumKey; V value; for (int i = elementCount; i > 0; i--) { enumKey = (K) stream.readObject(); value = (V) stream.readObject(); putImpl(enumKey, value); } } private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); stream.writeInt(mappingsCount); for (Map.Entry<K, V> entry : entrySet()) { stream.writeObject(entry.getKey()); stream.writeObject(entry.getValue()); } } private boolean isValidKeyType(Object key) { return key != null && keyType.isInstance(key); } private void initialization(EnumMap<K, ? extends V> enumMap) { keyType = enumMap.keyType; keys = enumMap.keys; enumSize = enumMap.enumSize; values = enumMap.values.clone(); hasMapping = enumMap.hasMapping.clone(); mappingsCount = enumMap.mappingsCount; } private void initialization(Class<K> type) { keyType = type; keys = Enum.getSharedConstants(keyType); enumSize = keys.length; // The value array is actually Object[] for speed of creation. It is treated as a V[] // because it is safe to do so and eliminates unchecked warning suppression throughout. @SuppressWarnings("unchecked") V[] valueArray = (V[]) new Object[enumSize]; values = valueArray; hasMapping = new boolean[enumSize]; } private void putAllImpl(Map<? extends K, ? extends V> map) { for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) { putImpl(entry.getKey(), entry.getValue()); } } private V putImpl(K key, V value) { if (key == null) { throw new NullPointerException("key == null"); } keyType.cast(key); // Called to throw ClassCastException. int keyOrdinal = key.ordinal(); if (!hasMapping[keyOrdinal]) { hasMapping[keyOrdinal] = true; mappingsCount++; } V oldValue = values[keyOrdinal]; values[keyOrdinal] = value; return oldValue; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.parser.filter; import java.text.ParseException; import java.util.Iterator; import java.util.concurrent.atomic.AtomicInteger; import org.apache.wicket.Component; import org.apache.wicket.MarkupContainer; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.ComponentTag.IAutoComponentFactory; import org.apache.wicket.markup.Markup; import org.apache.wicket.markup.MarkupElement; import org.apache.wicket.markup.MarkupResourceStream; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.WicketTag; import org.apache.wicket.markup.html.TransparentWebMarkupContainer; import org.apache.wicket.markup.parser.AbstractMarkupFilter; import org.apache.wicket.markup.resolver.IComponentResolver; import org.apache.wicket.request.UrlUtils; import org.apache.wicket.request.cycle.RequestCycle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The purpose of this filter is to make all "href", "src" and "background" attributes found in the * markup which contain a relative URL like "myDir/myPage.gif" actually resolve in the output HTML, * by prefixing them with with an appropriate path to make the link work properly, even if the * current page is being displayed at a mounted URL or whatever. It is applied to all non wicket * component tags, except for auto-linked tags. * * It achieves this by being both an IMarkupFilter and IComponentResolver, and works similarly to * the &lt;wicket:message&gt; code. For each tag, we look to see if the path in "href", "src" and * "background" attributes is relative. If it is, we assume it's relative to the context path and we * should prefix it appropriately so that it resolves correctly for the current request, even if * that's for something that's not at the context root. This is done for ServletWebRequests by * prepending with "../" tokens, for example. * * * @author Al Maw */ public final class RelativePathPrefixHandler extends AbstractMarkupFilter implements IComponentResolver { private static final long serialVersionUID = 1L; /** Logging */ private static final Logger log = LoggerFactory.getLogger(RelativePathPrefixHandler.class); /** * The id automatically assigned to tags without an id which we need to prepend a relative path * to. */ public static final String WICKET_RELATIVE_PATH_PREFIX_CONTAINER_ID = "_relative_path_prefix_"; /** List of attribute names considered */ private static final String attributeNames[] = new String[] { "href", "src", "background", "action" }; /** * Behavior that adds a prefix to src, href and background attributes to make them * context-relative */ public static final Behavior RELATIVE_PATH_BEHAVIOR = new Behavior() { private static final long serialVersionUID = 1L; @Override public void onComponentTag(Component component, ComponentTag tag) { // Modify all relevant attributes for (String attrName : attributeNames) { String attrValue = tag.getAttributes().getString(attrName); if ((attrValue != null) && (attrValue.startsWith("/") == false) && (!attrValue.contains(":")) && !(attrValue.startsWith("#"))) { tag.getAttributes().put(attrName, UrlUtils.rewriteToContextRelative(attrValue, RequestCycle.get())); } } } }; private static final IAutoComponentFactory FACTORY = new IAutoComponentFactory() { @Override public Component newComponent(MarkupContainer container, ComponentTag tag) { return new TransparentWebMarkupContainer(tag.getId()); } }; /** * https://issues.apache.org/jira/browse/WICKET-5724 * * Unique index to generate new tag ids. * */ private final AtomicInteger componentIndex = new AtomicInteger(); /** * Constructor for the IComponentResolver role. */ public RelativePathPrefixHandler() { this(null); } /** * Constructor for the IMarkupFilter role * * @param markup * The markup created by reading the markup file */ public RelativePathPrefixHandler(final MarkupResourceStream markup) { super(markup); } @Override protected final MarkupElement onComponentTag(ComponentTag tag) throws ParseException { if (tag.isClose()) { return tag; } String wicketIdAttr = getWicketNamespace() + ":" + "id"; // Don't touch any wicket:id component and any auto-components if ((tag instanceof WicketTag) || (tag.isAutolinkEnabled() == true) || (tag.getAttributes().get(wicketIdAttr) != null)) { return tag; } // Work out whether we have any attributes that require us to add a // behavior that prepends the relative path. for (String attrName : attributeNames) { String attrValue = tag.getAttributes().getString(attrName); if ((attrValue != null) && (attrValue.startsWith("/") == false) && (!attrValue.contains(":")) && !(attrValue.startsWith("#"))) { if (tag.getId() == null) { tag.setId(getWicketRelativePathPrefix(null) + componentIndex.getAndIncrement()); tag.setAutoComponentTag(true); } tag.addBehavior(RELATIVE_PATH_BEHAVIOR); tag.setModified(true); break; } } return tag; } @Override public Component resolve(final MarkupContainer container, final MarkupStream markupStream, final ComponentTag tag) { if ((tag != null) && (tag.getId().startsWith(getWicketRelativePathPrefix(markupStream)))) { // we do not want to mess with the hierarchy, so the container has to be // transparent as it may have wicket components inside. for example a raw anchor tag // that contains a label. return new TransparentWebMarkupContainer(tag.getId()); } return null; } @Override public void postProcess(Markup markup) { /** * https://issues.apache.org/jira/browse/WICKET-5724 * * Transparent component inside page body must allow queued children components. */ Iterator<MarkupElement> markupIterator = markup.iterator(); while (markupIterator.hasNext()) { MarkupElement next = markupIterator.next(); if (next instanceof ComponentTag) { ComponentTag componentTag = (ComponentTag)next; /** * if component tag is for a transparent component and contains "wicket:id", must be * queueable. */ if (componentTag.containsWicketId() && componentTag.getId().startsWith(getWicketRelativePathPrefix(null))) { componentTag.setAutoComponentFactory(FACTORY); } } } } private String getWicketRelativePathPrefix(final MarkupStream markupStream) { return getWicketNamespace(markupStream) + WICKET_RELATIVE_PATH_PREFIX_CONTAINER_ID; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.river.twitter.test; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.elasticsearch.action.count.CountResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.base.Predicate; import org.elasticsearch.common.joda.time.DateTime; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.indices.IndexMissingException; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.river.twitter.test.helper.HttpClient; import org.elasticsearch.river.twitter.test.helper.HttpClientResponse; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.junit.*; import twitter4j.Status; import twitter4j.Twitter; import twitter4j.TwitterException; import twitter4j.TwitterFactory; import twitter4j.auth.AccessToken; import java.io.IOException; import java.util.concurrent.TimeUnit; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; /** * Integration tests for Twitter river<br> * You must have an internet access. * * Launch it using: * mvn test -Dtests.thirdparty=true -Dtests.config=/path/to/elasticsearch.yml * * where your /path/to/elasticsearch.yml contains: river: twitter: oauth: consumer_key: "" consumer_secret: "" access_token: "" access_token_secret: "" */ @ElasticsearchIntegrationTest.ClusterScope( scope = ElasticsearchIntegrationTest.Scope.SUITE, transportClientRatio = 0.0) @ThreadLeakFilters(defaultFilters = true, filters = {Twitter4JThreadFilter.class}) public class TwitterIntegrationTest extends AbstractTwitterTest { private final String track = "obama"; @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settings = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put("path.home", createTempDir()) .put("plugins." + PluginsService.LOAD_PLUGIN_FROM_CLASSPATH, true); Environment environment = new Environment(settings.build()); // if explicit, just load it and don't load from env if (Strings.hasText(System.getProperty("tests.config"))) { settings.loadFromUrl(environment.resolveConfig(System.getProperty("tests.config"))); } return settings.build(); } @Before public void createEmptyRiverIndex() { // We want to force _river index to use 1 shard 1 replica client().admin().indices().prepareCreate("_river").setSettings(Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0)).get(); } @After public void deleteRiverAndWait() throws InterruptedException { logger.info(" --> delete all"); client().admin().indices().prepareDelete("_all").get(); assertThat(awaitBusy(new Predicate<Object>() { public boolean apply(Object obj) { CountResponse response = client().prepareCount().get(); return response.getCount() == 0; } }, 20, TimeUnit.SECONDS), equalTo(true)); // Let's wait one second between two runs as it appears that Twitter4J // does not close immediately so we might have Twitter API failure on the next test // 420:Returned by the Search and Trends API when you are being rate limited logger.info(" --> wait for Twitter4J to close"); awaitBusy1Second(new Predicate<Object>() { @Override public boolean apply(Object o) { return false; } }, 2, TimeUnit.SECONDS); logger.info(" --> ending test"); } private String getDbName() { return Strings.toUnderscoreCase(getTestName()); } private void launchTest(XContentBuilder river, final Integer numDocs, boolean removeRiver) throws IOException, InterruptedException { logger.info(" -> Checking internet working"); HttpClientResponse response = new HttpClient("www.elastic.co", 443).request("/"); Assert.assertThat(response.errorCode(), is(200)); logger.info(" -> Create river"); try { createIndex(getDbName()); } catch (IndexAlreadyExistsException e) { // No worries. We already created the index before } index("_river", getDbName(), "_meta", river); logger.info(" -> Wait for some docs"); assertThat(awaitBusy1Second(new Predicate<Object>() { public boolean apply(Object obj) { try { refresh(); CountResponse response = client().prepareCount(getDbName()).get(); logger.info(" -> got {} docs in {} index", response.getCount(), getDbName()); return response.getCount() >= numDocs; } catch (IndexMissingException e) { return false; } catch (SearchPhaseExecutionException e) { return false; } } }, 5, TimeUnit.MINUTES), equalTo(true)); if (removeRiver) { logger.info(" -> Remove river"); client().prepareDelete("_river", getDbName(), "_meta").get(); } } @Test public void testLanguageFiltering() throws IOException, InterruptedException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .field("type", "filter") .startObject("filter") .field("tracks", "le") .field("language", "fr") .endObject() .endObject() .endObject(), randomIntBetween(5, 50), true); // We should have only FR data SearchResponse response = client().prepareSearch(getDbName()) .addField("language") .addField("_source") .get(); logger.info(" --> Search response: {}", response.toString()); // All language fields should be fr for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.field("language"), notNullValue()); assertThat(hit.field("language").getValue().toString(), is("fr")); } } @Test public void testIgnoreRT() throws IOException, InterruptedException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .field("type", "sample") .field("ignore_retweet", true) .endObject() .endObject(), randomIntBetween(5, 50), true); // We should have only FR data SearchResponse response = client().prepareSearch(getDbName()) .addField("retweet.id") .get(); logger.info(" --> Search response: {}", response.toString()); // We should not have any RT for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.field("retweet.id"), nullValue()); } } @Test public void testRaw() throws IOException, InterruptedException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .field("raw", true) .startObject("filter") .field("tracks", track) .endObject() .endObject() .endObject(), randomIntBetween(5, 50), true); // We should have data we don't have without raw set to true SearchResponse response = client().prepareSearch(getDbName()) .addField("user.statuses_count") .addField("_source") .get(); logger.info(" --> Search response: {}", response.toString()); for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.field("user.statuses_count"), notNullValue()); } } /** * Tracking twitter account: 783214 */ @Test public void testFollow() throws IOException, InterruptedException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .startObject("filter") .field("follow", "783214") .endObject() .endObject() .startObject("index") .field("bulk_size", 1) .endObject() .endObject(), 1, true); } /** * Tracking twitter lists and Zonal_Marking/Guardian100FootballBlogs,Zonal_Marking/football-journalists-3 */ @Test public void testFollowList() throws IOException, InterruptedException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .startObject("filter") .field("user_lists", "Zonal_Marking/Guardian100FootballBlogs,Zonal_Marking/football-journalists-3") .endObject() .endObject() .startObject("index") .field("bulk_size", 1) .endObject() .endObject(), 1, true); } @Test public void testTracks() throws IOException, InterruptedException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .startObject("filter") .field("tracks", track) .endObject() .endObject() .endObject(), randomIntBetween(1, 10), true); // We should have only FR data SearchResponse response = client().prepareSearch(getDbName()) .setQuery(QueryBuilders.queryStringQuery(track)) .get(); logger.info(" --> Search response: {}", response.toString()); assertThat(response.getHits().getTotalHits(), greaterThan(0L)); } @Test public void testSample() throws IOException, InterruptedException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .field("type", "sample") .endObject() .endObject(), randomIntBetween(10, 200), true); } @Test public void testRetryAfter() throws IOException, InterruptedException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .field("type", "sample") .field("retry_after", "10s") .endObject() .endObject(), randomIntBetween(10, 200), true); } @Test public void testUserStream() throws IOException, InterruptedException, TwitterException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .field("type", "user") .endObject() .endObject(), 0, false); // Wait for the river to start awaitBusy(new Predicate<Object>() { public boolean apply(Object obj) { try { GetResponse response = get("_river", getDbName(), "_status"); return response.isExists(); } catch (IndexMissingException e) { return false; } } }, 10, TimeUnit.SECONDS); // The river could look started but it took actually some seconds // to get twitter stream up and running. So we wait 30 seconds more. awaitBusy1Second(new Predicate<Object>() { public boolean apply(Object obj) { return false; } }, 30, TimeUnit.SECONDS); // Generate a tweet on your timeline // We need to read settings from elasticsearch.yml file Settings settings = internalCluster().getInstance(Settings.class); AccessToken accessToken = new AccessToken( settings.get("river.twitter.oauth.access_token"), settings.get("river.twitter.oauth.access_token_secret")); Twitter twitter = new TwitterFactory().getInstance(); twitter.setOAuthConsumer( settings.get("river.twitter.oauth.consumer_key"), settings.get("river.twitter.oauth.consumer_secret")); twitter.setOAuthAccessToken(accessToken); Status status = twitter.updateStatus("testing twitter river. Please ignore. " + DateTime.now().toString()); logger.info(" -> tweet [{}] sent: [{}]", status.getId(), status.getText()); assertThat(awaitBusy1Second(new Predicate<Object>() { public boolean apply(Object obj) { try { refresh(); SearchResponse response = client().prepareSearch(getDbName()).get(); logger.info(" -> got {} docs in {} index", response.getHits().totalHits(), getDbName()); return response.getHits().totalHits() >= 1; } catch (IndexMissingException e) { return false; } } }, 1, TimeUnit.MINUTES), is(true)); logger.info(" -> Remove river"); client().prepareDelete("_river", getDbName(), "_meta").get(); } /** * Test for #51: https://github.com/elasticsearch/elasticsearch-river-twitter/issues/51 */ @Test public void testgeoAsArray() throws IOException, InterruptedException { launchTest(jsonBuilder() .startObject() .field("type", "twitter") .startObject("twitter") .field("type", "sample") .field("geo_as_array", true) .endObject() .endObject(), randomIntBetween(1, 10), false); // We wait for geo located tweets (it could take a looooong time) if (!awaitBusy1Second(new Predicate<Object>() { public boolean apply(Object obj) { try { refresh(); SearchResponse response = client().prepareSearch(getDbName()) .setPostFilter( QueryBuilders.geoDistanceQuery("location") .point(0, 0) .distance(10000, DistanceUnit.KILOMETERS) ) .addField("_source") .addField("location") .get(); logger.info(" --> Search response: {}", response.toString()); for (SearchHit hit : response.getHits().getHits()) { if (hit.field("location") != null) { // We have a location field so it must be an array containing 2 values assertThat(hit.field("location").getValues().size(), is(2)); return true; } } return false; } catch (IndexMissingException e) { return false; } } }, 5, TimeUnit.MINUTES)) { logger.warn(" -> We did not manage to get a geo localized tweet within 5 minutes. :("); } logger.info(" -> Remove river"); client().prepareDelete("_river", getDbName(), "_meta").get(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.security; import java.io.IOException; import java.security.AccessController; import java.util.Properties; import java.util.Set; import java.util.concurrent.Callable; import org.apache.commons.lang.SerializationException; import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.Logger; import org.apache.shiro.SecurityUtils; import org.apache.shiro.ShiroException; import org.apache.shiro.session.Session; import org.apache.shiro.subject.Subject; import org.apache.shiro.subject.support.SubjectThreadState; import org.apache.shiro.util.ThreadContext; import org.apache.shiro.util.ThreadState; import org.apache.geode.GemFireIOException; import org.apache.geode.internal.cache.EntryEventImpl; import org.apache.geode.internal.logging.LogService; import org.apache.geode.internal.security.shiro.GeodeAuthenticationToken; import org.apache.geode.internal.security.shiro.SecurityManagerProvider; import org.apache.geode.internal.security.shiro.ShiroPrincipal; import org.apache.geode.internal.util.BlobHelper; import org.apache.geode.security.AuthenticationFailedException; import org.apache.geode.security.AuthenticationRequiredException; import org.apache.geode.security.GemFireSecurityException; import org.apache.geode.security.NotAuthorizedException; import org.apache.geode.security.PostProcessor; import org.apache.geode.security.ResourcePermission; import org.apache.geode.security.ResourcePermission.Operation; import org.apache.geode.security.ResourcePermission.Resource; import org.apache.geode.security.ResourcePermission.Target; import org.apache.geode.security.SecurityManager; /** * Security service with SecurityManager and an optional PostProcessor. */ public class IntegratedSecurityService implements SecurityService { private static Logger logger = LogService.getLogger(LogService.SECURITY_LOGGER_NAME); public static final String CREDENTIALS_SESSION_ATTRIBUTE = "credentials"; private final PostProcessor postProcessor; private final SecurityManager securityManager; /** * this creates a security service using a SecurityManager * * @param provider this provides shiro security manager * @param postProcessor this can be null */ IntegratedSecurityService(SecurityManagerProvider provider, PostProcessor postProcessor) { // provider must provide a shiro security manager, otherwise, this is not integrated security // service at all. assert provider.getShiroSecurityManager() != null; SecurityUtils.setSecurityManager(provider.getShiroSecurityManager()); this.securityManager = provider.getSecurityManager(); this.postProcessor = postProcessor; } @Override public PostProcessor getPostProcessor() { return this.postProcessor; } @Override public SecurityManager getSecurityManager() { return this.securityManager; } /** * It first looks the shiro subject in AccessControlContext since JMX will use multiple threads to * process operations from the same client, then it looks into Shiro's thead context. * * @return the shiro subject, null if security is not enabled */ @Override public Subject getSubject() { Subject currentUser; // First try get the principal out of AccessControlContext instead of Shiro's Thread context // since threads can be shared between JMX clients. javax.security.auth.Subject jmxSubject = javax.security.auth.Subject.getSubject(AccessController.getContext()); if (jmxSubject != null) { Set<ShiroPrincipal> principals = jmxSubject.getPrincipals(ShiroPrincipal.class); if (!principals.isEmpty()) { ShiroPrincipal principal = principals.iterator().next(); currentUser = principal.getSubject(); ThreadContext.bind(currentUser); return currentUser; } } // in other cases like rest call, client operations, we get it from the current thread currentUser = SecurityUtils.getSubject(); if (currentUser == null || currentUser.getPrincipal() == null) { throw new AuthenticationRequiredException("Failed to find the authenticated user."); } return currentUser; } /** * @return return a shiro subject */ @Override public Subject login(final Properties credentials) { if (credentials == null) { throw new AuthenticationRequiredException("credentials are null"); } // this makes sure it starts with a clean user object ThreadContext.remove(); Subject currentUser = SecurityUtils.getSubject(); GeodeAuthenticationToken token = new GeodeAuthenticationToken(credentials); try { logger.debug("Logging in " + token.getPrincipal()); currentUser.login(token); } catch (ShiroException e) { logger.info("error logging in: " + token.getPrincipal()); throw new AuthenticationFailedException( "Authentication error. Please check your credentials.", e); } Session currentSession = currentUser.getSession(); currentSession.setAttribute(CREDENTIALS_SESSION_ATTRIBUTE, credentials); return currentUser; } @Override public void logout() { Subject currentUser = getSubject(); try { logger.debug("Logging out " + currentUser.getPrincipal()); currentUser.logout(); } catch (ShiroException e) { logger.info("error logging out: " + currentUser.getPrincipal()); throw new GemFireSecurityException(e.getMessage(), e); } // clean out Shiro's thread local content ThreadContext.remove(); } @Override public Callable associateWith(final Callable callable) { Subject currentUser = getSubject(); return currentUser.associateWith(callable); } /** * Binds the passed-in subject to the executing thread. Usage: * * <pre> * ThreadState state = null; * try { * state = securityService.bindSubject(subject); * // do the rest of the work as this subject * } finally { * if (state != null) * state.clear(); * } * </pre> */ @Override public ThreadState bindSubject(final Subject subject) { if (subject == null) { throw new AuthenticationRequiredException("Failed to find the authenticated user."); } ThreadState threadState = new SubjectThreadState(subject); threadState.bind(); return threadState; } @Override public void authorize(Resource resource, Operation operation) { authorize(resource, operation, Target.ALL, ResourcePermission.ALL); } @Override public void authorize(Resource resource, Operation operation, Target target) { authorize(resource, operation, target, ResourcePermission.ALL); } @Override public void authorize(Resource resource, Operation operation, String target) { authorize(resource, operation, target, ResourcePermission.ALL); } @Override public void authorize(Resource resource, Operation operation, Target target, String key) { authorize(new ResourcePermission(resource, operation, target, key)); } @Override public void authorize(Resource resource, Operation operation, String target, String key) { authorize(new ResourcePermission(resource, operation, target, key)); } @Override public void authorize(final ResourcePermission context) { if (context == null) { return; } if (context.getResource() == Resource.NULL && context.getOperation() == Operation.NULL) { return; } Subject currentUser = getSubject(); try { currentUser.checkPermission(context); } catch (ShiroException e) { String msg = currentUser.getPrincipal() + " not authorized for " + context; logger.info("NotAuthorizedException: {}", msg); throw new NotAuthorizedException(msg, e); } } @Override public void authorize(ResourcePermission context, Subject currentUser) { if (context == null) { return; } if (context.getResource() == Resource.NULL && context.getOperation() == Operation.NULL) { return; } try { currentUser.checkPermission(context); } catch (ShiroException e) { String msg = currentUser.getPrincipal() + " not authorized for " + context; logger.info("NotAuthorizedException: {}", msg); throw new NotAuthorizedException(msg, e); } } @Override public void close() { if (this.securityManager != null) { this.securityManager.close(); } if (this.postProcessor != null) { this.postProcessor.close(); } ThreadContext.remove(); SecurityUtils.setSecurityManager(null); } /** * postProcess call already has this logic built in, you don't need to call this everytime you * call postProcess. But if your postProcess is pretty involved with preparations and you need to * bypass it entirely, call this first. */ @Override public boolean needPostProcess() { return this.postProcessor != null; } @Override public Object postProcess(final String regionPath, final Object key, final Object value, final boolean valueIsSerialized) { return postProcess(null, regionPath, key, value, valueIsSerialized); } @Override public Object postProcess(Object principal, final String regionPath, final Object key, final Object value, final boolean valueIsSerialized) { if (!needPostProcess()) { return value; } if (principal == null) { principal = getSubject().getPrincipal(); } String regionName = StringUtils.stripStart(regionPath, "/"); Object newValue; // if the data is a byte array, but the data itself is supposed to be an object, we need to // deserialize it before we pass it to the callback. if (valueIsSerialized && value instanceof byte[]) { try { Object oldObj = EntryEventImpl.deserialize((byte[]) value); Object newObj = this.postProcessor.processRegionValue(principal, regionName, key, oldObj); newValue = BlobHelper.serializeToBlob(newObj); } catch (IOException | SerializationException e) { throw new GemFireIOException("Exception de/serializing entry value", e); } } else { newValue = this.postProcessor.processRegionValue(principal, regionName, key, value); } return newValue; } @Override public boolean isIntegratedSecurity() { return true; } @Override public boolean isClientSecurityRequired() { return true; } @Override public boolean isPeerSecurityRequired() { return true; } }
package com.cwjcsu.learning.network.multicast; import java.awt.Checkbox; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.IOException; import java.net.DatagramPacket; import java.net.InetAddress; import java.net.MulticastSocket; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JPanel; import javax.swing.JRootPane; import javax.swing.JScrollPane; import javax.swing.JTextArea; import javax.swing.JTextField; /** * * @author atlas * @date 2012-9-28 */ public class UDPMulticastClient { JFrame frame; JPanel panel; JTextField field1, field2, field3, field4, field5, field6, field7, field8; JTextArea area; JScrollPane pane; JLabel label; JButton button; JList list; Checkbox check1, check2, check3, check4; public static void main(String[] args) { UDPMulticastClient u = new UDPMulticastClient(); } public UDPMulticastClient() { frame = new JFrame("UDP Broadcast Client"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setUndecorated(true); frame.getRootPane().setWindowDecorationStyle(JRootPane.PLAIN_DIALOG); panel = new JPanel(); panel.setLayout(null); label = new JLabel("Destination IP"); label.setBounds(80, 5, 100, 30); panel.add(label); check1 = new Checkbox(); check1.setBounds(5, 40, 20, 20); panel.add(check1); label = new JLabel("Client 1 :"); label.setBounds(25, 35, 60, 30); panel.add(label); field1 = new JTextField(20); field1.setBounds(80, 35, 120, 20); panel.add(field1); label = new JLabel("Destination Port"); label.setBounds(255, 5, 100, 30); panel.add(label); field2 = new JTextField(10); field2.setBounds(255, 35, 100, 20); panel.add(field2); check2 = new Checkbox(); check2.setBounds(5, 70, 20, 20); panel.add(check2); label = new JLabel("Client 2 :"); label.setBounds(25, 65, 60, 30); panel.add(label); field3 = new JTextField(20); field3.setBounds(80, 65, 120, 20); panel.add(field3); field4 = new JTextField(10); field4.setBounds(255, 65, 100, 20); panel.add(field4); check3 = new Checkbox(); check3.setBounds(5, 100, 20, 20); panel.add(check3); label = new JLabel("Client 3 :"); label.setBounds(25, 95, 60, 30); panel.add(label); field5 = new JTextField(20); field5.setBounds(80, 95, 120, 20); panel.add(field5); field6 = new JTextField(10); field6.setBounds(255, 95, 100, 20); panel.add(field6); check4 = new Checkbox(); check4.setBounds(5, 130, 20, 20); panel.add(check4); label = new JLabel("Client 4 :"); label.setBounds(25, 125, 60, 30); panel.add(label); field7 = new JTextField(20); field7.setBounds(80, 125, 120, 20); panel.add(field7); field8 = new JTextField(10); field8.setBounds(255, 125, 100, 20); panel.add(field8); label = new JLabel("Message:"); label.setBounds(10, 160, 80, 30); panel.add(label); area = new JTextArea(); pane = new JScrollPane(area); pane.setBounds(10, 190, 300, 200); panel.add(pane); button = new JButton("Send"); button.setBounds(235, 410, 75, 30); button.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { new SendRequest(); } }); panel.add(button); frame.add(panel); frame.setSize(400, 500); frame.setVisible(true); } public class SendRequest { SendRequest() { try { boolean b = true; if (check1.getState() == b) { String dip = field1.getText(); InetAddress address = InetAddress.getByName(dip); MulticastSocket socket = new MulticastSocket(); socket.joinGroup(address); String port = field2.getText(); int pnum = Integer.parseInt(port); String mess = area.getText(); byte message[] = mess.getBytes(); DatagramPacket packet = new DatagramPacket (message, message.length, address, pnum); socket.send(packet); area.setText(""); // For Received message DatagramPacket packet1 = new DatagramPacket (message, message.length); socket.receive(packet1); String recmessage = new String(packet1.getData()); area.append("Received from server: " + recmessage); socket.close(); } if (check2.getState() == b) { String dip = field3.getText(); InetAddress address = InetAddress.getByName(dip); MulticastSocket socket = new MulticastSocket(); socket.joinGroup(address); String port = field4.getText(); int pnum = Integer.parseInt(port); String mess = area.getText(); byte message[] = mess.getBytes(); DatagramPacket packet = new DatagramPacket(message, message.length, address, pnum); socket.send(packet); area.setText(""); // For Received message DatagramPacket packet1 = new DatagramPacket(message, message.length); socket.receive(packet1); String recmessage = new String(packet1.getData()); area.append("Received from server: " + recmessage); socket.close(); } if (check3.getState() == b) { String dip = field5.getText(); InetAddress address = InetAddress.getByName(dip); MulticastSocket socket = new MulticastSocket(); socket.joinGroup(address); String port = field6.getText(); int pnum = Integer.parseInt(port); String mess = area.getText(); byte message[] = mess.getBytes(); DatagramPacket packet = new DatagramPacket (message, message.length, address, pnum); socket.send(packet); area.setText(""); // For Received message DatagramPacket packet1 = new DatagramPacket(message, message.length); socket.receive(packet1); String recmessage = new String(packet1.getData()); area.append("Received from server: " + recmessage); socket.close(); } if (check4.getState() == b) { String dip = field7.getText(); InetAddress address = InetAddress.getByName(dip); MulticastSocket socket = new MulticastSocket(); socket.joinGroup(address); String port = field8.getText(); int pnum = Integer.parseInt(port); String mess = area.getText(); byte message[] = mess.getBytes(); DatagramPacket packet = new DatagramPacket (message, message.length, address, pnum); socket.send(packet); area.setText(""); // For Received message DatagramPacket packet1 = new DatagramPacket(message, message.length); socket.receive(packet1); String recmessage = new String(packet1.getData()); area.append("Received from server: " + recmessage); socket.close(); } } catch (IOException io) { } } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.engine; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSelector; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.ByteSizeValue; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; public class Segment implements Writeable { private String name; private long generation; public boolean committed; public boolean search; public long sizeInBytes = -1; public int docCount = -1; public int delDocCount = -1; public org.apache.lucene.util.Version version = null; public Boolean compound = null; public String mergeId; public long memoryInBytes; public Sort segmentSort; public Accountable ramTree = null; public Map<String, String> attributes; public Segment(StreamInput in) throws IOException { name = in.readString(); generation = Long.parseLong(name.substring(1), Character.MAX_RADIX); committed = in.readBoolean(); search = in.readBoolean(); docCount = in.readInt(); delDocCount = in.readInt(); sizeInBytes = in.readLong(); version = Lucene.parseVersionLenient(in.readOptionalString(), null); compound = in.readOptionalBoolean(); mergeId = in.readOptionalString(); memoryInBytes = in.readLong(); if (in.readBoolean()) { // verbose mode ramTree = readRamTree(in); } segmentSort = readSegmentSort(in); if (in.readBoolean()) { attributes = in.readMap(StreamInput::readString, StreamInput::readString); } else { attributes = null; } } public Segment(String name) { this.name = name; this.generation = Long.parseLong(name.substring(1), Character.MAX_RADIX); } public String getName() { return this.name; } public long getGeneration() { return this.generation; } public boolean isCommitted() { return this.committed; } public boolean isSearch() { return this.search; } public int getNumDocs() { return this.docCount; } public int getDeletedDocs() { return this.delDocCount; } public ByteSizeValue getSize() { return new ByteSizeValue(sizeInBytes); } public org.apache.lucene.util.Version getVersion() { return version; } @Nullable public Boolean isCompound() { return compound; } /** * If set, a string representing that the segment is part of a merge, with the value representing the * group of segments that represent this merge. */ @Nullable public String getMergeId() { return this.mergeId; } /** * Estimation of the memory usage used by a segment. */ public long getMemoryInBytes() { return this.memoryInBytes; } /** * Return the sort order of this segment, or null if the segment has no sort. */ public Sort getSegmentSort() { return segmentSort; } /** * Return segment attributes. * @see org.apache.lucene.index.SegmentInfo#getAttributes() */ public Map<String, String> getAttributes() { return attributes; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Segment segment = (Segment) o; return Objects.equals(name, segment.name); } @Override public int hashCode() { return name != null ? name.hashCode() : 0; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeBoolean(committed); out.writeBoolean(search); out.writeInt(docCount); out.writeInt(delDocCount); out.writeLong(sizeInBytes); out.writeOptionalString(version.toString()); out.writeOptionalBoolean(compound); out.writeOptionalString(mergeId); out.writeLong(memoryInBytes); boolean verbose = ramTree != null; out.writeBoolean(verbose); if (verbose) { writeRamTree(out, ramTree); } writeSegmentSort(out, segmentSort); boolean hasAttributes = attributes != null; out.writeBoolean(hasAttributes); if (hasAttributes) { out.writeMap(attributes, StreamOutput::writeString, StreamOutput::writeString); } } private Sort readSegmentSort(StreamInput in) throws IOException { int size = in.readVInt(); if (size == 0) { return null; } SortField[] fields = new SortField[size]; for (int i = 0; i < size; i++) { String field = in.readString(); byte type = in.readByte(); if (type == 0) { Boolean missingFirst = in.readOptionalBoolean(); boolean max = in.readBoolean(); boolean reverse = in.readBoolean(); fields[i] = new SortedSetSortField(field, reverse, max ? SortedSetSelector.Type.MAX : SortedSetSelector.Type.MIN); if (missingFirst != null) { fields[i].setMissingValue(missingFirst ? SortedSetSortField.STRING_FIRST : SortedSetSortField.STRING_LAST); } } else { Object missing = in.readGenericValue(); boolean max = in.readBoolean(); boolean reverse = in.readBoolean(); final SortField.Type numericType; switch (type) { case 1: numericType = SortField.Type.INT; break; case 2: numericType = SortField.Type.FLOAT; break; case 3: numericType = SortField.Type.DOUBLE; break; case 4: numericType = SortField.Type.LONG; break; default: throw new IOException("invalid index sort type:[" + type + "] for numeric field:[" + field + "]"); } fields[i] = new SortedNumericSortField(field, numericType, reverse, max ? SortedNumericSelector.Type.MAX : SortedNumericSelector.Type.MIN); if (missing != null) { fields[i].setMissingValue(missing); } } } return new Sort(fields); } private void writeSegmentSort(StreamOutput out, Sort sort) throws IOException { if (sort == null) { out.writeVInt(0); return; } out.writeVInt(sort.getSort().length); for (SortField field : sort.getSort()) { out.writeString(field.getField()); if (field instanceof SortedSetSortField) { out.writeByte((byte) 0); out.writeOptionalBoolean(field.getMissingValue() == null ? null : field.getMissingValue() == SortField.STRING_FIRST); out.writeBoolean(((SortedSetSortField) field).getSelector() == SortedSetSelector.Type.MAX); out.writeBoolean(field.getReverse()); } else if (field instanceof SortedNumericSortField) { switch (((SortedNumericSortField) field).getNumericType()) { case INT: out.writeByte((byte) 1); break; case FLOAT: out.writeByte((byte) 2); break; case DOUBLE: out.writeByte((byte) 3); break; case LONG: out.writeByte((byte) 4); break; default: throw new IOException("invalid index sort field:" + field); } out.writeGenericValue(field.getMissingValue()); out.writeBoolean(((SortedNumericSortField) field).getSelector() == SortedNumericSelector.Type.MAX); out.writeBoolean(field.getReverse()); } else { throw new IOException("invalid index sort field:" + field); } } } private Accountable readRamTree(StreamInput in) throws IOException { final String name = in.readString(); final long bytes = in.readVLong(); int numChildren = in.readVInt(); if (numChildren == 0) { return Accountables.namedAccountable(name, bytes); } List<Accountable> children = new ArrayList<>(numChildren); while (numChildren-- > 0) { children.add(readRamTree(in)); } return Accountables.namedAccountable(name, children, bytes); } // the ram tree is written recursively since the depth is fairly low (5 or 6) private void writeRamTree(StreamOutput out, Accountable tree) throws IOException { out.writeString(tree.toString()); out.writeVLong(tree.ramBytesUsed()); Collection<Accountable> children = tree.getChildResources(); out.writeVInt(children.size()); for (Accountable child : children) { writeRamTree(out, child); } } @Override public String toString() { return "Segment{" + "name='" + name + '\'' + ", generation=" + generation + ", committed=" + committed + ", search=" + search + ", sizeInBytes=" + sizeInBytes + ", docCount=" + docCount + ", delDocCount=" + delDocCount + ", version='" + version + '\'' + ", compound=" + compound + ", mergeId='" + mergeId + '\'' + ", memoryInBytes=" + memoryInBytes + (segmentSort != null ? ", sort=" + segmentSort : "") + ", attributes=" + attributes + '}'; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.protocolPB; import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.protocol.BlockListAsLongs; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.RollingUpgradeStatus; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReceivedAndDeletedRequestProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportRequestProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportResponseProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.CacheReportRequestProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.CacheReportResponseProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.CommitBlockSynchronizationRequestProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.ErrorReportRequestProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.HeartbeatRequestProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.HeartbeatResponseProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.RegisterDatanodeRequestProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.RegisterDatanodeResponseProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.ReportBadBlocksRequestProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.StorageBlockReportProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.StorageReceivedDeletedBlocksProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto; import org.apache.hadoop.hdfs.server.protocol.BlockReportContext; import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand; import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.HeartbeatResponse; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo.Capability; import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo; import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport; import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; /** * This class is the client side translator to translate the requests made on * {@link DatanodeProtocol} interfaces to the RPC server implementing * {@link DatanodeProtocolPB}. */ @InterfaceAudience.Private @InterfaceStability.Stable public class DatanodeProtocolClientSideTranslatorPB implements ProtocolMetaInterface, DatanodeProtocol, Closeable { /** RpcController is not used and hence is set to null */ private final DatanodeProtocolPB rpcProxy; private static final VersionRequestProto VOID_VERSION_REQUEST = VersionRequestProto.newBuilder().build(); private final static RpcController NULL_CONTROLLER = null; @VisibleForTesting public DatanodeProtocolClientSideTranslatorPB(DatanodeProtocolPB rpcProxy) { this.rpcProxy = rpcProxy; } public DatanodeProtocolClientSideTranslatorPB(InetSocketAddress nameNodeAddr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, DatanodeProtocolPB.class, ProtobufRpcEngine.class); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); rpcProxy = createNamenode(nameNodeAddr, conf, ugi); } private static DatanodeProtocolPB createNamenode( InetSocketAddress nameNodeAddr, Configuration conf, UserGroupInformation ugi) throws IOException { return RPC.getProtocolProxy(DatanodeProtocolPB.class, RPC.getProtocolVersion(DatanodeProtocolPB.class), nameNodeAddr, ugi, conf, NetUtils.getSocketFactory(conf, DatanodeProtocolPB.class), org.apache.hadoop.ipc.Client.getPingInterval(conf), null).getProxy(); } @Override public void close() throws IOException { RPC.stopProxy(rpcProxy); } @Override public DatanodeRegistration registerDatanode(DatanodeRegistration registration ) throws IOException { RegisterDatanodeRequestProto.Builder builder = RegisterDatanodeRequestProto .newBuilder().setRegistration(PBHelper.convert(registration)); RegisterDatanodeResponseProto resp; try { resp = rpcProxy.registerDatanode(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { throw ProtobufHelper.getRemoteException(se); } return PBHelper.convert(resp.getRegistration()); } @Override public HeartbeatResponse sendHeartbeat(DatanodeRegistration registration, StorageReport[] reports, long cacheCapacity, long cacheUsed, int xmitsInProgress, int xceiverCount, int failedVolumes, VolumeFailureSummary volumeFailureSummary, boolean requestFullBlockReportLease) throws IOException { HeartbeatRequestProto.Builder builder = HeartbeatRequestProto.newBuilder() .setRegistration(PBHelper.convert(registration)) .setXmitsInProgress(xmitsInProgress).setXceiverCount(xceiverCount) .setFailedVolumes(failedVolumes) .setRequestFullBlockReportLease(requestFullBlockReportLease); builder.addAllReports(PBHelper.convertStorageReports(reports)); if (cacheCapacity != 0) { builder.setCacheCapacity(cacheCapacity); } if (cacheUsed != 0) { builder.setCacheUsed(cacheUsed); } if (volumeFailureSummary != null) { builder.setVolumeFailureSummary(PBHelper.convertVolumeFailureSummary( volumeFailureSummary)); } HeartbeatResponseProto resp; try { resp = rpcProxy.sendHeartbeat(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { throw ProtobufHelper.getRemoteException(se); } DatanodeCommand[] cmds = new DatanodeCommand[resp.getCmdsList().size()]; int index = 0; for (DatanodeCommandProto p : resp.getCmdsList()) { cmds[index] = PBHelper.convert(p); index++; } RollingUpgradeStatus rollingUpdateStatus = null; if (resp.hasRollingUpgradeStatus()) { rollingUpdateStatus = PBHelper.convert(resp.getRollingUpgradeStatus()); } return new HeartbeatResponse(cmds, PBHelper.convert(resp.getHaStatus()), rollingUpdateStatus, resp.getFullBlockReportLeaseId()); } @Override public DatanodeCommand blockReport(DatanodeRegistration registration, String poolId, StorageBlockReport[] reports, BlockReportContext context) throws IOException { BlockReportRequestProto.Builder builder = BlockReportRequestProto .newBuilder().setRegistration(PBHelper.convert(registration)) .setBlockPoolId(poolId); boolean useBlocksBuffer = registration.getNamespaceInfo() .isCapabilitySupported(Capability.STORAGE_BLOCK_REPORT_BUFFERS); for (StorageBlockReport r : reports) { StorageBlockReportProto.Builder reportBuilder = StorageBlockReportProto .newBuilder().setStorage(PBHelper.convert(r.getStorage())); BlockListAsLongs blocks = r.getBlocks(); if (useBlocksBuffer) { reportBuilder.setNumberOfBlocks(blocks.getNumberOfBlocks()); reportBuilder.addAllBlocksBuffers(blocks.getBlocksBuffers()); } else { for (long value : blocks.getBlockListAsLongs()) { reportBuilder.addBlocks(value); } } builder.addReports(reportBuilder.build()); } builder.setContext(PBHelper.convert(context)); BlockReportResponseProto resp; try { resp = rpcProxy.blockReport(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { throw ProtobufHelper.getRemoteException(se); } return resp.hasCmd() ? PBHelper.convert(resp.getCmd()) : null; } @Override public DatanodeCommand cacheReport(DatanodeRegistration registration, String poolId, List<Long> blockIds) throws IOException { CacheReportRequestProto.Builder builder = CacheReportRequestProto.newBuilder() .setRegistration(PBHelper.convert(registration)) .setBlockPoolId(poolId); for (Long blockId : blockIds) { builder.addBlocks(blockId); } CacheReportResponseProto resp; try { resp = rpcProxy.cacheReport(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { throw ProtobufHelper.getRemoteException(se); } if (resp.hasCmd()) { return PBHelper.convert(resp.getCmd()); } return null; } @Override public void blockReceivedAndDeleted(DatanodeRegistration registration, String poolId, StorageReceivedDeletedBlocks[] receivedAndDeletedBlocks) throws IOException { BlockReceivedAndDeletedRequestProto.Builder builder = BlockReceivedAndDeletedRequestProto.newBuilder() .setRegistration(PBHelper.convert(registration)) .setBlockPoolId(poolId); for (StorageReceivedDeletedBlocks storageBlock : receivedAndDeletedBlocks) { StorageReceivedDeletedBlocksProto.Builder repBuilder = StorageReceivedDeletedBlocksProto.newBuilder(); repBuilder.setStorageUuid(storageBlock.getStorage().getStorageID()); // Set for wire compatibility. repBuilder.setStorage(PBHelper.convert(storageBlock.getStorage())); for (ReceivedDeletedBlockInfo rdBlock : storageBlock.getBlocks()) { repBuilder.addBlocks(PBHelper.convert(rdBlock)); } builder.addBlocks(repBuilder.build()); } try { rpcProxy.blockReceivedAndDeleted(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { throw ProtobufHelper.getRemoteException(se); } } @Override public void errorReport(DatanodeRegistration registration, int errorCode, String msg) throws IOException { ErrorReportRequestProto req = ErrorReportRequestProto.newBuilder() .setRegistartion(PBHelper.convert(registration)) .setErrorCode(errorCode).setMsg(msg).build(); try { rpcProxy.errorReport(NULL_CONTROLLER, req); } catch (ServiceException se) { throw ProtobufHelper.getRemoteException(se); } } @Override public NamespaceInfo versionRequest() throws IOException { try { return PBHelper.convert(rpcProxy.versionRequest(NULL_CONTROLLER, VOID_VERSION_REQUEST).getInfo()); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } } @Override public void reportBadBlocks(LocatedBlock[] blocks) throws IOException { ReportBadBlocksRequestProto.Builder builder = ReportBadBlocksRequestProto .newBuilder(); for (int i = 0; i < blocks.length; i++) { builder.addBlocks(i, PBHelper.convert(blocks[i])); } ReportBadBlocksRequestProto req = builder.build(); try { rpcProxy.reportBadBlocks(NULL_CONTROLLER, req); } catch (ServiceException se) { throw ProtobufHelper.getRemoteException(se); } } @Override public void commitBlockSynchronization(ExtendedBlock block, long newgenerationstamp, long newlength, boolean closeFile, boolean deleteblock, DatanodeID[] newtargets, String[] newtargetstorages ) throws IOException { CommitBlockSynchronizationRequestProto.Builder builder = CommitBlockSynchronizationRequestProto.newBuilder() .setBlock(PBHelper.convert(block)).setNewGenStamp(newgenerationstamp) .setNewLength(newlength).setCloseFile(closeFile) .setDeleteBlock(deleteblock); for (int i = 0; i < newtargets.length; i++) { builder.addNewTaragets(PBHelper.convert(newtargets[i])); builder.addNewTargetStorages(newtargetstorages[i]); } CommitBlockSynchronizationRequestProto req = builder.build(); try { rpcProxy.commitBlockSynchronization(NULL_CONTROLLER, req); } catch (ServiceException se) { throw ProtobufHelper.getRemoteException(se); } } @Override // ProtocolMetaInterface public boolean isMethodSupported(String methodName) throws IOException { return RpcClientUtil.isMethodSupported(rpcProxy, DatanodeProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER, RPC.getProtocolVersion(DatanodeProtocolPB.class), methodName); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.common.task.batch.parallel; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.druid.client.indexing.IndexingServiceClient; import org.apache.druid.data.input.InputFormat; import org.apache.druid.data.input.InputSource; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.JsonInputFormat; import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.indexer.partitions.HashedPartitionsSpec; import org.apache.druid.indexer.partitions.PartitionsSpec; import org.apache.druid.indexer.partitions.SingleDimensionPartitionsSpec; import org.apache.druid.indexing.common.TestUtils; import org.apache.druid.indexing.common.task.IndexTaskClientFactory; import org.apache.druid.indexing.common.task.TaskResource; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.indexing.DataSchema; import org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec; import org.apache.druid.segment.indexing.granularity.GranularitySpec; import org.apache.druid.segment.realtime.appenderator.AppenderatorsManager; import org.apache.druid.segment.transform.TransformSpec; import org.apache.druid.timeline.partition.BuildingHashBasedNumberedShardSpec; import org.apache.druid.timeline.partition.HashPartitionFunction; import org.easymock.EasyMock; import org.joda.time.Duration; import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.File; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Helper for creating objects for testing parallel indexing. */ class ParallelIndexTestingFactory { static final String AUTOMATIC_ID = null; static final String ID = "id"; static final String GROUP_ID = "group-id"; static final TaskResource TASK_RESOURCE = null; static final String SUPERVISOR_TASK_ID = "supervisor-task-id"; static final String SUBTASK_SPEC_ID = "subtask-spec-id"; static final int NUM_ATTEMPTS = 1; static final Map<String, Object> CONTEXT = Collections.emptyMap(); static final IndexingServiceClient INDEXING_SERVICE_CLIENT = TestUtils.INDEXING_SERVICE_CLIENT; static final IndexTaskClientFactory<ParallelIndexSupervisorTaskClient> TASK_CLIENT_FACTORY = TestUtils.TASK_CLIENT_FACTORY; static final AppenderatorsManager APPENDERATORS_MANAGER = TestUtils.APPENDERATORS_MANAGER; static final ShuffleClient SHUFFLE_CLIENT = new ShuffleClient() { @Override public File fetchSegmentFile( File partitionDir, String supervisorTaskId, PartitionLocation location ) { return null; } }; static final List<Interval> INPUT_INTERVALS = Collections.singletonList(Intervals.ETERNITY); static final String TASK_EXECUTOR_HOST = "task-executor-host"; static final int TASK_EXECUTOR_PORT = 1; static final boolean USE_HTTPS = true; static final Interval INTERVAL = Intervals.ETERNITY; static final int NUM_ROWS = 2; static final long SIZE_BYTES = 3; static final int PARTITION_ID = 4; static final String HOST = "host"; static final int PORT = 1; static final String SUBTASK_ID = "subtask-id"; private static final TestUtils TEST_UTILS = new TestUtils(); private static final ObjectMapper NESTED_OBJECT_MAPPER = TEST_UTILS.getTestObjectMapper(); private static final String SCHEMA_TIME = "time"; private static final String SCHEMA_DIMENSION = "dim"; private static final String DATASOURCE = "datasource"; static final BuildingHashBasedNumberedShardSpec HASH_BASED_NUMBERED_SHARD_SPEC = new BuildingHashBasedNumberedShardSpec( PARTITION_ID, PARTITION_ID, PARTITION_ID + 1, Collections.singletonList("dim"), HashPartitionFunction.MURMUR3_32_ABS, ParallelIndexTestingFactory.NESTED_OBJECT_MAPPER ); static ObjectMapper createObjectMapper() { return TEST_UTILS.getTestObjectMapper(); } static IndexIO getIndexIO() { return TEST_UTILS.getTestIndexIO(); } @SuppressWarnings("SameParameterValue") static class TuningConfigBuilder { private PartitionsSpec partitionsSpec = new HashedPartitionsSpec(null, 2, null); private boolean forceGuaranteedRollup = true; private boolean logParseExceptions = false; private int maxParseExceptions = Integer.MAX_VALUE; TuningConfigBuilder partitionsSpec(PartitionsSpec partitionsSpec) { this.partitionsSpec = partitionsSpec; return this; } TuningConfigBuilder forceGuaranteedRollup(boolean forceGuaranteedRollup) { this.forceGuaranteedRollup = forceGuaranteedRollup; return this; } TuningConfigBuilder logParseExceptions(boolean logParseExceptions) { this.logParseExceptions = logParseExceptions; return this; } TuningConfigBuilder maxParseExceptions(int maxParseExceptions) { this.maxParseExceptions = maxParseExceptions; return this; } ParallelIndexTuningConfig build() { return new ParallelIndexTuningConfig( 1, null, null, 3, 4L, null, 5L, 6, null, partitionsSpec, null, null, 10, forceGuaranteedRollup, false, 14L, null, null, 16, 17, 18L, Duration.ZERO, 20, 21, 22, logParseExceptions, maxParseExceptions, 25, null, null ); } } static DataSchema createDataSchema(List<Interval> granularitySpecInputIntervals) { GranularitySpec granularitySpec = new ArbitraryGranularitySpec(Granularities.DAY, granularitySpecInputIntervals); TimestampSpec timestampSpec = new TimestampSpec(SCHEMA_TIME, "auto", null); DimensionsSpec dimensionsSpec = new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of(SCHEMA_DIMENSION)), null, null ); return new DataSchema( DATASOURCE, timestampSpec, dimensionsSpec, new AggregatorFactory[]{}, granularitySpec, TransformSpec.NONE, null, NESTED_OBJECT_MAPPER ); } static ParallelIndexIngestionSpec createIngestionSpec( InputSource inputSource, InputFormat inputFormat, ParallelIndexTuningConfig tuningConfig, DataSchema dataSchema ) { ParallelIndexIOConfig ioConfig = new ParallelIndexIOConfig(null, inputSource, inputFormat, false, false); return new ParallelIndexIngestionSpec(dataSchema, ioConfig, tuningConfig); } static class SingleDimensionPartitionsSpecBuilder { @Nullable private String partitionDimension = SCHEMA_DIMENSION; private boolean assumeGrouped = false; SingleDimensionPartitionsSpecBuilder partitionDimension(@Nullable String partitionDimension) { this.partitionDimension = partitionDimension; return this; } SingleDimensionPartitionsSpecBuilder assumeGrouped(boolean assumeGrouped) { this.assumeGrouped = assumeGrouped; return this; } SingleDimensionPartitionsSpec build() { return new SingleDimensionPartitionsSpec( 1, null, partitionDimension, assumeGrouped ); } } static IndexTaskClientFactory<ParallelIndexSupervisorTaskClient> createTaskClientFactory() { return (taskInfoProvider, callerId, numThreads, httpTimeout, numRetries) -> createTaskClient(); } private static ParallelIndexSupervisorTaskClient createTaskClient() { ParallelIndexSupervisorTaskClient taskClient = EasyMock.niceMock(ParallelIndexSupervisorTaskClient.class); EasyMock.replay(taskClient); return taskClient; } static String createRow(long timestamp, Object dimensionValue) { try { return NESTED_OBJECT_MAPPER.writeValueAsString(ImmutableMap.of( SCHEMA_TIME, timestamp, SCHEMA_DIMENSION, dimensionValue )); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } static String createRowFromMap(long timestamp, Map<String, Object> fields) { HashMap<String, Object> row = new HashMap<>(fields); row.put(SCHEMA_TIME, timestamp); try { return NESTED_OBJECT_MAPPER.writeValueAsString(row); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } static InputFormat getInputFormat() { return new JsonInputFormat(null, null, null); } }
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.soytree; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.base.CharMatcher; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.template.soy.base.SourceLocation; import com.google.template.soy.base.internal.Identifier; import com.google.template.soy.basetree.SyntaxVersion; import com.google.template.soy.basetree.SyntaxVersionUpperBound; import com.google.template.soy.data.SanitizedContent.ContentKind; import com.google.template.soy.error.ErrorReporter; import com.google.template.soy.error.SoyErrorKind; import com.google.template.soy.soytree.TemplateNode.SoyFileHeaderInfo; import com.google.template.soy.soytree.defn.SoyDocParam; import com.google.template.soy.soytree.defn.TemplateParam; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Nullable; /** * Builder for TemplateNode. * * <p>Important: Do not use outside of Soy code (treat as superpackage-private). * */ public abstract class TemplateNodeBuilder { private static final SoyErrorKind INVALID_SOYDOC_PARAM = SoyErrorKind.of("Found invalid soydoc param name ''{0}''"); private static final SoyErrorKind INVALID_PARAM_NAMED_IJ = SoyErrorKind.of("Invalid param name ''ij'' (''ij'' is for injected data)."); private static final SoyErrorKind KIND_BUT_NOT_STRICT = SoyErrorKind.of("kind=\"...\" attribute is only valid with autoescape=\"strict\"."); private static final SoyErrorKind LEGACY_COMPATIBLE_PARAM_TAG = SoyErrorKind.of( "Found invalid SoyDoc param tag ''{0}'', tags like this are only allowed in " + "legacy templates marked ''deprecatedV1=\"true\"''. The proper soydoc @param " + "syntax is: ''@param <name> <optional comment>''. Soy does not understand JsDoc " + "style type declarations in SoyDoc."); private static final SoyErrorKind PARAM_ALREADY_DECLARED = SoyErrorKind.of("Param ''{0}'' already declared"); /** Info from the containing Soy file's header declarations. */ protected final SoyFileHeaderInfo soyFileHeaderInfo; /** For reporting parse errors. */ protected final ErrorReporter errorReporter; /** The id for this node. */ protected Integer id; /** The lowest known syntax version bound. Value may be adjusted multiple times. */ @Nullable protected SyntaxVersionUpperBound syntaxVersionBound; /** The command text. */ protected String cmdText; /** * This template's name. This is private instead of protected to enforce use of * setTemplateNames(). */ private String templateName; /** * This template's partial name. Only applicable for V2. This is private instead of protected to * enforce use of setTemplateNames(). */ private String partialTemplateName; /** A string suitable for display in user msgs as the template name. */ protected String templateNameForUserMsgs; /** This template's visibility level. */ protected Visibility visibility; /** * The mode of autoescaping for this template. This is private instead of protected to enforce use * of setAutoescapeInfo(). */ private AutoescapeMode autoescapeMode; /** Required CSS namespaces. */ private ImmutableList<String> requiredCssNamespaces = ImmutableList.of(); /** Base CSS namespace for package-relative CSS selectors. */ private String cssBaseNamespace; /** * Strict mode context. Nonnull iff autoescapeMode is strict. This is private instead of protected * to enforce use of setAutoescapeInfo(). */ private ContentKind contentKind; /** The full SoyDoc, including the start/end tokens, or null. */ protected String soyDoc; /** The description portion of the SoyDoc (before declarations), or null. */ protected String soyDocDesc; /** The params from template header and/or SoyDoc. Null if no decls and no SoyDoc. */ @Nullable protected ImmutableList<TemplateParam> params; protected boolean isMarkedV1; protected StrictHtmlMode strictHtmlMode; SourceLocation sourceLocation; /** @param soyFileHeaderInfo Info from the containing Soy file's header declarations. */ protected TemplateNodeBuilder(SoyFileHeaderInfo soyFileHeaderInfo, ErrorReporter errorReporter) { this.soyFileHeaderInfo = soyFileHeaderInfo; this.errorReporter = errorReporter; this.syntaxVersionBound = null; this.strictHtmlMode = StrictHtmlMode.UNSET; // All other fields default to null. } /** * Sets the id for the node to be built. * * @return This builder. */ public TemplateNodeBuilder setId(int id) { Preconditions.checkState(this.id == null); this.id = id; return this; } /** Sets the source location. */ public TemplateNodeBuilder setSourceLocation(SourceLocation location) { checkState(sourceLocation == null); this.sourceLocation = checkNotNull(location); return this; } /** * Set the parsed data from the command tag. * * @param name The template name * @param attrs The attributes that are set on the tag {e.g. {@code kind="strict"}} */ public abstract TemplateNodeBuilder setCommandValues( Identifier name, List<CommandTagAttribute> attrs); /** * Returns a template name suitable for display in user msgs. * * <p>Note: This public getter exists because this info is needed by SoyFileParser for error * reporting before the TemplateNode is ready to be built. */ public String getTemplateNameForUserMsgs() { return templateNameForUserMsgs; } /** * Sets the SoyDoc for the node to be built. The SoyDoc will be parsed to fill in SoyDoc param * info. * * @return This builder. */ public TemplateNodeBuilder setSoyDoc(String soyDoc, SourceLocation soyDocLocation) { Preconditions.checkState(this.soyDoc == null); Preconditions.checkState(cmdText != null); this.soyDoc = soyDoc; Preconditions.checkArgument(soyDoc.startsWith("/**") && soyDoc.endsWith("*/")); String cleanedSoyDoc = cleanSoyDocHelper(soyDoc); this.soyDocDesc = parseSoyDocDescHelper(cleanedSoyDoc); this.addParams(parseSoyDocDeclsHelper(soyDoc, cleanedSoyDoc, soyDocLocation)); return this; } /** * Helper for {@code setSoyDoc()} and {@code setHeaderDecls()}. This method is intended to be * called at most once for SoyDoc params and at most once for header params. * * @param params The params to add. */ public TemplateNodeBuilder addParams(Iterable<? extends TemplateParam> params) { Set<String> seenParamKeys = new HashSet<>(); if (this.params == null) { this.params = ImmutableList.copyOf(params); } else { for (TemplateParam oldParam : this.params) { seenParamKeys.add(oldParam.name()); } this.params = ImmutableList.<TemplateParam>builder().addAll(this.params).addAll(params).build(); } // Check new params. for (TemplateParam param : params) { if (param.name().equals("ij")) { errorReporter.report(param.nameLocation(), INVALID_PARAM_NAMED_IJ); } if (!seenParamKeys.add(param.name())) { errorReporter.report(param.nameLocation(), PARAM_ALREADY_DECLARED, param.name()); } } return this; } /** Builds the template node. Will error if not enough info as been set on this builder. */ public abstract TemplateNode build(); // ----------------------------------------------------------------------------------------------- // Protected helpers for fields that need extra logic when being set. protected final void markDeprecatedV1(boolean isDeprecatedV1) { isMarkedV1 = isDeprecatedV1; if (isDeprecatedV1) { SyntaxVersionUpperBound newSyntaxVersionBound = new SyntaxVersionUpperBound(SyntaxVersion.V2_0, "Template is marked as deprecatedV1."); this.syntaxVersionBound = SyntaxVersionUpperBound.selectLower(this.syntaxVersionBound, newSyntaxVersionBound); } } protected void setAutoescapeInfo( AutoescapeMode autoescapeMode, @Nullable ContentKind contentKind, @Nullable SourceLocation kindLocation) { Preconditions.checkArgument(autoescapeMode != null); this.autoescapeMode = autoescapeMode; if (contentKind == null && autoescapeMode == AutoescapeMode.STRICT) { // Default mode is HTML. contentKind = ContentKind.HTML; } else if (contentKind != null && autoescapeMode != AutoescapeMode.STRICT) { // TODO: Perhaps this could imply strict escaping? errorReporter.report(kindLocation, KIND_BUT_NOT_STRICT); } this.contentKind = contentKind; } /** @return the id for this node. */ Integer getId() { return id; } /** @return The lowest known syntax version bound. */ SyntaxVersionUpperBound getSyntaxVersionBound() { return syntaxVersionBound; } /** @return The command text. */ String getCmdText() { return cmdText; } /** @return The full SoyDoc, including the start/end tokens, or null. */ String getSoyDoc() { return soyDoc; } /** @return The description portion of the SoyDoc (before declarations), or null. */ String getSoyDocDesc() { return soyDocDesc; } /** @return The mode of autoescaping for this template. */ protected AutoescapeMode getAutoescapeMode() { Preconditions.checkState(autoescapeMode != null); return autoescapeMode; } /** @return Strict mode context. Nonnull iff autoescapeMode is strict. */ @Nullable public ContentKind getContentKind() { checkState(autoescapeMode != null); // make sure setAutoescapeInfo was called return contentKind; } /** @return Required CSS namespaces. */ protected ImmutableList<String> getRequiredCssNamespaces() { return Preconditions.checkNotNull(requiredCssNamespaces); } protected void setRequiredCssNamespaces(ImmutableList<String> requiredCssNamespaces) { this.requiredCssNamespaces = Preconditions.checkNotNull(requiredCssNamespaces); } /** @return Base CSS namespace for package-relative CSS selectors. */ protected String getCssBaseNamespace() { return cssBaseNamespace; } protected void setCssBaseNamespace(String cssBaseNamespace) { this.cssBaseNamespace = cssBaseNamespace; } protected final void setTemplateNames( String templateName, SourceLocation nameLocation, @Nullable String partialTemplateName) { this.templateName = templateName; this.partialTemplateName = partialTemplateName; } protected StrictHtmlMode getStrictHtmlMode() { return strictHtmlMode; } protected String getTemplateName() { return templateName; } @Nullable protected String getPartialTemplateName() { return partialTemplateName; } // ----------------------------------------------------------------------------------------------- // Private static helpers for parsing template SoyDoc. /** Pattern for a newline. */ private static final Pattern NEWLINE = Pattern.compile("\\n|\\r\\n?"); /** Pattern for a SoyDoc start token, including spaces up to the first newline. */ private static final Pattern SOY_DOC_START = Pattern.compile("^ [/][*][*] [\\ ]* \\r?\\n?", Pattern.COMMENTS); /** Pattern for a SoyDoc end token, including preceding spaces up to the last newline. */ private static final Pattern SOY_DOC_END = Pattern.compile("\\r?\\n? [\\ ]* [*][/] $", Pattern.COMMENTS); /** Pattern for a SoyDoc declaration. */ // group(1) = declaration keyword; group(2) = declaration text. private static final Pattern SOY_DOC_DECL_PATTERN = Pattern.compile("( @param[?]? ) \\s+ ( \\S+ )", Pattern.COMMENTS); /** Pattern for SoyDoc parameter declaration text. */ private static final Pattern SOY_DOC_PARAM_TEXT_PATTERN = Pattern.compile("[a-zA-Z_]\\w*", Pattern.COMMENTS); /** * Private helper for the constructor to clean the SoyDoc. (1) Changes all newlines to "\n". (2) * Escapes deprecated javadoc tags. (3) Strips the start/end tokens and spaces (including newlines * if they occupy their own lines). (4) Removes common indent from all lines (e.g. * space-star-space). * * @param soyDoc The SoyDoc to clean. * @return The cleaned SoyDoc. */ private static String cleanSoyDocHelper(String soyDoc) { // Change all newlines to "\n". soyDoc = NEWLINE.matcher(soyDoc).replaceAll("\n"); // Escape all @deprecated javadoc tags. // TODO(cushon): add this to the specification and then also generate @Deprecated annotations soyDoc = soyDoc.replace("@deprecated", "&#64;deprecated"); // Strip start/end tokens and spaces (including newlines if they occupy their own lines). soyDoc = SOY_DOC_START.matcher(soyDoc).replaceFirst(""); soyDoc = SOY_DOC_END.matcher(soyDoc).replaceFirst(""); // Split into lines. List<String> lines = Lists.newArrayList(Splitter.on(NEWLINE).split(soyDoc)); // Remove indent common to all lines. Note that SoyDoc indents often include a star // (specifically the most common indent is space-star-space). Thus, we first remove common // spaces, then remove one common star, and finally, if we did remove a star, then we once again // remove common spaces. removeCommonStartCharHelper(lines, ' ', true); if (removeCommonStartCharHelper(lines, '*', false) == 1) { removeCommonStartCharHelper(lines, ' ', true); } return Joiner.on('\n').join(lines); } /** * Private helper for {@code cleanSoyDocHelper()}. Removes a common character at the start of all * lines, either once or as many times as possible. * * <p>Special case: Empty lines count as if they do have the common character for the purpose of * deciding whether all lines have the common character. * * @param lines The list of lines. If removal happens, then the list elements will be modified. * @param charToRemove The char to remove from the start of all lines. * @param shouldRemoveMultiple Whether to remove the char as many times as possible. * @return The number of chars removed from the start of each line. */ private static int removeCommonStartCharHelper( List<String> lines, char charToRemove, boolean shouldRemoveMultiple) { int numCharsToRemove = 0; // Count num chars to remove. boolean isStillCounting = true; do { boolean areAllLinesEmpty = true; for (String line : lines) { if (line.length() == 0) { continue; // empty lines are okay } areAllLinesEmpty = false; if (line.length() <= numCharsToRemove || line.charAt(numCharsToRemove) != charToRemove) { isStillCounting = false; break; } } if (areAllLinesEmpty) { isStillCounting = false; } if (isStillCounting) { numCharsToRemove += 1; } } while (isStillCounting && shouldRemoveMultiple); // Perform the removal. if (numCharsToRemove > 0) { for (int i = 0; i < lines.size(); i++) { String line = lines.get(i); if (line.length() == 0) { continue; // don't change empty lines } lines.set(i, line.substring(numCharsToRemove)); } } return numCharsToRemove; } /** * Private helper for the constructor to parse the SoyDoc description. * * @param cleanedSoyDoc The cleaned SoyDoc text. Must not be null. * @return The description (with trailing whitespace removed). */ private static String parseSoyDocDescHelper(String cleanedSoyDoc) { Matcher paramMatcher = SOY_DOC_DECL_PATTERN.matcher(cleanedSoyDoc); int endOfDescPos = (paramMatcher.find()) ? paramMatcher.start() : cleanedSoyDoc.length(); String soyDocDesc = cleanedSoyDoc.substring(0, endOfDescPos); return CharMatcher.whitespace().trimTrailingFrom(soyDocDesc); } /** * Private helper for the constructor to parse the SoyDoc declarations. * * @param cleanedSoyDoc The cleaned SoyDoc text. Must not be null. * @return A SoyDocDeclsInfo object with the parsed info. */ private List<SoyDocParam> parseSoyDocDeclsHelper( String originalSoyDoc, String cleanedSoyDoc, SourceLocation soyDocSourceLocation) { List<SoyDocParam> params = new ArrayList<>(); RawTextNode originalSoyDocAsNode = new RawTextNode(-1, originalSoyDoc, soyDocSourceLocation); Matcher matcher = SOY_DOC_DECL_PATTERN.matcher(cleanedSoyDoc); // Important: This statement finds the param for the first iteration of the loop. boolean isFound = matcher.find(); while (isFound) { // Save the match groups. String declKeyword = matcher.group(1); String declText = matcher.group(2); String fullMatch = matcher.group(); // find the param in the original soy doc and use the RawTextNode support for // calculating substring locations to get a more accurate location int indexOfParamName = originalSoyDoc.indexOf(declText, originalSoyDoc.indexOf(fullMatch)); SourceLocation paramLocation = originalSoyDocAsNode.substringLocation( indexOfParamName, indexOfParamName + declText.length()); // Find the next declaration in the SoyDoc and extract this declaration's desc string. int descStart = matcher.end(); // Important: This statement finds the param for the next iteration of the loop. // We must find the next param now in order to know where the current param's desc ends. isFound = matcher.find(); int descEnd = (isFound) ? matcher.start() : cleanedSoyDoc.length(); String desc = cleanedSoyDoc.substring(descStart, descEnd).trim(); if (declKeyword.equals("@param") || declKeyword.equals("@param?")) { if (SOY_DOC_PARAM_TEXT_PATTERN.matcher(declText).matches()) { params.add(new SoyDocParam(declText, declKeyword.equals("@param"), desc, paramLocation)); } else { if (declText.startsWith("{")) { // v1 is allowed for compatibility reasons if (!isMarkedV1) { errorReporter.report(paramLocation, LEGACY_COMPATIBLE_PARAM_TAG, declText); } } else { errorReporter.report(paramLocation, INVALID_SOYDOC_PARAM, declText); } } } else { throw new AssertionError(); } } return params; } }
/* * Copyright 2007 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.relaxNG.compact.psi.impl; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.intellij.codeInsight.CodeInsightUtilCore; import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider; import com.intellij.codeInsight.lookup.LookupItem; import com.intellij.codeInsight.template.*; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.LocalQuickFixProvider; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.lang.ASTNode; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFileFactory; import com.intellij.psi.PsiReference; import com.intellij.psi.ResolveState; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.scope.BaseScopeProcessor; import com.intellij.psi.tree.IElementType; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import org.intellij.plugins.relaxNG.compact.RncElementTypes; import org.intellij.plugins.relaxNG.compact.RncFileType; import org.intellij.plugins.relaxNG.compact.RncTokenTypes; import org.intellij.plugins.relaxNG.compact.psi.*; import org.intellij.plugins.relaxNG.compact.psi.util.EscapeUtil; import org.intellij.plugins.relaxNG.compact.psi.util.RenameUtil; /** * Created by IntelliJ IDEA. * User: sweinreuter * Date: 14.08.2007 */ public class RncNameImpl extends RncElementImpl implements RncName, PsiReference, EmptyResolveMessageProvider, LocalQuickFixProvider { private enum Kind { NAMESPACE, DATATYPES } public RncNameImpl(ASTNode node) { super(node); } @Override @Nullable public String getPrefix() { final String[] parts = EscapeUtil.unescapeText(getNode()).split(":", 2); return parts.length == 2 ? parts[0] : null; } @Override @Nonnull public String getLocalPart() { final String[] parts = EscapeUtil.unescapeText(getNode()).split(":", 2); return parts.length == 1 ? parts[0] : parts[1]; } @Override public void accept(@Nonnull RncElementVisitor visitor) { visitor.visitName(this); } @Override public PsiReference getReference() { return getPrefix() == null ? null : this; } @Override public PsiElement getElement() { return this; } @Override public TextRange getRangeInElement() { return TextRange.from(0, getText().indexOf(':')); } @Override @Nullable public PsiElement resolve() { final MyResolver resolver = new MyResolver(getPrefix(), getKind()); getContainingFile().processDeclarations(resolver, ResolveState.initial(), this, this); return resolver.getResult(); } private Kind getKind() { final IElementType parent = getNode().getTreeParent().getElementType(); if (parent == RncElementTypes.DATATYPE_PATTERN) { return Kind.DATATYPES; } else { return Kind.NAMESPACE; } } @Override @Nonnull public String getCanonicalText() { return getRangeInElement().substring(getText()); } @Override public PsiElement handleElementRename(String newElementName) throws IncorrectOperationException { final ASTNode node = getNode(); final ASTNode child = RenameUtil.createPrefixedNode(getManager(), newElementName, getLocalPart()); node.getTreeParent().replaceChild(node, child); return child.getPsi(); } @Override public PsiElement bindToElement(@Nonnull PsiElement element) throws IncorrectOperationException { throw new UnsupportedOperationException(); } @Override public boolean isReferenceTo(PsiElement element) { return element instanceof RncElement && Comparing.equal(resolve(), element); } @Override @Nonnull public Object[] getVariants() { return ArrayUtil.EMPTY_OBJECT_ARRAY; } @Override public boolean isSoft() { final String prefix = getPrefix(); return "xsd".equals(prefix) || "xml".equals(prefix); } @Override @Nonnull public String getUnresolvedMessagePattern() { return "Unresolved namespace prefix ''{0}''"; } @Nullable @Override public LocalQuickFix[] getQuickFixes() { if (getPrefix() != null) { return new LocalQuickFix[] { new CreateDeclFix(this) }; } return LocalQuickFix.EMPTY_ARRAY; } private static class MyResolver extends BaseScopeProcessor { private final String myPrefix; private final Kind myKind; private PsiElement myResult; public MyResolver(String prefix, Kind kind) { myPrefix = prefix; myKind = kind; } @Override public boolean execute(@Nonnull PsiElement element, @Nonnull ResolveState substitutor) { final ASTNode node = element.getNode(); if (node == null) return true; if (!(element instanceof RncDecl)) { return false; } final IElementType type = node.getElementType(); if (myKind == Kind.NAMESPACE && type == RncElementTypes.NS_DECL) { if (checkDecl(element)) return false; } else if (myKind == Kind.DATATYPES && type == RncElementTypes.DATATYPES_DECL) { if (checkDecl(element)) return false; } return true; } private boolean checkDecl(PsiElement element) { if (myPrefix.equals(((RncDecl)element).getPrefix())) { myResult = element; return true; } return false; } public PsiElement getResult() { return myResult; } } public static class CreateDeclFix implements LocalQuickFix { private final RncNameImpl myReference; public CreateDeclFix(RncNameImpl reference) { myReference = reference; } @Override @Nonnull public String getName() { return getFamilyName() + " '" + myReference.getPrefix() + "'"; } @Override @Nonnull public String getFamilyName() { return "Create " + myReference.getKind().name().toLowerCase() + " declaration"; } @Override public void applyFix(@Nonnull Project project, @Nonnull ProblemDescriptor descriptor) { final String prefix = myReference.getPrefix(); final PsiFileFactory factory = PsiFileFactory.getInstance(myReference.getProject()); final RncFile psiFile = (RncFile)factory.createFileFromText("dummy.rnc", RncFileType.getInstance(), myReference.getKind().name().toLowerCase() + " " + prefix + " = \"###\""); final RncFile rncFile = (RncFile)myReference.getContainingFile(); final RncDecl[] declarations = rncFile.getDeclarations(); final RncDecl decl = psiFile.getDeclarations()[0]; final RncDecl e; if (declarations.length > 0) { e = (RncDecl)rncFile.addAfter(decl, declarations[declarations.length - 1]); } else { final RncGrammar rncGrammar = rncFile.getGrammar(); if (rncGrammar != null) { e = (RncDecl)rncFile.addBefore(decl, rncGrammar); } else { e = (RncDecl)rncFile.add(decl); } } final ASTNode blockNode = e.getParent().getNode(); assert blockNode != null; final ASTNode newNode = e.getNode(); assert newNode != null; CodeStyleManager.getInstance(e.getManager().getProject()).reformatNewlyAddedElement(blockNode, newNode); final PsiElement literal = e.getLastChild(); assert literal != null; final ASTNode literalNode = literal.getNode(); assert literalNode != null; assert literalNode.getElementType() == RncTokenTypes.LITERAL; final int offset = literal.getTextRange().getStartOffset(); literal.delete(); VirtualFile virtualFile = myReference.getElement().getContainingFile().getVirtualFile(); if (virtualFile != null) { Editor editor = FileEditorManager.getInstance(project).openTextEditor(new OpenFileDescriptor(project, virtualFile, offset), true); if (editor != null) { RncDecl rncDecl = CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(e); final TemplateManager manager = TemplateManager.getInstance(project); final Template t = manager.createTemplate("", ""); t.addTextSegment(" \""); final Expression expression = new Expression() { @Override public Result calculateResult(ExpressionContext context) { return new TextResult(""); } @Override public Result calculateQuickResult(ExpressionContext context) { return calculateResult(context); } @Override public LookupItem[] calculateLookupItems(ExpressionContext context) { return LookupItem.EMPTY_ARRAY; } }; t.addVariable("uri", expression, expression, true); t.addTextSegment("\""); t.addEndVariable(); editor.getCaretModel().moveToOffset(rncDecl.getTextRange().getEndOffset()); manager.startTemplate(editor, t); } } } } }
package datastruct; import static org.junit.Assert.*; import java.util.Iterator; import javax.swing.JButton; import org.junit.Before; import org.junit.Test; import junit.framework.TestCase; /** * Test class for the linked sorted list */ public class LinkedSortedListTest extends TestCase { //Attributes private LinkedSortedList emptyList; private LinkedSortedList normalList; private LinkedSortedList anotherList; private LinkedSortedList listOfString; private LinkedSortedList listNonComparable; private Integer un; private Integer deux; private Integer trois; private Integer troisBis; private Integer cinq; private JButton nonComparable; private String comparableString; //Method ran before each test case @Before public void setUp() { emptyList = new LinkedSortedList(Integer.class); normalList = new LinkedSortedList(Integer.class); anotherList = new LinkedSortedList(Integer.class); listOfString = new LinkedSortedList(String.class); //String etant comparable un = new Integer(1); deux = new Integer(2); trois = new Integer(3); troisBis = new Integer(3); cinq = new Integer(5); nonComparable = new JButton("I'm not comparable!"); comparableString = "I'm comparable!"; //The inserts were already tested normalList.insert(deux); normalList.insert(trois); normalList.insert(un); normalList.insert(troisBis); normalList.insert(un); normalList.insert(cinq); } //Test of the constructor() //With a list of non comparable, exception must be throwed @Test public void testCreationOfNonComparableList() { System.out.println("\n" + "Test of creating a list of non comparable objects:"); boolean thrown = false; try { listNonComparable = new LinkedSortedList(JButton.class); } catch (RuntimeException e) { thrown = true; } assertTrue(thrown); } //Test of insert() //With an empty list @Test public void testInsertInEmptyList() { System.out.println("\n" + "Test of insert() on an empty list:"); System.out.println("Before:" + "\n" + emptyList.toString()); emptyList.insert(trois); System.out.println("Then:" + "\n" + emptyList.toString()); } //With a normal list @Test public void testInsertInNormalListRequiredType() { System.out.println("\n" + "Test of insert() an object with the type required in a normal list:"); System.out.println("Before:" + "\n" + normalList.toString()); normalList.insert(trois); System.out.println("Then:" + "\n" + normalList.toString()); } //With a non Comparable object @Test public void testInsertInNormalListNonComparableObject() { System.out.println("\n" + "Test of insert() a non comparable object in a normal list:"); /* normalList.insert(nonComparable); */ System.out.println("The compilator doesn't accept the type. OK"); } //Inserting the wrong type of object but comparable @Test public void testInsertWrongTypeOfObject() { System.out.println("\n" + "Test of insert() a comparable object but with the wrong type:"); System.out.println("Insertion of a String in a list of Integer:"); normalList.insert(comparableString); System.out.println("Insertion of an Integer in a list of String:"); listOfString.insert(un); } //Test of hasNext() //With an empty list @Test public void testHasNextOnEmptyList() { System.out.println("\n" + "Test of hasNext() on an empty list:"); assertEquals(false, emptyList.getIterator().hasNext()); System.out.println("OK"); } //With a normal list @Test public void testHasNextOnNormalList() { System.out.println("\n" + "Test of hasNext() on a normal list:"); assertEquals(true, normalList.getIterator().hasNext()); System.out.println("OK"); } //With a list that we'll use by different ways @Test @SuppressWarnings("rawtypes") public void testHasNextOnManipulatedList() { System.out.println("\n" + "Test of hasNext() on a manipulated list:"); //When empty assertEquals(false, anotherList.getIterator().hasNext()); //After adding another element anotherList.insert(un); assertEquals(true, anotherList.getIterator().hasNext()); //After adding another element anotherList.insert(deux); assertEquals(true, anotherList.getIterator().hasNext()); //Then when moving Iterator itr = anotherList.getIterator(); assertEquals(true, itr.hasNext()); //Moving one time itr.next(); assertEquals(true, itr.hasNext()); //Moving a second time itr.next(); assertEquals(false, itr.hasNext()); } //Test of next() //With an empty list @Test @SuppressWarnings("rawtypes") public void testNextOnEmptyList() { System.out.println("\n" + "Test of next() on an empty list:"); Iterator itr = emptyList.getIterator(); itr.next(); } //With a normal list @SuppressWarnings("rawtypes") public void testNextOnNormalList() { System.out.println("\n" + "Test of next() on a normal list:"); Iterator itr = normalList.getIterator(); //Normally, we can do it 6 times itr.next(); itr.next(); itr.next(); itr.next(); itr.next(); itr.next(); //Then we can't move anymore itr.next(); itr.next(); } //Test of toString() //With an empty list @Test public void testToStringEmpty() { System.out.println("\n" + "Test of toString() on an empty list:"); System.out.println(emptyList.toString()); } //With a normal list @Test public void testToStringNormal() { System.out.println("\n" + "Test of toString() on a normal list:"); System.out.println(normalList.toString()); } //Test of isEmpty() //With an empty list @Test public void testIsEmptyOnEmptyList() { System.out.println("\n" + "Test of isEmpty() on an empty list:"); assertEquals(true, emptyList.isEmpty()); System.out.println("OK"); } //With a normal list @Test public void testIsEmptyOnNormalList() { System.out.println("\n" + "Test of isEmpty() on a normal list:"); assertEquals(false, normalList.isEmpty()); System.out.println("OK"); } //Test of getSize() //With an empty list @Test public void testGetSizeOnEmptyList() { System.out.println("\n" + "Test of getSize() on an empty list:"); assertEquals(0, emptyList.getSize()); System.out.println("OK"); } //With a normal list @Test public void testGetSizeOnNormalList() { System.out.println("\n" + "Test of getSize() on a normal list:"); assertEquals(6, normalList.getSize()); System.out.println("OK"); } //Test of remove() //With an empty list @Test @SuppressWarnings("rawtypes") public void testRemoveOnEmptyList() { System.out.println("\n" + "Test of remove() on an empty list:"); Iterator itr = emptyList.getIterator(); System.out.println("Before:" + "\n" + emptyList.toString()); itr.remove(); System.out.println("Then:" + "\n" + emptyList.toString()); assertEquals(true, emptyList.isEmpty()); assertEquals(0, emptyList.getSize()); } //With a normal list @Test @SuppressWarnings("rawtypes") public void testRemoveOnNormalList() { //Without moving System.out.println("\n" + "Test of remove() on a normal list without moving (the iterator is on the sentinel):"); Iterator itr = normalList.getIterator(); System.out.println("Before:" + "\n" + normalList.toString()); itr.remove(); System.out.println("Then:" + "\n" + normalList.toString()); assertEquals(false, normalList.isEmpty()); assertEquals(6, normalList.getSize()); //After moving 2 times System.out.println("\n" + "After moving 2 times:"); itr.next(); itr.next(); System.out.println("Before:" + "\n" + normalList.toString()); itr.remove(); System.out.println("Then:" + "\n" + normalList.toString()); assertEquals(false, normalList.isEmpty()); assertEquals(5, normalList.getSize()); //Then moving one time System.out.println("\n" + "Then moving one time:"); itr.next(); System.out.println("Before:" + "\n" + normalList.toString()); itr.remove(); System.out.println("Then:" + "\n" + normalList.toString()); assertEquals(false, normalList.isEmpty()); assertEquals(4, normalList.getSize()); //And trying to remove a second time without moving System.out.println("\n" + "Finally, try to remove without moving:"); System.out.println("Before:" + "\n" + normalList.toString()); itr.remove(); System.out.println("Then:" + "\n" + normalList.toString()); assertEquals(false, normalList.isEmpty()); assertEquals(4, normalList.getSize()); } }
package algorithms.packing; import algorithms.MultiArrayMergeSort; import algorithms.util.PairInt; import algorithms.util.PixelHelper; import gnu.trove.iterator.TIntIterator; import gnu.trove.set.TIntSet; import gnu.trove.set.hash.TIntHashSet; import java.util.Collection; import java.util.HashSet; import java.util.Set; /** * Class to return a set of points for the intersection of 2 point sets where * the set of points are separated by a given interval in x and y, that is, * the bins are squares. * The two point sets can be irregular in shape. * The solution is not guaranteed to be optimal. * * The runtime complexity is expected to be: * O(min(N_points) * lg_2(N_points)) * * Considering implementing these: * * Option 1 (naiveStripPacking): * resembles 2D strips: * -- find the intersection of the point sets by iterating over the smallest * point set. * -- sort the intersection data structure by x, then y * -- fill the intersection space with rectangles of given x and y size. * * Option 2: * find the intersection of the point sets by iterating over the smallest * point set. * use a medial axis built from a voronoi diagram * (see algorithms.compGeometry.MedialAxis), * then fill the space using order based upon the most connected medial * axis points and then the space connected to those assigned bins, * iteratively. * It can use IntervalRangeSearch for collision checks. * * Option 3: * find the intersection of the point sets by iterating over the smallest * point set. * create ordered, connected boundary points of the * intersection. * then walk along the border, filling in bins * at cell x,y spacings and adding the connected * intervals to a stack iteratively to continue filling * the intersection with bins. * * @author nichole */ public class Intersection2DPacking { /** * uses 2-D strip packing and cell sizes of cellSize to place points * throughout the intersection of the 2-D points in the most naive greedy * placement order in x, then y. Note that the result is not guaranteed * to be optimal. * * The runtime complexity is O(N * log_2(N)) * where N is min(N_points1, N_points2). * * @param points1 * @param points2 * @param imageWidth * @param cellSize * @return */ public TIntSet naiveStripPacking(TIntSet points1, TIntSet points2, int imageWidth, int cellSize) { // O(N) TIntSet intersection = intersection(points1, points2); return naiveStripPacking(intersection, imageWidth, cellSize); } /** * uses 2-D strip packing and cell sizes of cellSize to place points * throughout the intersection of the 2-D points in the most naive greedy * placement order in x, then y. Note that the result is not guaranteed * to be optimal. * * The runtime complexity is O(N * log_2(N)) * where N is min(N_points1, N_points2). * * @param points1 * @param points2 * @param cellSize * @return */ public Set<PairInt> naiveStripPacking(Collection<PairInt> points1, Collection<PairInt> points2, int cellSize) { // O(N) Set<PairInt> intersection = intersection(points1, points2); return naiveStripPacking(intersection, cellSize); } /** * uses 2-D strip packing and cell sizes of cellSize to place bins * throughout the 2-D points in the most naive greedy * placement order in x, then y. Note that the result is not guaranteed * to be optimal. * * The runtime complexity is O(N * log_2(N)) * where N is min(N_points1, N_points2). * * @param points * @param imageWidth * @param cellSize * @return */ public TIntSet naiveStripPacking(TIntSet points, int imageWidth, int cellSize) { // O(N) TIntSet intersection = points; //O(N) int[] xs = new int[intersection.size()]; int[] ys = new int[intersection.size()]; _populate(intersection, imageWidth, xs, ys); //O(N*lg_2(N)) MultiArrayMergeSort.sortBy1stArgThen2nd(ys, xs); TIntSet out = new TIntHashSet(); PixelHelper ph = new PixelHelper(); int lX = Integer.MIN_VALUE; int lY = Integer.MIN_VALUE; int x, y; long pixIdx; for (int i = 0; i < xs.length; ++i) { x = xs[i]; y = ys[i]; if (x < lX) { lX = Integer.MIN_VALUE; } if ((x >= (lX + cellSize)) && ((y == lY) || (y >= (lY + cellSize)))) { pixIdx = ph.toPixelIndex(x, y, imageWidth); out.add((int)pixIdx); lX = x; lY = y; } } return out; } /** * uses 2-D strip packing and cell sizes of cellSize to place bins * throughout the 2-D points in the most naive greedy * placement order in x, then y. Note that the result is not guaranteed * to be optimal. * * The runtime complexity is O(N * log_2(N)) * where N is min(N_points1, N_points2). * * @param points * @param cellSize * @return */ public Set<PairInt> naiveStripPacking(Collection<PairInt> points, int cellSize) { // O(N) Collection<PairInt> intersection = points; //O(N) int[] xs = new int[intersection.size()]; int[] ys = new int[intersection.size()]; _populate(intersection, xs, ys); //O(N*lg_2(N)) MultiArrayMergeSort.sortBy1stArgThen2nd(ys, xs); Set<PairInt> out = new HashSet<PairInt>(); int lX = Integer.MIN_VALUE; int lY = Integer.MIN_VALUE; int x, y; for (int i = 0; i < xs.length; ++i) { x = xs[i]; y = ys[i]; if (x < lX) { lX = Integer.MIN_VALUE; } if ((x >= (lX + cellSize)) && ((y == lY) || (y >= (lY + cellSize)))) { PairInt p = new PairInt(x, y); out.add(p); lX = x; lY = y; } } return out; } /** * Find the intersection of the 2 point sets. * The runtime complexity is O(N) where N is min(N_points1, N_points2). * * @param points1 * @param points2 * @return */ public TIntSet intersection(TIntSet points1, TIntSet points2) { TIntSet out = new TIntHashSet(); TIntSet p1, p2; if (points1.size() <= points2.size()) { p1 = points1; p2 = points2; } else { p1 = points2; p2 = points1; } TIntIterator iter = p1.iterator(); while (iter.hasNext()) { int pixIdx = iter.next(); if (p2.contains(pixIdx)) { out.add(pixIdx); } } return out; } /** * Find the intersection of the 2 point sets. * The runtime complexity is O(N) where N is min(N_points1, N_points2). * * @param points1 * @param points2 * @return */ public Set<PairInt> intersection(Collection<PairInt> points1, Collection<PairInt> points2) { Set<PairInt> out = new HashSet<PairInt>(); Collection<PairInt> p1, p2; if (points1.size() <= points2.size()) { p1 = points1; p2 = points2; } else { p1 = points2; p2 = points1; } for (PairInt p : p1) { if (p2.contains(p)) { out.add(p); } } return out; } void _populate(TIntSet points, int imageWidth, int[] xs, int[] ys) { PixelHelper ph = new PixelHelper(); int[] xy = new int[2]; int i = 0; TIntIterator iter = points.iterator(); while (iter.hasNext()) { int pixIdx = iter.next(); ph.toPixelCoords(pixIdx, imageWidth, xy); xs[i] = xy[0]; ys[i] = xy[1]; ++i; } } void _populate(Collection<PairInt> points, int[] xs, int[] ys) { int i = 0; for (PairInt p : points) { xs[i] = p.getX(); ys[i] = p.getY(); ++i; } } }
// Copyright (c) Committed Software 2018, opensource@committed.io package gnu.trove; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.verify; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.HashMap; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import gnu.trove.map.TObjectIntMap; @RunWith(MockitoJUnitRunner.class) public class TObjectIntHashMapTest { private static final int ZERO = 0; private static final String KEY = "key"; private TObjectIntHashMap<String> tObjectIntHashMap; @Mock private gnu.trove.map.hash.TObjectIntHashMap<String> delegate; @Mock private ObjectOutput objectOutput; @Mock private ObjectInput objectInput; @Before public void setUp() throws Exception { tObjectIntHashMap = new TObjectIntHashMap<>(delegate); } @Test public void testMapConstructor() { gnu.trove.map.hash.TIntObjectHashMap<String> original = new gnu.trove.map.hash.TIntObjectHashMap<>(); original.put(0, "zero"); TIntObjectHashMap<String> tIntObjectHashMapFromMap = new TIntObjectHashMap<>(original); assertEquals("zero", tIntObjectHashMapFromMap.get(0)); } @Test public void testClone() throws CloneNotSupportedException { TIntObjectHashMap<String> original = new TIntObjectHashMap<>(); assertEquals(original, original.clone()); } @Test public void testCapacity() { tObjectIntHashMap.capacity(); verify(delegate).capacity(); } @Test public void testNoEntryValue() { tObjectIntHashMap.getNoEntryValue(); verify(delegate).getNoEntryValue(); } @Test public void testIsEmpty() { tObjectIntHashMap.isEmpty(); verify(delegate).isEmpty(); } @Test public void testContains() { tObjectIntHashMap.contains(0); verify(delegate).contains(0); } @Test public void testSize() { tObjectIntHashMap.size(); verify(delegate).size(); } @Test public void testForEach() { TObjectIntHashMap<String> tIntObjectHashMapWithRealDelegate = new TObjectIntHashMap<>(); tIntObjectHashMapWithRealDelegate.put("key", ZERO); assertFalse(tIntObjectHashMapWithRealDelegate.forEach(i -> i.equals("other"))); } @Test public void testForEachEntry() { TObjectIntHashMap<String> tIntObjectHashMapWithRealDelegate = new TObjectIntHashMap<>(); tIntObjectHashMapWithRealDelegate.put("key", ZERO); assertTrue( tIntObjectHashMapWithRealDelegate.forEachEntry((a, b) -> a.equals("key") && b == ZERO)); assertFalse( tIntObjectHashMapWithRealDelegate.forEachEntry( (a, b) -> a.equals("notequal") && b == ZERO)); } @Test public void testForEachKey() { TObjectIntHashMap<String> tIntObjectHashMapWithRealDelegate = new TObjectIntHashMap<>(); tIntObjectHashMapWithRealDelegate.put(KEY, ZERO); assertTrue(tIntObjectHashMapWithRealDelegate.forEachKey(k -> k.equals(KEY))); assertFalse(tIntObjectHashMapWithRealDelegate.forEachKey(k -> k.equals("otherValue"))); } @Test public void testForEachValue() { TObjectIntHashMap<String> tIntObjectHashMapWithRealDelegate = new TObjectIntHashMap<>(); tIntObjectHashMapWithRealDelegate.put(KEY, ZERO); assertTrue(tIntObjectHashMapWithRealDelegate.forEachValue(v -> v == ZERO)); assertFalse(tIntObjectHashMapWithRealDelegate.forEachValue(v -> v == 100)); } @Test public void testEnsureCapacity() { tObjectIntHashMap.ensureCapacity(1); verify(delegate).ensureCapacity(1); } @Test public void testGetNoEntryValue() { tObjectIntHashMap.getNoEntryValue(); verify(delegate).getNoEntryValue(); } @Test public void testContainsKey() { tObjectIntHashMap.containsKey(0); verify(delegate).containsKey(0); } @Test public void testContainsValue() { tObjectIntHashMap.containsValue(ZERO); verify(delegate).containsValue(ZERO); } @Test public void testCompact() { tObjectIntHashMap.compact(); verify(delegate).compact(); } @Test public void testGet() { tObjectIntHashMap.get(0); verify(delegate).get(0); } @Test public void testPut() { tObjectIntHashMap.put(KEY, ZERO); verify(delegate).put(KEY, ZERO); } @Test public void testPutIfAbsent() { tObjectIntHashMap.putIfAbsent(KEY, ZERO); verify(delegate).putIfAbsent(KEY, ZERO); } @Test public void testSetAutoCompactionFactor() { tObjectIntHashMap.setAutoCompactionFactor(1.0F); verify(delegate).setAutoCompactionFactor(1.0F); } @Test public void testRemove() { tObjectIntHashMap.remove(0); verify(delegate).remove(0); } @Test public void testPutAll() { Map<String, Integer> map = new HashMap<>(); tObjectIntHashMap.putAll(map); verify(delegate).putAll(map); } @Test public void testPutAllTObjectIntMap() { TObjectIntMap<String> map = new gnu.trove.map.hash.TObjectIntHashMap<>(); tObjectIntHashMap.putAll(map); verify(delegate).putAll(map); } @Test public void testGetAutoCompactionFactor() { tObjectIntHashMap.getAutoCompactionFactor(); verify(delegate).getAutoCompactionFactor(); } @Test public void testTrimToSize() { tObjectIntHashMap.trimToSize(); verify(delegate).trimToSize(); } @Test public void testClear() { tObjectIntHashMap.clear(); verify(delegate).clear(); } @Test public void testKeySet() { tObjectIntHashMap.keySet(); verify(delegate).keySet(); } @Test public void testKeys() { tObjectIntHashMap.keys(); verify(delegate).keys(); } @Test public void testKeysDestinationArray() { String[] strings = new String[3]; tObjectIntHashMap.keys(strings); verify(delegate).keys(strings); } @Test public void testValueCollection() { tObjectIntHashMap.valueCollection(); verify(delegate).valueCollection(); } @Test public void testValues() { tObjectIntHashMap.values(); verify(delegate).values(); } @Test public void testValuesDestinationArray() { int[] ints = new int[3]; tObjectIntHashMap.values(ints); verify(delegate).values(ints); } @Test public void testTempDisableAutoCompaction() { tObjectIntHashMap.tempDisableAutoCompaction(); verify(delegate).tempDisableAutoCompaction(); } @Test public void testReEnableAutoCompaction() { tObjectIntHashMap.reenableAutoCompaction(true); verify(delegate).reenableAutoCompaction(true); } @Test public void testIterator() { tObjectIntHashMap.iterator(); verify(delegate).iterator(); } @Test public void testRetainEntries() { TObjectIntHashMap<String> tIntObjectHashMapWithRealDelegate = new TObjectIntHashMap<>(); tIntObjectHashMapWithRealDelegate.put(KEY, ZERO); tIntObjectHashMapWithRealDelegate.put("otherKey", 1); assertEquals(2, tIntObjectHashMapWithRealDelegate.size()); tIntObjectHashMapWithRealDelegate.retainEntries((k, v) -> k.equals(KEY) && v == ZERO); assertEquals(1, tIntObjectHashMapWithRealDelegate.size()); assertEquals(ZERO, tIntObjectHashMapWithRealDelegate.get(0)); } @Test public void testTransformValues() { TObjectIntHashMap<String> tObjectIntHashMapWithRealDelegate = new TObjectIntHashMap<>(); tObjectIntHashMapWithRealDelegate.put(KEY, ZERO); tObjectIntHashMapWithRealDelegate.transformValues(v -> 1); assertEquals("New value should be 1", 1, tObjectIntHashMapWithRealDelegate.get(KEY)); assertEquals("Size should be 1", 1, tObjectIntHashMapWithRealDelegate.size()); } @Test public void testEquals() { assertEquals(tObjectIntHashMap, new TObjectIntHashMap<>(delegate)); assertNotEquals(tObjectIntHashMap, tObjectIntHashMap.toString()); } @Test public void testHashCode() { assertEquals(new TIntIntHashMap().hashCode(), new TObjectIntHashMap<Object>().hashCode()); assertNotEquals(new TIntIntHashMap().hashCode(), "hello".hashCode()); } @Test public void testReadExternal() throws IOException, ClassNotFoundException { tObjectIntHashMap.readExternal(objectInput); verify(delegate).readExternal(objectInput); } @Test public void testWriteExternal() throws IOException { tObjectIntHashMap.writeExternal(objectOutput); verify(delegate).writeExternal(objectOutput); } }
/* * Copyright (c) 2012-2015 The original author or authors * ------------------------------------------------------ * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package org.eclipse.moquette.spi.persistence; import org.eclipse.moquette.proto.MQTTException; import org.eclipse.moquette.spi.IMatchingCondition; import org.eclipse.moquette.spi.IMessagesStore; import org.eclipse.moquette.spi.ISessionsStore; import org.eclipse.moquette.spi.impl.events.PublishEvent; import org.eclipse.moquette.spi.impl.storage.StoredPublishEvent; import org.eclipse.moquette.spi.impl.subscriptions.Subscription; import org.eclipse.moquette.proto.messages.AbstractMessage; import static org.eclipse.moquette.spi.impl.Utils.defaultGet; import org.mapdb.DB; import org.mapdb.DBMaker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.ConcurrentMap; /** * MapDB main persistence implementation */ public class MapDBPersistentStore implements IMessagesStore, ISessionsStore { private static final Logger LOG = LoggerFactory.getLogger(MapDBPersistentStore.class); private ConcurrentMap<String, StoredMessage> m_retainedStore; //maps clientID to the list of pending messages stored private ConcurrentMap<String, List<StoredPublishEvent>> m_persistentMessageStore; //bind clientID+MsgID -> evt message published private ConcurrentMap<String, StoredPublishEvent> m_inflightStore; //map clientID <-> set of currently in flight packet identifiers Map<String, Set<Integer>> m_inFlightIds; //bind clientID+MsgID -> evt message published private ConcurrentMap<String, StoredPublishEvent> m_qos2Store; //persistent Map of clientID, set of Subscriptions private ConcurrentMap<String, Set<Subscription>> m_persistentSubscriptions; private DB m_db; private String m_storePath; /* * The default constructor will create an in memory store as no file path was specified */ public MapDBPersistentStore() { this.m_storePath = ""; } public MapDBPersistentStore(String storePath) { this.m_storePath = storePath; } @Override public void initStore() { if (m_storePath == null || m_storePath.isEmpty()) { m_db = DBMaker.newMemoryDB().make(); } else { File tmpFile; try { tmpFile = new File(m_storePath); tmpFile.createNewFile(); } catch (IOException ex) { LOG.error(null, ex); throw new MQTTException("Can't create temp file for subscriptions storage [" + m_storePath + "]", ex); } m_db = DBMaker.newFileDB(tmpFile).make(); } m_retainedStore = m_db.getHashMap("retained"); m_persistentMessageStore = m_db.getHashMap("persistedMessages"); m_inflightStore = m_db.getHashMap("inflight"); m_inFlightIds = m_db.getHashMap("inflightPacketIDs"); m_persistentSubscriptions = m_db.getHashMap("subscriptions"); m_qos2Store = m_db.getHashMap("qos2Store"); } @Override public void cleanRetained(String topic) { m_retainedStore.remove(topic); } @Override public void storeRetained(String topic, ByteBuffer message, AbstractMessage.QOSType qos) { if (!message.hasRemaining()) { //clean the message from topic m_retainedStore.remove(topic); } else { //store the message to the topic byte[] raw = new byte[message.remaining()]; message.get(raw); m_retainedStore.put(topic, new StoredMessage(raw, qos, topic)); } m_db.commit(); } @Override public Collection<StoredMessage> searchMatching(IMatchingCondition condition) { LOG.debug("searchMatching scanning all retained messages, presents are {}", m_retainedStore.size()); List<StoredMessage> results = new ArrayList<StoredMessage>(); for (Map.Entry<String, StoredMessage> entry : m_retainedStore.entrySet()) { StoredMessage storedMsg = entry.getValue(); if (condition.match(entry.getKey())) { results.add(storedMsg); } } return results; } @Override public void storePublishForFuture(PublishEvent evt) { List<StoredPublishEvent> storedEvents; String clientID = evt.getClientID(); if (!m_persistentMessageStore.containsKey(clientID)) { storedEvents = new ArrayList<>(); } else { storedEvents = m_persistentMessageStore.get(clientID); } storedEvents.add(convertToStored(evt)); m_persistentMessageStore.put(clientID, storedEvents); m_db.commit(); //NB rewind the evt message content LOG.debug("Stored published message for client <{}> on topic <{}>", clientID, evt.getTopic()); } @Override public List<PublishEvent> listMessagesInSession(String clientID) { List<PublishEvent> liveEvts = new ArrayList<>(); List<StoredPublishEvent> storedEvts = defaultGet(m_persistentMessageStore, clientID, Collections.<StoredPublishEvent>emptyList()); for (StoredPublishEvent storedEvt : storedEvts) { liveEvts.add(convertFromStored(storedEvt)); } return liveEvts; } @Override public void removeMessageInSession(String clientID, Integer messageID) { List<StoredPublishEvent> events = m_persistentMessageStore.get(clientID); if (events == null) { return; } StoredPublishEvent toRemoveEvt = null; for (StoredPublishEvent evt : events) { if (evt.getMessageID() == null && messageID == null) { //was a qos0 message (no ID) toRemoveEvt = evt; } if (evt.getMessageID() == messageID) { toRemoveEvt = evt; } } events.remove(toRemoveEvt); m_persistentMessageStore.put(clientID, events); m_db.commit(); } public void dropMessagesInSession(String clientID) { m_persistentMessageStore.remove(clientID); m_db.commit(); } //----------------- In flight methods ----------------- @Override public void cleanInFlight(String clientID, int packetID) { String publishKey = String.format("%s%d", clientID, packetID); m_inflightStore.remove(publishKey); Set<Integer> inFlightForClient = this.m_inFlightIds.get(clientID); if (inFlightForClient != null) { inFlightForClient.remove(packetID); } m_db.commit(); } @Override public void addInFlight(PublishEvent evt, String clientID, int packetID) { String publishKey = String.format("%s%d", clientID, packetID); StoredPublishEvent storedEvt = convertToStored(evt); m_inflightStore.put(publishKey, storedEvt); m_db.commit(); } /** * Return the next valid packetIdentifier for the given client session. * */ @Override public int nextPacketID(String clientID) { Set<Integer> inFlightForClient = this.m_inFlightIds.get(clientID); if (inFlightForClient == null) { int nextPacketId = 1; inFlightForClient = new HashSet<>(); inFlightForClient.add(nextPacketId); this.m_inFlightIds.put(clientID, inFlightForClient); return nextPacketId; } int maxId = inFlightForClient.isEmpty() ? 0 : Collections.max(inFlightForClient); int nextPacketId = (maxId + 1) % 0xFFFF; inFlightForClient.add(nextPacketId); return nextPacketId; } @Override public void removeSubscription(String topic, String clientID) { LOG.debug("removeSubscription topic filter: {} for clientID: {}", topic, clientID); if (!m_persistentSubscriptions.containsKey(clientID)) { return; } Set<Subscription> clientSubscriptions = m_persistentSubscriptions.get(clientID); //search for the subscription to remove Subscription toBeRemoved = null; for (Subscription sub : clientSubscriptions) { if (sub.getTopicFilter().equals(topic)) { toBeRemoved = sub; break; } } if (toBeRemoved != null) { clientSubscriptions.remove(toBeRemoved); } m_persistentSubscriptions.put(clientID, clientSubscriptions); m_db.commit(); } public void addNewSubscription(Subscription newSubscription) { LOG.debug("addNewSubscription invoked with subscription {}", newSubscription); final String clientID = newSubscription.getClientId(); if (!m_persistentSubscriptions.containsKey(clientID)) { LOG.debug("clientID {} is a newcome, creating it's subscriptions set", clientID); m_persistentSubscriptions.put(clientID, new HashSet<Subscription>()); } Set<Subscription> subs = m_persistentSubscriptions.get(clientID); if (!subs.contains(newSubscription)) { LOG.debug("updating clientID {} subscriptions set with new subscription", clientID); //TODO check the subs doesn't contain another subscription to the same topic with different Subscription existingSubscription = null; for (Subscription scanSub : subs) { if (newSubscription.getTopicFilter().equals(scanSub.getTopicFilter())) { existingSubscription = scanSub; break; } } if (existingSubscription != null) { subs.remove(existingSubscription); } subs.add(newSubscription); m_persistentSubscriptions.put(clientID, subs); LOG.debug("clientID {} subscriptions set now is {}", clientID, subs); } m_db.commit(); } public void wipeSubscriptions(String clientID) { m_persistentSubscriptions.remove(clientID); m_db.commit(); } @Override public void updateSubscriptions(String clientID, Set<Subscription> subscriptions) { m_persistentSubscriptions.put(clientID, subscriptions); m_db.commit(); } public List<Subscription> listAllSubscriptions() { List<Subscription> allSubscriptions = new ArrayList<Subscription>(); for (Map.Entry<String, Set<Subscription>> entry : m_persistentSubscriptions.entrySet()) { allSubscriptions.addAll(entry.getValue()); } LOG.debug("retrieveAllSubscriptions returning subs {}", allSubscriptions); return allSubscriptions; } @Override public boolean contains(String clientID) { return m_persistentSubscriptions.containsKey(clientID); } public void close() { this.m_db.commit(); LOG.debug("persisted subscriptions {}", m_persistentSubscriptions); this.m_db.close(); LOG.debug("closed disk storage"); } /*-------- QoS 2 storage management --------------*/ public void persistQoS2Message(String publishKey, PublishEvent evt) { LOG.debug("persistQoS2Message store pubKey: {}, evt: {}", publishKey, evt); m_qos2Store.put(publishKey, convertToStored(evt)); } public void removeQoS2Message(String publishKey) { LOG.debug("Removing stored Q0S2 message <{}>", publishKey); m_qos2Store.remove(publishKey); } public PublishEvent retrieveQoS2Message(String publishKey) { StoredPublishEvent storedEvt = m_qos2Store.get(publishKey); return convertFromStored(storedEvt); } private StoredPublishEvent convertToStored(PublishEvent evt) { StoredPublishEvent storedEvt = new StoredPublishEvent(evt); return storedEvt; } private PublishEvent convertFromStored(StoredPublishEvent evt) { byte[] message = evt.getMessage(); ByteBuffer bbmessage = ByteBuffer.wrap(message); //bbmessage.flip(); PublishEvent liveEvt = new PublishEvent(evt.getTopic(), evt.getQos(), bbmessage, evt.isRetain(), evt.getClientID(), evt.getMessageID()); return liveEvt; } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.printservice; import android.annotation.NonNull; import android.annotation.Nullable; import android.app.Service; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.os.Handler; import android.os.IBinder; import android.os.Looper; import android.os.Message; import android.os.RemoteException; import android.print.PrintJobInfo; import android.print.PrinterId; import android.util.Log; import com.android.internal.util.Preconditions; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * <p> * This is the base class for implementing print services. A print service knows * how to discover and interact one or more printers via one or more protocols. * </p> * <h3>Printer discovery</h3> * <p> * A print service is responsible for discovering printers, adding discovered printers, * removing added printers, and updating added printers. When the system is interested * in printers managed by your service it will call {@link * #onCreatePrinterDiscoverySession()} from which you must return a new {@link * PrinterDiscoverySession} instance. The returned session encapsulates the interaction * between the system and your service during printer discovery. For description of this * interaction refer to the documentation for {@link PrinterDiscoverySession}. * </p> * <p> * For every printer discovery session all printers have to be added since system does * not retain printers across sessions. Hence, each printer known to this print service * should be added only once during a discovery session. Only an already added printer * can be removed or updated. Removed printers can be added again. * </p> * <h3>Print jobs</h3> * <p> * When a new print job targeted to a printer managed by this print service is is queued, * i.e. ready for processing by the print service, you will receive a call to {@link * #onPrintJobQueued(PrintJob)}. The print service may handle the print job immediately * or schedule that for an appropriate time in the future. The list of all active print * jobs for this service is obtained by calling {@link #getActivePrintJobs()}. Active * print jobs are ones that are queued or started. * </p> * <p> * A print service is responsible for setting a print job's state as appropriate * while processing it. Initially, a print job is queued, i.e. {@link PrintJob#isQueued() * PrintJob.isQueued()} returns true, which means that the document to be printed is * spooled by the system and the print service can begin processing it. You can obtain * the printed document by calling {@link PrintJob#getDocument() PrintJob.getDocument()} * whose data is accessed via {@link PrintDocument#getData() PrintDocument.getData()}. * After the print service starts printing the data it should set the print job's * state to started by calling {@link PrintJob#start()} after which * {@link PrintJob#isStarted() PrintJob.isStarted()} would return true. Upon successful * completion, the print job should be marked as completed by calling {@link * PrintJob#complete() PrintJob.complete()} after which {@link PrintJob#isCompleted() * PrintJob.isCompleted()} would return true. In case of a failure, the print job should * be marked as failed by calling {@link PrintJob#fail(String) PrintJob.fail( * String)} after which {@link PrintJob#isFailed() PrintJob.isFailed()} would * return true. * </p> * <p> * If a print job is queued or started and the user requests to cancel it, the print * service will receive a call to {@link #onRequestCancelPrintJob(PrintJob)} which * requests from the service to do best effort in canceling the job. In case the job * is successfully canceled, its state has to be marked as cancelled by calling {@link * PrintJob#cancel() PrintJob.cancel()} after which {@link PrintJob#isCancelled() * PrintJob.isCacnelled()} would return true. * </p> * <h3>Lifecycle</h3> * <p> * The lifecycle of a print service is managed exclusively by the system and follows * the established service lifecycle. Additionally, starting or stopping a print service * is triggered exclusively by an explicit user action through enabling or disabling it * in the device settings. After the system binds to a print service, it calls {@link * #onConnected()}. This method can be overriden by clients to perform post binding setup. * Also after the system unbinds from a print service, it calls {@link #onDisconnected()}. * This method can be overriden by clients to perform post unbinding cleanup. Your should * not do any work after the system disconnected from your print service since the * service can be killed at any time to reclaim memory. The system will not disconnect * from a print service if there are active print jobs for the printers managed by it. * </p> * <h3>Declaration</h3> * <p> * A print service is declared as any other service in an AndroidManifest.xml but it must * also specify that it handles the {@link android.content.Intent} with action {@link * #SERVICE_INTERFACE android.printservice.PrintService}. Failure to declare this intent * will cause the system to ignore the print service. Additionally, a print service must * request the {@link android.Manifest.permission#BIND_PRINT_SERVICE * android.permission.BIND_PRINT_SERVICE} permission to ensure that only the system can * bind to it. Failure to declare this intent will cause the system to ignore the print * service. Following is an example declaration: * </p> * <pre> * &lt;service android:name=".MyPrintService" * android:permission="android.permission.BIND_PRINT_SERVICE"&gt; * &lt;intent-filter&gt; * &lt;action android:name="android.printservice.PrintService" /&gt; * &lt;/intent-filter&gt; * . . . * &lt;/service&gt; * </pre> * <h3>Configuration</h3> * <p> * A print service can be configured by specifying an optional settings activity which * exposes service specific settings, an optional add printers activity which is used for * manual addition of printers, vendor name ,etc. It is a responsibility of the system * to launch the settings and add printers activities when appropriate. * </p> * <p> * A print service is configured by providing a {@link #SERVICE_META_DATA meta-data} * entry in the manifest when declaring the service. A service declaration with a meta-data * tag is presented below: * <pre> &lt;service android:name=".MyPrintService" * android:permission="android.permission.BIND_PRINT_SERVICE"&gt; * &lt;intent-filter&gt; * &lt;action android:name="android.printservice.PrintService" /&gt; * &lt;/intent-filter&gt; * &lt;meta-data android:name="android.printservice" android:resource="@xml/printservice" /&gt; * &lt;/service&gt;</pre> * </p> * <p> * For more details for how to configure your print service via the meta-data refer to * {@link #SERVICE_META_DATA} and <code>&lt;{@link android.R.styleable#PrintService * print-service}&gt;</code>. * </p> * <p> * <strong>Note: </strong> All callbacks in this class are executed on the main * application thread. You should also invoke any method of this class on the main * application thread. * </p> */ public abstract class PrintService extends Service { private static final String LOG_TAG = "PrintService"; private static final boolean DEBUG = false; /** * The {@link Intent} action that must be declared as handled by a service * in its manifest for the system to recognize it as a print service. */ public static final String SERVICE_INTERFACE = "android.printservice.PrintService"; /** * Name under which a {@link PrintService} component publishes additional information * about itself. This meta-data must reference a XML resource containing a <code> * &lt;{@link android.R.styleable#PrintService print-service}&gt;</code> tag. This is * a sample XML file configuring a print service: * <pre> &lt;print-service * android:vendor="SomeVendor" * android:settingsActivity="foo.bar.MySettingsActivity" * andorid:addPrintersActivity="foo.bar.MyAddPrintersActivity." * . . . * /&gt;</pre> * <p> * For detailed configuration options that can be specified via the meta-data * refer to {@link android.R.styleable#PrintService android.R.styleable.PrintService}. * </p> * <p> * If you declare a settings or add a printers activity, they have to be exported, * by setting the {@link android.R.attr#exported} activity attribute to <code>true * </code>. Also in case you want only the system to be able to start any of these * activities you can specify that they request the android.permission * .START_PRINT_SERVICE_CONFIG_ACTIVITY permission by setting the * {@link android.R.attr#permission} activity attribute. * </p> */ public static final String SERVICE_META_DATA = "android.printservice"; /** * If you declared an optional activity with advanced print options via the * {@link android.R.attr#advancedPrintOptionsActivity advancedPrintOptionsActivity} attribute, * this extra is used to pass in the currently constructed {@link PrintJobInfo} to your activity * allowing you to modify it. After you are done, you must return the modified * {@link PrintJobInfo} via the same extra. * <p> * You cannot modify the passed in {@link PrintJobInfo} directly, rather you should build * another one using the {@link android.print.PrintJobInfo.Builder PrintJobInfo.Builder} class. * You can specify any standard properties and add advanced, printer specific, ones via * {@link android.print.PrintJobInfo.Builder#putAdvancedOption(String, String) * PrintJobInfo.Builder.putAdvancedOption(String, String)} and * {@link android.print.PrintJobInfo.Builder#putAdvancedOption(String, int) * PrintJobInfo.Builder.putAdvancedOption(String, int)}. The advanced options are not * interpreted by the system, they will not be visible to applications, and can only be accessed * by your print service via {@link PrintJob#getAdvancedStringOption(String) * PrintJob.getAdvancedStringOption(String)} and {@link PrintJob#getAdvancedIntOption(String) * PrintJob.getAdvancedIntOption(String)}. * </p> * <p> * If the advanced print options activity offers changes to the standard print options, you can * get the current {@link android.print.PrinterInfo PrinterInfo} using the * {@link #EXTRA_PRINTER_INFO} extra which will allow you to present the user with UI options * supported by the current printer. For example, if the current printer does not support a * given media size, you should not offer it in the advanced print options UI. * </p> * * @see #EXTRA_PRINTER_INFO */ public static final String EXTRA_PRINT_JOB_INFO = "android.intent.extra.print.PRINT_JOB_INFO"; /** * If you declared an optional activity with advanced print options via the * {@link android.R.attr#advancedPrintOptionsActivity advancedPrintOptionsActivity} * attribute, this extra is used to pass in the currently selected printer's * {@link android.print.PrinterInfo} to your activity allowing you to inspect it. * * @see #EXTRA_PRINT_JOB_INFO */ public static final String EXTRA_PRINTER_INFO = "android.intent.extra.print.EXTRA_PRINTER_INFO"; /** * If you declared an optional activity with advanced print options via the * {@link android.R.attr#advancedPrintOptionsActivity advancedPrintOptionsActivity} * attribute, this extra is used to pass in the meta-data for the currently printed * document as a {@link android.print.PrintDocumentInfo} to your activity allowing * you to inspect it. * * @see #EXTRA_PRINT_JOB_INFO * @see #EXTRA_PRINTER_INFO */ public static final String EXTRA_PRINT_DOCUMENT_INFO = "android.printservice.extra.PRINT_DOCUMENT_INFO"; private Handler mHandler; private IPrintServiceClient mClient; private int mLastSessionId = -1; private PrinterDiscoverySession mDiscoverySession; @Override protected final void attachBaseContext(Context base) { super.attachBaseContext(base); mHandler = new ServiceHandler(base.getMainLooper()); } /** * The system has connected to this service. */ protected void onConnected() { /* do nothing */ } /** * The system has disconnected from this service. */ protected void onDisconnected() { /* do nothing */ } /** * Callback asking you to create a new {@link PrinterDiscoverySession}. * * @return The created session. * @see PrinterDiscoverySession */ protected abstract @Nullable PrinterDiscoverySession onCreatePrinterDiscoverySession(); /** * Called when cancellation of a print job is requested. The service * should do best effort to fulfill the request. After the cancellation * is performed, the print job should be marked as cancelled state by * calling {@link PrintJob#cancel()}. * * @param printJob The print job to cancel. * * @see PrintJob#cancel() PrintJob.cancel() * @see PrintJob#isCancelled() PrintJob.isCancelled() */ protected abstract void onRequestCancelPrintJob(PrintJob printJob); /** * Called when there is a queued print job for one of the printers * managed by this print service. * * @param printJob The new queued print job. * * @see PrintJob#isQueued() PrintJob.isQueued() * @see #getActivePrintJobs() */ protected abstract void onPrintJobQueued(PrintJob printJob); /** * Gets the active print jobs for the printers managed by this service. * Active print jobs are ones that are not in a final state, i.e. whose * state is queued or started. * * @return The active print jobs. * * @see PrintJob#isQueued() PrintJob.isQueued() * @see PrintJob#isStarted() PrintJob.isStarted() */ public final List<PrintJob> getActivePrintJobs() { throwIfNotCalledOnMainThread(); if (mClient == null) { return Collections.emptyList(); } try { List<PrintJob> printJobs = null; List<PrintJobInfo> printJobInfos = mClient.getPrintJobInfos(); if (printJobInfos != null) { final int printJobInfoCount = printJobInfos.size(); printJobs = new ArrayList<PrintJob>(printJobInfoCount); for (int i = 0; i < printJobInfoCount; i++) { printJobs.add(new PrintJob(this, printJobInfos.get(i), mClient)); } } if (printJobs != null) { return printJobs; } } catch (RemoteException re) { Log.e(LOG_TAG, "Error calling getPrintJobs()", re); } return Collections.emptyList(); } /** * Generates a global printer id given the printer's locally unique one. * * @param localId A locally unique id in the context of your print service. * @return Global printer id. */ public @NonNull final PrinterId generatePrinterId(String localId) { throwIfNotCalledOnMainThread(); localId = Preconditions.checkNotNull(localId, "localId cannot be null"); return new PrinterId(new ComponentName(getPackageName(), getClass().getName()), localId); } static void throwIfNotCalledOnMainThread() { if (!Looper.getMainLooper().isCurrentThread()) { throw new IllegalAccessError("must be called from the main thread"); } } @Override public final IBinder onBind(Intent intent) { return new IPrintService.Stub() { @Override public void createPrinterDiscoverySession() { mHandler.sendEmptyMessage(ServiceHandler.MSG_CREATE_PRINTER_DISCOVERY_SESSION); } @Override public void destroyPrinterDiscoverySession() { mHandler.sendEmptyMessage(ServiceHandler.MSG_DESTROY_PRINTER_DISCOVERY_SESSION); } @Override public void startPrinterDiscovery(List<PrinterId> priorityList) { mHandler.obtainMessage(ServiceHandler.MSG_START_PRINTER_DISCOVERY, priorityList).sendToTarget(); } @Override public void stopPrinterDiscovery() { mHandler.sendEmptyMessage(ServiceHandler.MSG_STOP_PRINTER_DISCOVERY); } @Override public void validatePrinters(List<PrinterId> printerIds) { mHandler.obtainMessage(ServiceHandler.MSG_VALIDATE_PRINTERS, printerIds).sendToTarget(); } @Override public void startPrinterStateTracking(PrinterId printerId) { mHandler.obtainMessage(ServiceHandler.MSG_START_PRINTER_STATE_TRACKING, printerId).sendToTarget(); } @Override public void requestCustomPrinterIcon(PrinterId printerId) { mHandler.obtainMessage(ServiceHandler.MSG_REQUEST_CUSTOM_PRINTER_ICON, printerId).sendToTarget(); } @Override public void stopPrinterStateTracking(PrinterId printerId) { mHandler.obtainMessage(ServiceHandler.MSG_STOP_PRINTER_STATE_TRACKING, printerId).sendToTarget(); } @Override public void setClient(IPrintServiceClient client) { mHandler.obtainMessage(ServiceHandler.MSG_SET_CLIENT, client) .sendToTarget(); } @Override public void requestCancelPrintJob(PrintJobInfo printJobInfo) { mHandler.obtainMessage(ServiceHandler.MSG_ON_REQUEST_CANCEL_PRINTJOB, printJobInfo).sendToTarget(); } @Override public void onPrintJobQueued(PrintJobInfo printJobInfo) { mHandler.obtainMessage(ServiceHandler.MSG_ON_PRINTJOB_QUEUED, printJobInfo).sendToTarget(); } }; } private final class ServiceHandler extends Handler { public static final int MSG_CREATE_PRINTER_DISCOVERY_SESSION = 1; public static final int MSG_DESTROY_PRINTER_DISCOVERY_SESSION = 2; public static final int MSG_START_PRINTER_DISCOVERY = 3; public static final int MSG_STOP_PRINTER_DISCOVERY = 4; public static final int MSG_VALIDATE_PRINTERS = 5; public static final int MSG_START_PRINTER_STATE_TRACKING = 6; public static final int MSG_REQUEST_CUSTOM_PRINTER_ICON = 7; public static final int MSG_STOP_PRINTER_STATE_TRACKING = 8; public static final int MSG_ON_PRINTJOB_QUEUED = 9; public static final int MSG_ON_REQUEST_CANCEL_PRINTJOB = 10; public static final int MSG_SET_CLIENT = 11; public ServiceHandler(Looper looper) { super(looper, null, true); } @Override @SuppressWarnings("unchecked") public void handleMessage(Message message) { final int action = message.what; switch (action) { case MSG_CREATE_PRINTER_DISCOVERY_SESSION: { if (DEBUG) { Log.i(LOG_TAG, "MSG_CREATE_PRINTER_DISCOVERY_SESSION " + getPackageName()); } PrinterDiscoverySession session = onCreatePrinterDiscoverySession(); if (session == null) { throw new NullPointerException("session cannot be null"); } if (session.getId() == mLastSessionId) { throw new IllegalStateException("cannot reuse session instances"); } mDiscoverySession = session; mLastSessionId = session.getId(); session.setObserver(mClient); } break; case MSG_DESTROY_PRINTER_DISCOVERY_SESSION: { if (DEBUG) { Log.i(LOG_TAG, "MSG_DESTROY_PRINTER_DISCOVERY_SESSION " + getPackageName()); } if (mDiscoverySession != null) { mDiscoverySession.destroy(); mDiscoverySession = null; } } break; case MSG_START_PRINTER_DISCOVERY: { if (DEBUG) { Log.i(LOG_TAG, "MSG_START_PRINTER_DISCOVERY " + getPackageName()); } if (mDiscoverySession != null) { List<PrinterId> priorityList = (ArrayList<PrinterId>) message.obj; mDiscoverySession.startPrinterDiscovery(priorityList); } } break; case MSG_STOP_PRINTER_DISCOVERY: { if (DEBUG) { Log.i(LOG_TAG, "MSG_STOP_PRINTER_DISCOVERY " + getPackageName()); } if (mDiscoverySession != null) { mDiscoverySession.stopPrinterDiscovery(); } } break; case MSG_VALIDATE_PRINTERS: { if (DEBUG) { Log.i(LOG_TAG, "MSG_VALIDATE_PRINTERS " + getPackageName()); } if (mDiscoverySession != null) { List<PrinterId> printerIds = (List<PrinterId>) message.obj; mDiscoverySession.validatePrinters(printerIds); } } break; case MSG_START_PRINTER_STATE_TRACKING: { if (DEBUG) { Log.i(LOG_TAG, "MSG_START_PRINTER_STATE_TRACKING " + getPackageName()); } if (mDiscoverySession != null) { PrinterId printerId = (PrinterId) message.obj; mDiscoverySession.startPrinterStateTracking(printerId); } } break; case MSG_REQUEST_CUSTOM_PRINTER_ICON: { if (DEBUG) { Log.i(LOG_TAG, "MSG_REQUEST_CUSTOM_PRINTER_ICON " + getPackageName()); } if (mDiscoverySession != null) { PrinterId printerId = (PrinterId) message.obj; mDiscoverySession.requestCustomPrinterIcon(printerId); } } break; case MSG_STOP_PRINTER_STATE_TRACKING: { if (DEBUG) { Log.i(LOG_TAG, "MSG_STOP_PRINTER_STATE_TRACKING " + getPackageName()); } if (mDiscoverySession != null) { PrinterId printerId = (PrinterId) message.obj; mDiscoverySession.stopPrinterStateTracking(printerId); } } break; case MSG_ON_REQUEST_CANCEL_PRINTJOB: { if (DEBUG) { Log.i(LOG_TAG, "MSG_ON_REQUEST_CANCEL_PRINTJOB " + getPackageName()); } PrintJobInfo printJobInfo = (PrintJobInfo) message.obj; onRequestCancelPrintJob(new PrintJob(PrintService.this, printJobInfo, mClient)); } break; case MSG_ON_PRINTJOB_QUEUED: { if (DEBUG) { Log.i(LOG_TAG, "MSG_ON_PRINTJOB_QUEUED " + getPackageName()); } PrintJobInfo printJobInfo = (PrintJobInfo) message.obj; if (DEBUG) { Log.i(LOG_TAG, "Queued: " + printJobInfo); } onPrintJobQueued(new PrintJob(PrintService.this, printJobInfo, mClient)); } break; case MSG_SET_CLIENT: { if (DEBUG) { Log.i(LOG_TAG, "MSG_SET_CLIENT " + getPackageName()); } mClient = (IPrintServiceClient) message.obj; if (mClient != null) { onConnected(); } else { onDisconnected(); } } break; default: { throw new IllegalArgumentException("Unknown message: " + action); } } } } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.pivotal.gemfirexd.internal.engine.distributed; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import com.gemstone.gemfire.cache30.CacheSerializableRunnable; import com.pivotal.gemfirexd.DistributedSQLTestBase; import com.pivotal.gemfirexd.TestUtil; @SuppressWarnings("serial") public class PersistentReplicateTableDUnit extends DistributedSQLTestBase { private final static String DISKSTORE = "TestPersistenceDiskStore"; public PersistentReplicateTableDUnit(String name) { super(name); } public String getSuffix() throws Exception { String suffix = " PERSISTENT " + "'" + DISKSTORE + "'"; return suffix; } public void createDiskStore(boolean useClient, int vmNum) throws Exception { CacheSerializableRunnable csr = getDiskStoreCreator(DISKSTORE); if (useClient) { if (vmNum == 1) { csr.run2(); } else { clientExecute(vmNum, csr); } } else { serverExecute(vmNum, csr); } } @Override protected String[] testSpecificDirectoriesForDeletion() { return new String[] { "test_dir" }; } /** * Test insufficient data store behaviour for distributed/update/delete/select * and for primary key based select/update/delete * * @throws Exception */ public void testInsufficientDatastoreBehaviourBug42447() throws Exception { startVMs(1, 3); createDiskStore(true, 1); // Create a schema clientSQLExecute(1, "create schema trade"); clientSQLExecute(1, "create table trade.customers (cid int not null, " + "cust_name varchar(100), tid int, primary key (cid)) replicate " + getSuffix()); Connection conn = TestUtil.getConnection(); PreparedStatement psInsert = conn .prepareStatement("insert into trade.customers values (?,?,?)"); for (int i = 1; i < 31; ++i) { psInsert.setInt(1, i); psInsert.setString(2, "name" + i); psInsert.setInt(3, i); psInsert.executeUpdate(); } stopVMNums(-3, -2, -1); stopVMNum(1); restartVMNums(1); try { conn = TestUtil.getConnection(); Statement stmt = conn.createStatement(); // Test bulk operations try { stmt.executeQuery("select * from trade.customers"); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { PreparedStatement ps = conn .prepareStatement("select * from trade.customers " + "where tid > ?"); ps.setInt(1, 0); ps.executeQuery(); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { stmt.executeUpdate("update trade.customers set tid = 5 where tid > 3"); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { PreparedStatement ps = conn .prepareStatement("update trade.customers set tid = ?" + " where tid > ?"); ps.setInt(1, 5); ps.setInt(2, 3); ps.executeUpdate(); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { stmt.executeUpdate(" delete from trade.customers where tid > 3"); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { PreparedStatement ps = conn .prepareStatement(" delete from trade.customers where tid > ?"); ps.setInt(1, 3); ps.executeUpdate(); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } // Test PK based operations try { ResultSet rs = stmt .executeQuery("select * from trade.customers where cid = 1"); rs.next(); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals(sqle.getSQLState(), "X0Z08"); } try { PreparedStatement ps = conn .prepareStatement("select * from trade.customers " + "where cid = ?"); ps.setInt(1, 1); ResultSet rs = ps.executeQuery(); rs.next(); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { stmt.executeUpdate("update trade.customers set tid = 5 where cid = 3"); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { psInsert = conn .prepareStatement("insert into trade.customers values (?,?,?)"); psInsert.setInt(1, 40); psInsert.setString(2, "name40"); psInsert.setInt(3, 40); psInsert.executeUpdate(); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { PreparedStatement ps = conn .prepareStatement("update trade.customers set tid = ?" + " where cid = ?"); ps.setInt(1, 5); ps.setInt(2, 3); ps.executeUpdate(); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { stmt.executeUpdate(" delete from trade.customers where cid = 3"); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } try { PreparedStatement ps = conn .prepareStatement(" delete from trade.customers where cid = ?"); ps.setInt(1, 3); ps.executeUpdate(); fail("Test should fail due to insufficient data stores"); } catch (SQLException sqle) { assertEquals("X0Z08", sqle.getSQLState()); } } finally { // restartVMNums(-1, -2, -3); // restarting a VM so that DB cleanup can go on without problems else // subsequent tests start seeing stale datadictionary restartVMNums(-1); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.azure.storage.blob.integration; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.security.SecureRandom; import java.util.LinkedList; import java.util.List; import com.azure.storage.blob.BlobContainerClient; import com.azure.storage.blob.models.PageRange; import org.apache.camel.EndpointInject; import org.apache.camel.ProducerTemplate; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.azure.storage.blob.BlobBlock; import org.apache.camel.component.azure.storage.blob.BlobConstants; import org.apache.camel.component.mock.MockEndpoint; import org.apache.commons.lang3.RandomStringUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; class BlobProducerIT extends Base { @EndpointInject private ProducerTemplate template; @EndpointInject("mock:result") private MockEndpoint result; private String resultName = "mock:result"; private BlobContainerClient containerClient; @BeforeAll public void prepare() { // create test container containerClient = serviceClient.getBlobContainerClient(containerName); containerClient.create(); } @Test void testUploadBlockBlob() throws InterruptedException { final String blobName = RandomStringUtils.randomAlphabetic(10); result.expectedMessageCount(1); template.send("direct:uploadBlockBlob", exchange -> { exchange.getIn().setHeader(BlobConstants.BLOB_NAME, blobName); exchange.getIn().setBody("Block Blob"); }); result.assertIsSatisfied(); } @Test void testCommitAndStageBlockBlob() throws InterruptedException, IOException { final String blobName = RandomStringUtils.randomAlphabetic(10); result.expectedMessageCount(1); result.expectedBodiesReceived(true); template.send("direct:stageBlockBlobList", exchange -> { exchange.getIn().setHeader(BlobConstants.BLOB_NAME, blobName); exchange.getIn().setHeader(BlobConstants.COMMIT_BLOCK_LIST_LATER, false); final List<BlobBlock> blocks = new LinkedList<>(); blocks.add(BlobBlock.createBlobBlock(new ByteArrayInputStream("Hello".getBytes()))); blocks.add(BlobBlock.createBlobBlock(new ByteArrayInputStream("From".getBytes()))); blocks.add(BlobBlock.createBlobBlock(new ByteArrayInputStream("Camel".getBytes()))); exchange.getIn().setBody(blocks); }); result.assertIsSatisfied(); assertNotNull(result.getExchanges().get(0).getMessage().getHeader(BlobConstants.E_TAG)); } @Test void testCommitAppendBlobWithError() throws InterruptedException { final String blobName = RandomStringUtils.randomAlphabetic(10); template.send("direct:commitAppendBlobWithError", exchange -> { exchange.getIn().setHeader(BlobConstants.BLOB_NAME, blobName); exchange.getIn().setHeader(BlobConstants.CREATE_APPEND_BLOB, false); final String data = "Hello world from my awesome tests!"; final InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8)); exchange.getIn().setBody(dataStream); }); result.assertIsSatisfied(); // append blob not created because of the flag assertTrue(result.getExchanges().isEmpty()); } @Test void testCreateAndUpdateAppendBlob() throws InterruptedException { final String blobName = RandomStringUtils.randomAlphabetic(10); result.expectedMessageCount(1); result.expectedBodiesReceived(true); template.send("direct:commitAppendBlob", exchange -> { exchange.getIn().setHeader(BlobConstants.BLOB_NAME, blobName); final String data = "Hello world from my awesome tests!"; final InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8)); exchange.getIn().setBody(dataStream); }); result.assertIsSatisfied(); assertNotNull(result.getExchanges().get(0).getMessage().getHeader(BlobConstants.E_TAG)); assertNotNull(result.getExchanges().get(0).getMessage().getHeader(BlobConstants.COMMITTED_BLOCK_COUNT)); } @Test void testCreateAndUploadPageBlob() throws InterruptedException { final String blobName = RandomStringUtils.randomAlphabetic(10); result.expectedMessageCount(1); result.expectedBodiesReceived(true); template.send("direct:uploadPageBlob", exchange -> { exchange.getIn().setHeader(BlobConstants.BLOB_NAME, blobName); byte[] dataBytes = new byte[512]; // we set range for the page from 0-511 new SecureRandom().nextBytes(dataBytes); final InputStream dataStream = new ByteArrayInputStream(dataBytes); final PageRange pageRange = new PageRange().setStart(0).setEnd(511); exchange.getIn().setHeader(BlobConstants.PAGE_BLOB_RANGE, pageRange); exchange.getIn().setBody(dataStream); }); result.assertIsSatisfied(); assertNotNull(result.getExchanges().get(0).getMessage().getHeader(BlobConstants.E_TAG)); } @Test void testUploadBlockBlobWithConfigUri() throws InterruptedException { result.expectedMessageCount(1); template.send("direct:uploadBlockBlobWithConfigUri", exchange -> exchange.getIn().setBody("Block Blob")); result.assertIsSatisfied(); } @Test void testHeaderPreservation() throws InterruptedException { result.expectedMessageCount(1); template.send("direct:uploadBlockBlobWithConfigUri", exchange -> { exchange.getIn().setBody("Block Blob"); exchange.getIn().setHeader("DoNotDelete", "keep me"); }); assertEquals("keep me", result.getExchanges().get(0).getMessage().getHeader("DoNotDelete")); result.assertIsSatisfied(); } @AfterAll public void tearDown() { containerClient.delete(); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:uploadBlockBlob") .to(componentUri("uploadBlockBlob")) .to(resultName); from("direct:stageBlockBlobList") .to(componentUri("stageBlockBlobList")) .to(resultName); from("direct:commitAppendBlob") .to(componentUri("commitAppendBlob")) .to(resultName); from("direct:commitAppendBlobWithError") .to(componentUri("commitAppendBlob")) .to(resultName); from("direct:uploadPageBlob") .to(componentUri("uploadPageBlob")) .to(resultName); from("direct:uploadBlockBlobWithConfigUri") .to(componentUri("uploadBlockBlob") + "&blobName=uploadBlockName") .to(resultName); } }; } private String componentUri(final String operation) { return String.format("azure-storage-blob://cameldev/%s?operation=%s", containerName, operation); } }
package nkarasch.repeatingreminder.gui; /* * Copyright (C) 2015-2016 Nick Karasch <nkarasch@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.app.AlertDialog; import android.app.NotificationManager; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Color; import android.media.RingtoneManager; import android.net.Uri; import android.support.annotation.NonNull; import android.support.v4.app.FragmentActivity; import android.support.v4.content.ContextCompat; import android.support.v7.widget.SwitchCompat; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.animation.Animation; import android.view.animation.Transformation; import android.widget.CheckBox; import android.widget.EditText; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import com.codetroopers.betterpickers.hmspicker.HmsPickerBuilder; import com.codetroopers.betterpickers.hmspicker.HmsPickerDialogFragment; import butterknife.ButterKnife; import butterknife.BindView; import butterknife.OnCheckedChanged; import butterknife.OnClick; import nkarasch.repeatingreminder.scheduling.AlarmHandler; import nkarasch.repeatingreminder.Alert; import nkarasch.repeatingreminder.R; import nkarasch.repeatingreminder.utils.ColorUtils; import static nkarasch.repeatingreminder.gui.ProgrammableStyleableRadialTimePickerDialogFragment.*; public class AlertView extends LinearLayout { private final FragmentActivity mContext; private AlertListAdapter mAdapter; private AlarmHandler mAlarmHandler; private Alert mAlert; private int mPosition; private boolean mSettingState; //main display components @SuppressWarnings("WeakerAccess") @BindView(R.id.text_frequency) TextView textFrequency; @SuppressWarnings("WeakerAccess") @BindView(R.id.switch_on_off) SwitchCompat switchOnOff; @SuppressWarnings("WeakerAccess") @BindView(R.id.text_label_display) TextView textLabel; @SuppressWarnings("WeakerAccess") @BindView(R.id.text_days_display) TextView textDays; @SuppressWarnings("WeakerAccess") @BindView(R.id.text_times_display) TextView textTimes; @SuppressWarnings("WeakerAccess") @BindView(R.id.iv_expand_down) ImageView imageDownArrow; @SuppressWarnings("WeakerAccess") @BindView(R.id.rl_schedule_expansion) RelativeLayout layoutSchedule; //expansion components @SuppressWarnings("WeakerAccess") @BindView(R.id.checkbox_vibrate) CheckBox checkVibrate; @SuppressWarnings("WeakerAccess") @BindView(R.id.text_ringtone) TextView textRingtone; @SuppressWarnings("WeakerAccess") @BindView(R.id.checkbox_schedule) CheckBox checkSchedule; @SuppressWarnings("WeakerAccess") @BindView(R.id.checkbox_schedule_text) TextView checkScheduleText; @SuppressWarnings("WeakerAccess") @BindView(R.id.text_alarm_on) TextView textStartTime; @SuppressWarnings("WeakerAccess") @BindView(R.id.text_alarm_off) TextView textEndTime; @SuppressWarnings("WeakerAccess") @BindView(R.id.ll_schedule_days) LinearLayout layoutScheduleDays; @SuppressWarnings("WeakerAccess") @BindView(R.id.rl_expansion) RelativeLayout layoutExpansion; @SuppressWarnings("WeakerAccess") @BindView(R.id.checkbox_wake) CheckBox checkWake; @SuppressWarnings("WeakerAccess") @BindView(R.id.checkbox_mute) CheckBox checkMute; @SuppressWarnings("WeakerAccess") @BindView(R.id.sunday_toggle) CircleToggleButton sundayButton; @SuppressWarnings("WeakerAccess") @BindView(R.id.monday_toggle) CircleToggleButton mondayButton; @SuppressWarnings("WeakerAccess") @BindView(R.id.tuesday_toggle) CircleToggleButton tuesdayButton; @SuppressWarnings("WeakerAccess") @BindView(R.id.wednesday_toggle) CircleToggleButton wednesdayButton; @SuppressWarnings("WeakerAccess") @BindView(R.id.thursday_toggle) CircleToggleButton thursdayButton; @SuppressWarnings("WeakerAccess") @BindView(R.id.friday_toggle) CircleToggleButton fridayButton; @SuppressWarnings("WeakerAccess") @BindView(R.id.saturday_toggle) CircleToggleButton saturdayButton; private final CircleToggleButton[] mDayOfWeekButtons = new CircleToggleButton[7]; public AlertView(final Context context) { super(context); this.mContext = (FragmentActivity) context; } public AlertView(final Context context, AttributeSet attrs) { super(context, attrs); this.mContext = (FragmentActivity) context; } public AlertView(final Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); this.mContext = (FragmentActivity) context; } @Override protected void onVisibilityChanged(@NonNull View changedView, int visibility) { super.onVisibilityChanged(changedView, visibility); if (mAlert.isExpanded()) { setBackgroundColor(ColorUtils.getTintedBackgroundColor()); layoutExpansion.setVisibility(View.VISIBLE); imageDownArrow.setVisibility(View.GONE); } else { setBackgroundColor(ColorUtils.getCurrentHourColor()); layoutExpansion.setVisibility(View.GONE); imageDownArrow.setVisibility(View.VISIBLE); } } public void update(AlertListAdapter adapter, AlarmHandler alarmHandler, int position) { this.mAdapter = adapter; this.mAlarmHandler = alarmHandler; this.mAlert = adapter.getItem(position); this.mPosition = position; ButterKnife.bind(this); mSettingState = true; mDayOfWeekButtons[0] = sundayButton; mDayOfWeekButtons[1] = mondayButton; mDayOfWeekButtons[2] = tuesdayButton; mDayOfWeekButtons[3] = wednesdayButton; mDayOfWeekButtons[4] = thursdayButton; mDayOfWeekButtons[5] = fridayButton; mDayOfWeekButtons[6] = saturdayButton; createDayOfWeekButtons(); setState(); if (mAlert.getFrequency() < 5 && mAlert.isNewlyCreated()) { frequencyDisplayOnClick(); mAlert.disableNewlyCreated(); } mSettingState = false; } private void setState() { if (mAlert.isExpanded()) { setBackgroundColor(ColorUtils.getTintedBackgroundColor()); layoutExpansion.setVisibility(View.VISIBLE); imageDownArrow.setVisibility(View.GONE); } else { setBackgroundColor(ColorUtils.getCurrentHourColor()); layoutExpansion.setVisibility(View.GONE); imageDownArrow.setVisibility(View.VISIBLE); } textFrequency.setText(mAlert.getFrequencyDisplay()); switchOnOff.setChecked(mAlert.isOn()); textLabel.bringToFront(); textLabel.setText(mAlert.getLabel()); textDays.setText(mAlert.getDaysDisplay()); textTimes.setText(mAlert.getTimeDisplay()); checkWake.setChecked(mAlert.isWake()); checkMute.setChecked(mAlert.isMute()); for (int i = 0; i < 7; i++) { mDayOfWeekButtons[i].setActivated(mAlert.isDayEnabled(i)); } checkVibrate.setChecked(mAlert.isVibrate()); textRingtone.setText(mAlert.getToneDisplay()); checkSchedule.setChecked(mAlert.isSchedule()); if (mAlert.isSchedule()) { layoutScheduleDays.setVisibility(View.VISIBLE); layoutSchedule.setVisibility(View.VISIBLE); } else { layoutScheduleDays.setVisibility(View.GONE); layoutSchedule.setVisibility(View.GONE); } textDays.setText(mAlert.getDaysDisplay()); textStartTime.setText(mAlert.getStartTimeDisplay()); textEndTime.setText(mAlert.getEndTimeDisplay()); } private void createDayOfWeekButtons() { for (int i = 0; i < 7; i++) { final CircleToggleButton button = mDayOfWeekButtons[i]; final int iterator = i; button.setOnTouchListener(new OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { v.onTouchEvent(event); switch (event.getAction()) { case MotionEvent.ACTION_UP: if (mAlert.isDayEnabled(iterator)) { mAlert.setDayEnabled(false, iterator); } else { mAlert.setDayEnabled(true, iterator); } textDays.setText(mAlert.getDaysDisplay()); stopAlert(); button.setActivated(!button.isActivated()); button.actionUp(); break; case MotionEvent.ACTION_DOWN: button.actionDown(event); break; case MotionEvent.ACTION_CANCEL: button.actionCancel(); break; } invalidate(); return true; } }); } } @OnClick(R.id.text_label_display) public void labelOnClick() { final EditText input = new EditText(mContext); input.setSingleLine(); final int accentColor = ContextCompat.getColor(mContext, R.color.accent); final int textColor = Color.WHITE; final AlertDialog labelDialog = new DialogBuilder(mContext) .setTitle("Set Label") .setTitleColor(accentColor) .setDividerColor(accentColor) .setView(input) .setPositiveButton("Ok", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { mAlert.setLabel(input.getText().toString()); textLabel.setText(mAlert.getLabel()); stopAlert(); } }).setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { } }).create(); labelDialog.setOnShowListener(new DialogInterface.OnShowListener() { @Override public void onShow(DialogInterface dialog) { labelDialog.getButton(AlertDialog.BUTTON_POSITIVE).setTextColor(textColor); labelDialog.getButton(AlertDialog.BUTTON_NEGATIVE).setTextColor(textColor); } }); labelDialog.show(); } @SuppressWarnings("WeakerAccess") @OnClick(R.id.text_frequency) public void frequencyDisplayOnClick() { new HmsPickerBuilder() .setFragmentManager(mContext.getSupportFragmentManager()) .setStyleResId(R.style.frequency_picker_dialog).addHmsPickerDialogHandler(new HmsPickerDialogFragment.HmsPickerDialogHandlerV2() { @Override public void onDialogHmsSet(int reference, boolean isNegative, int hours, int minutes, int seconds) { int frequency = hours * 3600 + minutes * 60 + seconds; if (frequency >= 5) { mAlert.setFrequency(frequency); if (mAlert.getFrequencyDisplay() != null) { textFrequency.setText(mAlert.getFrequencyDisplay()); } stopAlert(); } else { Toast.makeText(mContext, "Repeating actions at intervals under 5 seconds is disabled.", Toast.LENGTH_LONG).show(); } } }).show(); } @OnClick({R.id.rl_display, R.id.rl_expansion}) public void onDisplayLayoutClicked() { if (mAlert.isExpanded()) { setBackgroundColor(ColorUtils.getCurrentHourColor()); mAlert.setExpanded(false); collapseView(layoutExpansion); imageDownArrow.setVisibility(View.VISIBLE); } else { setBackgroundColor(ColorUtils.getTintedBackgroundColor()); mAlert.setExpanded(true); expandView(layoutExpansion); imageDownArrow.setVisibility(View.GONE); } mAdapter.saveData(); } @OnCheckedChanged(R.id.switch_on_off) public void onOffSwitchChanged(boolean isChecked) { if (mSettingState) { return; } if (isChecked) { startAlert(); } else { stopAlert(); } } @OnCheckedChanged(R.id.checkbox_vibrate) public void vibrateOnCheckChanged(boolean isChecked) { if (mSettingState) { return; } mAlert.setVibrate(isChecked); stopAlert(); } @OnClick(R.id.checkbox_vibrate_text) public void vibrateCheck() { checkVibrate.setChecked(!checkVibrate.isChecked()); } @OnCheckedChanged(R.id.checkbox_wake) public void wakeOnCheckChanged(boolean isChecked) { if (mSettingState) { return; } mAlert.setWake(isChecked); stopAlert(); } @OnClick(R.id.checkbox_wake_text) public void ledCheck() { checkWake.setChecked(!checkWake.isChecked()); } @OnCheckedChanged(R.id.checkbox_mute) public void muteOnCheckChanged(boolean isChecked) { if (mSettingState) { return; } mAlert.setMute(isChecked); stopAlert(); } @OnClick(R.id.checkbox_mute_text) public void muteCheck() { checkMute.setChecked(!checkMute.isChecked()); } @OnCheckedChanged(R.id.checkbox_schedule) public void scheduleOnCheckChanged(boolean isChecked) { if (mSettingState) { expandScheduleDays(isChecked); return; } mAlert.setSchedule(isChecked); expandScheduleDays(isChecked); textDays.setText(mAlert.getDaysDisplay()); textStartTime.setText(mAlert.getStartTimeDisplay()); textEndTime.setText(mAlert.getEndTimeDisplay()); textTimes.setText(mAlert.getTimeDisplay()); stopAlert(); } private void expandScheduleDays(boolean isSchedule) { if (isSchedule) { layoutSchedule.setVisibility(View.VISIBLE); layoutScheduleDays.setVisibility(View.VISIBLE); checkSchedule.setPadding(0, 0, 0, (int) getResources().getDimension(R.dimen.alarm_bottom_padding) / 2); checkScheduleText.setPadding((int) getResources().getDimension(R.dimen.alarm_text_left_padding), 0, 0, (int) getResources().getDimension(R.dimen.alarm_bottom_padding) / 2); } else { layoutSchedule.setVisibility(View.VISIBLE); layoutScheduleDays.setVisibility(View.VISIBLE); layoutSchedule.setVisibility(View.GONE); layoutScheduleDays.setVisibility(View.GONE); checkSchedule.setPadding(0, 0, 0, (int) getResources().getDimension(R.dimen.alarm_bottom_padding)); checkScheduleText.setPadding((int) getResources().getDimension(R.dimen.alarm_text_left_padding), 0, 0, (int) getResources().getDimension(R.dimen.alarm_bottom_padding)); } } @OnClick(R.id.checkbox_schedule_text) public void checkSchedule() { checkSchedule.setChecked(!checkSchedule.isChecked()); } @OnClick({R.id.img_alarm_on, R.id.text_alarm_on}) public void startTimeOnClick() { final ProgrammableStyleableRadialTimePickerDialogFragment timePickerDialog = new ProgrammableStyleableRadialTimePickerDialogFragment(); timePickerDialog.setOnTimeSetListener(new OnTimeSetListener() { @Override public void onTimeSet(ProgrammableStyleableRadialTimePickerDialogFragment dialog, int hourOfDay, int minute) { if (isTimeBefore(hourOfDay, minute, mAlert.getEndHour(), mAlert.getEndMinute())) { mAlert.setStartTime(hourOfDay, minute); textStartTime.setText(mAlert.getStartTimeDisplay()); textTimes.setText(mAlert.getTimeDisplay()); stopAlert(); timePickerDialog.dismiss(); } else { Toast.makeText(mContext, "Scheduled start time must be before the end time (" + mAlert.getEndTimeDisplay() + ").", Toast.LENGTH_SHORT).show(); } } }); timePickerDialog.setStartTime(mAlert.getStartHour(), mAlert.getStartMinute()); ProgrammableStyleableRadialTimePickerDialogFragment.ProgrammableStyle style = timePickerDialog.new ProgrammableStyle(); style.headerBgColor = ContextCompat.getColor(mContext, R.color.primary); style.bodyBgColor = ColorUtils.getTintedBackgroundColor(); style.buttonBgColor = ColorUtils.getCurrentHourColor(); style.buttonTextColor = ContextCompat.getColor(mContext, android.R.color.white); style.selectedColor = ContextCompat.getColor(mContext, android.R.color.white); style.unselectedColor = ContextCompat.getColor(mContext, android.R.color.white); timePickerDialog.setStyleProgramatically(style); timePickerDialog.show(mContext.getSupportFragmentManager(), null); } @OnClick({R.id.img_alarm_off, R.id.text_alarm_off}) public void endTimeOnClick() { final ProgrammableStyleableRadialTimePickerDialogFragment timePickerDialog = new ProgrammableStyleableRadialTimePickerDialogFragment(); timePickerDialog.setOnTimeSetListener(new OnTimeSetListener() { @Override public void onTimeSet(ProgrammableStyleableRadialTimePickerDialogFragment dialog, int hourOfDay, int minute) { if (isTimeBefore(mAlert.getStartHour(), mAlert.getStartMinute(), hourOfDay, minute)) { mAlert.setEndTime(hourOfDay, minute); textEndTime.setText(mAlert.getEndTimeDisplay()); textTimes.setText(mAlert.getTimeDisplay()); stopAlert(); timePickerDialog.dismiss(); } else { Toast.makeText(mContext, "Scheduled end time must be after the start time (" + mAlert.getStartTimeDisplay() + ").", Toast.LENGTH_SHORT).show(); } } }); timePickerDialog.setStartTime(mAlert.getEndHour(), mAlert.getEndMinute()); ProgrammableStyleableRadialTimePickerDialogFragment.ProgrammableStyle style = timePickerDialog.new ProgrammableStyle(); style.headerBgColor = ContextCompat.getColor(mContext, R.color.primary); style.bodyBgColor = ColorUtils.getTintedBackgroundColor(); style.buttonBgColor = ColorUtils.getCurrentHourColor(); style.buttonTextColor = ContextCompat.getColor(mContext, android.R.color.white); style.selectedColor = ContextCompat.getColor(mContext, android.R.color.white); style.unselectedColor = ContextCompat.getColor(mContext, android.R.color.white); timePickerDialog.setStyleProgramatically(style); timePickerDialog.show(mContext.getSupportFragmentManager(), null); } @OnClick({R.id.img_ringtone, R.id.text_ringtone}) public void toneOnClick() { Intent intent = new Intent(RingtoneManager.ACTION_RINGTONE_PICKER); intent.putExtra(RingtoneManager.EXTRA_RINGTONE_TYPE, RingtoneManager.TYPE_NOTIFICATION); intent.putExtra(RingtoneManager.EXTRA_RINGTONE_TITLE, "Select Tone"); intent.putExtra(RingtoneManager.EXTRA_RINGTONE_EXISTING_URI, (Uri) null); mContext.getIntent().putExtra("array_index", mPosition); mContext.startActivityForResult(intent, 5); stopAlert(); } @OnClick(R.id.ib_delete) public void deleteOnClick() { mAdapter.remove(mAlert); stopAlert(); } private void startAlert() { if (!mAlert.isOn()) { if (mAlert.getFrequency() >= 5) { switchOnOff.setChecked(true); mAlert.setOn(true); mAlarmHandler.startAlert(mAlert); Toast.makeText(mContext, "Started", Toast.LENGTH_SHORT).show(); } else { switchOnOff.setChecked(false); stopAlert(); } } mAdapter.saveData(); } private void stopAlert() { if (mAlert.isOn()) { switchOnOff.setChecked(false); mAlert.setOn(false); mAlarmHandler.stopAlert(mAlert); if (mAlert.getFrequencyDisplay() != null) { textFrequency.setText(mAlert.getFrequencyDisplay()); } NotificationManager mNotificationManager = (NotificationManager) mContext.getSystemService(Context.NOTIFICATION_SERVICE); mNotificationManager.cancel(mAlert.getId()); Toast.makeText(mContext, "Stopped", Toast.LENGTH_SHORT).show(); } mAdapter.saveData(); } private static void expandView(final View view) { view.measure(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT); final int targetHeight = view.getMeasuredHeight(); view.getLayoutParams().height = 0; view.setVisibility(View.VISIBLE); Animation animation = new Animation() { @Override protected void applyTransformation(float interpolatedTime, Transformation t) { view.getLayoutParams().height = interpolatedTime == 1 ? ViewGroup.LayoutParams.WRAP_CONTENT : (int) (targetHeight * interpolatedTime); view.requestLayout(); } @Override public boolean willChangeBounds() { return true; } }; animation.setDuration((int) (targetHeight / view.getContext().getResources().getDisplayMetrics().density)); view.startAnimation(animation); } private static void collapseView(final View view) { final int initialHeight = view.getMeasuredHeight(); Animation animation = new Animation() { @Override protected void applyTransformation(float interpolatedTime, Transformation transformation) { if (interpolatedTime == 1) { view.setVisibility(View.GONE); } else { view.getLayoutParams().height = initialHeight - (int) (initialHeight * interpolatedTime); view.requestLayout(); } } @Override public boolean willChangeBounds() { return true; } }; animation.setDuration((int) (initialHeight / view.getContext().getResources().getDisplayMetrics().density)); view.startAnimation(animation); } private static boolean isTimeBefore(int isThisHour, int isThisMinute, int beforeThisHour, int beforeThisMinute) { return (isThisHour < beforeThisHour) || (isThisHour == beforeThisHour && isThisMinute < beforeThisMinute); } }
/** * Copyright 2005-2015 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.core.api.util.collect; import org.junit.Before; import org.junit.Test; import org.springframework.beans.BeanUtils; import java.lang.reflect.Method; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Properties; import java.util.Set; import static org.junit.Assert.*; /** * This class tests the PropertyTreeTest methods. */ public class PropertyTreeTest { private static final String KNOWN_SIMPLE_KEY = "simple"; private static final String KNOWN_SIMPLE_VALUE = "simple value"; private static final String KNOWN_COMPLEX_KEY = "known.complex.key"; private static final String KNOWN_COMPLEX_VALUE = "known complex value"; private static final int MIXED_COUNT = 13; PropertiesMap.PropertyTree tree; @Before public void setUp() throws Exception { tree = new PropertiesMap.PropertyTree(); } // entrySet @Test public void testEntrySet_readwrite() { Set entrySet = tree.entrySet(); boolean failedAsExpected = false; try { entrySet.clear(); } catch (UnsupportedOperationException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } @Test public void testEntrySet_emptyTree() { Set entrySet = tree.entrySet(); assertTrue(entrySet.isEmpty()); } @Test public void testEntrySet_oneSimpleKey() { setOneSimpleKey(); Set entrySet = tree.entrySet(); assertEquals(1, entrySet.size()); boolean foundSimple = false; for (Iterator i = entrySet.iterator(); i.hasNext();) { Map.Entry e = (Map.Entry) i.next(); if (e.getKey().equals(KNOWN_SIMPLE_KEY)) { foundSimple = true; assertEquals(e.getValue(), KNOWN_SIMPLE_VALUE); } } assertTrue(foundSimple); } @Test public void testEntrySet_oneComplexKey() { setOneComplexKey(); Set entrySet = tree.entrySet(); assertEquals(1, entrySet.size()); boolean foundComplex = false; for (Iterator i = entrySet.iterator(); i.hasNext();) { Map.Entry e = (Map.Entry) i.next(); if (e.getKey().equals(KNOWN_COMPLEX_KEY)) { foundComplex = true; assertEquals(e.getValue(), KNOWN_COMPLEX_VALUE); } } assertTrue(foundComplex); } @Test public void testEntrySet_manyMixedKeys() { setManyMixedKeys(); Set entrySet = tree.entrySet(); assertEquals(MIXED_COUNT, entrySet.size()); boolean foundSimple = false; boolean foundComplex = false; for (Iterator i = entrySet.iterator(); i.hasNext();) { Map.Entry e = (Map.Entry) i.next(); if (e.getKey().equals(KNOWN_SIMPLE_KEY)) { foundSimple = true; assertEquals(e.getValue(), KNOWN_SIMPLE_VALUE); } else if (e.getKey().equals(KNOWN_COMPLEX_KEY)) { foundComplex = true; assertEquals(e.getValue(), KNOWN_COMPLEX_VALUE); } } assertTrue(foundSimple); assertTrue(foundComplex); } // size() @Test public void testSize_emptyTree() { assertEquals(0, tree.size()); } @Test public void testSize_oneSimpleKey() { setOneSimpleKey(); assertEquals(1, tree.size()); } @Test public void testSize_oneComplexKey() { setOneComplexKey(); assertEquals(1, tree.size()); } @Test public void testSize_manyMixedKeys() { setManyMixedKeys(); assertEquals(MIXED_COUNT, tree.size()); } // isEmpty @Test public void testIsEmpty_emptyTree() { assertTrue(tree.isEmpty()); } @Test public void testIsEmpty_oneSimpleKey() { setOneSimpleKey(); assertFalse(tree.isEmpty()); } @Test public void testIsEmpty_oneComplexKey() { setOneComplexKey(); assertFalse(tree.isEmpty()); } @Test public void testIsEmpty_manyMixedKeys() { setManyMixedKeys(); assertFalse(tree.isEmpty()); } // values @Test public void testValues_readwrite() { Collection values = tree.values(); boolean failedAsExpected = false; try { values.clear(); } catch (UnsupportedOperationException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } @Test public void testValues_emptyTree() { Collection values = tree.values(); assertTrue(values.isEmpty()); } @Test public void testValues_oneSimpleKey() { setOneSimpleKey(); Collection values = tree.values(); assertEquals(1, values.size()); assertTrue(values.contains(KNOWN_SIMPLE_VALUE)); } @Test public void testValues_oneComplexKey() { setOneComplexKey(); Collection values = tree.values(); assertEquals(1, values.size()); assertTrue(values.contains(KNOWN_COMPLEX_VALUE)); } @Test public void testValues_manyMixedKeys() { setManyMixedKeys(); Collection values = tree.values(); assertEquals(MIXED_COUNT, values.size()); assertTrue(values.contains(KNOWN_SIMPLE_VALUE)); assertTrue(values.contains(KNOWN_COMPLEX_VALUE)); } // keySet @Test public void testKeySet_readwrite() { Set keys = tree.keySet(); boolean failedAsExpected = false; try { keys.clear(); } catch (UnsupportedOperationException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } @Test public void testKeySet_emptyTree() { Set keys = tree.keySet(); assertTrue(keys.isEmpty()); } @Test public void testKeySet_oneSimpleKey() { setOneSimpleKey(); Set keys = tree.keySet(); assertEquals(1, keys.size()); assertTrue(keys.contains(KNOWN_SIMPLE_KEY)); } @Test public void testKeySet_oneComplexKey() { setOneComplexKey(); Set keys = tree.keySet(); assertEquals(1, keys.size()); assertTrue(keys.contains(KNOWN_COMPLEX_KEY)); } @Test public void testKeySet_manyMixedKeys() { setManyMixedKeys(); Set keys = tree.keySet(); assertEquals(MIXED_COUNT, keys.size()); assertTrue(keys.contains(KNOWN_SIMPLE_KEY)); assertTrue(keys.contains(KNOWN_COMPLEX_KEY)); } // containsKey @Test public void testContainsKey_invalidKey() { boolean failedAsExpected = false; try { assertFalse(tree.containsKey(null)); } catch (IllegalArgumentException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } @Test public void testContainsKey_emptyTree() { assertFalse(tree.containsKey(KNOWN_SIMPLE_KEY)); } @Test public void testContainsKey_unknownKey() { setManyMixedKeys(); assertFalse(tree.containsKey("hopefully unknown key")); } @Test public void testContainsKey_oneSimpleKey() { setOneSimpleKey(); assertTrue(tree.containsKey(KNOWN_SIMPLE_KEY)); } @Test public void testContainsKey_oneComplexKey() { setOneComplexKey(); assertTrue(tree.containsKey(KNOWN_COMPLEX_KEY)); } @Test public void testContainsKey_manyMixedKeys() { setManyMixedKeys(); assertTrue(tree.containsKey(KNOWN_SIMPLE_KEY)); assertTrue(tree.containsKey(KNOWN_COMPLEX_KEY)); } // containsValue @Test public void testContainsValue_invalidValue() { boolean failedAsExpected = false; try { assertFalse(tree.containsValue(null)); } catch (IllegalArgumentException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } @Test public void testContainsValue_emptyTree() { assertFalse(tree.containsValue(KNOWN_SIMPLE_VALUE)); } @Test public void testContainsValue_unknownValue() { setManyMixedKeys(); assertFalse(tree.containsValue("hopefully unknown value")); } @Test public void testContainsValue_oneSimpleKey() { setOneSimpleKey(); assertTrue(tree.containsValue(KNOWN_SIMPLE_VALUE)); } @Test public void testContainsValue_oneComplexKey() { setOneComplexKey(); assertTrue(tree.containsValue(KNOWN_COMPLEX_VALUE)); } @Test public void testContainsValue_manyMixedKeys() { setManyMixedKeys(); assertTrue(tree.containsValue(KNOWN_SIMPLE_VALUE)); assertTrue(tree.containsValue(KNOWN_COMPLEX_VALUE)); } // get @Test public void testGet_invalidKey() { boolean failedAsExpected = false; try { tree.get(null); } catch (IllegalArgumentException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } @Test public void testGet_unknownKey() { setManyMixedKeys(); assertNull(tree.get("hopefully unknown key")); } @Test public void testGet_oneSimpleKey() { setOneSimpleKey(); assertEquals(KNOWN_SIMPLE_VALUE, tree.get(KNOWN_SIMPLE_KEY).toString()); } @Test public void testGet_oneComplexKey() { setOneComplexKey(); assertEquals(KNOWN_COMPLEX_VALUE, tree.get(KNOWN_COMPLEX_KEY).toString()); } @Test public void testGet_manyMixedKeys() { setManyMixedKeys(); assertEquals(KNOWN_SIMPLE_VALUE, tree.get(KNOWN_SIMPLE_KEY).toString()); assertEquals(KNOWN_COMPLEX_VALUE, tree.get(KNOWN_COMPLEX_KEY).toString()); } @Test public void testGet_chainedGet() throws Exception { setManyMixedKeys(); String value = ((PropertiesMap.PropertyTree) ((PropertiesMap.PropertyTree) tree.get("known")).get("complex")).get("key").toString(); assertNotNull(value); assertEquals(KNOWN_COMPLEX_VALUE, value); } /* * As close a simulation as possible of how the JSTL variable-reference will actually be implemented. */ @Test public void testGet_jstlGet() throws Exception { setManyMixedKeys(); Class[] getParamTypes = { Object.class }; Object level1 = tree.get("known"); Method m1 = BeanUtils.findMethod(level1.getClass(), "get", getParamTypes); Object level2 = m1.invoke(level1, new Object[] { "complex" }); Method m2 = BeanUtils.findMethod(level2.getClass(), "get", getParamTypes); Object level3 = m2.invoke(level2, new Object[] { "key" }); String value = level3.toString(); assertNotNull(value); assertEquals(KNOWN_COMPLEX_VALUE, value); } // unsupported operations @Test public void testClear() { setManyMixedKeys(); boolean failedAsExpected = false; try { tree.clear(); } catch (UnsupportedOperationException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } @Test public void testPut() { setManyMixedKeys(); boolean failedAsExpected = false; try { tree.put("meaningless", "entry"); } catch (UnsupportedOperationException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } @Test public void testPutAll() { setManyMixedKeys(); Properties p = new Properties(); p.setProperty("meaningless", "value"); boolean failedAsExpected = false; try { tree.putAll(p); } catch (UnsupportedOperationException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } @Test public void testRemove() { setManyMixedKeys(); boolean failedAsExpected = false; try { tree.remove(KNOWN_SIMPLE_KEY); } catch (UnsupportedOperationException e) { failedAsExpected = true; } assertTrue(failedAsExpected); } // support methods private void setOneSimpleKey() { Properties p = new Properties(); p.setProperty(KNOWN_SIMPLE_KEY, KNOWN_SIMPLE_VALUE); tree = new PropertiesMap.PropertyTree(p); } private void setOneComplexKey() { Properties p = new Properties(); p.setProperty(KNOWN_COMPLEX_KEY, KNOWN_COMPLEX_VALUE); tree = new PropertiesMap.PropertyTree(p); } private void setManyMixedKeys() { Properties p = new Properties(); p.setProperty(KNOWN_SIMPLE_KEY, KNOWN_SIMPLE_VALUE); p.setProperty(KNOWN_COMPLEX_KEY, KNOWN_COMPLEX_VALUE); p.setProperty("a", "a"); p.setProperty("b.b", "bb"); p.setProperty("b.c", "cb"); p.setProperty("c.b.c", "cbc"); p.setProperty("c.b.d", "dbc"); p.setProperty("c.c.a", "acc"); p.setProperty("a.c.b", "bca"); p.setProperty("a.c.c", "cca"); p.setProperty("b.a", "ab"); p.setProperty("b", "b"); p.setProperty("d", "d"); tree = new PropertiesMap.PropertyTree(p); } }
/* * Copyright 2014 Miles Chaston * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.chaston.oakfunds.model; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import org.chaston.oakfunds.account.AccountCode; import org.chaston.oakfunds.account.AccountCodeManager; import org.chaston.oakfunds.account.AccountCodeModule; import org.chaston.oakfunds.bootstrap.BootstrapModule; import org.chaston.oakfunds.jdbc.DatabaseTearDown; import org.chaston.oakfunds.ledger.ExpenseAccount; import org.chaston.oakfunds.ledger.LedgerManager; import org.chaston.oakfunds.ledger.LedgerModule; import org.chaston.oakfunds.ledger.RevenueAccount; import org.chaston.oakfunds.security.AuthenticationScope; import org.chaston.oakfunds.security.TestUserAuthenticatorModule; import org.chaston.oakfunds.security.UserAuthenticationManager; import org.chaston.oakfunds.security.UserSecurityModule; import org.chaston.oakfunds.storage.Report; import org.chaston.oakfunds.storage.ReportDateGranularity; import org.chaston.oakfunds.storage.ReportEntry; import org.chaston.oakfunds.storage.ReportRow; import org.chaston.oakfunds.storage.StorageException; import org.chaston.oakfunds.storage.Store; import org.chaston.oakfunds.storage.TestStorageModule; import org.chaston.oakfunds.storage.Transaction; import org.chaston.oakfunds.storage.mgmt.SchemaDeploymentTask; import org.chaston.oakfunds.system.SystemModule; import org.chaston.oakfunds.system.TestSystemBootstrapModuleBuilder; import org.chaston.oakfunds.util.BigDecimalUtil; import org.chaston.oakfunds.util.DateUtil; import org.joda.time.DateTimeFieldType; import org.joda.time.Instant; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.sql.SQLException; import java.util.Iterator; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * TODO(mchaston): write JavaDocs */ @RunWith(JUnit4.class) public class ModelManagerTest { private static final int YEAR_2014 = Instant.parse("2014-01-01").get(DateTimeFieldType.year()); private static final int YEAR_2015 = Instant.parse("2015-01-01").get(DateTimeFieldType.year()); @Inject private AccountCodeManager accountCodeManager; @Inject private UserAuthenticationManager userAuthenticationManager; @Inject private LedgerManager ledgerManager; @Inject private ModelManager modelManager; @Inject private Store store; @Inject private SchemaDeploymentTask schemaDeploymentTask; @Inject private DatabaseTearDown databaseTearDown; private AuthenticationScope authenticationScope; @Before public void setUp() throws Exception { Injector injector = Guice.createInjector( new AccountCodeModule(), new LedgerModule(), new BootstrapModule(), new ModelModule(), new UserSecurityModule(), new SystemModule(), new TestSystemBootstrapModuleBuilder() .setCurrentYear(Instant.parse("2014-01-01").get(DateTimeFieldType.year())) .setTimeHorizon(10) .build(), new TestStorageModule(), new TestUserAuthenticatorModule()); injector.injectMembers(this); authenticationScope = userAuthenticationManager.authenticateUser(); } @After public void teardown() throws SQLException { authenticationScope.close(); databaseTearDown.teardown(); } @Test public void createModel() throws StorageException { Transaction transaction = store.startTransaction(); Model model = modelManager.createNewModel("New Model"); assertEquals("New Model", model.getTitle()); transaction.commit(); assertEquals("New Model", modelManager.getModel(model.getId()).getTitle()); } @Test public void getBaseModel() throws StorageException { assertNotNull(modelManager.getBaseModel()); } @Test public void updateModel() throws StorageException { Transaction transaction = store.startTransaction(); Model model = modelManager.createNewModel("New Model"); assertEquals("New Model", model.getTitle()); transaction.commit(); transaction = store.startTransaction(); model = modelManager.updateModel(model, "Old Model"); assertEquals("Old Model", model.getTitle()); assertFalse(model.isBaseModel()); transaction.commit(); assertEquals("Old Model", modelManager.getModel(model.getId()).getTitle()); transaction = store.startTransaction(); model = modelManager.updateModel(modelManager.getBaseModel(), "Base Model"); assertEquals("Base Model", model.getTitle()); assertTrue(model.isBaseModel()); transaction.commit(); assertEquals("Base Model", modelManager.getBaseModel().getTitle()); assertTrue(model.isBaseModel()); } @Test public void setMonthlyRecurringEventDetails() throws StorageException { Transaction transaction = store.startTransaction(); AccountCode accountCode = accountCodeManager.createAccountCode(5000, "Utilities"); ExpenseAccount expenseAccount = ledgerManager.createExpenseAccount(accountCode, "Electricity / Water", null); MonthlyRecurringEvent monthlyRecurringEvent = modelManager.setMonthlyRecurringEventDetails(modelManager.getBaseModel(), expenseAccount, DateUtil.BEGINNING_OF_TIME, DateUtil.END_OF_TIME, BigDecimalUtil.valueOf(3000)); transaction.commit(); assertEquals(BigDecimalUtil.valueOf(3000), monthlyRecurringEvent.getAmount()); Iterable<ModelAccountTransaction> modelTransactions = modelManager.getModelTransactions(modelManager.getBaseModel(), expenseAccount, Instant.parse("2014-01-01"), Instant.parse("2015-01-01")); assertEquals(12, Iterables.size(modelTransactions)); ModelAccountTransaction firstModelTransaction = Iterables.getFirst(modelTransactions, null); assertNotNull(firstModelTransaction); assertEquals(modelManager.getBaseModel().getId(), firstModelTransaction.getModelId()); assertEquals(expenseAccount.getId(), firstModelTransaction.getAccountId()); assertEquals(BigDecimalUtil.valueOf(3000), firstModelTransaction.getAmount()); assertEquals(Instant.parse("2014-01-01"), firstModelTransaction.getInstant()); Iterable<ModelDistributionTransaction> modelDistributionTransactions = modelManager.getModelDistributionTransactions(modelManager.getBaseModel(), expenseAccount, DateUtil.endOfYear(2013), Instant.parse("2015-01-01")); assertEquals(0, Iterables.size(modelDistributionTransactions)); } @Test public void setAnnualRecurringEventDetails() throws StorageException { Transaction transaction = store.startTransaction(); AccountCode accountCode = accountCodeManager.createAccountCode(6000, "Insurance"); ExpenseAccount expenseAccount = ledgerManager.createExpenseAccount(accountCode, "Insurance", null); AnnualRecurringEvent annualRecurringEvent = modelManager.setAnnualRecurringEventDetails(modelManager.getBaseModel(), expenseAccount, DateUtil.BEGINNING_OF_TIME, DateUtil.END_OF_TIME, 3, BigDecimalUtil.valueOf(12000)); transaction.commit(); assertEquals(BigDecimalUtil.valueOf(12000), annualRecurringEvent.getAmount()); assertEquals(3, annualRecurringEvent.getPaymentMonth()); Iterable<ModelAccountTransaction> modelTransactions = modelManager.getModelTransactions(modelManager.getBaseModel(), expenseAccount, DateUtil.endOfYear(2013), Instant.parse("2015-01-01")); assertEquals(1, Iterables.size(modelTransactions)); ModelAccountTransaction modelTransaction = Iterables.getOnlyElement(modelTransactions); assertEquals(modelManager.getBaseModel().getId(), modelTransaction.getModelId()); assertEquals(expenseAccount.getId(), modelTransaction.getAccountId()); assertEquals(BigDecimalUtil.valueOf(12000), modelTransaction.getAmount()); assertEquals(Instant.parse("2014-03-01"), modelTransaction.getInstant()); Iterable<ModelDistributionTransaction> modelDistributionTransactions = modelManager.getModelDistributionTransactions(modelManager.getBaseModel(), expenseAccount, DateUtil.endOfYear(2013), Instant.parse("2015-01-01")); assertEquals(13, Iterables.size(modelDistributionTransactions)); ModelDistributionTransaction firstModelDistributionTransaction = Iterables.getFirst(modelDistributionTransactions, null); assertNotNull(firstModelDistributionTransaction); assertEquals(modelManager.getBaseModel().getId(), firstModelDistributionTransaction.getModelId()); assertEquals(expenseAccount.getId(), firstModelDistributionTransaction.getAccountId()); // This is the sum of distributions until the end of the previous year. assertEquals(BigDecimalUtil.valueOf(9000), firstModelDistributionTransaction.getAmount()); assertEquals(DateUtil.endOfYear(2013), firstModelDistributionTransaction.getInstant()); // This is the distribution that cancels out previous distributions. ModelDistributionTransaction thirdModelDistributionTransaction = Iterables.get(modelDistributionTransactions, 3, null); assertNotNull(thirdModelDistributionTransaction); assertEquals(modelManager.getBaseModel().getId(), thirdModelDistributionTransaction.getModelId()); assertEquals(expenseAccount.getId(), thirdModelDistributionTransaction.getAccountId()); assertEquals(BigDecimalUtil.valueOf(-11000), thirdModelDistributionTransaction.getAmount()); assertEquals(Instant.parse("2014-03-01"), thirdModelDistributionTransaction.getInstant()); assertEquals(firstModelDistributionTransaction.getModelAccountTransactionId(), thirdModelDistributionTransaction.getModelAccountTransactionId()); ModelDistributionTransaction fourthModelDistributionTransaction = Iterables.get(modelDistributionTransactions, 4, null); assertNotNull(fourthModelDistributionTransaction); assertEquals(modelManager.getBaseModel().getId(), fourthModelDistributionTransaction.getModelId()); assertEquals(expenseAccount.getId(), fourthModelDistributionTransaction.getAccountId()); assertEquals(BigDecimalUtil.valueOf(1000), fourthModelDistributionTransaction.getAmount()); assertEquals(Instant.parse("2014-04-01"), fourthModelDistributionTransaction.getInstant()); // Different transaction from the previous ones. assertNotEquals(firstModelDistributionTransaction.getModelAccountTransactionId(), fourthModelDistributionTransaction.getModelAccountTransactionId()); } @Test public void createAdHocEvent() throws StorageException { Transaction transaction = store.startTransaction(); AccountCode accountCode = accountCodeManager.createAccountCode(7000, "Maintenance"); ExpenseAccount expenseAccount = ledgerManager.createExpenseAccount(accountCode, "House Painting", null); ModelAccountTransaction modelAccountTransaction = modelManager.createAdHocEvent(modelManager.getBaseModel(), expenseAccount, Instant.parse("2017-01-01"), 5, DistributionTimeUnit.YEARS, BigDecimalUtil.valueOf(60000)); transaction.commit(); assertNotNull(modelAccountTransaction); Iterable<ModelDistributionTransaction> modelDistributionTransactions = modelManager.getModelDistributionTransactions(modelManager.getBaseModel(), expenseAccount, DateUtil.endOfYear(2013), Instant.parse("2015-01-01")); assertEquals(13, Iterables.size(modelDistributionTransactions)); ModelDistributionTransaction firstModelDistributionTransaction = Iterables.getFirst( modelDistributionTransactions, null); assertNotNull(firstModelDistributionTransaction); assertEquals(modelManager.getBaseModel().getId(), firstModelDistributionTransaction.getModelId()); assertEquals(expenseAccount.getId(), firstModelDistributionTransaction.getAccountId()); // First one is a roll up of the previous distributions. assertEquals(BigDecimalUtil.valueOf(23000), firstModelDistributionTransaction.getAmount()); assertEquals(DateUtil.endOfYear(2013), firstModelDistributionTransaction.getInstant()); // Regular ones are a normal size. ModelDistributionTransaction secondModelDistributionTransaction = Iterables.get(modelDistributionTransactions, 2, null); assertNotNull(secondModelDistributionTransaction); assertEquals(modelManager.getBaseModel().getId(), secondModelDistributionTransaction.getModelId()); assertEquals(expenseAccount.getId(), secondModelDistributionTransaction.getAccountId()); assertEquals(BigDecimalUtil.valueOf(1000), secondModelDistributionTransaction.getAmount()); assertEquals(Instant.parse("2014-02-01"), secondModelDistributionTransaction.getInstant()); } @Test public void createAdHocEventBeyondTimeHorizon() throws StorageException { Transaction transaction = store.startTransaction(); AccountCode accountCode = accountCodeManager.createAccountCode(7000, "Maintenance"); ExpenseAccount expenseAccount = ledgerManager.createExpenseAccount(accountCode, "House Painting", null); ModelAccountTransaction modelAccountTransaction = modelManager.createAdHocEvent(modelManager.getBaseModel(), expenseAccount, Instant.parse("2027-01-01"), 50, DistributionTimeUnit.YEARS, BigDecimalUtil.valueOf(60000)); transaction.commit(); assertNotNull(modelAccountTransaction); Iterable<ModelDistributionTransaction> modelDistributionTransactions = modelManager.getModelDistributionTransactions(modelManager.getBaseModel(), expenseAccount, DateUtil.endOfYear(2013), Instant.parse("2015-01-01")); assertEquals(13, Iterables.size(modelDistributionTransactions)); ModelDistributionTransaction firstModelDistributionTransaction = Iterables.getFirst( modelDistributionTransactions, null); assertNotNull(firstModelDistributionTransaction); assertEquals(modelManager.getBaseModel().getId(), firstModelDistributionTransaction.getModelId()); assertEquals(expenseAccount.getId(), firstModelDistributionTransaction.getAccountId()); // First one is a roll up of the previous distributions. assertEquals(BigDecimalUtil.valueOf(44300), firstModelDistributionTransaction.getAmount()); assertEquals(DateUtil.endOfYear(2013), firstModelDistributionTransaction.getInstant()); // Regular ones are a normal size. ModelDistributionTransaction secondModelDistributionTransaction = Iterables.get(modelDistributionTransactions, 2, null); assertNotNull(secondModelDistributionTransaction); assertEquals(modelManager.getBaseModel().getId(), secondModelDistributionTransaction.getModelId()); assertEquals(expenseAccount.getId(), secondModelDistributionTransaction.getAccountId()); assertEquals(BigDecimalUtil.valueOf(100), secondModelDistributionTransaction.getAmount()); assertEquals(Instant.parse("2014-02-01"), secondModelDistributionTransaction.getInstant()); } @Test public void updateAdHocEvent() throws StorageException { // Create the initial event. Transaction transaction = store.startTransaction(); AccountCode accountCode = accountCodeManager.createAccountCode(7000, "Maintenance"); ExpenseAccount expenseAccount = ledgerManager.createExpenseAccount(accountCode, "House Painting", null); modelManager.createAdHocEvent(modelManager.getBaseModel(), expenseAccount, Instant.parse("2017-01-01"), 5, DistributionTimeUnit.YEARS, BigDecimalUtil.valueOf(60000)); transaction.commit(); Iterable<ModelDistributionTransaction> oldModelDistributionTransactions = modelManager.getModelDistributionTransactions(modelManager.getBaseModel(), expenseAccount, DateUtil.endOfYear(2013), Instant.parse("2015-01-01")); // Get the event back. ModelAccountTransaction modelAccountTransaction = Iterables.getOnlyElement( modelManager.getModelTransactions(modelManager.getBaseModel(), expenseAccount, Instant.parse("2016-01-01"), Instant.parse("2017-02-01"))); transaction = store.startTransaction(); modelManager.updateAdHocEvent(modelAccountTransaction, Instant.parse("2017-06-01"), 5, DistributionTimeUnit.YEARS, BigDecimalUtil.valueOf(60000)); transaction.commit(); Iterable<ModelDistributionTransaction> newModelDistributionTransactions = modelManager.getModelDistributionTransactions(modelManager.getBaseModel(), expenseAccount, DateUtil.endOfYear(2013), Instant.parse("2015-01-01")); Iterator<ModelDistributionTransaction> oldTransactionsIter = oldModelDistributionTransactions.iterator(); Iterator<ModelDistributionTransaction> newTransactionsIter = newModelDistributionTransactions.iterator(); for (int i = 0; i < 12; i++) { ModelDistributionTransaction oldTransaction = oldTransactionsIter.next(); ModelDistributionTransaction newTransaction = newTransactionsIter.next(); assertEquals(oldTransaction.getInstant(), newTransaction.getInstant()); if (i == 0) { assertNotEquals(oldTransaction.getAmount(), newTransaction.getAmount()); } else { assertEquals(oldTransaction.getAmount(), newTransaction.getAmount()); } } } @Test public void deleteAdHocEvent() throws StorageException { // Create the initial event. Transaction transaction = store.startTransaction(); AccountCode accountCode = accountCodeManager.createAccountCode(7000, "Maintenance"); ExpenseAccount expenseAccount = ledgerManager.createExpenseAccount(accountCode, "House Painting", null); modelManager.createAdHocEvent(modelManager.getBaseModel(), expenseAccount, Instant.parse("2017-01-01"), 5, DistributionTimeUnit.YEARS, BigDecimalUtil.valueOf(60000)); transaction.commit(); // Get the event back. ModelAccountTransaction modelAccountTransaction = Iterables.getOnlyElement( modelManager.getModelTransactions(modelManager.getBaseModel(), expenseAccount, Instant.parse("2016-01-01"), Instant.parse("2017-02-01"))); transaction = store.startTransaction(); modelManager.deleteAdHocEvent(modelAccountTransaction); transaction.commit(); Iterable<ModelDistributionTransaction> newModelDistributionTransactions = modelManager.getModelDistributionTransactions(modelManager.getBaseModel(), expenseAccount, DateUtil.endOfYear(2013), Instant.parse("2015-01-01")); assertTrue(Iterables.isEmpty(newModelDistributionTransactions)); } @Test public void runDistributionReport() throws StorageException { ReportingAccounts accounts = initReportingDataset(); Report report = modelManager.runDistributionReport(accounts.model1, YEAR_2014, YEAR_2015, ReportDateGranularity.MONTH); assertEquals(Iterables.size(report.getRows()), 2); ReportRow reportRow = report.getRow( ImmutableMap.of(ModelManager.DIMENSION_ACCOUNT_ID, (Object) accounts.longTermExpenseAccount.getId())); assertEquals(13, Iterables.size(reportRow.getEntries())); ReportEntry entry = Iterables.get(reportRow.getEntries(), 0); assertEquals(DateUtil.endOfYear(2013), entry.getInstant()); assertEquals(BigDecimalUtil.valueOf(11500), entry.getMeasure(ModelDistributionTransaction.ATTRIBUTE_AMOUNT)); for (int i = 1; i <= 12; i++) { entry = Iterables.get(reportRow.getEntries(), i); assertEquals(DateUtil.endOfMonth(2014, i), entry.getInstant()); assertEquals(BigDecimalUtil.valueOf(11500 + (i * 250)), entry.getMeasure(ModelDistributionTransaction.ATTRIBUTE_AMOUNT)); } reportRow = report.getRow( ImmutableMap.of(ModelManager.DIMENSION_ACCOUNT_ID, (Object) accounts.annualExpenseAccount.getId())); assertEquals(13, Iterables.size(reportRow.getEntries())); // Starts off with the amount owed from the beginning of the year. entry = Iterables.get(reportRow.getEntries(), 0); assertEquals(DateUtil.endOfYear(2013), entry.getInstant()); assertEquals(BigDecimalUtil.valueOf(5249.99997), entry.getMeasure(ModelDistributionTransaction.ATTRIBUTE_AMOUNT)); entry = Iterables.get(reportRow.getEntries(), 1); assertEquals(DateUtil.endOfMonth(2014, 1), entry.getInstant()); assertEquals(BigDecimalUtil.valueOf(5833.33330), entry.getMeasure(ModelDistributionTransaction.ATTRIBUTE_AMOUNT)); entry = Iterables.get(reportRow.getEntries(), 2); assertEquals(DateUtil.endOfMonth(2014, 2), entry.getInstant()); assertEquals(BigDecimalUtil.valueOf(6416.66663), entry.getMeasure(ModelDistributionTransaction.ATTRIBUTE_AMOUNT)); // The March distribution resets the sum to zero (as the event would happen). for (int i = 3; i <= 12; i++) { entry = Iterables.get(reportRow.getEntries(), i); assertEquals(DateUtil.endOfMonth(2014, i), entry.getInstant()); assertEquals(BigDecimalUtil.valueOf(0), entry.getMeasure(ModelDistributionTransaction.ATTRIBUTE_AMOUNT)); } } private ReportingAccounts initReportingDataset() throws StorageException { ReportingAccounts reportingAccounts = new ReportingAccounts(); Transaction transaction = store.startTransaction(); reportingAccounts.model1 = modelManager.createNewModel("Alternative Model 1"); reportingAccounts.model2 = modelManager.createNewModel("Alternative Model 2"); AccountCode accountCode = accountCodeManager.createAccountCode(1000, "Unimportant"); reportingAccounts.revenueAccount = ledgerManager.createRevenueAccount(accountCode, "Revenue", null); reportingAccounts.monthlyExpenseAccount = ledgerManager.createExpenseAccount(accountCode, "Monthly Expense", null); reportingAccounts.annualExpenseAccount = ledgerManager.createExpenseAccount(accountCode, "Annual Expense", null); reportingAccounts.longTermExpenseAccount = ledgerManager.createExpenseAccount(accountCode, "Long Term Expense", null); // Revenue with model 1. modelManager.setMonthlyRecurringEventDetails(reportingAccounts.model1, reportingAccounts.revenueAccount, Instant.parse("2014-01-01"), Instant.parse("2015-01-01"), BigDecimalUtil.valueOf(1000)); // Revenue with model 2. modelManager.setMonthlyRecurringEventDetails(reportingAccounts.model2, reportingAccounts.revenueAccount, Instant.parse("2014-01-01"), Instant.parse("2015-01-01"), BigDecimalUtil.valueOf(1100)); modelManager.setMonthlyRecurringEventDetails(modelManager.getBaseModel(), reportingAccounts.monthlyExpenseAccount, Instant.parse("2014-01-01"), Instant.parse("2015-01-01"), BigDecimalUtil.valueOf(200)); modelManager.setAnnualRecurringEventDetails(modelManager.getBaseModel(), reportingAccounts.annualExpenseAccount, Instant.parse("2014-01-01"), Instant.parse("2015-01-01"), 3, BigDecimalUtil.valueOf(7000)); // Long term expense in model 1 payed in 2020. modelManager.createAdHocEvent(reportingAccounts.model1, reportingAccounts.longTermExpenseAccount, Instant.parse("2020-02-01"), 10, DistributionTimeUnit.YEARS, BigDecimalUtil.valueOf(30000)); // Long term expense in model 2 payed in 2021. modelManager.createAdHocEvent(reportingAccounts.model2, reportingAccounts.longTermExpenseAccount, Instant.parse("2021-02-01"), 10, DistributionTimeUnit.YEARS, BigDecimalUtil.valueOf(30000)); transaction.commit(); return reportingAccounts; } private static class ReportingAccounts { public Model model1; public Model model2; public RevenueAccount revenueAccount; public ExpenseAccount monthlyExpenseAccount; public ExpenseAccount annualExpenseAccount; public ExpenseAccount longTermExpenseAccount; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.index.solr.configuration.nodestate; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import com.google.common.collect.Iterables; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.plugins.index.solr.configuration.OakSolrConfiguration; import org.apache.jackrabbit.oak.plugins.index.solr.configuration.OakSolrConfigurationDefaults; import org.apache.jackrabbit.oak.plugins.index.solr.query.SolrQueryIndex; import org.apache.jackrabbit.oak.spi.query.Filter; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * An {@link OakSolrConfiguration} specified via a given {@link org.apache.jackrabbit.oak.spi.state.NodeState}. * For each of the supported properties a default is provided if either the * property doesn't exist in the node or if the value is <code>null</code> */ public class OakSolrNodeStateConfiguration implements OakSolrConfiguration { private final NodeState definition; public OakSolrNodeStateConfiguration(NodeState definition) { this.definition = definition; if (!definition.hasProperty("type") || !(SolrQueryIndex.TYPE.equals(definition.getProperty("type").getValue(Type.STRING)))) { throw new IllegalArgumentException("missing or wrong 'type' property in " + definition); } } @Override public String getFieldNameFor(Type<?> propertyType) { Iterable<String> typeMappings = getStringValuesFor(Properties.TYPE_MAPPINGS); if (typeMappings != null) { for (String typeMapping : typeMappings) { String[] mapping = typeMapping.split("="); if (mapping.length == 2 && mapping[0] != null && mapping[1] != null) { Type<?> type = Type.fromString(mapping[0]); if (type != null && type.tag() == propertyType.tag()) { return mapping[1]; } } } } return null; } @NotNull @Override public String getPathField() { return getStringValueFor(Properties.PATH_FIELD, OakSolrConfigurationDefaults.PATH_FIELD_NAME); } @Nullable @Override public String getFieldForPathRestriction(Filter.PathRestriction pathRestriction) { String fieldName = null; switch (pathRestriction) { case ALL_CHILDREN: { fieldName = getStringValueFor(Properties.DESCENDANTS_FIELD, OakSolrConfigurationDefaults.DESC_FIELD_NAME); break; } case DIRECT_CHILDREN: { fieldName = getStringValueFor(Properties.CHILDREN_FIELD, OakSolrConfigurationDefaults.CHILD_FIELD_NAME); break; } case EXACT: { fieldName = getStringValueFor(Properties.PATH_FIELD, OakSolrConfigurationDefaults.PATH_FIELD_NAME); break; } case PARENT: { fieldName = getStringValueFor(Properties.PARENT_FIELD, OakSolrConfigurationDefaults.ANC_FIELD_NAME); break; } case NO_RESTRICTION: break; default: break; } return fieldName; } @Override public String getCatchAllField() { return getStringValueFor(Properties.CATCHALL_FIELD, OakSolrConfigurationDefaults.CATCHALL_FIELD); } @Override public String getFieldForPropertyRestriction(Filter.PropertyRestriction propertyRestriction) { Iterable<String> propertyMappings = getStringValuesFor(Properties.PROPERTY_MAPPINGS); if (propertyMappings != null) { for (String propertyMapping : propertyMappings) { String[] mapping = propertyMapping.split("="); if (mapping.length == 2 && mapping[0] != null && mapping[1] != null) { if (propertyRestriction.propertyName.equals(mapping[0])) { return mapping[1]; } } } } return null; } @NotNull @Override public CommitPolicy getCommitPolicy() { return CommitPolicy.valueOf(getStringValueFor(Properties.COMMIT_POLICY, CommitPolicy.SOFT.toString()).toUpperCase()); } @Override public int getRows() { return getIntValueFor(Properties.ROWS, OakSolrConfigurationDefaults.ROWS); } @Override public boolean useForPropertyRestrictions() { return getBooleanValueFor(Properties.PROPERTY_RESTRICIONS, OakSolrConfigurationDefaults.PROPERTY_RESTRICTIONS); } @Override public boolean useForPrimaryTypes() { return getBooleanValueFor(Properties.PRIMARY_TYPES, OakSolrConfigurationDefaults.PRIMARY_TYPES); } @Override public boolean useForPathRestrictions() { return getBooleanValueFor(Properties.PATH_RESTRICTIONS, OakSolrConfigurationDefaults.PATH_RESTRICTIONS); } @NotNull @Override public Collection<String> getIgnoredProperties() { Collection<String> ignoredProperties; Iterable<String> ignoredPropertiesValues = getStringValuesFor(Properties.IGNORED_PROPERTIES); if (ignoredPropertiesValues != null) { ignoredProperties = new LinkedList<String>(); for (String ignoredProperty : ignoredPropertiesValues) { ignoredProperties.add(ignoredProperty); } } else { ignoredProperties = OakSolrConfigurationDefaults.IGNORED_PROPERTIES; } return ignoredProperties; } @NotNull @Override public Collection<String> getUsedProperties() { Collection<String> usedProperties; Iterable<String> usedPropertiesValues = getStringValuesFor(Properties.USED_PROPERTIES); if (usedPropertiesValues != null) { usedProperties = new LinkedList<String>(); for (String usedProperty : usedPropertiesValues) { usedProperties.add(usedProperty); } } else { usedProperties = Collections.emptyList(); } return usedProperties; } @Override public boolean collapseJcrContentNodes() { return getBooleanValueFor(Properties.COLLAPSE_JCR_CONTENT_NODES, OakSolrConfigurationDefaults.COLLAPSE_JCR_CONTENT_NODES); } @NotNull @Override public String getCollapsedPathField() { return getStringValueFor(Properties.COLLAPSED_PATH_FIELD, OakSolrConfigurationDefaults.COLLAPSED_PATH_FIELD); } @NotNull @Override public String getPathDepthField() { return getStringValueFor(Properties.DEPTH_FIELD, OakSolrConfigurationDefaults.PATH_DEPTH_FIELD); } private boolean getBooleanValueFor(String propertyName, boolean defaultValue) { boolean value = defaultValue; PropertyState property = definition.getProperty(propertyName); if (property != null) { value = property.getValue(Type.BOOLEAN); } return value; } private int getIntValueFor(String propertyName, int defaultValue) { long value = defaultValue; PropertyState property = definition.getProperty(propertyName); if (property != null) { value = property.getValue(Type.LONG); } return (int) value; } private String getStringValueFor(String propertyName, String defaultValue) { String value = defaultValue; PropertyState property = definition.getProperty(propertyName); if (property != null) { value = property.getValue(Type.STRING); } return value; } private Iterable<String> getStringValuesFor(String propertyName) { Iterable<String> values = null; PropertyState property = definition.getProperty(propertyName); if (property != null && property.isArray()) { values = property.getValue(Type.STRINGS); } return values; } @Override public String toString() { return "OakSolrNodeStateConfiguration{" + "definitionChildren=" + Iterables.toString(definition.getChildNodeNames()) + '}'; } /** * Properties that may be retrieved from the configuration {@link org.apache.jackrabbit.oak.spi.state.NodeState}. */ public final class Properties { // --> oak solr config properties <-- public static final String PATH_FIELD = "pathField"; public static final String COLLAPSED_PATH_FIELD = "pathField"; public static final String PARENT_FIELD = "parentField"; public static final String CHILDREN_FIELD = "childrenField"; public static final String DESCENDANTS_FIELD = "descendantsField"; public static final String CATCHALL_FIELD = "catchAllField"; public static final String COMMIT_POLICY = "commitPolicy"; public static final String ROWS = "rows"; public static final String PROPERTY_RESTRICIONS = "propertyRestrictions"; public static final String PRIMARY_TYPES = "primaryTypes"; public static final String PATH_RESTRICTIONS = "pathRestrictions"; public static final String IGNORED_PROPERTIES = "ignoredProperties"; public static final String TYPE_MAPPINGS = "typeMappings"; public static final String PROPERTY_MAPPINGS = "propertyMappings"; public static final String USED_PROPERTIES = "usedProperties"; public static final String COLLAPSE_JCR_CONTENT_NODES = "collapseJcrContentNodes"; public static final String DEPTH_FIELD= "depthField"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.hadoop; import org.apache.ignite.*; import org.apache.ignite.cluster.*; import org.apache.ignite.configuration.*; import org.apache.ignite.hadoop.mapreduce.*; import org.apache.ignite.igfs.*; import org.apache.ignite.igfs.mapreduce.*; import org.apache.ignite.igfs.secondary.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.cluster.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.igfs.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.lang.*; import org.apache.ignite.testframework.*; import org.jetbrains.annotations.*; import java.net.*; import java.util.*; /** * */ public class HadoopDefaultMapReducePlannerSelfTest extends HadoopAbstractSelfTest { /** */ private static final UUID ID_1 = new UUID(0, 1); /** */ private static final UUID ID_2 = new UUID(0, 2); /** */ private static final UUID ID_3 = new UUID(0, 3); /** */ private static final String HOST_1 = "host1"; /** */ private static final String HOST_2 = "host2"; /** */ private static final String HOST_3 = "host3"; /** */ private static final String INVALID_HOST_1 = "invalid_host1"; /** */ private static final String INVALID_HOST_2 = "invalid_host2"; /** */ private static final String INVALID_HOST_3 = "invalid_host3"; /** Mocked Grid. */ private static final MockIgnite GRID = new MockIgnite(); /** Mocked IGFS. */ private static final IgniteFileSystem IGFS = new MockIgfs(); /** Planner. */ private static final HadoopMapReducePlanner PLANNER = new IgniteHadoopMapReducePlanner(); /** Block locations. */ private static final Map<Block, Collection<IgfsBlockLocation>> BLOCK_MAP = new HashMap<>(); /** Proxy map. */ private static final Map<URI, Boolean> PROXY_MAP = new HashMap<>(); /** Last created plan. */ private static final ThreadLocal<HadoopMapReducePlan> PLAN = new ThreadLocal<>(); /** * */ static { GridTestUtils.setFieldValue(PLANNER, "ignite", GRID); } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { GridTestUtils.setFieldValue(PLANNER, "log", log()); BLOCK_MAP.clear(); PROXY_MAP.clear(); } /** * @throws IgniteCheckedException If failed. */ public void testIgfsOneBlockPerNode() throws IgniteCheckedException { HadoopFileBlock split1 = split(true, "/file1", 0, 100, HOST_1); HadoopFileBlock split2 = split(true, "/file2", 0, 100, HOST_2); HadoopFileBlock split3 = split(true, "/file3", 0, 100, HOST_3); mapIgfsBlock(split1.file(), 0, 100, location(0, 100, ID_1)); mapIgfsBlock(split2.file(), 0, 100, location(0, 100, ID_2)); mapIgfsBlock(split3.file(), 0, 100, location(0, 100, ID_3)); plan(1, split1); assert ensureMappers(ID_1, split1); assert ensureReducers(ID_1, 1); assert ensureEmpty(ID_2); assert ensureEmpty(ID_3); plan(2, split1); assert ensureMappers(ID_1, split1); assert ensureReducers(ID_1, 2); assert ensureEmpty(ID_2); assert ensureEmpty(ID_3); plan(1, split1, split2); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(2, split1, split2); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(3, split1, split2); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(3, split1, split2, split3); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureMappers(ID_3, split3); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureReducers(ID_3, 1); plan(5, split1, split2, split3); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureMappers(ID_3, split3); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1); } /** * @throws IgniteCheckedException If failed. */ public void testNonIgfsOneBlockPerNode() throws IgniteCheckedException { HadoopFileBlock split1 = split(false, "/file1", 0, 100, HOST_1); HadoopFileBlock split2 = split(false, "/file2", 0, 100, HOST_2); HadoopFileBlock split3 = split(false, "/file3", 0, 100, HOST_3); plan(1, split1); assert ensureMappers(ID_1, split1); assert ensureReducers(ID_1, 1); assert ensureEmpty(ID_2); assert ensureEmpty(ID_3); plan(2, split1); assert ensureMappers(ID_1, split1); assert ensureReducers(ID_1, 2); assert ensureEmpty(ID_2); assert ensureEmpty(ID_3); plan(1, split1, split2); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(2, split1, split2); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(3, split1, split2); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(3, split1, split2, split3); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureMappers(ID_3, split3); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureReducers(ID_3, 1); plan(5, split1, split2, split3); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureMappers(ID_3, split3); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1); } /** * @throws IgniteCheckedException If failed. */ public void testIgfsSeveralBlocksPerNode() throws IgniteCheckedException { HadoopFileBlock split1 = split(true, "/file1", 0, 100, HOST_1, HOST_2); HadoopFileBlock split2 = split(true, "/file2", 0, 100, HOST_1, HOST_2); HadoopFileBlock split3 = split(true, "/file3", 0, 100, HOST_1, HOST_3); mapIgfsBlock(split1.file(), 0, 100, location(0, 100, ID_1, ID_2)); mapIgfsBlock(split2.file(), 0, 100, location(0, 100, ID_1, ID_2)); mapIgfsBlock(split3.file(), 0, 100, location(0, 100, ID_1, ID_3)); plan(1, split1); assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 1) && ensureEmpty(ID_2) || ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(2, split1); assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 2) && ensureEmpty(ID_2) || ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 2); assert ensureEmpty(ID_3); plan(1, split1, split2); assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(2, split1, split2); assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(3, split1, split2, split3); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureReducers(ID_3, 1); plan(5, split1, split2, split3); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1); } /** * @throws IgniteCheckedException If failed. */ public void testNonIgfsSeveralBlocksPerNode() throws IgniteCheckedException { HadoopFileBlock split1 = split(false, "/file1", 0, 100, HOST_1, HOST_2); HadoopFileBlock split2 = split(false, "/file2", 0, 100, HOST_1, HOST_2); HadoopFileBlock split3 = split(false, "/file3", 0, 100, HOST_1, HOST_3); plan(1, split1); assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 1) && ensureEmpty(ID_2) || ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(2, split1); assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 2) && ensureEmpty(ID_2) || ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 2); assert ensureEmpty(ID_3); plan(1, split1, split2); assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(2, split1, split2); assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); plan(3, split1, split2, split3); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureReducers(ID_3, 1); plan(5, split1, split2, split3); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1); } /** * @throws IgniteCheckedException If failed. */ public void testIgfsSeveralComplexBlocksPerNode() throws IgniteCheckedException { HadoopFileBlock split1 = split(true, "/file1", 0, 100, HOST_1, HOST_2, HOST_3); HadoopFileBlock split2 = split(true, "/file2", 0, 100, HOST_1, HOST_2, HOST_3); mapIgfsBlock(split1.file(), 0, 100, location(0, 50, ID_1, ID_2), location(51, 100, ID_1, ID_3)); mapIgfsBlock(split2.file(), 0, 100, location(0, 50, ID_1, ID_2), location(51, 100, ID_2, ID_3)); plan(1, split1); assert ensureMappers(ID_1, split1); assert ensureReducers(ID_1, 1); assert ensureEmpty(ID_2); assert ensureEmpty(ID_3); plan(1, split2); assert ensureMappers(ID_2, split2); assert ensureReducers(ID_2, 1); assert ensureEmpty(ID_1); assert ensureEmpty(ID_3); plan(1, split1, split2); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1) || ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0); assert ensureEmpty(ID_3); plan(2, split1, split2); assert ensureMappers(ID_1, split1); assert ensureMappers(ID_2, split2); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureEmpty(ID_3); } /** * @throws IgniteCheckedException If failed. */ public void testNonIgfsOrphans() throws IgniteCheckedException { HadoopFileBlock split1 = split(false, "/file1", 0, 100, INVALID_HOST_1, INVALID_HOST_2); HadoopFileBlock split2 = split(false, "/file2", 0, 100, INVALID_HOST_1, INVALID_HOST_3); HadoopFileBlock split3 = split(false, "/file3", 0, 100, INVALID_HOST_2, INVALID_HOST_3); plan(1, split1); assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 1) && ensureEmpty(ID_2) && ensureEmpty(ID_3) || ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 1) && ensureEmpty(ID_3) || ensureEmpty(ID_1) && ensureEmpty(ID_2) && ensureMappers(ID_3, split1) && ensureReducers(ID_3, 1); plan(2, split1); assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 2) && ensureEmpty(ID_2) && ensureEmpty(ID_3) || ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 2) && ensureEmpty(ID_3) || ensureEmpty(ID_1) && ensureEmpty(ID_2) && ensureMappers(ID_3, split1) && ensureReducers(ID_3, 2); plan(1, split1, split2, split3); assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split3) || ensureMappers(ID_1, split1) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split2) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split3) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split1) || ensureMappers(ID_1, split3) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split2) || ensureMappers(ID_1, split3) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split1); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) && ensureReducers(ID_3, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 0) && ensureReducers(ID_3, 1); plan(3, split1, split2, split3); assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split3) || ensureMappers(ID_1, split1) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split2) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split3) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split1) || ensureMappers(ID_1, split3) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split2) || ensureMappers(ID_1, split3) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split1); assert ensureReducers(ID_1, 1); assert ensureReducers(ID_2, 1); assert ensureReducers(ID_3, 1); plan(5, split1, split2, split3); assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split3) || ensureMappers(ID_1, split1) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split2) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split3) || ensureMappers(ID_1, split2) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split1) || ensureMappers(ID_1, split3) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split2) || ensureMappers(ID_1, split3) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split1); assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1); } /** * Create plan. * * @param reducers Reducers count. * @param splits Splits. * @return Plan. * @throws IgniteCheckedException If failed. */ private static HadoopMapReducePlan plan(int reducers, HadoopInputSplit... splits) throws IgniteCheckedException { assert reducers > 0; assert splits != null && splits.length > 0; Collection<HadoopInputSplit> splitList = new ArrayList<>(splits.length); Collections.addAll(splitList, splits); Collection<ClusterNode> top = new ArrayList<>(); GridTestNode node1 = new GridTestNode(ID_1); GridTestNode node2 = new GridTestNode(ID_2); GridTestNode node3 = new GridTestNode(ID_3); node1.setHostName(HOST_1); node2.setHostName(HOST_2); node3.setHostName(HOST_3); top.add(node1); top.add(node2); top.add(node3); HadoopMapReducePlan plan = PLANNER.preparePlan(new MockJob(reducers, splitList), top, null); PLAN.set(plan); return plan; } /** * Ensure that node contains the given mappers. * * @param nodeId Node ID. * @param expSplits Expected splits. * @return {@code True} if this assumption is valid. */ private static boolean ensureMappers(UUID nodeId, HadoopInputSplit... expSplits) { Collection<HadoopInputSplit> expSplitsCol = new ArrayList<>(); Collections.addAll(expSplitsCol, expSplits); Collection<HadoopInputSplit> splits = PLAN.get().mappers(nodeId); return F.eq(expSplitsCol, splits); } /** * Ensure that node contains the given amount of reducers. * * @param nodeId Node ID. * @param reducers Reducers. * @return {@code True} if this assumption is valid. */ private static boolean ensureReducers(UUID nodeId, int reducers) { int[] reducersArr = PLAN.get().reducers(nodeId); return reducers == 0 ? F.isEmpty(reducersArr) : (reducersArr != null && reducersArr.length == reducers); } /** * Ensure that no mappers and reducers is located on this node. * * @param nodeId Node ID. * @return {@code True} if this assumption is valid. */ private static boolean ensureEmpty(UUID nodeId) { return F.isEmpty(PLAN.get().mappers(nodeId)) && F.isEmpty(PLAN.get().reducers(nodeId)); } /** * Create split. * * @param igfs IGFS flag. * @param file File. * @param start Start. * @param len Length. * @param hosts Hosts. * @return Split. */ private static HadoopFileBlock split(boolean igfs, String file, long start, long len, String... hosts) { URI uri = URI.create((igfs ? "igfs://igfs@" : "hdfs://") + file); return new HadoopFileBlock(hosts, uri, start, len); } /** * Create block location. * * @param start Start. * @param len Length. * @param nodeIds Node IDs. * @return Block location. */ private static IgfsBlockLocation location(long start, long len, UUID... nodeIds) { assert nodeIds != null && nodeIds.length > 0; Collection<ClusterNode> nodes = new ArrayList<>(nodeIds.length); for (UUID id : nodeIds) nodes.add(new GridTestNode(id)); return new IgfsBlockLocationImpl(start, len, nodes); } /** * Map IGFS block to nodes. * * @param file File. * @param start Start. * @param len Length. * @param locations Locations. */ private static void mapIgfsBlock(URI file, long start, long len, IgfsBlockLocation... locations) { assert locations != null && locations.length > 0; IgfsPath path = new IgfsPath(file); Block block = new Block(path, start, len); Collection<IgfsBlockLocation> locationsList = new ArrayList<>(); Collections.addAll(locationsList, locations); BLOCK_MAP.put(block, locationsList); } /** * Block. */ private static class Block { /** */ private final IgfsPath path; /** */ private final long start; /** */ private final long len; /** * Constructor. * * @param path Path. * @param start Start. * @param len Length. */ private Block(IgfsPath path, long start, long len) { this.path = path; this.start = start; this.len = len; } /** {@inheritDoc} */ @SuppressWarnings("RedundantIfStatement") @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Block)) return false; Block block = (Block) o; if (len != block.len) return false; if (start != block.start) return false; if (!path.equals(block.path)) return false; return true; } /** {@inheritDoc} */ @Override public int hashCode() { int res = path.hashCode(); res = 31 * res + (int) (start ^ (start >>> 32)); res = 31 * res + (int) (len ^ (len >>> 32)); return res; } } /** * Mocked job. */ private static class MockJob implements HadoopJob { /** Reducers count. */ private final int reducers; /** */ private Collection<HadoopInputSplit> splitList; /** * Constructor. * * @param reducers Reducers count. * @param splitList Splits. */ private MockJob(int reducers, Collection<HadoopInputSplit> splitList) { this.reducers = reducers; this.splitList = splitList; } /** {@inheritDoc} */ @Override public HadoopJobId id() { return null; } /** {@inheritDoc} */ @Override public HadoopJobInfo info() { return new HadoopDefaultJobInfo() { @Override public int reducers() { return reducers; } }; } /** {@inheritDoc} */ @Override public Collection<HadoopInputSplit> input() throws IgniteCheckedException { return splitList; } /** {@inheritDoc} */ @Override public HadoopTaskContext getTaskContext(HadoopTaskInfo info) throws IgniteCheckedException { return null; } /** {@inheritDoc} */ @Override public void initialize(boolean external, UUID nodeId) throws IgniteCheckedException { // No-op. } /** {@inheritDoc} */ @Override public void dispose(boolean external) throws IgniteCheckedException { // No-op. } /** {@inheritDoc} */ @Override public void prepareTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException { // No-op. } /** {@inheritDoc} */ @Override public void cleanupTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException { // No-op. } /** {@inheritDoc} */ @Override public void cleanupStagingDirectory() { // No-op. } } /** * Mocked IGFS. */ private static class MockIgfs implements IgfsEx { /** {@inheritDoc} */ @Override public boolean isProxy(URI path) { return PROXY_MAP.containsKey(path) && PROXY_MAP.get(path); } /** {@inheritDoc} */ @Override public Collection<IgfsBlockLocation> affinity(IgfsPath path, long start, long len) { return BLOCK_MAP.get(new Block(path, start, len)); } /** {@inheritDoc} */ @Override public Collection<IgfsBlockLocation> affinity(IgfsPath path, long start, long len, long maxLen) { return null; } /** {@inheritDoc} */ @Override public void stop(boolean cancel) { // No-op. } /** {@inheritDoc} */ @Override public IgfsContext context() { return null; } /** {@inheritDoc} */ @Override public IgfsPaths proxyPaths() { return null; } /** {@inheritDoc} */ @Override public IgfsInputStreamAdapter open(IgfsPath path, int bufSize, int seqReadsBeforePrefetch) { return null; } /** {@inheritDoc} */ @Override public IgfsInputStreamAdapter open(IgfsPath path) { return null; } /** {@inheritDoc} */ @Override public IgfsInputStreamAdapter open(IgfsPath path, int bufSize) { return null; } /** {@inheritDoc} */ @Override public IgfsStatus globalSpace() throws IgniteCheckedException { return null; } /** {@inheritDoc} */ @Override public void globalSampling(@Nullable Boolean val) throws IgniteCheckedException { // No-op. } /** {@inheritDoc} */ @Nullable @Override public Boolean globalSampling() { return null; } /** {@inheritDoc} */ @Override public IgfsLocalMetrics localMetrics() { return null; } /** {@inheritDoc} */ @Override public long groupBlockSize() { return 0; } /** {@inheritDoc} */ @Override public IgniteInternalFuture<?> awaitDeletesAsync() throws IgniteCheckedException { return null; } /** {@inheritDoc} */ @Nullable @Override public String clientLogDirectory() { return null; } /** {@inheritDoc} */ @Override public void clientLogDirectory(String logDir) { // No-op. } /** {@inheritDoc} */ @Override public boolean evictExclude(IgfsPath path, boolean primary) { return false; } /** {@inheritDoc} */ @Nullable @Override public String name() { return null; } /** {@inheritDoc} */ @Override public FileSystemConfiguration configuration() { return null; } /** {@inheritDoc} */ @Override public boolean exists(IgfsPath path) { return false; } /** {@inheritDoc} */ @Nullable @Override public IgfsFile info(IgfsPath path) { return null; } /** {@inheritDoc} */ @Override public IgfsPathSummary summary(IgfsPath path) { return null; } /** {@inheritDoc} */ @Nullable @Override public IgfsFile update(IgfsPath path, Map<String, String> props) { return null; } /** {@inheritDoc} */ @Override public void rename(IgfsPath src, IgfsPath dest) { // No-op. } /** {@inheritDoc} */ @Override public boolean delete(IgfsPath path, boolean recursive) { return false; } /** {@inheritDoc} */ @Override public void mkdirs(IgfsPath path) { // No-op. } /** {@inheritDoc} */ @Override public void mkdirs(IgfsPath path, @Nullable Map<String, String> props) { // No-op. } /** {@inheritDoc} */ @Override public Collection<IgfsPath> listPaths(IgfsPath path) { return null; } /** {@inheritDoc} */ @Override public Collection<IgfsFile> listFiles(IgfsPath path) { return null; } /** {@inheritDoc} */ @Override public long usedSpaceSize() { return 0; } /** {@inheritDoc} */ @Override public IgfsOutputStream create(IgfsPath path, boolean overwrite) { return null; } /** {@inheritDoc} */ @Override public IgfsOutputStream create(IgfsPath path, int bufSize, boolean overwrite, int replication, long blockSize, @Nullable Map<String, String> props) { return null; } /** {@inheritDoc} */ @Override public IgfsOutputStream create(IgfsPath path, int bufSize, boolean overwrite, @Nullable IgniteUuid affKey, int replication, long blockSize, @Nullable Map<String, String> props) { return null; } /** {@inheritDoc} */ @Override public IgfsOutputStream append(IgfsPath path, boolean create) { return null; } /** {@inheritDoc} */ @Override public IgfsOutputStream append(IgfsPath path, int bufSize, boolean create, @Nullable Map<String, String> props) { return null; } /** {@inheritDoc} */ @Override public void setTimes(IgfsPath path, long accessTime, long modificationTime) { // No-op. } /** {@inheritDoc} */ @Override public IgfsMetrics metrics() { return null; } /** {@inheritDoc} */ @Override public void resetMetrics() { // No-op. } /** {@inheritDoc} */ @Override public long size(IgfsPath path) { return 0; } /** {@inheritDoc} */ @Override public void format() { // No-op. } /** {@inheritDoc} */ @Override public <T, R> R execute(IgfsTask<T, R> task, @Nullable IgfsRecordResolver rslvr, Collection<IgfsPath> paths, @Nullable T arg) { return null; } /** {@inheritDoc} */ @Override public <T, R> R execute(IgfsTask<T, R> task, @Nullable IgfsRecordResolver rslvr, Collection<IgfsPath> paths, boolean skipNonExistentFiles, long maxRangeLen, @Nullable T arg) { return null; } /** {@inheritDoc} */ @Override public <T, R> R execute(Class<? extends IgfsTask<T, R>> taskCls, @Nullable IgfsRecordResolver rslvr, Collection<IgfsPath> paths, @Nullable T arg) { return null; } /** {@inheritDoc} */ @Override public <T, R> R execute(Class<? extends IgfsTask<T, R>> taskCls, @Nullable IgfsRecordResolver rslvr, Collection<IgfsPath> paths, boolean skipNonExistentFiles, long maxRangeLen, @Nullable T arg) { return null; } /** {@inheritDoc} */ @Override public IgniteUuid nextAffinityKey() { return null; } /** {@inheritDoc} */ @Override public IgniteFileSystem withAsync() { return null; } /** {@inheritDoc} */ @Override public boolean isAsync() { return false; } /** {@inheritDoc} */ @Override public <R> IgniteFuture<R> future() { return null; } /** {@inheritDoc} */ @Override public IgfsSecondaryFileSystem asSecondary() { return null; } } /** * Mocked Grid. */ @SuppressWarnings("ExternalizableWithoutPublicNoArgConstructor") private static class MockIgnite extends IgniteSpringBean implements IgniteEx { /** {@inheritDoc} */ @Override public IgniteClusterEx cluster() { return (IgniteClusterEx)super.cluster(); } /** {@inheritDoc} */ @Override public IgniteFileSystem igfsx(String name) { assert F.eq("igfs", name); return IGFS; } /** {@inheritDoc} */ @Override public Hadoop hadoop() { return null; } /** {@inheritDoc} */ @Override public String name() { return null; } /** {@inheritDoc} */ @Override public <K extends GridCacheUtilityKey, V> IgniteInternalCache<K, V> utilityCache() { return null; } /** {@inheritDoc} */ @Nullable @Override public <K, V> IgniteInternalCache<K, V> cachex(@Nullable String name) { return null; } /** {@inheritDoc} */ @Nullable @Override public <K, V> IgniteInternalCache<K, V> cachex() { return null; } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public Collection<IgniteInternalCache<?, ?>> cachesx(@Nullable IgnitePredicate<? super IgniteInternalCache<?, ?>>... p) { return null; } /** {@inheritDoc} */ @Override public boolean eventUserRecordable(int type) { return false; } /** {@inheritDoc} */ @Override public boolean allEventsUserRecordable(int[] types) { return false; } /** {@inheritDoc} */ @Override public boolean isJmxRemoteEnabled() { return false; } /** {@inheritDoc} */ @Override public boolean isRestartEnabled() { return false; } /** {@inheritDoc} */ @Override public ClusterNode localNode() { return null; } /** {@inheritDoc} */ @Override public String latestVersion() { return null; } /** {@inheritDoc} */ @Override public GridKernalContext context() { return null; } } }
package aynik.core; import aynik.compiler.AynikCompiler; import aynik.compiler.CompilerMissingNodeException; import aynik.map.*; import aynik.player.action.*; import aynik.player.Player; import aynik.player.action.util.ActionUseCondition; import aynik.player.item.Item; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Random; /** * Created by schaller on 05/02/16. */ public class Aynik { private static final String GAME_FILE_PATH = "data/game-data.json"; private static AynikCompiler compiler; private AynikTicker ticker; private AynikMap map; private AynikConsole console; private Player player; public Aynik(AynikTicker ticker, AynikMap map, AynikConsole console, Player player) { this.ticker = ticker; this.map = map; this.console = console; this.player = player; } public static void main (String[] args) { try { compiler = new AynikCompiler(new File(GAME_FILE_PATH)); } catch (IOException e) { e.printStackTrace(); System.exit(0); } catch (CompilerMissingNodeException e) { e.printStackTrace(); System.exit(0); } try { compiler.prepareGame(); } catch (CompilerMissingNodeException e) { e.printStackTrace(); } AynikMap map = AynikMap.getInstance(); Player player = Player.getInstance(); AynikStory story = AynikStory.getInstance(); AynikTicker ticker = AynikTicker.getInstance(); AynikConsole console = AynikConsole.getInstance(); console.setPlayer(player); console.setStory(story); console.setTicker(ticker); console.setMap(map); Aynik game = new Aynik(ticker, map, console, player); console.setGame(game); game.startPlaying(); } public void startPlaying () { Position originPosition; boolean needNewFirstLocation = true; this.console.startGame(); boolean playerWannaPlay = true; this.randomizePlayerLocation(); originPosition = this.player.currentPosition; needNewFirstLocation = this.player.currentLocation.type == LocationTypes.death; while (playerWannaPlay) { this.console.printLanding(); this.arrivingLocation(true); while (this.player.isAlive() && !this.player.won()) { this.console.playerAction(); this.ticker.next(); } playerWannaPlay = this.console.askPlayerWannaTryAgain(); if (playerWannaPlay) { if (needNewFirstLocation) { this.randomizePlayerLocation(); originPosition = this.player.currentPosition; needNewFirstLocation = this.player.currentLocation.type == LocationTypes.death; } this.player.resetPlayer(originPosition, this.map.get(originPosition)); } } } public void arrivingLocation(boolean justParachute) { Location playerLocation = this.player.currentLocation; if (playerLocation.type == LocationTypes.end) { this.player.win = true; this.console.printEnd(); } else if (this.player.currentLocation.type == LocationTypes.death) { LocationDeath deathLocation = (LocationDeath) playerLocation; if (justParachute) this.console.printUnluckyParachute(); this.playerDie(); this.console.printContext(deathLocation.story); } else { if (this.player.currentLocation.type == LocationTypes.itemsDiscovery) { LocationItemsDiscovery currentLocationItemsDiscovery = (LocationItemsDiscovery) this.player.currentLocation; this.player.addItems(currentLocationItemsDiscovery.items); this.console.printGotItems(currentLocationItemsDiscovery.items); } this.console.printNewLocation(); } } private void playerDie() { this.console.playerDie(); this.player.die(); } private void randomizePlayerLocation() { HashMap<Position, Location> row = this.map.getRow(1); ArrayList<Position> positionCandidates = new ArrayList<>(); for (Map.Entry<Position, Location> positionLocationEntry : row.entrySet()) { Location location = positionLocationEntry.getValue(); if (location.type == LocationTypes.obstacle) continue; positionCandidates.add(positionLocationEntry.getKey()); } int candidatesSize = positionCandidates.size(); int randomPositionIndex = new Random().nextInt(candidatesSize); Position firstPosition = positionCandidates.get(randomPositionIndex); this.player.changeLocation(firstPosition, row.get(firstPosition)); } public void applyAction(Action action) { if (action.hasContext()) { this.console.printContext(action.context); } if ( ! action.success) { this.playerDie(); return; } if (action instanceof ActionAttack) { ActionAttack actionAttack = (ActionAttack) action; if ( ! action.hasContext()) { this.console.printSuccessfulAttack(); } if (actionAttack.rewards.size() > 0) { for (String reward : actionAttack.rewards) { if (reward.equals("teammate")) { this.console.printGotATeammate(); this.player.asTeammate = true; } } } if (actionAttack.needTeammate && ! this.player.asTeammate) { this.playerDie(); return; } if (actionAttack.loseTeammate) { this.console.printTeammateLose(); this.player.asTeammate = false; } } if (action instanceof ActionJetpack) { this.player.usedJetpack = true; } if (action instanceof ActionUse) { Item item = ((ActionUse) action).item; this.player.usedItem(item); if (! action.hasContext()) this.console.printSuccessfulUse(item); if (action instanceof ActionUseConditions) { ActionUseConditions actionUseConditions = (ActionUseConditions) action; for (ActionUseCondition condition : actionUseConditions.conditions) { if (! this.player.hasItem(condition.item)) { this.console.printContext(condition.context); this.playerDie(); } } } } if ( ! (action instanceof ActionHide)) { try { this.console.makeThePlayerMove(action); } catch (Exception e) { this.console.print("Error in the program..."); this.playerDie(); e.printStackTrace(); } } } public void movePlayerTo(Position position) { this.player.changeLocation(position, this.map.get(position)); this.arrivingLocation(false); } }
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.process.workitem.email; import org.jbpm.process.workitem.core.AbstractLogOrThrowWorkItemHandler; import org.jbpm.process.workitem.core.util.Wid; import org.jbpm.process.workitem.core.util.WidMavenDepends; import org.jbpm.process.workitem.core.util.WidParameter; import org.jbpm.process.workitem.core.util.WidResult; import org.jbpm.process.workitem.core.util.service.WidAction; import org.jbpm.process.workitem.core.util.service.WidAuth; import org.jbpm.process.workitem.core.util.service.WidService; import org.kie.api.runtime.process.WorkItem; import org.kie.api.runtime.process.WorkItemManager; /** * WorkItemHandler for sending email. * * Expects the following parameters: * - "From" (String): sends an email from the given the email address * - "To" (String): sends the email to the given email address(es), * multiple addresses must be separated using a semi-colon (';') * - "Subject" (String): the subject of the email * - "Body" (String): the body of the email (using HTML) * - "Template" (String): optional template to generate body of the email, template when given will override Body parameter * Is completed immediately and does not return any result parameters. * * Sending an email cannot be aborted. * */ @Wid(widfile = "EmailDefinitions.wid", name = "Email", displayName = "Email", defaultHandler = "mvel: new org.jbpm.process.workitem.email.EmailWorkItemHandler()", documentation = "${artifactId}/index.html", category = "${artifactId}", icon = "defaultemailicon.png", parameters = { @WidParameter(name = "From"), @WidParameter(name = "To"), @WidParameter(name = "Reply-To"), @WidParameter(name = "Cc"), @WidParameter(name = "Bcc"), @WidParameter(name = "Body"), @WidParameter(name = "Template"), @WidParameter(name = "Subject"), @WidParameter(name = "Attachments"), @WidParameter(name = "Debug") }, mavenDepends = { @WidMavenDepends(group = "${groupId}", artifact = "${artifactId}", version = "${version}") }, serviceInfo = @WidService(category = "${name}", description = "${description}", keywords = "send,email", action = @WidAction(title = "Send email"), authinfo = @WidAuth(required = true, params = {"host", "port", "username", "password"}, paramsdescription = {"Host", "Port", "User Name", "Password"}) )) public class EmailWorkItemHandler extends AbstractLogOrThrowWorkItemHandler { private Connection connection; private TemplateManager templateManager = TemplateManager.get(); public EmailWorkItemHandler() { } public EmailWorkItemHandler(String host, String port, String userName, String password) { setConnection(host, port, userName, password); } public EmailWorkItemHandler(String host, String port, String userName, String password, String startTls) { setConnection(host, port, userName, password, startTls); } public EmailWorkItemHandler(String handlingProcessId, String handlingStrategy) { this.handlingProcessId = handlingProcessId; this.handlingStrategy = handlingStrategy; } public EmailWorkItemHandler(String host, String port, String userName, String password, String handlingProcessId, String handlingStrategy) { setConnection(host, port, userName, password); this.handlingProcessId = handlingProcessId; this.handlingStrategy = handlingStrategy; } public EmailWorkItemHandler(String host, String port, String userName, String password, String startTls, String handlingProcessId, String handlingStrategy) { setConnection(host, port, userName, password, startTls); this.handlingProcessId = handlingProcessId; this.handlingStrategy = handlingStrategy; } public void setConnection(String host, String port, String userName, String password) { connection = new Connection(); connection.setHost(host); connection.setPort(port); connection.setUserName(userName); connection.setPassword(password); } public void setConnection(String host, String port, String userName, String password, String startTls) { connection = new Connection(); connection.setHost(host); connection.setPort(port); connection.setUserName(userName); connection.setPassword(password); connection.setStartTls(Boolean.parseBoolean(startTls)); } public Connection getConnection() { return connection; } public void executeWorkItem(WorkItem workItem, WorkItemManager manager) { try { Email email = createEmail(workItem, connection); SendHtml.sendHtml(email, getDebugFlag(workItem)); // avoid null pointer when used from deadline escalation handler if (manager != null) { manager.completeWorkItem(workItem.getId(), null); } } catch (Exception e) { handleException(e); } } protected Email createEmail(WorkItem workItem, Connection connection) { Email email = new Email(); Message message = new Message(); message.setFrom((String) workItem.getParameter("From")); message.setReplyTo( (String) workItem.getParameter("Reply-To")); message.setDisplayName((String) workItem.getParameter("DisplayName")); // Set recipients Recipients recipients = new Recipients(); String to = (String) workItem.getParameter("To"); if ( to == null || to.trim().length() == 0 ) { throw new RuntimeException( "Email must have one or more to adresses" ); } for (String s: to.split(";")) { if (s != null && !"".equals(s)) { Recipient recipient = new Recipient(); recipient.setEmail(s); recipient.setType( "To" ); recipients.addRecipient(recipient); } } // Set cc String cc = (String) workItem.getParameter("Cc"); if ( cc != null && cc.trim().length() > 0 ) { for (String s: cc.split(";")) { if (s != null && !"".equals(s)) { Recipient recipient = new Recipient(); recipient.setEmail(s); recipient.setType( "Cc" ); recipients.addRecipient(recipient); } } } // Set bcc String bcc = (String) workItem.getParameter("Bcc"); if ( bcc != null && bcc.trim().length() > 0 ) { for (String s: bcc.split(";")) { if (s != null && !"".equals(s)) { Recipient recipient = new Recipient(); recipient.setEmail(s); recipient.setType( "Bcc" ); recipients.addRecipient(recipient); } } } // Fill message String body = (String) workItem.getParameter("Body"); String template = (String) workItem.getParameter("Template"); if (template != null) { body = templateManager.render(template, workItem.getParameters()); } message.setRecipients(recipients); message.setSubject((String) workItem.getParameter("Subject")); message.setBody(body); // fill attachments String attachmentList = (String) workItem.getParameter("Attachments"); if (attachmentList != null) { String[] attachments = attachmentList.split(","); message.setAttachments(java.util.Arrays.asList(attachments)); } // setup email email.setMessage(message); email.setConnection(connection); return email; } public void abortWorkItem(WorkItem arg0, WorkItemManager arg1) { // Do nothing, email cannot be aborted } protected boolean getDebugFlag(WorkItem workItem) { Object debugParam = workItem.getParameter("Debug"); if (debugParam == null) { return false; } return Boolean.parseBoolean(debugParam.toString()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.dag.app; import java.io.IOException; import java.net.UnknownHostException; import java.util.Collections; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.util.Clock; import org.apache.tez.client.TezApiVersionInfo; import org.apache.tez.common.ContainerContext; import org.apache.tez.common.ContainerTask; import org.apache.tez.dag.api.TezUncheckedException; import org.apache.tez.dag.app.dag.event.VertexEventRouteEvent; import org.apache.tez.dag.app.launcher.ContainerLauncher; import org.apache.tez.dag.app.rm.NMCommunicatorEvent; import org.apache.tez.dag.app.rm.NMCommunicatorLaunchRequestEvent; import org.apache.tez.dag.app.rm.NMCommunicatorStopRequestEvent; import org.apache.tez.dag.app.rm.container.AMContainerEvent; import org.apache.tez.dag.app.rm.container.AMContainerEventLaunched; import org.apache.tez.dag.app.rm.container.AMContainerEventType; import org.apache.tez.dag.records.TezTaskAttemptID; import org.apache.tez.dag.records.TezTaskID; import org.apache.tez.dag.records.TezVertexID; import org.apache.tez.runtime.api.events.TaskAttemptCompletedEvent; import org.apache.tez.runtime.api.events.TaskStatusUpdateEvent; import org.apache.tez.runtime.api.impl.EventMetaData; import org.apache.tez.runtime.api.impl.TezEvent; import org.apache.tez.runtime.api.impl.EventMetaData.EventProducerConsumerType; import com.google.common.collect.Maps; @SuppressWarnings("unchecked") public class MockDAGAppMaster extends DAGAppMaster { private static final Log LOG = LogFactory.getLog(MockDAGAppMaster.class); MockContainerLauncher containerLauncher; boolean initFailFlag; boolean startFailFlag; // mock container launcher does not launch real tasks. // Upon, launch of a container is simulates the container asking for tasks // Upon receiving a task it simulates completion of the tasks // It can be used to preempt the container for a given task public class MockContainerLauncher extends AbstractService implements ContainerLauncher, Runnable { BlockingQueue<NMCommunicatorEvent> eventQueue = new LinkedBlockingQueue<NMCommunicatorEvent>(); Thread eventHandlingThread; Map<ContainerId, ContainerData> containers = Maps.newConcurrentMap(); TaskAttemptListenerImpTezDag taListener; AtomicBoolean startScheduling = new AtomicBoolean(true); AtomicBoolean goFlag; boolean updateProgress = true; Map<TezTaskID, Integer> preemptedTasks = Maps.newConcurrentMap(); Map<TezTaskAttemptID, Integer> tasksWithStatusUpdates = Maps.newConcurrentMap(); public MockContainerLauncher(AtomicBoolean goFlag) { super("MockContainerLauncher"); this.goFlag = goFlag; } public class ContainerData { ContainerId cId; TezTaskAttemptID taId; String vName; ContainerLaunchContext launchContext; int numUpdates = 0; boolean completed; public ContainerData(ContainerId cId, ContainerLaunchContext context) { this.cId = cId; this.launchContext = context; } void clear() { taId = null; vName = null; completed = false; launchContext = null; } } @Override public void serviceStart() throws Exception { taListener = (TaskAttemptListenerImpTezDag) getTaskAttemptListener(); eventHandlingThread = new Thread(this); eventHandlingThread.start(); } @Override public void serviceStop() throws Exception { if (eventHandlingThread != null) { eventHandlingThread.interrupt(); eventHandlingThread.join(2000l); } } @Override public void handle(NMCommunicatorEvent event) { switch (event.getType()) { case CONTAINER_LAUNCH_REQUEST: launch((NMCommunicatorLaunchRequestEvent) event); break; case CONTAINER_STOP_REQUEST: stop((NMCommunicatorStopRequestEvent)event); break; } } void waitToGo() { if (goFlag == null) { return; } synchronized (goFlag) { goFlag.set(true); goFlag.notify(); try { goFlag.wait(); } catch (InterruptedException e) { throw new TezUncheckedException(e); } } } public void startScheduling(boolean value) { startScheduling.set(value); } public void updateProgress(boolean value) { this.updateProgress = value; } public Map<ContainerId, ContainerData> getContainers() { return containers; } public void preemptContainerForTask(TezTaskID tId, int uptoVersion) { preemptedTasks.put(tId, uptoVersion); } public void preemptContainer(ContainerData cData) { getTaskSchedulerEventHandler().containerCompleted(null, ContainerStatus.newInstance(cData.cId, null, "Preempted", ContainerExitStatus.PREEMPTED)); cData.clear(); } public void setStatusUpdatesForTask(TezTaskAttemptID tId, int numUpdates) { tasksWithStatusUpdates.put(tId, numUpdates); } void stop(NMCommunicatorStopRequestEvent event) { // remove from simulated container list containers.remove(event.getContainerId()); getContext().getEventHandler().handle( new AMContainerEvent(event.getContainerId(), AMContainerEventType.C_NM_STOP_SENT)); } void launch(NMCommunicatorLaunchRequestEvent event) { // launch container by putting it in simulated container list containers.put(event.getContainerId(), new ContainerData(event.getContainerId(), event.getContainerLaunchContext())); getContext().getEventHandler().handle(new AMContainerEventLaunched(event.getContainerId())); } public void waitTillContainersLaunched() throws InterruptedException { while (containers.isEmpty()) { Thread.sleep(50); } } void incrementTime(long inc) { Clock clock = getContext().getClock(); if (clock instanceof MockClock) { ((MockClock) clock).incrementTime(inc); } } @Override public void run() { // wait for test to sync with us and get a reference to us. Go when sync is done waitToGo(); while(true) { if (!startScheduling.get()) { // schedule when asked to do so by the test code continue; } incrementTime(1000); for (Map.Entry<ContainerId, ContainerData> entry : containers.entrySet()) { ContainerData cData = entry.getValue(); ContainerId cId = entry.getKey(); if (cData.taId == null) { // if container is not assigned a task, ask for a task try { ContainerTask cTask = taListener.getTask(new ContainerContext(cId.toString())); if (cTask == null) { continue; } if (cTask.shouldDie()) { containers.remove(cId); } else { cData.taId = cTask.getTaskSpec().getTaskAttemptID(); cData.vName = cTask.getTaskSpec().getVertexName(); } } catch (IOException e) { e.printStackTrace(); } } else if (!cData.completed) { // container is assigned a task and task is not completed // complete the task or preempt the task Integer version = preemptedTasks.get(cData.taId.getTaskID()); Integer updatesToMake = tasksWithStatusUpdates.get(cData.taId); if (cData.numUpdates == 0 || // do at least one update updatesToMake != null && cData.numUpdates < updatesToMake) { cData.numUpdates++; float maxUpdates = (updatesToMake != null) ? updatesToMake.intValue() : 1; float progress = updateProgress ? cData.numUpdates/maxUpdates : 0f; TezVertexID vertexId = cData.taId.getTaskID().getVertexID(); getContext().getEventHandler().handle( new VertexEventRouteEvent(vertexId, Collections.singletonList(new TezEvent( new TaskStatusUpdateEvent(null, progress), new EventMetaData( EventProducerConsumerType.SYSTEM, cData.vName, "", cData.taId))))); } else if (version != null && cData.taId.getId() <= version.intValue()) { preemptContainer(cData); } else { // send a done notification TezVertexID vertexId = cData.taId.getTaskID().getVertexID(); cData.completed = true; getContext().getEventHandler().handle( new VertexEventRouteEvent(vertexId, Collections.singletonList(new TezEvent( new TaskAttemptCompletedEvent(), new EventMetaData( EventProducerConsumerType.SYSTEM, cData.vName, "", cData.taId))))); cData.clear(); } } } try { Thread.sleep(10); } catch (InterruptedException e) { System.out.println("Interrupted in mock container launcher thread"); break; } } } } public class MockDAGAppMasterShutdownHandler extends DAGAppMasterShutdownHandler { public AtomicInteger shutdownInvoked = new AtomicInteger(0); public AtomicInteger shutdownInvokedWithoutDelay = new AtomicInteger(0); @Override public void shutdown() { shutdownInvokedWithoutDelay.incrementAndGet(); } @Override public void shutdown(boolean now) { shutdownInvoked.incrementAndGet(); } public boolean wasShutdownInvoked() { return shutdownInvoked.get() > 0 || shutdownInvokedWithoutDelay.get() > 0; } } public MockDAGAppMaster(ApplicationAttemptId applicationAttemptId, ContainerId containerId, String nmHost, int nmPort, int nmHttpPort, Clock clock, long appSubmitTime, boolean isSession, String workingDirectory, String[] localDirs, String[] logDirs, AtomicBoolean launcherGoFlag, boolean initFailFlag, boolean startFailFlag) { super(applicationAttemptId, containerId, nmHost, nmPort, nmHttpPort, clock, appSubmitTime, isSession, workingDirectory, localDirs, logDirs, new TezApiVersionInfo().getVersion(), 1); containerLauncher = new MockContainerLauncher(launcherGoFlag); shutdownHandler = new MockDAGAppMasterShutdownHandler(); this.initFailFlag = initFailFlag; this.startFailFlag = startFailFlag; } // use mock container launcher for tests @Override protected ContainerLauncher createContainerLauncher(final AppContext context) throws UnknownHostException { return containerLauncher; } public MockContainerLauncher getContainerLauncher() { return containerLauncher; } public MockDAGAppMasterShutdownHandler getShutdownHandler() { return (MockDAGAppMasterShutdownHandler) this.shutdownHandler; } @Override public synchronized void serviceInit(Configuration conf) throws Exception { super.serviceInit(conf); if (initFailFlag) { throw new Exception("FailInit"); } } @Override public synchronized void serviceStart() throws Exception { super.serviceStart(); if (startFailFlag) { throw new Exception("FailStart"); } } }
package com.communote.common.image; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.List; import javax.imageio.IIOImage; import javax.imageio.ImageIO; import javax.imageio.ImageWriteParam; import javax.imageio.ImageWriter; import javax.imageio.stream.MemoryCacheImageOutputStream; import org.apache.commons.imaging.ImageReadException; import org.apache.commons.imaging.ImageWriteException; import org.apache.commons.imaging.Imaging; import org.apache.commons.imaging.common.ImageMetadata; import org.apache.commons.imaging.formats.jpeg.exif.ExifRewriter; import org.apache.commons.imaging.formats.tiff.TiffField; import org.apache.commons.imaging.formats.tiff.TiffImageMetadata; import org.apache.commons.imaging.formats.tiff.constants.TiffTagConstants; import org.apache.commons.imaging.formats.tiff.write.TiffOutputDirectory; import org.apache.commons.imaging.formats.tiff.write.TiffOutputSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.communote.common.io.IOHelper; import com.communote.common.io.MimeTypeHelper; /** * Utility class for image transformations. * * @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a> */ public class ImageHelper { /** Logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(ImageHelper.class); /** * The maximum of the image size is 5000 bytes */ public final static int USER_IMAGE_MAX_SIZE = 5000; /** * Converts the byte array to an image. * * @param data * Image as binary data. * @return Returns a BufferedImage object or null. */ public static BufferedImage byteToImage(byte[] data) { try { return Imaging.getBufferedImage(data); } catch (Exception e) { LOGGER.warn("Error reading image with Imaging: {}", e.getMessage()); try { return ImageIO.read(new ByteArrayInputStream(data)); } catch (IOException e1) { LOGGER.warn("Error reading image with ImageIO: {} ", e1.getMessage()); LOGGER.debug(e1.getMessage(), e1); } } return null; } /** * This method will copy the Exif "orientation" information to the resulting image, if the * original image contains this data too. * * @param sourceImage * The source image. * @param result * The original result. * @return The new result containing the Exif orientation. */ public static byte[] copyExifOrientation(byte[] sourceImage, byte[] result) { try { ImageMetadata imageMetadata = Imaging.getMetadata(sourceImage); if (imageMetadata == null) { return result; } List<? extends ImageMetadata.ImageMetadataItem> metadataItems = imageMetadata .getItems(); for (ImageMetadata.ImageMetadataItem metadataItem : metadataItems) { if (metadataItem instanceof TiffImageMetadata.TiffMetadataItem) { TiffField tiffField = ((TiffImageMetadata.TiffMetadataItem) metadataItem) .getTiffField(); if (!tiffField.getTagInfo().equals(TiffTagConstants.TIFF_TAG_ORIENTATION)) { continue; } Object orientationValue = tiffField.getValue(); if (orientationValue == null) { break; } TiffOutputSet outputSet = new TiffOutputSet(); TiffOutputDirectory outputDirectory = outputSet.getOrCreateRootDirectory(); outputDirectory.add(TiffTagConstants.TIFF_TAG_ORIENTATION, ((Number) orientationValue).shortValue()); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); new ExifRewriter().updateExifMetadataLossy(result, outputStream, outputSet); return outputStream.toByteArray(); } } } catch (IOException | ImageWriteException | ImageReadException e) { LOGGER.warn("Error reading image: {}", e.getMessage()); } return result; } /** * Creates an image file and save it to the cache directory * * @param data * Image binary data * @param filename * Destination path of the given file * @return If the creation of image file was successful, then returns <code>true</code>, else * <code>false</code> */ public static boolean createImageFile(byte[] data, String filename) { FileOutputStream fos = null; try { File file = new File(filename); if (!file.exists()) { new File(file.getAbsolutePath()).mkdirs(); } fos = new FileOutputStream(file); fos.write(data); return true; } catch (IOException e) { LOGGER.error("Error writing image to file: {} ", filename, e); } catch (RuntimeException e) { LOGGER.error("Error writing image to file: {}", filename, e); } finally { IOHelper.close(fos); } return false; } /** * * Method to get the best mime type for the given image. * * @param image * The image to check. * @return The mime type of the image or null if the bytes are not a valid image. */ public static String getMimeType(byte[] image) { String mimeType = MimeTypeHelper.getMimeType(image); if (mimeType.startsWith("image")) { return mimeType; } return null; } /** * Return the given image file as byte array * * @param file * Image file * @param format * Image format * @return Byte array */ public static byte[] imageFileToByteArray(File file, String format) { BufferedImage image = loadImage(file); return imageToByte(image, format); } /** * Converts an image to a byte array in the given image format. The image is not compressed. * * @param image * BufferedImage object * @param format * Format for the transformation * @return Returns the converted byte array */ public static byte[] imageToByte(BufferedImage image, String format) { return imageToByte(image, format, null); } /** * Converts an image to a byte array in the given image format * * @param image * BufferedImage object * @param format * Format for the transformation * @param compressionQuality * optional parameter to set a compression quality. The value is expected to be * between 0 and 1 where 0 is interpreted as high compression and 1 as high quality. * When passing null the image won't be compressed. * @return Returns the converted byte array */ public static byte[] imageToByte(BufferedImage image, String format, Float compressionQuality) { if (image == null) { throw new IllegalArgumentException("image cannnot be null"); } if (format == null) { throw new IllegalArgumentException("format cannnot be null"); } ByteArrayOutputStream stream = new ByteArrayOutputStream(); try { if (compressionQuality != null) { ImageWriter imageWriter = ImageIO.getImageWritersByFormatName(format).next(); ImageWriteParam imageWriteParam = imageWriter.getDefaultWriteParam(); imageWriteParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); imageWriteParam.setCompressionQuality(compressionQuality); imageWriter.setOutput(new MemoryCacheImageOutputStream(stream)); imageWriter.write(null, new IIOImage(image, null, null), imageWriteParam); imageWriter.dispose(); } else { ImageIO.write(image, format, stream); } } catch (IOException e) { // throw new IOException("Cannot write image to " + format, e); return null; } byte[] bytesOut = stream.toByteArray(); try { stream.close(); } catch (IOException e) { LOGGER.warn(e.getMessage()); } return bytesOut; } /** * Loads an image from a file. * * @param file * File * @return BufferedImage object */ public static BufferedImage loadImage(File file) { BufferedImage image = null; try { image = ImageIO.read(file); } catch (IOException e) { // throw new IOException("Cannot read the image file", e); LOGGER.error("Can't read image file '{}'", file.getAbsolutePath(), e); } return image; } /** * Private constructor */ private ImageHelper() { } }