repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
ExiledNarwal28/glo-4003-spamdul
src/test/java/ca/ulaval/glo4003/reports/services/assemblers/ReportMetricDataAssemblerTest.java
package ca.ulaval.glo4003.reports.services.assemblers; import static com.google.common.truth.Truth.assertThat; import static org.mockito.Mockito.when; import ca.ulaval.glo4003.reports.domain.metrics.ReportMetricData; import ca.ulaval.glo4003.reports.domain.metrics.ReportMetricType; import ca.ulaval.glo4003.reports.services.dto.ReportMetricDataDto; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class ReportMetricDataAssemblerTest { @Mock private ReportMetricData metric; @Mock private ReportMetricData otherMetric; private ReportMetricDataAssembler reportMetricDataAssembler; private final ReportMetricType metricType = ReportMetricType.PROFITS; private final ReportMetricType otherMetricType = ReportMetricType.GATE_ENTRIES; private final double metricValue = 1.0; private final double otherMetricValue = 2.0; private List<ReportMetricData> singleMetric; private List<ReportMetricData> multipleMetrics; @Before public void setUp() { reportMetricDataAssembler = new ReportMetricDataAssembler(); when(metric.getType()).thenReturn(metricType); when(metric.getValue()).thenReturn(metricValue); when(otherMetric.getType()).thenReturn(otherMetricType); when(otherMetric.getValue()).thenReturn(otherMetricValue); singleMetric = Collections.singletonList(metric); multipleMetrics = Arrays.asList(metric, otherMetric); } @Test public void givenSingleMetric_whenAssemblingMany_thenAssembleASingleMetric() { List<ReportMetricDataDto> dtos = reportMetricDataAssembler.assembleMany(singleMetric); assertThat(dtos).hasSize(1); } @Test public void givenMultipleMetrics_whenAssemblingMany_thenAssembleMultipleMetrics() { List<ReportMetricDataDto> dtos = reportMetricDataAssembler.assembleMany(multipleMetrics); assertThat(dtos).hasSize(2); } @Test public void givenSingleMetric_whenAssemblingMany_thenSetName() { List<ReportMetricDataDto> dtos = reportMetricDataAssembler.assembleMany(singleMetric); assertThat(dtos.get(0).name).isEqualTo(metricType.toString()); } @Test public void givenMultipleMetrics_whenAssemblingMany_thenSetNames() { List<ReportMetricDataDto> dtos = reportMetricDataAssembler.assembleMany(multipleMetrics); assertThat(dtos.get(0).name).isEqualTo(metricType.toString()); assertThat(dtos.get(1).name).isEqualTo(otherMetricType.toString()); } @Test public void givenSingleMetric_whenAssemblingMany_thenSetValue() { List<ReportMetricDataDto> dtos = reportMetricDataAssembler.assembleMany(singleMetric); assertThat(dtos.get(0).value).isEqualTo(metricValue); } @Test public void givenMultipleMetrics_whenAssemblingMany_thenSetValues() { List<ReportMetricDataDto> dtos = reportMetricDataAssembler.assembleMany(multipleMetrics); assertThat(dtos.get(0).value).isEqualTo(metricValue); assertThat(dtos.get(1).value).isEqualTo(otherMetricValue); } }
retverd/python_hse
Week 5 Sorting, sets and dics/Task13.py
<filename>Week 5 Sorting, sets and dics/Task13.py # Некоторый банк хочет внедрить систему управления счетами клиентов, поддерживающую следующие операции: # # Пополнение счета клиента. Снятие денег со счета. Запрос остатка средств на счете. Перевод денег между счетами # клиентов. Начисление процентов всем клиентам. # # Вам необходимо реализовать такую систему. Клиенты банка идентифицируются именами (уникальная строка, не содержащая # пробелов). Первоначально у банка нет ни одного клиента. Как только для клиента проводится операция пололнения, снятия # или перевода денег, ему заводится счет с нулевым балансом. Все дальнейшие операции проводятся только с этим счетом. # Сумма на счету может быть как положительной, так и отрицательной, при этом всегда является целым числом. # # Входной данные содержат количество и последовательность операций. Возможны следующие операции: DEPOSIT name sum - # зачислить сумму sum на счет клиента name. Если у клиента нет счета, то счет создается. WITHDRAW name sum - снять # сумму sum со счета клиента name. Если у клиента нет счета, то счет создается. BALANCE name - узнать остаток средств # на счету клиента name. TRANSFER name1 name2 sum - перевести сумму sum со счета клиента name1 на счет клиента name2. # Если у какого-либо клиента нет счета, то ему создается счет. INCOME p - начислить всем клиентам, у которых открыты # счета, pот суммы счета. Проценты начисляются только клиентам с положительным остатком на счету, если у клиента остаток # отрицательный, то его счет не меняется. После начисления процентов сумма на счету остается целой, то есть начисляется # только целое число денежных единиц. Дробная часть начисленных процентов отбрасывается. # # Для каждого запроса BALANCE программа должна вывести остаток на счету данного клиента. Если же у клиента # с запрашиваемым именем не открыт счет в банке, выведите ERROR. def balance(details): global clients if details in clients: print(clients[details]) else: print("ERROR") return def deposit(details): global clients my_args = list(details.split()) if my_args[0] not in clients: clients[my_args[0]] = 0 clients[my_args[0]] += int(my_args[1]) return def withdraw(details): global clients my_args = list(details.split()) if my_args[0] not in clients: clients[my_args[0]] = 0 clients[my_args[0]] -= int(my_args[1]) return def transfer(details): global clients my_args = list(details.split()) withdraw(my_args[0] + " " + my_args[2]) deposit(my_args[1] + " " + my_args[2]) return def income(details): global clients perc = int(details) for client in clients: if clients[client] > 0: clients[client] += clients[client] * perc // 100 return total = int(input()) acc_act = {"deposit", "withdraw", "balance", "transfer", "income"} clients = {} for i in range(total): command = input() action = command[:command.find(" ")].lower() args = command[command.find(" ") + 1:] if action in acc_act: locals()[action](args)
zzhmark/vaa3d_tools
released_plugins/v3d_plugins/bigneuron_hide_ikeno_SIGEN/doc/html/search/files_6.js
<filename>released_plugins/v3d_plugins/bigneuron_hide_ikeno_SIGEN/doc/html/search/files_6.js var searchData= [ ['main_2ecpp',['main.cpp',['../main_8cpp.html',1,'']]], ['math_2eh',['math.h',['../math_8h.html',1,'']]] ];
peterschrammel/jayhorn
soottocfg/src/main/java/soottocfg/cfg/method/CfgEdge.java
<reponame>peterschrammel/jayhorn /** * */ package soottocfg.cfg.method; import org.jgrapht.graph.DefaultEdge; import com.google.common.base.Optional; import soottocfg.cfg.expression.Expression; /** * @author schaef * */ public class CfgEdge extends DefaultEdge { /** * */ private static final long serialVersionUID = 5682469643400808759L; private Optional<Expression> label; /** * */ public CfgEdge() { label = Optional.absent(); } public void setLabel(Expression l) { label = Optional.of(l); } public void removeLabel() { label = Optional.absent(); } public Optional<Expression> getLabel() { return label; } }
odinshen/ComputeLibrary_17.9_BareMetal
documentation/dir_463ebbf2abb1a8cd3e39ae1d0da97cb5.js
var dir_463ebbf2abb1a8cd3e39ae1d0da97cb5 = [ [ "ActivationLayerFixture.h", "benchmark_2fixtures_2_activation_layer_fixture_8h.xhtml", [ [ "ActivationLayerFixture", "classarm__compute_1_1test_1_1_activation_layer_fixture.xhtml", "classarm__compute_1_1test_1_1_activation_layer_fixture" ] ] ], [ "AlexNetFixture.h", "_alex_net_fixture_8h.xhtml", [ [ "AlexNetFixture", "classarm__compute_1_1test_1_1_alex_net_fixture.xhtml", "classarm__compute_1_1test_1_1_alex_net_fixture" ] ] ], [ "BatchNormalizationLayerFixture.h", "benchmark_2fixtures_2_batch_normalization_layer_fixture_8h.xhtml", [ [ "BatchNormalizationLayerFixture", "classarm__compute_1_1test_1_1_batch_normalization_layer_fixture.xhtml", "classarm__compute_1_1test_1_1_batch_normalization_layer_fixture" ] ] ], [ "ConvolutionLayerFixture.h", "benchmark_2fixtures_2_convolution_layer_fixture_8h.xhtml", [ [ "ConvolutionLayerFixture", "classarm__compute_1_1test_1_1_convolution_layer_fixture.xhtml", "classarm__compute_1_1test_1_1_convolution_layer_fixture" ] ] ], [ "DepthwiseConvolutionFixture.h", "benchmark_2fixtures_2_depthwise_convolution_fixture_8h.xhtml", [ [ "DepthwiseConvolutionFixture", "classarm__compute_1_1test_1_1_depthwise_convolution_fixture.xhtml", "classarm__compute_1_1test_1_1_depthwise_convolution_fixture" ] ] ], [ "DepthwiseSeparableConvolutionLayerFixture.h", "benchmark_2fixtures_2_depthwise_separable_convolution_layer_fixture_8h.xhtml", [ [ "DepthwiseSeparableConvolutionLayerFixture", "classarm__compute_1_1test_1_1_depthwise_separable_convolution_layer_fixture.xhtml", "classarm__compute_1_1test_1_1_depthwise_separable_convolution_layer_fixture" ] ] ], [ "FloorFixture.h", "benchmark_2fixtures_2_floor_fixture_8h.xhtml", [ [ "FloorFixture", "classarm__compute_1_1test_1_1_floor_fixture.xhtml", "classarm__compute_1_1test_1_1_floor_fixture" ] ] ], [ "FullyConnectedLayerFixture.h", "benchmark_2fixtures_2_fully_connected_layer_fixture_8h.xhtml", [ [ "FullyConnectedLayerFixture", "classarm__compute_1_1test_1_1_fully_connected_layer_fixture.xhtml", "classarm__compute_1_1test_1_1_fully_connected_layer_fixture" ] ] ], [ "GEMMFixture.h", "benchmark_2fixtures_2_g_e_m_m_fixture_8h.xhtml", [ [ "GEMMFixture", "classarm__compute_1_1test_1_1_g_e_m_m_fixture.xhtml", "classarm__compute_1_1test_1_1_g_e_m_m_fixture" ] ] ], [ "LeNet5Fixture.h", "_le_net5_fixture_8h.xhtml", [ [ "LeNet5Fixture", "classarm__compute_1_1test_1_1_le_net5_fixture.xhtml", "classarm__compute_1_1test_1_1_le_net5_fixture" ] ] ], [ "NormalizationLayerFixture.h", "benchmark_2fixtures_2_normalization_layer_fixture_8h.xhtml", [ [ "NormalizationLayerFixture", "classarm__compute_1_1test_1_1_normalization_layer_fixture.xhtml", "classarm__compute_1_1test_1_1_normalization_layer_fixture" ] ] ], [ "PoolingLayerFixture.h", "benchmark_2fixtures_2_pooling_layer_fixture_8h.xhtml", [ [ "PoolingLayerFixture", "classarm__compute_1_1test_1_1_pooling_layer_fixture.xhtml", "classarm__compute_1_1test_1_1_pooling_layer_fixture" ] ] ], [ "ROIPoolingLayerFixture.h", "_r_o_i_pooling_layer_fixture_8h.xhtml", [ [ "ROIPoolingLayerFixture", "classarm__compute_1_1test_1_1_r_o_i_pooling_layer_fixture.xhtml", "classarm__compute_1_1test_1_1_r_o_i_pooling_layer_fixture" ] ] ] ];
DolgopolovaMaria/Software-Engineering-SPBU
semester 1/disjointSetUnion/disjointSetUnion/sets.h
<gh_stars>0 #pragma once typedef struct SETS { int *parents; int numberOfElements; } Sets; Sets *createUnionSets(int numberOfElements); void deleteSets(Sets *sets); int makeSet(Sets *sets, int x); int findSet(Sets *sets, int x); int unite(Sets *set, int x, int y);
jbox-web/action_form
spec/support/user_form_fixture.rb
class UserFormFixture < ActionForm::Base self.main_model = :user attributes :name, :age, :gender, required: true validates :name, length: { in: 6..20 } validates :age, numericality: { only_integer: true } end
Jzice/spdk
lib/event/reactor.c
/*- * BSD LICENSE * * Copyright (c) Intel Corporation. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * Neither the name of Intel Corporation nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "spdk/stdinc.h" #include "spdk/likely.h" #include "spdk_internal/event.h" #include "spdk/log.h" #include "spdk/thread.h" #include "spdk/env.h" #include "spdk/util.h" #include "spdk/string.h" #include "spdk/fd_group.h" #ifdef __linux__ #include <sys/prctl.h> #include <sys/eventfd.h> #endif #ifdef __FreeBSD__ #include <pthread_np.h> #endif #define SPDK_EVENT_BATCH_SIZE 8 static struct spdk_reactor *g_reactors; static struct spdk_cpuset g_reactor_core_mask; static enum spdk_reactor_state g_reactor_state = SPDK_REACTOR_STATE_UNINITIALIZED; static bool g_framework_context_switch_monitor_enabled = true; static struct spdk_mempool *g_spdk_event_mempool = NULL; TAILQ_HEAD(, spdk_scheduler) g_scheduler_list = TAILQ_HEAD_INITIALIZER(g_scheduler_list); static struct spdk_scheduler *g_scheduler; static struct spdk_scheduler *g_new_scheduler; static struct spdk_reactor *g_scheduling_reactor; static uint32_t g_scheduler_period; static struct spdk_scheduler_core_info *g_core_infos = NULL; TAILQ_HEAD(, spdk_governor) g_governor_list = TAILQ_HEAD_INITIALIZER(g_governor_list); static int _governor_get_capabilities(uint32_t lcore_id, struct spdk_governor_capabilities *capabilities); static struct spdk_governor g_governor = { .name = "default", .get_core_capabilities = _governor_get_capabilities, }; static int reactor_interrupt_init(struct spdk_reactor *reactor); static void reactor_interrupt_fini(struct spdk_reactor *reactor); static struct spdk_scheduler * _scheduler_find(char *name) { struct spdk_scheduler *tmp; TAILQ_FOREACH(tmp, &g_scheduler_list, link) { if (strcmp(name, tmp->name) == 0) { return tmp; } } return NULL; } int _spdk_scheduler_set(char *name) { struct spdk_scheduler *scheduler; scheduler = _scheduler_find(name); if (scheduler == NULL) { SPDK_ERRLOG("Requested scheduler is missing\n"); return -ENOENT; } if (g_reactors == NULL || g_scheduling_reactor == NULL) { g_new_scheduler = scheduler; g_scheduler = scheduler; return 0; } if (g_scheduling_reactor->flags.is_scheduling) { g_new_scheduler = scheduler; } else { if (g_scheduler->deinit != NULL) { g_scheduler->deinit(&g_governor); } g_new_scheduler = scheduler; g_scheduler = scheduler; } if (scheduler->init != NULL) { scheduler->init(&g_governor); } return 0; } void _spdk_scheduler_period_set(uint32_t period) { g_scheduler_period = period; } void _spdk_scheduler_list_add(struct spdk_scheduler *scheduler) { if (_scheduler_find(scheduler->name)) { SPDK_ERRLOG("scheduler named '%s' already registered.\n", scheduler->name); assert(false); return; } TAILQ_INSERT_TAIL(&g_scheduler_list, scheduler, link); } static void reactor_construct(struct spdk_reactor *reactor, uint32_t lcore) { reactor->lcore = lcore; reactor->flags.is_valid = true; TAILQ_INIT(&reactor->threads); reactor->thread_count = 0; reactor->events = spdk_ring_create(SPDK_RING_TYPE_MP_SC, 65536, SPDK_ENV_SOCKET_ID_ANY); if (reactor->events == NULL) { SPDK_ERRLOG("Failed to allocate events ring\n"); assert(false); } if (spdk_interrupt_mode_is_enabled()) { reactor_interrupt_init(reactor); } } struct spdk_reactor * spdk_reactor_get(uint32_t lcore) { struct spdk_reactor *reactor; if (g_reactors == NULL) { SPDK_WARNLOG("Called spdk_reactor_get() while the g_reactors array was NULL!\n"); return NULL; } reactor = &g_reactors[lcore]; if (reactor->flags.is_valid == false) { return NULL; } return reactor; } static int reactor_thread_op(struct spdk_thread *thread, enum spdk_thread_op op); static bool reactor_thread_op_supported(enum spdk_thread_op op); int spdk_reactors_init(void) { int rc; uint32_t i, last_core; char mempool_name[32]; rc = _spdk_scheduler_set("static"); if (rc != 0) { SPDK_ERRLOG("Failed setting up scheduler\n"); return rc; } snprintf(mempool_name, sizeof(mempool_name), "evtpool_%d", getpid()); g_spdk_event_mempool = spdk_mempool_create(mempool_name, 262144 - 1, /* Power of 2 minus 1 is optimal for memory consumption */ sizeof(struct spdk_event), SPDK_MEMPOOL_DEFAULT_CACHE_SIZE, SPDK_ENV_SOCKET_ID_ANY); if (g_spdk_event_mempool == NULL) { SPDK_ERRLOG("spdk_event_mempool creation failed\n"); return -1; } /* struct spdk_reactor must be aligned on 64 byte boundary */ last_core = spdk_env_get_last_core(); rc = posix_memalign((void **)&g_reactors, 64, (last_core + 1) * sizeof(struct spdk_reactor)); if (rc != 0) { SPDK_ERRLOG("Could not allocate array size=%u for g_reactors\n", last_core + 1); spdk_mempool_free(g_spdk_event_mempool); return -1; } g_core_infos = calloc(last_core + 1, sizeof(*g_core_infos)); if (g_core_infos == NULL) { SPDK_ERRLOG("Could not allocate memory for g_core_infos\n"); spdk_mempool_free(g_spdk_event_mempool); free(g_reactors); return -ENOMEM; } memset(g_reactors, 0, (last_core + 1) * sizeof(struct spdk_reactor)); spdk_thread_lib_init_ext(reactor_thread_op, reactor_thread_op_supported, sizeof(struct spdk_lw_thread)); SPDK_ENV_FOREACH_CORE(i) { reactor_construct(&g_reactors[i], i); } g_reactor_state = SPDK_REACTOR_STATE_INITIALIZED; return 0; } void spdk_reactors_fini(void) { uint32_t i; struct spdk_reactor *reactor; if (g_reactor_state == SPDK_REACTOR_STATE_UNINITIALIZED) { return; } if (g_scheduler->deinit != NULL) { g_scheduler->deinit(&g_governor); } spdk_thread_lib_fini(); SPDK_ENV_FOREACH_CORE(i) { reactor = spdk_reactor_get(i); assert(reactor != NULL); assert(reactor->thread_count == 0); if (reactor->events != NULL) { spdk_ring_free(reactor->events); } if (reactor->interrupt_mode) { reactor_interrupt_fini(reactor); } if (g_core_infos != NULL) { free(g_core_infos[i].threads); } } spdk_mempool_free(g_spdk_event_mempool); free(g_reactors); g_reactors = NULL; free(g_core_infos); g_core_infos = NULL; } struct spdk_event * spdk_event_allocate(uint32_t lcore, spdk_event_fn fn, void *arg1, void *arg2) { struct spdk_event *event = NULL; struct spdk_reactor *reactor = spdk_reactor_get(lcore); if (!reactor) { assert(false); return NULL; } event = spdk_mempool_get(g_spdk_event_mempool); if (event == NULL) { assert(false); return NULL; } event->lcore = lcore; event->fn = fn; event->arg1 = arg1; event->arg2 = arg2; return event; } void spdk_event_call(struct spdk_event *event) { int rc; struct spdk_reactor *reactor; reactor = spdk_reactor_get(event->lcore); assert(reactor != NULL); assert(reactor->events != NULL); rc = spdk_ring_enqueue(reactor->events, (void **)&event, 1, NULL); if (rc != 1) { assert(false); } if (reactor->interrupt_mode) { uint64_t notify = 1; rc = write(reactor->events_fd, &notify, sizeof(notify)); if (rc < 0) { SPDK_ERRLOG("failed to notify event queue: %s.\n", spdk_strerror(errno)); } } } static inline uint32_t event_queue_run_batch(struct spdk_reactor *reactor) { unsigned count, i; void *events[SPDK_EVENT_BATCH_SIZE]; struct spdk_thread *thread; struct spdk_lw_thread *lw_thread; #ifdef DEBUG /* * spdk_ring_dequeue() fills events and returns how many entries it wrote, * so we will never actually read uninitialized data from events, but just to be sure * (and to silence a static analyzer false positive), initialize the array to NULL pointers. */ memset(events, 0, sizeof(events)); #endif if (reactor->interrupt_mode) { uint64_t notify = 1; int rc; /* There may be race between event_acknowledge and another producer's event_notify, * so event_acknowledge should be applied ahead. And then check for self's event_notify. * This can avoid event notification missing. */ rc = read(reactor->events_fd, &notify, sizeof(notify)); if (rc < 0) { SPDK_ERRLOG("failed to acknowledge event queue: %s.\n", spdk_strerror(errno)); return -errno; } count = spdk_ring_dequeue(reactor->events, events, SPDK_EVENT_BATCH_SIZE); if (spdk_ring_count(reactor->events) != 0) { /* Trigger new notification if there are still events in event-queue waiting for processing. */ rc = write(reactor->events_fd, &notify, sizeof(notify)); if (rc < 0) { SPDK_ERRLOG("failed to notify event queue: %s.\n", spdk_strerror(errno)); return -errno; } } } else { count = spdk_ring_dequeue(reactor->events, events, SPDK_EVENT_BATCH_SIZE); } if (count == 0) { return 0; } /* Execute the events. There are still some remaining events * that must occur on an SPDK thread. To accomodate those, try to * run them on the first thread in the list, if it exists. */ lw_thread = TAILQ_FIRST(&reactor->threads); if (lw_thread) { thread = spdk_thread_get_from_ctx(lw_thread); } else { thread = NULL; } spdk_set_thread(thread); for (i = 0; i < count; i++) { struct spdk_event *event = events[i]; assert(event != NULL); event->fn(event->arg1, event->arg2); } spdk_set_thread(NULL); spdk_mempool_put_bulk(g_spdk_event_mempool, events, count); return count; } /* 1s */ #define CONTEXT_SWITCH_MONITOR_PERIOD 1000000 static int get_rusage(struct spdk_reactor *reactor) { struct rusage rusage; if (getrusage(RUSAGE_THREAD, &rusage) != 0) { return -1; } if (rusage.ru_nvcsw != reactor->rusage.ru_nvcsw || rusage.ru_nivcsw != reactor->rusage.ru_nivcsw) { SPDK_INFOLOG(reactor, "Reactor %d: %ld voluntary context switches and %ld involuntary context switches in the last second.\n", reactor->lcore, rusage.ru_nvcsw - reactor->rusage.ru_nvcsw, rusage.ru_nivcsw - reactor->rusage.ru_nivcsw); } reactor->rusage = rusage; return -1; } void spdk_framework_enable_context_switch_monitor(bool enable) { /* This global is being read by multiple threads, so this isn't * strictly thread safe. However, we're toggling between true and * false here, and if a thread sees the value update later than it * should, it's no big deal. */ g_framework_context_switch_monitor_enabled = enable; } bool spdk_framework_context_switch_monitor_enabled(void) { return g_framework_context_switch_monitor_enabled; } static void _set_thread_name(const char *thread_name) { #if defined(__linux__) prctl(PR_SET_NAME, thread_name, 0, 0, 0); #elif defined(__FreeBSD__) pthread_set_name_np(pthread_self(), thread_name); #else #error missing platform support for thread name #endif } static void _init_thread_stats(struct spdk_reactor *reactor, struct spdk_lw_thread *lw_thread) { struct spdk_thread *thread = spdk_thread_get_from_ctx(lw_thread); lw_thread->lcore = reactor->lcore; spdk_set_thread(thread); spdk_thread_get_stats(&lw_thread->current_stats); } static void _threads_reschedule(struct spdk_scheduler_core_info *cores_info) { struct spdk_scheduler_core_info *core; struct spdk_lw_thread *lw_thread; uint32_t i, j; SPDK_ENV_FOREACH_CORE(i) { core = &cores_info[i]; for (j = 0; j < core->threads_count; j++) { lw_thread = core->threads[j]; if (lw_thread->lcore != lw_thread->new_lcore) { _spdk_lw_thread_set_core(lw_thread, lw_thread->new_lcore); } } } } static void _reactors_scheduler_fini(void *arg1, void *arg2) { struct spdk_reactor *reactor; uint32_t last_core; uint32_t i; if (g_reactor_state == SPDK_REACTOR_STATE_RUNNING) { last_core = spdk_env_get_last_core(); g_scheduler->balance(g_core_infos, last_core + 1, &g_governor); /* Reschedule based on the balancing output */ _threads_reschedule(g_core_infos); SPDK_ENV_FOREACH_CORE(i) { reactor = spdk_reactor_get(i); assert(reactor != NULL); reactor->flags.is_scheduling = false; } } } static void _reactors_scheduler_cancel(void *arg1, void *arg2) { struct spdk_reactor *reactor; uint32_t i; SPDK_ENV_FOREACH_CORE(i) { reactor = spdk_reactor_get(i); assert(reactor != NULL); reactor->flags.is_scheduling = false; } } /* Phase 1 of thread scheduling is to gather metrics on the existing threads */ static void _reactors_scheduler_gather_metrics(void *arg1, void *arg2) { struct spdk_scheduler_core_info *core_info; struct spdk_lw_thread *lw_thread; struct spdk_reactor *reactor; struct spdk_event *evt; uint32_t next_core; uint32_t i; reactor = spdk_reactor_get(spdk_env_get_current_core()); assert(reactor != NULL); reactor->flags.is_scheduling = true; core_info = &g_core_infos[reactor->lcore]; core_info->lcore = reactor->lcore; core_info->core_idle_tsc = reactor->idle_tsc; core_info->core_busy_tsc = reactor->busy_tsc; SPDK_DEBUGLOG(reactor, "Gathering metrics on %u\n", reactor->lcore); free(core_info->threads); core_info->threads = NULL; i = 0; TAILQ_FOREACH(lw_thread, &reactor->threads, link) { _init_thread_stats(reactor, lw_thread); i++; } core_info->threads_count = i; if (core_info->threads_count > 0) { core_info->threads = calloc(core_info->threads_count, sizeof(struct spdk_lw_thread *)); if (core_info->threads == NULL) { SPDK_ERRLOG("Failed to allocate memory when gathering metrics on %u\n", reactor->lcore); /* Cancel this round of schedule work */ evt = spdk_event_allocate(g_scheduling_reactor->lcore, _reactors_scheduler_cancel, NULL, NULL); spdk_event_call(evt); return; } i = 0; TAILQ_FOREACH(lw_thread, &reactor->threads, link) { core_info->threads[i] = lw_thread; i++; } } next_core = spdk_env_get_next_core(reactor->lcore); if (next_core == UINT32_MAX) { next_core = spdk_env_get_first_core(); } /* If we've looped back around to the scheduler thread, move to the next phase */ if (next_core == g_scheduling_reactor->lcore) { /* Phase 2 of scheduling is rebalancing - deciding which threads to move where */ evt = spdk_event_allocate(next_core, _reactors_scheduler_fini, NULL, NULL); spdk_event_call(evt); return; } evt = spdk_event_allocate(next_core, _reactors_scheduler_gather_metrics, NULL, NULL); spdk_event_call(evt); } static int _reactor_schedule_thread(struct spdk_thread *thread); static uint64_t g_rusage_period; static bool reactor_post_process_lw_thread(struct spdk_reactor *reactor, struct spdk_lw_thread *lw_thread) { struct spdk_thread *thread = spdk_thread_get_from_ctx(lw_thread); int efd; if (spdk_unlikely(lw_thread->resched)) { lw_thread->resched = false; TAILQ_REMOVE(&reactor->threads, lw_thread, link); assert(reactor->thread_count > 0); reactor->thread_count--; if (reactor->interrupt_mode) { efd = spdk_thread_get_interrupt_fd(thread); spdk_fd_group_remove(reactor->fgrp, efd); } _reactor_schedule_thread(thread); return true; } if (spdk_unlikely(spdk_thread_is_exited(thread) && spdk_thread_is_idle(thread))) { if (reactor->flags.is_scheduling == false) { TAILQ_REMOVE(&reactor->threads, lw_thread, link); assert(reactor->thread_count > 0); reactor->thread_count--; if (reactor->interrupt_mode) { efd = spdk_thread_get_interrupt_fd(thread); spdk_fd_group_remove(reactor->fgrp, efd); } spdk_thread_destroy(thread); return true; } } return false; } static void reactor_interrupt_run(struct spdk_reactor *reactor) { int block_timeout = -1; /* _EPOLL_WAIT_FOREVER */ spdk_fd_group_wait(reactor->fgrp, block_timeout); /* TODO: add tsc records and g_framework_context_switch_monitor_enabled */ } static void _reactor_run(struct spdk_reactor *reactor) { struct spdk_thread *thread; struct spdk_lw_thread *lw_thread, *tmp; uint64_t now; int rc; event_queue_run_batch(reactor); TAILQ_FOREACH_SAFE(lw_thread, &reactor->threads, link, tmp) { thread = spdk_thread_get_from_ctx(lw_thread); rc = spdk_thread_poll(thread, 0, reactor->tsc_last); now = spdk_thread_get_last_tsc(thread); if (rc == 0) { reactor->idle_tsc += now - reactor->tsc_last; } else if (rc > 0) { reactor->busy_tsc += now - reactor->tsc_last; } reactor->tsc_last = now; reactor_post_process_lw_thread(reactor, lw_thread); } if (g_framework_context_switch_monitor_enabled) { if ((reactor->last_rusage + g_rusage_period) < reactor->tsc_last) { get_rusage(reactor); reactor->last_rusage = reactor->tsc_last; } } } static int reactor_run(void *arg) { struct spdk_reactor *reactor = arg; struct spdk_thread *thread; struct spdk_lw_thread *lw_thread, *tmp; char thread_name[32]; uint64_t last_sched = 0; SPDK_NOTICELOG("Reactor started on core %u\n", reactor->lcore); /* Rename the POSIX thread because the reactor is tied to the POSIX * thread in the SPDK event library. */ snprintf(thread_name, sizeof(thread_name), "reactor_%u", reactor->lcore); _set_thread_name(thread_name); reactor->tsc_last = spdk_get_ticks(); while (1) { if (spdk_unlikely(reactor->interrupt_mode)) { reactor_interrupt_run(reactor); } else { _reactor_run(reactor); } if (spdk_unlikely((reactor->tsc_last - last_sched) > g_scheduler_period && reactor == g_scheduling_reactor && !reactor->flags.is_scheduling)) { if (spdk_unlikely(g_scheduler != g_new_scheduler)) { if (g_scheduler->deinit != NULL) { g_scheduler->deinit(&g_governor); } g_scheduler = g_new_scheduler; } if (spdk_unlikely(g_scheduler->balance != NULL)) { last_sched = reactor->tsc_last; _reactors_scheduler_gather_metrics(NULL, NULL); } } if (g_reactor_state != SPDK_REACTOR_STATE_RUNNING) { break; } } TAILQ_FOREACH(lw_thread, &reactor->threads, link) { thread = spdk_thread_get_from_ctx(lw_thread); spdk_set_thread(thread); spdk_thread_exit(thread); } while (!TAILQ_EMPTY(&reactor->threads)) { TAILQ_FOREACH_SAFE(lw_thread, &reactor->threads, link, tmp) { thread = spdk_thread_get_from_ctx(lw_thread); spdk_set_thread(thread); if (spdk_thread_is_exited(thread)) { TAILQ_REMOVE(&reactor->threads, lw_thread, link); assert(reactor->thread_count > 0); reactor->thread_count--; if (reactor->interrupt_mode) { int efd = spdk_thread_get_interrupt_fd(thread); spdk_fd_group_remove(reactor->fgrp, efd); } spdk_thread_destroy(thread); } else { spdk_thread_poll(thread, 0, 0); } } } return 0; } int spdk_app_parse_core_mask(const char *mask, struct spdk_cpuset *cpumask) { int ret; const struct spdk_cpuset *validmask; ret = spdk_cpuset_parse(cpumask, mask); if (ret < 0) { return ret; } validmask = spdk_app_get_core_mask(); spdk_cpuset_and(cpumask, validmask); return 0; } const struct spdk_cpuset * spdk_app_get_core_mask(void) { return &g_reactor_core_mask; } void spdk_reactors_start(void) { struct spdk_reactor *reactor; struct spdk_cpuset tmp_cpumask = {}; uint32_t i, current_core; int rc; char thread_name[32]; g_rusage_period = (CONTEXT_SWITCH_MONITOR_PERIOD * spdk_get_ticks_hz()) / SPDK_SEC_TO_USEC; g_reactor_state = SPDK_REACTOR_STATE_RUNNING; current_core = spdk_env_get_current_core(); SPDK_ENV_FOREACH_CORE(i) { if (i != current_core) { reactor = spdk_reactor_get(i); if (reactor == NULL) { continue; } rc = spdk_env_thread_launch_pinned(reactor->lcore, reactor_run, reactor); if (rc < 0) { SPDK_ERRLOG("Unable to start reactor thread on core %u\n", reactor->lcore); assert(false); return; } /* For now, for each reactor spawn one thread. */ snprintf(thread_name, sizeof(thread_name), "reactor_%u", reactor->lcore); spdk_cpuset_zero(&tmp_cpumask); spdk_cpuset_set_cpu(&tmp_cpumask, i, true); spdk_thread_create(thread_name, &tmp_cpumask); } spdk_cpuset_set_cpu(&g_reactor_core_mask, i, true); } /* Start the main reactor */ reactor = spdk_reactor_get(current_core); assert(reactor != NULL); g_scheduling_reactor = reactor; reactor_run(reactor); spdk_env_thread_wait_all(); g_reactor_state = SPDK_REACTOR_STATE_SHUTDOWN; } void spdk_reactors_stop(void *arg1) { uint32_t i; int rc; struct spdk_reactor *reactor; uint64_t notify = 1; g_reactor_state = SPDK_REACTOR_STATE_EXITING; if (spdk_interrupt_mode_is_enabled()) { SPDK_ENV_FOREACH_CORE(i) { reactor = spdk_reactor_get(i); assert(reactor != NULL); rc = write(reactor->events_fd, &notify, sizeof(notify)); if (rc < 0) { SPDK_ERRLOG("failed to notify event queue for reactor(%u): %s.\n", i, spdk_strerror(errno)); continue; } } } } static pthread_mutex_t g_scheduler_mtx = PTHREAD_MUTEX_INITIALIZER; static uint32_t g_next_core = UINT32_MAX; static int thread_process_interrupts(void *arg) { struct spdk_thread *thread = arg; return spdk_thread_poll(thread, 0, 0); } static void _schedule_thread(void *arg1, void *arg2) { struct spdk_lw_thread *lw_thread = arg1; struct spdk_reactor *reactor; uint32_t current_core; int efd; current_core = spdk_env_get_current_core(); reactor = spdk_reactor_get(current_core); assert(reactor != NULL); TAILQ_INSERT_TAIL(&reactor->threads, lw_thread, link); reactor->thread_count++; if (reactor->interrupt_mode) { int rc; struct spdk_thread *thread; thread = spdk_thread_get_from_ctx(lw_thread); efd = spdk_thread_get_interrupt_fd(thread); rc = spdk_fd_group_add(reactor->fgrp, efd, thread_process_interrupts, thread); if (rc < 0) { SPDK_ERRLOG("Failed to schedule spdk_thread: %s.\n", spdk_strerror(-rc)); } } } static int _reactor_schedule_thread(struct spdk_thread *thread) { uint32_t core; struct spdk_lw_thread *lw_thread; struct spdk_event *evt = NULL; struct spdk_cpuset *cpumask; uint32_t i; cpumask = spdk_thread_get_cpumask(thread); lw_thread = spdk_thread_get_ctx(thread); assert(lw_thread != NULL); core = lw_thread->lcore; memset(lw_thread, 0, sizeof(*lw_thread)); pthread_mutex_lock(&g_scheduler_mtx); if (core == SPDK_ENV_LCORE_ID_ANY) { for (i = 0; i < spdk_env_get_core_count(); i++) { if (g_next_core > spdk_env_get_last_core()) { g_next_core = spdk_env_get_first_core(); } core = g_next_core; g_next_core = spdk_env_get_next_core(g_next_core); if (spdk_cpuset_get_cpu(cpumask, core)) { break; } } } evt = spdk_event_allocate(core, _schedule_thread, lw_thread, NULL); pthread_mutex_unlock(&g_scheduler_mtx); assert(evt != NULL); if (evt == NULL) { SPDK_ERRLOG("Unable to schedule thread on requested core mask.\n"); return -1; } lw_thread->tsc_start = spdk_get_ticks(); spdk_event_call(evt); return 0; } static void _reactor_request_thread_reschedule(struct spdk_thread *thread) { struct spdk_lw_thread *lw_thread; struct spdk_reactor *reactor; uint32_t current_core; assert(thread == spdk_get_thread()); lw_thread = spdk_thread_get_ctx(thread); _spdk_lw_thread_set_core(lw_thread, SPDK_ENV_LCORE_ID_ANY); current_core = spdk_env_get_current_core(); reactor = spdk_reactor_get(current_core); assert(reactor != NULL); if (reactor->interrupt_mode) { uint64_t notify = 1; if (write(reactor->resched_fd, &notify, sizeof(notify)) < 0) { SPDK_ERRLOG("failed to notify reschedule: %s.\n", spdk_strerror(errno)); } } } static int reactor_thread_op(struct spdk_thread *thread, enum spdk_thread_op op) { struct spdk_lw_thread *lw_thread; switch (op) { case SPDK_THREAD_OP_NEW: lw_thread = spdk_thread_get_ctx(thread); lw_thread->lcore = SPDK_ENV_LCORE_ID_ANY; return _reactor_schedule_thread(thread); case SPDK_THREAD_OP_RESCHED: _reactor_request_thread_reschedule(thread); return 0; default: return -ENOTSUP; } } static bool reactor_thread_op_supported(enum spdk_thread_op op) { switch (op) { case SPDK_THREAD_OP_NEW: case SPDK_THREAD_OP_RESCHED: return true; default: return false; } } struct call_reactor { uint32_t cur_core; spdk_event_fn fn; void *arg1; void *arg2; uint32_t orig_core; spdk_event_fn cpl; }; static void on_reactor(void *arg1, void *arg2) { struct call_reactor *cr = arg1; struct spdk_event *evt; cr->fn(cr->arg1, cr->arg2); cr->cur_core = spdk_env_get_next_core(cr->cur_core); if (cr->cur_core > spdk_env_get_last_core()) { SPDK_DEBUGLOG(reactor, "Completed reactor iteration\n"); evt = spdk_event_allocate(cr->orig_core, cr->cpl, cr->arg1, cr->arg2); free(cr); } else { SPDK_DEBUGLOG(reactor, "Continuing reactor iteration to %d\n", cr->cur_core); evt = spdk_event_allocate(cr->cur_core, on_reactor, arg1, NULL); } assert(evt != NULL); spdk_event_call(evt); } void spdk_for_each_reactor(spdk_event_fn fn, void *arg1, void *arg2, spdk_event_fn cpl) { struct call_reactor *cr; struct spdk_event *evt; cr = calloc(1, sizeof(*cr)); if (!cr) { SPDK_ERRLOG("Unable to perform reactor iteration\n"); cpl(arg1, arg2); return; } cr->fn = fn; cr->arg1 = arg1; cr->arg2 = arg2; cr->cpl = cpl; cr->orig_core = spdk_env_get_current_core(); cr->cur_core = spdk_env_get_first_core(); SPDK_DEBUGLOG(reactor, "Starting reactor iteration from %d\n", cr->orig_core); evt = spdk_event_allocate(cr->cur_core, on_reactor, cr, NULL); assert(evt != NULL); spdk_event_call(evt); } #ifdef __linux__ static int reactor_schedule_thread_event(void *arg) { struct spdk_reactor *reactor = arg; struct spdk_lw_thread *lw_thread, *tmp; uint32_t count = 0; uint64_t notify = 1; assert(reactor->interrupt_mode); if (read(reactor->resched_fd, &notify, sizeof(notify)) < 0) { SPDK_ERRLOG("failed to acknowledge reschedule: %s.\n", spdk_strerror(errno)); return -errno; } TAILQ_FOREACH_SAFE(lw_thread, &reactor->threads, link, tmp) { count += reactor_post_process_lw_thread(reactor, lw_thread) ? 1 : 0; } return count; } static int reactor_interrupt_init(struct spdk_reactor *reactor) { int rc; rc = spdk_fd_group_create(&reactor->fgrp); if (rc != 0) { return rc; } reactor->resched_fd = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC); if (reactor->resched_fd < 0) { rc = -EBADF; goto err; } rc = spdk_fd_group_add(reactor->fgrp, reactor->resched_fd, reactor_schedule_thread_event, reactor); if (rc) { close(reactor->resched_fd); goto err; } reactor->events_fd = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC); if (reactor->events_fd < 0) { spdk_fd_group_remove(reactor->fgrp, reactor->resched_fd); close(reactor->resched_fd); rc = -EBADF; goto err; } rc = spdk_fd_group_add(reactor->fgrp, reactor->events_fd, (spdk_fd_fn)event_queue_run_batch, reactor); if (rc) { spdk_fd_group_remove(reactor->fgrp, reactor->resched_fd); close(reactor->resched_fd); close(reactor->events_fd); goto err; } reactor->interrupt_mode = true; return 0; err: spdk_fd_group_destroy(reactor->fgrp); return rc; } #else static int reactor_interrupt_init(struct spdk_reactor *reactor) { return -ENOTSUP; } #endif static void reactor_interrupt_fini(struct spdk_reactor *reactor) { struct spdk_fd_group *fgrp = reactor->fgrp; if (!fgrp) { return; } spdk_fd_group_remove(fgrp, reactor->events_fd); spdk_fd_group_remove(fgrp, reactor->resched_fd); close(reactor->events_fd); close(reactor->resched_fd); spdk_fd_group_destroy(fgrp); reactor->fgrp = NULL; } void _spdk_lw_thread_set_core(struct spdk_lw_thread *thread, uint32_t lcore) { assert(thread != NULL); thread->lcore = lcore; thread->resched = true; } void _spdk_lw_thread_get_current_stats(struct spdk_lw_thread *thread, struct spdk_thread_stats *stats) { assert(thread != NULL); *stats = thread->current_stats; } static int _governor_get_capabilities(uint32_t lcore_id, struct spdk_governor_capabilities *capabilities) { capabilities->freq_change = false; capabilities->freq_getset = false; capabilities->freq_up = false; capabilities->freq_down = false; capabilities->freq_max = false; capabilities->freq_min = false; capabilities->turbo_set = false; capabilities->priority = false; capabilities->turbo_available = false; return 0; } static struct spdk_governor * _governor_find(char *name) { struct spdk_governor *governor, *tmp; TAILQ_FOREACH_SAFE(governor, &g_governor_list, link, tmp) { if (strcmp(name, governor->name) == 0) { return governor; } } return NULL; } int _spdk_governor_set(char *name) { struct spdk_governor *governor; uint32_t i; int rc; governor = _governor_find(name); if (governor == NULL) { return -EINVAL; } g_governor = *governor; if (g_governor.init) { rc = g_governor.init(); if (rc != 0) { return rc; } } SPDK_ENV_FOREACH_CORE(i) { if (g_governor.init_core) { rc = g_governor.init_core(i); if (rc != 0) { return rc; } } } return 0; } void _spdk_governor_list_add(struct spdk_governor *governor) { if (_governor_find(governor->name)) { SPDK_ERRLOG("governor named '%s' already registered.\n", governor->name); assert(false); return; } TAILQ_INSERT_TAIL(&g_governor_list, governor, link); } SPDK_LOG_REGISTER_COMPONENT(reactor)
LSTS/imc4j
core/src/main/java/pt/lsts/imc4j/msg/TeleoperationDone.java
<filename>core/src/main/java/pt/lsts/imc4j/msg/TeleoperationDone.java<gh_stars>1-10 package pt.lsts.imc4j.msg; import java.io.IOException; import java.lang.Exception; import java.lang.String; import java.nio.ByteBuffer; /** * Notification of completion of a Teleoperation maneuver. */ public class TeleoperationDone extends Message { public static final int ID_STATIC = 460; public String abbrev() { return "TeleoperationDone"; } public int mgid() { return 460; } public byte[] serializeFields() { return new byte[0]; } public void deserializeFields(ByteBuffer buf) throws IOException { try { } catch (Exception e) { throw new IOException(e); } } }
2637309949/dolphin
packages/web/core/util.go
<gh_stars>10-100 package core // LastChar defined func LastChar(str string) uint8 { if str == "" { panic("The length of the string can't be 0") } return str[len(str)-1] }
rodrigojv/thisvui
src/mixins/states.js
<reponame>rodrigojv/thisvui<filename>src/mixins/states.js import utils from "../utils/utils"; import CssArchitect from "../utils/css-architect"; export default { props: { isHovered: { type: [String, Boolean] }, isFocused: { type: [String, Boolean] }, isLoading: { type: [String, Boolean] }, disabled: { type: Boolean } }, computed: { /** * Dynamically adds the modifiers css classes based on mixin props * @returns { A String with the chained css classes } */ getStateModifiers: function() { const cssArchitect = new CssArchitect(); cssArchitect.addClass( "is-hovered", utils.convert.stringToBoolean(this.isHovered) ); cssArchitect.addClass( "is-focused", utils.convert.stringToBoolean(this.isFocused) ); cssArchitect.addClass( "is-loading", utils.convert.stringToBoolean(this.isLoading) ); return cssArchitect.getClasses(); } } };
uwo-openhouse/openhouse-app
src/constants/Colors.js
const tintColor = '#2f95dc'; export default { tintColor, tabIconDefault: '#ccc', tabIconSelected: tintColor, tabBar: '#fefefe', errorBackground: 'red', errorText: '#fff', warningBackground: '#EAEB5E', warningText: '#666804', noticeBackground: tintColor, noticeText: '#fff', westernPurple: '#4f2683', offBlack: '#3d3d3d', linkColor: '#2980b9', background: '#E5E5E5', lineColor: '#999', };
Andrew8305/open-capacity-platform
open-cxf-service/src/main/java/com/open/capacity/srenewSer/utils/SrenewSerUtils.java
<filename>open-cxf-service/src/main/java/com/open/capacity/srenewSer/utils/SrenewSerUtils.java package com.open.capacity.srenewSer.utils; import java.util.logging.Logger; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.open.capacity.srenewSer.bo.SrenewSerBO; import com.open.capacity.srenewSer.bo.impl.SrenewSerBOImpl; import com.open.capacity.utils.SpringUtil; import cn.chinaunicom.ws.srenewser.unibssbody.QRYCHGPRODOUTPUT; import cn.chinaunicom.ws.srenewser.unibssbody.SRENEWSUBOUTPUT; import cn.chinaunicom.ws.srenewser.unibssbody.SRENEWTRADEOUTPUT; import cn.chinaunicom.ws.srenewser.unibssbody.qrychgprodreq.QRYCHGPRODREQ; import cn.chinaunicom.ws.srenewser.unibssbody.qrychgprodrsp.QRYCHGPRODRSP; import cn.chinaunicom.ws.srenewser.unibssbody.srenewsubrsp.SRENEWSUBRSP; import cn.chinaunicom.ws.srenewser.unibssbody.srenewtradersp.SRENEWTRADERSP; /** * @author 作者 owen E-mail: <EMAIL> * @version 创建时间:2018年04月23日 上午20:01:06 类说明 */ public class SrenewSerUtils { public static SrenewSerUtils instance; private ObjectMapper objectMapper = SpringUtil.getBean(ObjectMapper.class) ; private static final Logger LOG = Logger.getLogger(SrenewSerBOImpl.class.getName()); private SrenewSerUtils() { } public static SrenewSerUtils getInstance() { if (instance == null) { instance = new SrenewSerUtils(); } return instance; } /* * (non-Javadoc) * * @see cn.chinaunicom.ws.srenewser.SrenewSer#qryChgProd(cn.chinaunicom.ws. * srenewser.unibssbody.QRYCHGPRODINPUT parameters )* */ public cn.chinaunicom.ws.srenewser.unibssbody.QRYCHGPRODOUTPUT qryChgProd( cn.chinaunicom.ws.srenewser.unibssbody.QRYCHGPRODINPUT parameters) { try { cn.chinaunicom.ws.srenewser.unibssbody.QRYCHGPRODOUTPUT _return = SpringUtil.getBean(SrenewSerBO.class).qryChgProd(parameters); try { LOG.info("响应的报文"+objectMapper.writeValueAsString(_return)); } catch (JsonProcessingException e) { // TODO Auto-generated catch block e.printStackTrace(); } return _return; } catch (Exception ex) { QRYCHGPRODOUTPUT out = new QRYCHGPRODOUTPUT(); QRYCHGPRODOUTPUT.UNIBSSBODY body = new QRYCHGPRODOUTPUT.UNIBSSBODY(); out.setUNIBSSHEAD(parameters.getUNIBSSHEAD()); out.getUNIBSSHEAD().setACTIONCODE("1"); out.getUNIBSSHEAD().getRESPONSE().setRSPCODE("8888"); out.getUNIBSSHEAD().getRESPONSE().setRSPDESC("程序异常"); out.getUNIBSSHEAD().getRESPONSE().setRSPTYPE("1"); out.setUNIBSSBODY(body); QRYCHGPRODRSP rsp = new QRYCHGPRODRSP(); QRYCHGPRODREQ req = parameters.getUNIBSSBODY().getQRYCHGPRODREQ(); rsp.setRESPCODE("8888"); rsp.setRESPDESC("程序异常"); body.setQRYCHGPRODRSP(rsp); try { LOG.info("响应的报文"+objectMapper.writeValueAsString(out)); } catch (JsonProcessingException e) { // TODO Auto-generated catch block e.printStackTrace(); } return out ; } } /* * (non-Javadoc) * * @see cn.chinaunicom.ws.srenewser.SrenewSer#srenewTrade(cn.chinaunicom.ws. * srenewser.unibssbody.SRENEWTRADEINPUT parameters )* */ public cn.chinaunicom.ws.srenewser.unibssbody.SRENEWTRADEOUTPUT srenewTrade( cn.chinaunicom.ws.srenewser.unibssbody.SRENEWTRADEINPUT parameters) { try { cn.chinaunicom.ws.srenewser.unibssbody.SRENEWTRADEOUTPUT _return = SpringUtil.getBean(SrenewSerBO.class) .srenewTrade(parameters); try { LOG.info("响应的报文"+objectMapper.writeValueAsString(_return)); } catch (JsonProcessingException e) { // TODO Auto-generated catch block e.printStackTrace(); } return _return; } catch (Exception ex) { SRENEWTRADEOUTPUT out = new SRENEWTRADEOUTPUT(); SRENEWTRADEOUTPUT.UNIBSSBODY body = new SRENEWTRADEOUTPUT.UNIBSSBODY(); out.setUNIBSSBODY(body); SRENEWTRADERSP resp = new SRENEWTRADERSP(); out.setUNIBSSHEAD(parameters.getUNIBSSHEAD()); out.getUNIBSSHEAD().setACTIONCODE("1"); out.getUNIBSSHEAD().getRESPONSE().setRSPCODE("8888"); out.getUNIBSSHEAD().getRESPONSE().setRSPDESC("程序异常"); out.getUNIBSSHEAD().getRESPONSE().setRSPTYPE("1"); resp.setRESPCODE("8888"); resp.setRESPDESC("程序异常"); out.getUNIBSSBODY().setSRENEWTRADERSP(resp); try { LOG.info("响应的报文"+objectMapper.writeValueAsString(out)); } catch (JsonProcessingException e) { // TODO Auto-generated catch block e.printStackTrace(); } return out; } } /* * (non-Javadoc) * * @see cn.chinaunicom.ws.srenewser.SrenewSer#srenewSub(cn.chinaunicom.ws. * srenewser.unibssbody.SRENEWSUBINPUT parameters )* */ public cn.chinaunicom.ws.srenewser.unibssbody.SRENEWSUBOUTPUT srenewSub( cn.chinaunicom.ws.srenewser.unibssbody.SRENEWSUBINPUT parameters) { try { cn.chinaunicom.ws.srenewser.unibssbody.SRENEWSUBOUTPUT _return = SpringUtil.getBean(SrenewSerBO.class).srenewSub(parameters); try { LOG.info("响应的报文"+objectMapper.writeValueAsString(_return)); } catch (JsonProcessingException e) { // TODO Auto-generated catch block e.printStackTrace(); } return _return; } catch (Exception ex) { SRENEWSUBOUTPUT out = new SRENEWSUBOUTPUT(); SRENEWSUBOUTPUT.UNIBSSBODY body = new SRENEWSUBOUTPUT.UNIBSSBODY(); out.setUNIBSSBODY(body); SRENEWSUBRSP resp = new SRENEWSUBRSP(); out.setUNIBSSHEAD(parameters.getUNIBSSHEAD()); out.getUNIBSSHEAD().setACTIONCODE("1"); out.getUNIBSSHEAD().getRESPONSE().setRSPCODE("8888"); out.getUNIBSSHEAD().getRESPONSE().setRSPDESC("程序异常"); out.getUNIBSSHEAD().getRESPONSE().setRSPTYPE("1"); resp.setRESPCODE("8888"); resp.setRESPDESC("程序异常"); out.getUNIBSSBODY().setSRENEWSUBRSP(resp); try { LOG.info("响应的报文"+objectMapper.writeValueAsString(out)); } catch (JsonProcessingException e) { // TODO Auto-generated catch block e.printStackTrace(); } return out; } } }
Amstee/Go-Blockchain
logic/displayWallets.go
<filename>logic/displayWallets.go package logic import ( "github.com/jinzhu/gorm" "github.com/amstee/blockchain/models" ) func DisplayWallets(db *gorm.DB) { wallets := models.GetWallets(db) wallets.Display() }
didizlatkova/PatternShooter
src/zones/common/weapons/Gun.java
<reponame>didizlatkova/PatternShooter<filename>src/zones/common/weapons/Gun.java<gh_stars>0 package zones.common.weapons; import elements.abstracts.weapons.Weapon; public class Gun extends Weapon { public Gun() { super(40); } @Override public String toString() { return "G"; } @Override public String getName() { return "Gun"; } }
ChrisLR/BasicDungeonRL
bflib/spells/magicuser/confusion.py
from bflib import units from bflib.characters import classes from bflib.spells import listing from bflib.spells.base import Spell from bflib.spells.duration import SpellDuration from bflib.spells.range import SpellRange @listing.register_spell class Confusion(Spell): name = "Confusion" class_level_map = { classes.MagicUser: 4, } duration = SpellDuration( base_duration=units.CombatRound(2), duration_per_level=units.CombatRound(1) ) range = SpellRange(base_range=units.Feet(360))
lehaSVV2009/leetcode
Yandex.ATM/Iteration2.js
<filename>Yandex.ATM/Iteration2.js moneyTypes = [5000, 1000, 500, 100, 50]; limits = { 5000: 4, 1000: 3, 500: 2, 100: 5, 50: 100 }; function getMoney(amount) { const smallestBanknote = moneyTypes[moneyTypes.length - 1]; if (amount % smallestBanknote !== 0) { // amount < 0? amount == 0? throw new Error(`Amount must be divided by ${smallestBanknote}`); } const result = { res: moneyTypes.reduce((map, banknote) => { map[banknote] = 0; return map; }, {}), limits: { ...limits } }; for (let banknote of moneyTypes) { const neededCount = Math.floor(amount / banknote); const countToWithdraw = Math.min(neededCount, limits[banknote]); result.res[banknote] += countToWithdraw; result.limits[banknote] -= countToWithdraw; amount -= banknote * countToWithdraw; } if (amount > 0) { return { res: "warn", limits: { ...limits } }; } return result; }
hugo3m/service-national-universel
admin/src/scenes/volontaires-head-center/view/wrapper.js
<reponame>hugo3m/service-national-universel import React from "react"; import styled from "styled-components"; import api from "../../../services/api"; import { useHistory } from "react-router-dom"; import { toastr } from "react-redux-toastr"; import { translate } from "../../../utils"; import Badge from "../../../components/Badge"; import TabList from "../../../components/views/TabList"; import Tab from "../../../components/views/Tab"; export default ({ children, young, tab }) => { const history = useHistory(); const handleDelete = async () => { if (!confirm("Êtes-vous sûr(e) de vouloir supprimer ce volontaire ?")) return; try { const { ok, code } = await api.remove(`/young/${young._id}`); if (!ok && code === "OPERATION_UNAUTHORIZED") return toastr.error("Vous n'avez pas les droits pour effectuer cette action"); if (!ok) return toastr.error("Une erreur s'est produite :", translate(code)); toastr.success("Ce volontaire a été supprimé."); return history.push(`/volontaire`); } catch (e) { console.log(e); return toastr.error("Oups, une erreur est survenue pendant la supression du volontaire :", translate(e.code)); } }; if (!young) return null; return ( <div style={{ flex: tab === "missions" ? "0%" : 2, position: "relative", padding: "3rem" }}> <Header> <div style={{ flex: 1 }}> <Title> {young.firstName} {young.lastName} <Badge text={`Cohorte ${young.cohort}`} /> </Title> <TabList> <Tab isActive={tab === "details"} onClick={() => history.push(`/volontaire/${young._id}`)}> Détails </Tab> <Tab isActive={tab === "phase1"} onClick={() => history.push(`/volontaire/${young._id}/phase1`)}> Phase 1 </Tab> </TabList> </div> </Header> {children} </div> ); }; const Title = styled.div` display: flex; color: rgb(38, 42, 62); font-weight: 700; font-size: 24px; margin-bottom: 10px; align-items: center; `; const Header = styled.div` padding: 0 25px 0; display: flex; margin-bottom: 1rem; align-items: flex-start; `;
TsvetanNikolov123/JAVA---OOP-Advanced
15 INTERFACE SEGREGATION DEPENDENCY INVERSION PRINCIPLES/p02_services/SmsNotificationService.java
package p02_services; public class SmsNotificationService implements NotificationService { @Override public void sendNotification() { System.out.println("Sms send"); } @Override public boolean isActive() { return false; } }
DrDaveD/mytoken-server
internal/db/dbmigrate/migrate.go
<filename>internal/db/dbmigrate/migrate.go package dbmigrate import ( "embed" "fmt" "io/fs" log "github.com/sirupsen/logrus" "golang.org/x/mod/semver" "github.com/oidc-mytoken/server/shared/utils" ) // Commands is a type for holding sql commands that should run before and after a version update type Commands struct { Before string `yaml:"before"` After string `yaml:"after"` } // VersionCommands is type holding the Commands that are related to a mytoken version type VersionCommands map[string]Commands // MigrationCommands holds the VersionCommands for mytoken. These commands are used to migrate the database between mytoken // versions. var MigrationCommands = VersionCommands{} // Versions holds all versions for which migration commands are available var Versions []string //go:embed scripts var migrationScripts embed.FS func init() { Versions = []string{} if err := fs.WalkDir(fs.FS(migrationScripts), ".", func(path string, d fs.DirEntry, err error) error { if d.IsDir() { return nil } name := d.Name() Versions = append(Versions, utils.RSplitN(name, ".", 3)[0]) return nil }); err != nil { log.WithError(err).Fatal() } semver.Sort(Versions) for _, v := range Versions { MigrationCommands[v] = Commands{ Before: readBeforeFile(v), After: readAfterFile(v), } } } func readBeforeFile(version string) string { return _readSQLFile(version, "pre") } func readAfterFile(version string) string { return _readSQLFile(version, "post") } func _readSQLFile(version, typeString string) string { data, err := migrationScripts.ReadFile(fmt.Sprintf("scripts/%s.%s.sql", version, typeString)) if err != nil { return "" } return string(data) }
amrit92/educron-ssr
app/redux/actions/PostAction.js
import {AWAIT_MARKER} from 'redux-await'; import PostApi from '../../api/post/index'; import {postsList} from '../../configs/index'; export const POST_CREATE = 'post create'; export const POST_UPDATE = 'post update'; export const POST_LISTS = 'posts lists'; export const POST_LISTS_LOAD_MORE = 'post load more'; export const POST_VIEW = 'post view'; export const POST_EDIT = 'post edit'; export const POST_DELETE = 'post delete'; export const POST_RESET = 'post reset'; export const RESULT_VIEW = 'result view'; export function createPost(post) { return (dispatch, getState)=> { dispatch({ type: POST_CREATE, AWAIT_MARKER, payload: { createPost: PostApi.createPost(post, getState().auth.authenticated.user.uid) } }); } } export function updatePost(post, post_id) { return (dispatch) => { dispatch({ type: POST_UPDATE, AWAIT_MARKER, payload: { updatePost: PostApi.updatePost(post, post_id) } }) } } export function getPostsList() { return (dispatch) => { dispatch({ type: POST_LISTS, AWAIT_MARKER, payload: { getPosts: PostApi.getPostsList() } }); } } export function getPostView(id) { return (dispatch) => { dispatch({ type: POST_VIEW, AWAIT_MARKER, payload: { getPost: PostApi.getPost(id) } }) } } export function deletePost(id){ return (dispatch) => { dispatch({ type: POST_DELETE, AWAIT_MARKER, payload: { deletePost: PostApi.deletePost(id) } }) } } export function loadMorePosts(loadMore = postsList.perPage){ return (dispatch) => { dispatch({ type: POST_LISTS_LOAD_MORE, loadMore: loadMore }) } } export function resetCurrentPost(){ return (dispatch) => { dispatch({ type: POST_RESET }) } } //export function getPostsList() { // return (dispatch, getState) => { // dispatch({type: POST_LISTS_FETCHING}); // PostApi.getPostsList().then(posts => { // dispatch({type: POST_LISTS_COMPLETED, payload: {posts}}) // }).catch(error => { // dispatch({type: POST_LISTS_FAILED, error: error}); // }) // } //}
Gadreel/dcserver
dcraft.hub/src/main/java/dcraft/locale/index/eng/Index.java
<filename>dcraft.hub/src/main/java/dcraft/locale/index/eng/Index.java package dcraft.locale.index.eng; import dcraft.locale.IndexBase; import dcraft.locale.IndexInfo; import dcraft.locale.analyzer.EnglishFullAnalyzer; import dcraft.locale.analyzer.EnglishSimpleAnalyzer; import dcraft.log.Logger; import dcraft.util.StringUtil; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import java.io.StringReader; import java.util.ArrayList; import java.util.List; public class Index extends IndexBase { @Override public List<IndexInfo> full(String value) { List<IndexInfo> ret = new ArrayList<>(); if (StringUtil.isEmpty(value)) return ret; try (EnglishFullAnalyzer analyzer = new EnglishFullAnalyzer()) { try (TokenStream stream = analyzer.tokenStream(null, new StringReader(value.trim()))) { CharTermAttribute cattr = stream.addAttribute(CharTermAttribute.class); OffsetAttribute offsetAttribute = stream.addAttribute(OffsetAttribute.class); stream.reset(); while (stream.incrementToken()) { IndexInfo info = new IndexInfo(); info.token = cattr.toString(); info.start = offsetAttribute.startOffset(); info.end = offsetAttribute.endOffset(); info.score = 1; ret.add(info); } stream.end(); } } catch (Exception x) { Logger.error("error with full index analyzer: " + x); } return ret; } @Override public List<IndexInfo> simple(String value) { List<IndexInfo> ret = new ArrayList<>(); if (StringUtil.isEmpty(value)) return ret; try (EnglishSimpleAnalyzer analyzer = new EnglishSimpleAnalyzer()) { try (TokenStream stream = analyzer.tokenStream(null, new StringReader(value.trim()))) { CharTermAttribute cattr = stream.addAttribute(CharTermAttribute.class); OffsetAttribute offsetAttribute = stream.addAttribute(OffsetAttribute.class); stream.reset(); while (stream.incrementToken()) { IndexInfo info = new IndexInfo(); info.token = cattr.toString(); info.start = offsetAttribute.startOffset(); info.end = offsetAttribute.endOffset(); info.score = 1; ret.add(info); } stream.end(); } } catch (Exception x) { Logger.error("error with full index analyzer: " + x); } return ret; } }
jser/jser.info
tools/refactor/dump-tags.json.js
<gh_stars>10-100 // MIT © 2017 azu "use strict"; const fs = require("fs"); const getAllItemFilePath = require("./get-all-item-file-path"); const argv = require('minimist')(process.argv.slice(2)); const from = argv.from; const to = argv.to; /** * tags.jsonを作成するツール * Usage: node ./ --from "スライド" --to "slide" */ const tagSet = new Set(); getAllItemFilePath().forEach(filePath => { const article = require(filePath); article.list.forEach(item => { if (!Array.isArray(item.tags)) { return; } item.tags.forEach(tag => { tagSet.add(tag); }); }); }); fs.writeFileSync("tag.json", JSON.stringify(Array.from(tagSet), null, 4), "utf-8");
asdFletcher/data-structures-and-algorithms
src/benchmarking/benchmarker/__tests__/benchmarker.test.js
<filename>src/benchmarking/benchmarker/__tests__/benchmarker.test.js const benchmarker = require('../benchmarker.js'); const calculateAverageFromSingleDataSet = benchmarker.calculateAverageFromSingleDataSet; describe('avg data sets', () => { it('works with regular data no duplicatse in a single set', () => { let data = [ {'data':[{'x':1,'y':3},{'x':2,'y':5},{'x':3,'y':0},{'x':4,'y':0}]}, {'data':[{'x':1,'y':4},{'x':2,'y':2},{'x':3,'y':3},{'x':4,'y':2}]}, {'data':[{'x':1,'y':5},{'x':2,'y':0},{'x':3,'y':3},{'x':4,'y':3}]}, {'data':[{'x':1,'y':2},{'x':2,'y':3},{'x':3,'y':2},{'x':4,'y':2}]}, ]; let expected = [{'x':1,'y':14/4},{'x':2,'y':10/4},{'x':3,'y':8/4},{'x':4,'y':7/4}]; let result = calculateAverageFromSingleDataSet(data); expect(result).toEqual(expected); }); it('works with irregular data, duplicatse in a single set', () => { let data = [ {'data':[{'x':1,'y':3},{'x':1,'y':100},{'x':2,'y':5},{'x':3,'y':0},{'x':4,'y':0}]}, {'data':[{'x':1,'y':4},{'x':2,'y':2},{'x':3,'y':3},{'x':4,'y':2}]}, {'data':[{'x':1,'y':5},{'x':2,'y':0},{'x':3,'y':3},{'x':4,'y':3}]}, {'data':[{'x':1,'y':2},{'x':2,'y':3},{'x':3,'y':2},{'x':4,'y':2}]}, ]; let expected = [{'x':1,'y':114/5},{'x':2,'y':10/4},{'x':3,'y':8/4},{'x':4,'y':7/4}]; let result = calculateAverageFromSingleDataSet(data); expect(result).toEqual(expected); }); });
palava/palava-ipc
src/main/java/de/cosmocode/palava/ipc/IpcCallFilterDefinition.java
<filename>src/main/java/de/cosmocode/palava/ipc/IpcCallFilterDefinition.java /** * Copyright 2010 CosmoCode GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.cosmocode.palava.ipc; import java.util.List; import com.google.common.base.Predicate; import com.google.inject.TypeLiteral; /** * A filter definition contains a predicate matching commands * and a key addressing a filter binding in guice. * * @author <NAME> */ interface IpcCallFilterDefinition { TypeLiteral<List<IpcCallFilterDefinition>> LITERAL = new TypeLiteral<List<IpcCallFilterDefinition>>() { }; /** * Provides the matcher predicate. * * @return the predicate used to match commands */ Predicate<? super IpcCommand> getPredicate(); /** * Provides the associated filter. * * @return the filter */ IpcCallFilter getFilter(); }
saintube/rails_sample_app
db/migrate/20181223030400_add_columns_to_comments.rb
<filename>db/migrate/20181223030400_add_columns_to_comments.rb class AddColumnsToComments < ActiveRecord::Migration[5.1] def change add_column :comments, :power, :integer add_column :comments, :price, :integer add_column :comments, :interior, :integer add_column :comments, :configure, :integer add_column :comments, :safety, :integer add_column :comments, :appearance, :integer add_column :comments, :control, :integer add_column :comments, :consumption, :integer add_column :comments, :space, :integer add_column :comments, :comfort, :integer end end
matthiasblaesing/COMTypelibraries
office2/src/main/java/eu/doppel_helix/jna/tlb/office2/MsoWarpFormat.java
<filename>office2/src/main/java/eu/doppel_helix/jna/tlb/office2/MsoWarpFormat.java package eu.doppel_helix.jna.tlb.office2; import com.sun.jna.platform.win32.COM.util.IComEnum; public enum MsoWarpFormat implements IComEnum { /** * (-2) */ msoWarpFormatMixed(-2), /** * (0) */ msoWarpFormat1(0), /** * (1) */ msoWarpFormat2(1), /** * (2) */ msoWarpFormat3(2), /** * (3) */ msoWarpFormat4(3), /** * (4) */ msoWarpFormat5(4), /** * (5) */ msoWarpFormat6(5), /** * (6) */ msoWarpFormat7(6), /** * (7) */ msoWarpFormat8(7), /** * (8) */ msoWarpFormat9(8), /** * (9) */ msoWarpFormat10(9), /** * (10) */ msoWarpFormat11(10), /** * (11) */ msoWarpFormat12(11), /** * (12) */ msoWarpFormat13(12), /** * (13) */ msoWarpFormat14(13), /** * (14) */ msoWarpFormat15(14), /** * (15) */ msoWarpFormat16(15), /** * (16) */ msoWarpFormat17(16), /** * (17) */ msoWarpFormat18(17), /** * (18) */ msoWarpFormat19(18), /** * (19) */ msoWarpFormat20(19), /** * (20) */ msoWarpFormat21(20), /** * (21) */ msoWarpFormat22(21), /** * (22) */ msoWarpFormat23(22), /** * (23) */ msoWarpFormat24(23), /** * (24) */ msoWarpFormat25(24), /** * (25) */ msoWarpFormat26(25), /** * (26) */ msoWarpFormat27(26), /** * (27) */ msoWarpFormat28(27), /** * (28) */ msoWarpFormat29(28), /** * (29) */ msoWarpFormat30(29), /** * (30) */ msoWarpFormat31(30), /** * (31) */ msoWarpFormat32(31), /** * (32) */ msoWarpFormat33(32), /** * (33) */ msoWarpFormat34(33), /** * (34) */ msoWarpFormat35(34), /** * (35) */ msoWarpFormat36(35), /** * (36) */ msoWarpFormat37(36), ; private MsoWarpFormat(long value) { this.value = value; } private long value; public long getValue() { return this.value; } }
tcolgate/traefik
integration/vendor/github.com/docker/libcompose/yaml/external.go
package yaml import ( "fmt" ) // External represents an external network entry in compose file. // It can be a boolean (true|false) or have a name type External struct { External bool Name string } // MarshalYAML implements the Marshaller interface. func (n External) MarshalYAML() (tag string, value interface{}, err error) { if n.Name == "" { return "", n.External, nil } return "", map[string]interface{}{ "name": n.Name, }, nil } // UnmarshalYAML implements the Unmarshaller interface. func (n *External) UnmarshalYAML(tag string, value interface{}) error { switch v := value.(type) { case bool: n.External = v case map[interface{}]interface{}: for mapKey, mapValue := range v { switch mapKey { case "name": n.Name = mapValue.(string) default: // Ignore unknown keys continue } } n.External = true default: return fmt.Errorf("Failed to unmarshal External: %#v", value) } return nil }
algairim/brooklyn-server
software/base/src/main/java/org/apache/brooklyn/entity/machine/AddMachineMetrics.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.brooklyn.entity.machine; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.Beta; import com.google.common.base.CharMatcher; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Splitter; import com.google.common.collect.FluentIterable; import com.google.common.primitives.Doubles; import org.apache.brooklyn.api.entity.Entity; import org.apache.brooklyn.api.entity.EntityInitializer; import org.apache.brooklyn.api.entity.EntityLocal; import org.apache.brooklyn.api.sensor.EnricherSpec; import org.apache.brooklyn.core.entity.EntityInternal; import org.apache.brooklyn.enricher.stock.PercentageEnricher; import org.apache.brooklyn.enricher.stock.YamlRollingTimeWindowMeanEnricher; import org.apache.brooklyn.enricher.stock.YamlTimeWeightedDeltaEnricher; import org.apache.brooklyn.entity.software.base.SoftwareProcess; import org.apache.brooklyn.feed.ssh.SshFeed; import org.apache.brooklyn.feed.ssh.SshPollConfig; import org.apache.brooklyn.feed.ssh.SshPollValue; import org.apache.brooklyn.util.text.Strings; import org.apache.brooklyn.util.time.Duration; /** * Adds an {@link SshFeed} feed with sensors returning details about the machine the entity is running on. * <p> * The machine must be SSHable and running Linux. * * @since 0.10.0 */ @Beta public class AddMachineMetrics implements EntityInitializer { private static final Logger LOG = LoggerFactory.getLogger(AddMachineMetrics.class); static { MachineAttributes.init(); } @Override public void apply(EntityLocal entity) { SshFeed machineMetricsFeed = createMachineMetricsFeed(entity); ((EntityInternal) entity).feeds().add(machineMetricsFeed); addMachineMetricsEnrichers(entity); LOG.info("Configured machine metrics feed and enrichers on {}", entity); } public static void addMachineMetricsEnrichers(Entity entity) { entity.enrichers().add(EnricherSpec.create(YamlTimeWeightedDeltaEnricher.class) .configure(YamlTimeWeightedDeltaEnricher.SOURCE_SENSOR, MachineAttributes.USED_MEMORY) .configure(YamlTimeWeightedDeltaEnricher.TARGET_SENSOR, MachineAttributes.USED_MEMORY_DELTA_PER_SECOND_LAST)); entity.enrichers().add(EnricherSpec.create(YamlRollingTimeWindowMeanEnricher.class) .configure(YamlRollingTimeWindowMeanEnricher.SOURCE_SENSOR, MachineAttributes.USED_MEMORY_DELTA_PER_SECOND_LAST) .configure(YamlRollingTimeWindowMeanEnricher.TARGET_SENSOR, MachineAttributes.USED_MEMORY_DELTA_PER_SECOND_IN_WINDOW)); entity.enrichers().add(EnricherSpec.create(PercentageEnricher.class) .configure(PercentageEnricher.SOURCE_CURRENT_SENSOR, MachineAttributes.USED_MEMORY) .configure(PercentageEnricher.SOURCE_TOTAL_SENSOR, MachineAttributes.TOTAL_MEMORY) .configure(PercentageEnricher.TARGET_SENSOR, MachineAttributes.USED_MEMORY_PERCENT) .configure(PercentageEnricher.SUPPRESS_DUPLICATES, true)); } public static SshFeed createMachineMetricsFeed(Entity entity) { boolean retrieveUsageMetrics = entity.config().get(SoftwareProcess.RETRIEVE_USAGE_METRICS); return SshFeed.builder() .uniqueTag("machineMetricsFeed") .period(Duration.THIRTY_SECONDS) .entity(entity) .poll(SshPollConfig.forSensor(MachineAttributes.UPTIME) .command("cat /proc/uptime") .enabled(retrieveUsageMetrics) .onFailureOrException(Functions.<Duration>constant(null)) .onSuccess(new Function<SshPollValue, Duration>() { @Override public Duration apply(SshPollValue input) { return Duration.seconds(Double.valueOf(Strings.getFirstWord(input.getStdout()))); } })) .poll(SshPollConfig.forSensor(MachineAttributes.LOAD_AVERAGE) .command("uptime") .enabled(retrieveUsageMetrics) .onFailureOrException(Functions.<Double>constant(null)) .onSuccess(new Function<SshPollValue, Double>() { @Override public Double apply(SshPollValue input) { String loadAverage = Strings.getFirstWordAfter(input.getStdout(), "load average:").replace(",", ""); return Double.valueOf(loadAverage); } })) .poll(SshPollConfig.forSensor(MachineAttributes.CPU_USAGE) .command("ps -A -o pcpu") .enabled(retrieveUsageMetrics) .onFailureOrException(Functions.<Double>constant(null)) .onSuccess(new Function<SshPollValue, Double>() { @Override public Double apply(SshPollValue input) { Double cpu = 0d; Iterable<String> stdout = Splitter.on(CharMatcher.breakingWhitespace()).omitEmptyStrings().split(input.getStdout()); for (Double each : FluentIterable.from(stdout).skip(1).transform(Doubles.stringConverter())) { cpu += each; } return cpu / 100d; } })) .poll(SshPollConfig.forSensor(MachineAttributes.USED_MEMORY) .command("free | grep Mem:") .enabled(retrieveUsageMetrics) .onFailureOrException(Functions.<Long>constant(null)) .onSuccess(new Function<SshPollValue, Long>() { @Override public Long apply(SshPollValue input) { List<String> memoryData = Splitter.on(" ").omitEmptyStrings().splitToList(Strings.getFirstLine(input.getStdout())); return Long.parseLong(memoryData.get(2)); } })) .poll(SshPollConfig.forSensor(MachineAttributes.FREE_MEMORY) .command("free | grep Mem:") .enabled(retrieveUsageMetrics) .onFailureOrException(Functions.<Long>constant(null)) .onSuccess(new Function<SshPollValue, Long>() { @Override public Long apply(SshPollValue input) { List<String> memoryData = Splitter.on(" ").omitEmptyStrings().splitToList(Strings.getFirstLine(input.getStdout())); return Long.parseLong(memoryData.get(3)); } })) .poll(SshPollConfig.forSensor(MachineAttributes.TOTAL_MEMORY) .command("free | grep Mem:") .enabled(retrieveUsageMetrics) .onFailureOrException(Functions.<Long>constant(null)) .onSuccess(new Function<SshPollValue, Long>() { @Override public Long apply(SshPollValue input) { List<String> memoryData = Splitter.on(" ").omitEmptyStrings().splitToList(Strings.getFirstLine(input.getStdout())); return Long.parseLong(memoryData.get(1)); } })) .build(); } }
mergian/vednn
src/C/vednnActivationBackward.h
<reponame>mergian/vednn #ifndef _VEDNNACTIVATIONBACWARD_H_ #define _VEDNNACTIVATIONBACWARD_H_ #include "vednn.h" typedef vednnError_t (*vednnActivationBackward_t) ( const void *pDataGradOut, const void *pDataIn, void *pDataGradIn, const uint64_t nElements ) ; vednnError_t vednnActivationBackward_Relu( const void *pDataGradOut, const void *pDataIn, void *pDataGradIn, const uint64_t nElements ) ; #endif /* _VEDNNACTIVATIONBACWARD_H_ */
GooruAdmin/Gooru-Core-API
api/src/main/java/org/ednovo/gooru/domain/cassandra/ApiCassandraFactory.java
<filename>api/src/main/java/org/ednovo/gooru/domain/cassandra/ApiCassandraFactory.java ///////////////////////////////////////////////////////////// // ApiCassandraFactory.java // gooru-api // Created by Gooru on 2014 // Copyright (c) 2014 Gooru. All rights reserved. // http://www.goorulearning.org/ // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ///////////////////////////////////////////////////////////// /** * */ package org.ednovo.gooru.domain.cassandra; import javax.annotation.PostConstruct; import org.ednovo.gooru.cassandra.core.dao.CassandraColumnFamily; import org.ednovo.gooru.cassandra.core.dao.EntityCassandraColumnFamily; import org.ednovo.gooru.cassandra.core.dao.EntityCassandraDaoImpl; import org.ednovo.gooru.cassandra.core.dao.RawCassandraDaoImpl; import org.ednovo.gooru.cassandra.core.factory.SearchCassandraFactory; import org.ednovo.gooru.core.cassandra.model.ReverseIndexColumnSetting; import org.ednovo.gooru.cassandra.core.service.CassandraSettingService; import org.ednovo.gooru.core.api.model.RevisionHistory; import org.ednovo.gooru.core.cassandra.model.DomainCio; import org.ednovo.gooru.core.cassandra.model.ResourceCio; import org.ednovo.gooru.core.cassandra.model.ReverseIndexColumnSetting; import org.ednovo.gooru.core.cassandra.model.TaxonomyCio; import org.ednovo.gooru.core.cassandra.model.UserCio; import org.ednovo.gooru.core.constant.ColumnFamilyConstant; import org.ednovo.gooru.infrastructure.persistence.hibernate.ConfigSettingRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; /** * @author SearchTeam * */ @Component public class ApiCassandraFactory extends SearchCassandraFactory { @Autowired private ConfigSettingRepository configSettingRepository; @Override @PostConstruct public final void init() { super.init(); register(new CassandraColumnFamily(ColumnFamilyConstant.DATA_STORE)); register(new CassandraColumnFamily(ColumnFamilyConstant.SEARCH_SETTING)); register(new CassandraColumnFamily(ColumnFamilyConstant.CONTENT_META)); register(new EntityCassandraColumnFamily<DomainCio>(DomainCio.class, new ReverseIndexColumnSetting().putField("name", "id"))); register(new EntityCassandraColumnFamily<ResourceCio>(ResourceCio.class, new ReverseIndexColumnSetting().putField("type","resourceType").putField("batch", "batchId").putField("categoy", "category").putField("resourceFormat", "resourceFormat").putField("instructional", "instructional"))); register(new EntityCassandraColumnFamily<RevisionHistory>(RevisionHistory.class, new ReverseIndexColumnSetting().putField("entity", "entityName"))); register(new EntityCassandraColumnFamily<TaxonomyCio>(TaxonomyCio.class, new ReverseIndexColumnSetting().putField("organization", "organization.partyUid"))); register(new EntityCassandraColumnFamily<UserCio>(UserCio.class, new ReverseIndexColumnSetting().putField("organization", "organization.partyUid"))); register(new RawCassandraDaoImpl(this, ColumnFamilyConstant.DATA_STORE)); register(new RawCassandraDaoImpl(this, ColumnFamilyConstant.SEARCH_SETTING)); register(new EntityCassandraDaoImpl<ResourceCio>(this, ColumnFamilyConstant.RESOURCE)); register(new EntityCassandraDaoImpl<TaxonomyCio>(this, ColumnFamilyConstant.TAXONOMY)); register(new EntityCassandraDaoImpl<UserCio>(this, ColumnFamilyConstant.USER)); register(new EntityCassandraDaoImpl<RevisionHistory>(this, ColumnFamilyConstant.REVISION_HISTORY)); register(new EntityCassandraDaoImpl<DomainCio>(this, ColumnFamilyConstant.DOMAIN)); register(new RawCassandraDaoImpl(this, ColumnFamilyConstant.CONTENT_META)); } @Override public CassandraSettingService getSettingService() { return configSettingRepository; } }
liammews/Briefrr
studio/node_modules/popmotion/dist/es/packages/popmotion/lib/index.js
<filename>studio/node_modules/popmotion/dist/es/packages/popmotion/lib/index.js<gh_stars>1-10 export { animate } from './animations/index.js'; export { inertia } from './animations/inertia.js'; export { decay } from './animations/generators/decay.js'; export { spring } from './animations/generators/spring.js'; export { keyframes } from './animations/generators/keyframes.js'; export { angle } from './utils/angle.js'; export { applyOffset } from './utils/apply-offset.js'; export { attract, attractExpo, createAttractor } from './utils/attract.js'; export { clamp } from './utils/clamp.js'; export { degreesToRadians } from './utils/degrees-to-radians.js'; export { distance } from './utils/distance.js'; export { interpolate } from './utils/interpolate.js'; export { isPoint3D } from './utils/is-point-3d.js'; export { isPoint } from './utils/is-point.js'; export { mixColor } from './utils/mix-color.js'; export { mixComplex } from './utils/mix-complex.js'; export { mix } from './utils/mix.js'; export { pipe } from './utils/pipe.js'; export { pointFromVector } from './utils/point-from-vector.js'; export { progress } from './utils/progress.js'; export { radiansToDegrees } from './utils/radians-to-degrees.js'; export { smoothFrame } from './utils/smooth-frame.js'; export { smooth } from './utils/smooth.js'; export { snap } from './utils/snap.js'; export { toDecimal } from './utils/to-decimal.js'; export { velocityPerFrame } from './utils/velocity-per-frame.js'; export { velocityPerSecond } from './utils/velocity-per-second.js'; export { wrap } from './utils/wrap.js'; export { anticipate, backIn, backInOut, backOut, bounceIn, bounceInOut, bounceOut, circIn, circInOut, circOut, easeIn, easeInOut, easeOut, linear } from './easing/index.js'; export { cubicBezier } from './easing/cubic-bezier.js'; export { steps } from './easing/steps.js'; export { createAnticipate, createBackIn, createExpoIn, mirrorEasing, reverseEasing } from './easing/utils.js';
kppw99/enVAS
dataset/source/NVD/CVE_2013_1772_PATCHED_call_console_drivers.c
static void CVE_2013_1772_PATCHED_call_console_drivers(unsigned start, unsigned end) { unsigned cur_index, start_print; static int msg_level = -1; BUG_ON(((int)(start - end)) > 0); cur_index = start; start_print = start; while (cur_index != end) { if (msg_level < 0 && ((end - cur_index) > 2)) { /* * prepare buf_prefix, as a contiguous array, * to be processed by log_prefix function */ char buf_prefix[SYSLOG_PRI_MAX_LENGTH+1]; unsigned i; for (i = 0; i < ((end - cur_index)) && (i < SYSLOG_PRI_MAX_LENGTH); i++) { buf_prefix[i] = LOG_BUF(cur_index + i); } buf_prefix[i] = '\0'; /* force '\0' as last string character */ /* strip log prefix */ cur_index += log_prefix((const char *)&buf_prefix, &msg_level, NULL); start_print = cur_index; } while (cur_index != end) { char c = LOG_BUF(cur_index); cur_index++; if (c == '\n') { if (msg_level < 0) { /* * printk() has already given us loglevel tags in * the buffer. This code is here in case the * log buffer has wrapped right round and scribbled * on those tags */ msg_level = default_message_loglevel; } _call_console_drivers(start_print, cur_index, msg_level); msg_level = -1; start_print = cur_index; break; } } } _call_console_drivers(start_print, end, msg_level); }
lambda666/liteos_in_raspberry_ubuntu20
LiteOS/targets/bsp/drivers/FM33LC0xx_LL_Driver/Src/fm33lc0xx_ll_adc.c
<reponame>lambda666/liteos_in_raspberry_ubuntu20<gh_stars>0 /** **************************************************************************************************** * @file fm33lc0xx_ll_adc.c * @author FMSH Application Team * @brief Src file of ADC LL Module **************************************************************************************************** * @attention * * Copyright (c) [2019] [Fudan Microelectronics] * THIS SOFTWARE is licensed under the Mulan PSL v1. * can use this software according to the terms and conditions of the Mulan PSL v1. * You may obtain a copy of Mulan PSL v1 at: * http://license.coscl.org.cn/MulanPSL * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR * PURPOSE. * See the Mulan PSL v1 for more details. * **************************************************************************************************** */ /* Includes ------------------------------------------------------------------*/ #include "fm33lc0xx_ll_adc.h" #include "fm33lc0xx_ll_rcc.h" #include "fm33lc0xx_ll_rmu.h" #include "fm33lc0xx_ll_vref.h" #include "fm33_assert.h" /** @addtogroup fm33lc0xx_LL_Driver * @{ */ /** *@} */ /* Private macros ------------------------------------------------------------*/ /** @addtogroup LPUART_LL_Private_Macros * @{ */ #define IS_LL_ADC_INSTANCE(INSTANCE) ((INSTANCE) == ADC) #define IS_LL_ADC_COMMON_CLOCKSOURCE(__VALUE__) (((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALLER_PLL)||\ ((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALLER_XTHF)||\ ((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALLER_RCHF)||\ ((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALLER_RC4M_PSC)) #define IS_LL_ADC_COMMON_PRESCALER(__VALUE__) (((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALER_DIV1)||\ ((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALER_DIV2)||\ ((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALER_DIV4)||\ ((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALER_DIV8)||\ ((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALER_DIV16)||\ ((__VALUE__) == LL_RCC_ADC_OPERATION_CLOCK_PRESCALER_DIV32)) #define IS_LL_ADC_CLOCK_SRC(__VALUE__) (((__VALUE__) == LL_ADC_CLOCK_ADCCLK)||\ ((__VALUE__) == LL_ADC_CLOCK_APBCLK)||\ ((__VALUE__) == LL_ADC_CLOCK_APBCLK_DIV2)||\ ((__VALUE__) == LL_ADC_CLOCK_APBCLK_DIV4)) #define IS_LL_ADC_CONTINUOUSCONVMODE(__VALUE__) (((__VALUE__) == LL_ADC_CONV_SINGLE)||\ ((__VALUE__) == LL_ADC_CONV_CONTINUOUS)) #define IS_LL_ADC_AUTO_MODE(__VALUE__) (((__VALUE__) == LL_ADC_SINGLE_CONV_MODE_SEMIAUTO)||\ ((__VALUE__) == LL_ADC_SINGLE_CONV_MODE_AUTO)) #define IS_LL_ADC_SCANDIRECTION(__VALUE__) (((__VALUE__) == LL_ADC_SEQ_SCAN_DIR_FORWARD)||\ ((__VALUE__) == LL_ADC_SEQ_SCAN_DIR_BACKWARD)) #define IS_LL_ADC_EXTERNALTRIGCONV(__VALUE__) (((__VALUE__) == LL_ADC_EXT_TRIGGER_NONE)||\ ((__VALUE__) == LL_ADC_EXT_TRIGGER_RISING)||\ ((__VALUE__) == LL_ADC_EXT_TRIGGER_FALLING)||\ ((__VALUE__) == LL_ADC_EXT_TRIGGER_BOTHEDGE)) #define IS_LL_ADC_SAMPLINGTRIGSOURCE(__VALUE__) (((__VALUE__) == LL_ADC_TRIG_EXT_PA8)||\ ((__VALUE__) == LL_ADC_TRIG_EXT_PB9)||\ ((__VALUE__) == LL_ADC_TRIG_EXT_ATIM_TRGO)||\ ((__VALUE__) == LL_ADC_TRIG_EXT_GPTIM1_TRGO)||\ ((__VALUE__) == LL_ADC_TRIG_EXT_GPTIM2_TRGO)||\ ((__VALUE__) == LL_ADC_TRIG_EXT_RTC_TRGO)||\ ((__VALUE__) == LL_ADC_TRIG_EXT_BSTIM1_TRGO)||\ ((__VALUE__) == LL_ADC_TRIG_EXT_COMP1_TRGO)||\ ((__VALUE__) == LL_ADC_TRIG_EXT_COMP2_TRGO)) #define IS_LL_ADC_CHANNEL_SWAP_WAIT(__VALUE__) (((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_1_CYCLE)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_2_CYCLES)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_3_CYCLES)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_4_CYCLES)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_5_CYCLES)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_6_CYCLES)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_7_CYCLES)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_8_CYCLES)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_9_CYCLES)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_10_CYCLES)||\ ((__VALUE__) == LL_ADC_SAMPLEING_INTERVAL_11_CYCLES)) #define IS_LL_ADC_CHANNEL_FAST_TIME(__VALUE__) (((__VALUE__) == LL_ADC_FAST_CH_SAMPLING_TIME_3_CYCLES_5)||\ ((__VALUE__) == LL_ADC_FAST_CH_SAMPLING_TIME_4_CYCLES_5)||\ ((__VALUE__) == LL_ADC_FAST_CH_SAMPLING_TIME_6_CYCLES_5)||\ ((__VALUE__) == LL_ADC_FAST_CH_SAMPLING_TIME_10_CYCLES_5)) #define IS_LL_ADC_CHANNEL_SLOW_TIME(__VALUE__) (((__VALUE__) == LL_ADC_SLOW_CH_SAMPLING_TIME_3_CYCLES_5)||\ ((__VALUE__) == LL_ADC_SLOW_CH_SAMPLING_TIME_4_CYCLES_5)||\ ((__VALUE__) == LL_ADC_SLOW_CH_SAMPLING_TIME_6_CYCLES_5)||\ ((__VALUE__) == LL_ADC_SLOW_CH_SAMPLING_TIME_10_CYCLES_5)) #define IS_LL_ADC_OVERSAMPCOFIG(__VALUE__) (((__VALUE__) == DISABLE)||\ ((__VALUE__) == ENABLE)) #define IS_LL_ADC_OVERSAMPINGRATIO(__VALUE__) (((__VALUE__) == LL_ADC_OVERSAMPLING_2X)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_4X)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_8X)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_16X)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_32X)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_64X)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_128X)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_256X)) #define IS_LL_ADC_OVERSAMPINGSHIFT(__VALUE__) (((__VALUE__) == LL_ADC_OVERSAMPLING_RESULT_ORIGINAL)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_RESULT_DIV2)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_RESULT_DIV4)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_RESULT_DIV8)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_RESULT_DIV16)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_RESULT_DIV32)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_RESULT_DIV64)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_RESULT_DIV128)||\ ((__VALUE__) == LL_ADC_OVERSAMPLING_RESULT_DIV256)) #define ADC_CALIBRATIN_TIME_OUT (500000) /** * @} */ /** * @brief ADC外设寄存器值为复位值 * @param 外设入口地址 * @retval 返回错误状态,可能值: * -PASS 外设寄存器值恢复复位值 * -FAIL 未成功执行 */ ErrorStatus LL_ADC_CommonDeInit(void) { /* 关闭总线时钟 */ LL_RCC_Group2_DisableBusClock(LL_RCC_BUS2_CLOCK_ADC); /* 关闭操作时钟 */ LL_RCC_Group2_DisableOperationClock(LL_RCC_OPERATION2_CLOCK_ADC); return PASS; } /** * @brief ADC共用寄存器设置以配置外设工作时钟 * * @note 其中LL_LPTIM_OPERATION_MODE_EXTERNAL_ASYN_PAUSE_CNT 模式需要外部脉冲提供给LPTIM模块作为工作时钟,此时 * LPTIM完全工作在异步模式下。 * @param LPTIM 外设入口地址 * @param LPTIM_InitStruct指向LL_LPTIM_TimeInitTypeDef类的结构体,它包含指定LPTIM外设的配置信息 * * @retval ErrorStatus枚举值 * -FAIL 配置过程发生错误 * -PASS LPUART配置成功 */ ErrorStatus LL_ADC_CommonInit(LL_ADC_CommonInitTypeDef *ADC_CommonInitStruct) { ErrorStatus status = PASS; /* 入口参数检查 */ assert_param(IS_LL_ADC_COMMON_CLOCKSOURCE(ADC_CommonInitStruct->AdcClockSource)); assert_param(IS_LL_ADC_COMMON_PRESCALER(ADC_CommonInitStruct->AdcClockPrescaler)); /* 开启总线时钟 */ LL_RCC_Group2_EnableBusClock(LL_RCC_BUS2_CLOCK_ADC); /* 开启操作时钟 */ LL_RCC_Group2_EnableOperationClock(LL_RCC_OPERATION2_CLOCK_ADC); /* 配置ADC时钟预分频 */ LL_RCC_SetADCPrescaler(ADC_CommonInitStruct->AdcClockPrescaler); /* 配置ADC时钟模块时钟源 */ LL_RCC_SetADCClockSource(ADC_CommonInitStruct->AdcClockSource); return status; } /** * @brief 设置 ADC_CommonInitStruct 为默认配置 * @param ADC_CommonInitStruct 指向需要将值设置为默认配置的结构体 @ref LL_ADC_CommonInitTypeDef 结构体 * * @retval None */ void LL_ADC_CommonStructInit(LL_ADC_CommonInitTypeDef *ADC_CommonInitStruct) { /*默认使用RCHF作为ADC时钟模块时钟源,预分频系数16*/ ADC_CommonInitStruct->AdcClockSource = LL_RCC_ADC_OPERATION_CLOCK_PRESCALLER_RCHF; ADC_CommonInitStruct->AdcClockPrescaler = LL_RCC_ADC_OPERATION_CLOCK_PRESCALER_DIV16; } /** * @brief 恢复对应的ADC入口地址寄存器为默认值 * * @param ADCx 外设入口地址 * * @retval ErrorStatus枚举值 * -FAIL 配置过程发生错误 * -PASS LPUART配置成功 */ ErrorStatus LL_ADC_DeInit(ADC_Type *ADCx) { ErrorStatus status = PASS; /* 入口合法性检查 */ assert_param(IS_LL_ADC_INSTANCE(ADCx)); /* 外设复位使能 */ LL_RCC_EnablePeripheralReset(); LL_RCC_EnableResetAPB2Peripheral(LL_RCC_APB2_PERIPHERAL_RST_ADC); LL_RCC_DisableResetAPB2Peripheral(LL_RCC_APB2_PERIPHERAL_RST_ADC); LL_RCC_EnableResetAPB2Peripheral(LL_RCC_APB2_PERIPHERAL_RST_ADC_CR); LL_RCC_DisableResetAPB2Peripheral(LL_RCC_APB2_PERIPHERAL_RST_ADC_CR); LL_RCC_DisablePeripheralReset(); return status; } /** * @brief 初始化ADCx指定的入口地址的外设寄存器 * * @note 用户必须检查此函数的返回值,以确保自校准完成,否则转换结果精度无法保证,除此之外ADC使能过采样实际不会增加ADC的 * 转换精度只会提高转换结果的稳定性(同时配置移位寄存器的情况下),同时过采样会降低转换速度。 * @param ADCx 外设入口地址 * @param ADC_InitStruct 向一LL_ADC_InitTypeDef结构体,它包含指定ADC外设的配置信息 * * @retval ErrorStatus枚举值 * -FAIL 配置过程发生错误 * -PASS LPUART配置成功 */ ErrorStatus LL_ADC_Init(ADC_Type *ADCx, LL_ADC_InitTypeDef *ADC_InitStruct) { ErrorStatus status = PASS; /* 入口合法性检查 */ assert_param(IS_LL_ADC_INSTANCE(ADCx)); assert_param(IS_LL_ADC_CONTINUOUSCONVMODE(ADC_InitStruct->ADC_ContinuousConvMode)); assert_param(IS_LL_ADC_AUTO_MODE(ADC_InitStruct->ADC_AutoMode)); assert_param(IS_LL_ADC_SCANDIRECTION(ADC_InitStruct->ADC_ScanDirection)); assert_param(IS_LL_ADC_EXTERNALTRIGCONV(ADC_InitStruct->ADC_ExternalTrigConv)); assert_param(IS_LL_ADC_OVERSAMPCOFIG(ADC_InitStruct->ADC_Oversampling)); assert_param(IS_LL_ADC_OVERSAMPINGRATIO(ADC_InitStruct->ADC_OverSampingRatio)); assert_param(IS_LL_ADC_OVERSAMPINGSHIFT(ADC_InitStruct->ADC_OversamplingShift)); assert_param(IS_LL_ADC_SAMPLINGTRIGSOURCE(ADC_InitStruct->ADC_SamplingTrigSource)); if(DISABLE == LL_VREF_IsEnabledVREF(VREF)) LL_VREF_ClearFlag_VREFIF(VREF); LL_VREF_EnableVREF(VREF);//置位VREF_EN寄存器,使能VREF1p2模块 LL_VREF_EnableTemperatureSensor(VREF);//置位PTAT_EN寄存器 while (LL_VREF_IsActiveFlag_VREFIF(VREF) == 0); //等待VREF建立 LL_RCC_EnablePeripheralReset(); LL_RCC_EnableResetAPB2Peripheral(LL_RCC_APB2_PERIPHERAL_RST_ADC); LL_RCC_DisableResetAPB2Peripheral(LL_RCC_APB2_PERIPHERAL_RST_ADC); LL_RCC_EnableResetAPB2Peripheral(LL_RCC_APB2_PERIPHERAL_RST_ADC_CR); LL_RCC_DisableResetAPB2Peripheral(LL_RCC_APB2_PERIPHERAL_RST_ADC_CR); LL_RCC_DisablePeripheralReset(); if (LL_ADC_IsEnabled(ADCx) == 0U) { /* 选择ADC时钟源 */ //LL_ADC_SetClock(ADCx,ADC_InitStruct->ADC_Clock_Src); /* 连续转换模式 */ LL_ADC_SetContinuousMode(ADCx, ADC_InitStruct->ADC_ContinuousConvMode); /* 自动转换模式 */ LL_ADC_SetSingleConversionAutoMode(ADCx, ADC_InitStruct->ADC_AutoMode); /* 多通道扫描方向 */ LL_ADC_SetSequencerScanDirection(ADCx, ADC_InitStruct->ADC_ScanDirection); /* 触发模式 */ LL_ADC_SetTriggerEdge(ADCx, ADC_InitStruct->ADC_ExternalTrigConv); /* 触发源选择*/ LL_ADC_SetTriggerSource(ADCx, ADC_InitStruct->ADC_SamplingTrigSource); /* 通道等待使能 */ LL_ADC_SetWaitMode(ADCx, ADC_InitStruct->ADC_WaitMode); /*数据冲突模式设置*/ LL_ADC_SetOverrun(ADCx, ADC_InitStruct->ADC_OverrunMode); /*通道采样时间设置*/ LL_ADC_SetSamplingInterval(ADCx, ADC_InitStruct->ADC_Channel_Swap_Wait); LL_ADC_SetSamplingTimeFastCH(ADCx, ADC_InitStruct->ADC_Channel_Fast_Time); LL_ADC_SetSamplingTimeSlowCH(ADCx, ADC_InitStruct->ADC_Channel_Slow_Time); if (ADC_InitStruct->ADC_Oversampling) { /*使能过采样倍数后,需要配置移位寄存器进行移位,这一过程是硬件自动完成的最终最大 可输出16位的结果值(即256被采样得到的结果是20bit的,右移4bit结果就是16bit的)*/ LL_ADC_SetOverSamplingRatio(ADCx, ADC_InitStruct->ADC_OverSampingRatio); LL_ADC_SetOverSamplingResult(ADCx, ADC_InitStruct->ADC_OversamplingShift); /* 过采样使能 */ LL_ADC_EnableOverSampling(ADCx); } else { /* 关闭过采样 */ LL_ADC_DisableOverSampling(ADCx); } /* 关闭ADC */ LL_ADC_Disable(ADCx); } else { status = FAIL; } return status; } /** * @brief 校准芯片实际工作的VDDA值,从而进行采样值到实际电压值的准确装换 * * @note 此函数必须将ADC工作时钟频率配置为1M以下(推荐500KHz),才可以保证转换结果的准确性 * * @param ADCx 外设入口地址 * * @retval 当前芯片工作实际的VDDA电压值(mV) * * */ //uint32_t GetActualVddaVoltage(ADC_Type *ADCx) //{ // //} /** * @brief 设置 ADC_InitStruct 为默认配置 * @param ADC_InitStruct 指向需要将值设置为默认配置的结构体 @ref LL_ADC_InitTypeDef 结构体 * * @retval None */ void LL_ADC_StructInit(LL_ADC_InitTypeDef *ADC_InitStruct) { ADC_InitStruct->ADC_ContinuousConvMode = LL_ADC_CONV_CONTINUOUS; ADC_InitStruct->ADC_AutoMode = LL_ADC_SINGLE_CONV_MODE_AUTO; ADC_InitStruct->ADC_ScanDirection = LL_ADC_SEQ_SCAN_DIR_BACKWARD; ADC_InitStruct->ADC_ExternalTrigConv = LL_ADC_EXT_TRIGGER_NONE; ADC_InitStruct->ADC_SamplingTrigSource = LL_ADC_TRIG_EXT_PA8; ADC_InitStruct->ADC_OverrunMode = LL_ADC_OVR_DATA_OVERWRITTEN; ADC_InitStruct->ADC_WaitMode = LL_ADC_WAIT_MODE_NO_WAIT; ADC_InitStruct->ADC_Channel_Swap_Wait = LL_ADC_SAMPLEING_INTERVAL_11_CYCLES; ADC_InitStruct->ADC_Channel_Fast_Time = LL_ADC_FAST_CH_SAMPLING_TIME_384_ADCCLK; ADC_InitStruct->ADC_Channel_Slow_Time = LL_ADC_SLOW_CH_SAMPLING_TIME_384_ADCCLK; ADC_InitStruct->ADC_Oversampling = ENABLE; ADC_InitStruct->ADC_OverSampingRatio = LL_ADC_OVERSAMPLING_256X; ADC_InitStruct->ADC_OversamplingShift = LL_ADC_OVERSAMPLING_RESULT_ORIGINAL; } /*************************************************************END OF FILE************************************************************/
funsonli/spring-boot-demo
spring-boot-demo-522-uploader/src/main/java/com/funsonli/springbootdemo522uploader/component/uploader/Uploader.java
<filename>spring-boot-demo-522-uploader/src/main/java/com/funsonli/springbootdemo522uploader/component/uploader/Uploader.java package com.funsonli.springbootdemo522uploader.component.uploader; import java.io.InputStream; /** * Class for * * @author Funsonli * @date 2019/10/30 */ public interface Uploader { /** * * @author Funsonli * @date 2019/10/30 * @param file 文件流 * @param key * @return */ String uploadInputStream(InputStream file, String key); }
f-koehler/mlxtk
mlxtk/simulation_set/__init__.py
import argparse import re import sys from pathlib import Path from typing import List, Optional, Tuple, Union from mlxtk import sge from mlxtk.simulation import Simulation from mlxtk.simulation_set import base RE_INDEX = re.compile(r"(\d+)") RE_RANGE = re.compile(r"(\d+)-(\d+)") RE_SLICE = re.compile(r"^([+-]*\d*):([+-]*\d*)(?::([+-]*\d*))?$") def parse_slice(slice_: str) -> Optional[Tuple[int, Optional[int], int]]: m = RE_SLICE.match(slice) if not m: return None start = 0 stop = None step = 1 if m.group(1) != "": start = int(m.group(1)) if m.group(2) != "": stop = int(m.group(2)) try: if m.group(3) != "": step = int(m.group(3)) except IndexError: pass return (start, stop, step) class SimulationSet(base.SimulationSetBase): def __init__( self, name: str, simulations: List[Simulation], working_dir: Union[str, Path] = None, ): super().__init__(name, simulations, working_dir) self.argparser = argparse.ArgumentParser( description="This is a set of mlxtk simulations" ) self.subparsers = self.argparser.add_subparsers() self.argparser_archive = self.subparsers.add_parser("archive") self.argparser_archive.set_defaults(subcommand=self.cmd_archive) self.argparser_archive.add_argument( "-c", "--compression", type=int, default=9, help="compression level [1-9] (1: fastest, 9: best)", ) self.argparser_archive.add_argument( "-j", "--jobs", type=int, default=1, help="number of jobs (when pigz is available)", ) self.argparser_clean = self.subparsers.add_parser("clean") self.argparser_clean.set_defaults(subcommand=self.cmd_clean) self.argparser_clean.add_argument( "-j", "--jobs", type=int, default=1, help="number of parallel workers" ) self.argparser_dry_run = self.subparsers.add_parser("dry-run") self.argparser_dry_run.set_defaults(subcommand=self.cmd_dry_run) self.argparser_list = self.subparsers.add_parser("list") self.argparser_list.set_defaults(subcommand=self.cmd_list) self.argparser_list.add_argument("-d", "--directory", action="store_true") self.argparser_list_tasks = self.subparsers.add_parser("list-tasks") self.argparser_list_tasks.set_defaults(subcommand=self.cmd_list_tasks) self.argparser_list_tasks.add_argument( "index", type=int, help="index of the simulation whose tasks to list" ) self.argparser_lockfiles = self.subparsers.add_parser("lockfiles") self.argparser_lockfiles.set_defaults(subcommand=self.cmd_lockfiles) self.argparser_propagation_status = self.subparsers.add_parser( "propagation-status" ) self.argparser_propagation_status.set_defaults( subcommand=self.cmd_propagation_status ) self.argparser_propagation_status.add_argument( "name", default="propagate", nargs="?", type=str, help="name of the propagation", ) self.argparser_qsub = self.subparsers.add_parser("qsub") self.argparser_qsub.set_defaults(subcommand=self.cmd_qsub_array) sge.add_parser_arguments(self.argparser_qsub) self.argparser_run = self.subparsers.add_parser("run") self.argparser_run.set_defaults(subcommand=self.cmd_run) self.argparser_run.add_argument( "-j", "--jobs", type=int, default=1, help="number of parallel workers" ) self.argparser_run_index = self.subparsers.add_parser("run-index") self.argparser_run_index.set_defaults(subcommand=self.cmd_run_index) self.argparser_run_index.add_argument( "index", type=int, help="index of the simulation to run" ) self.argparser_task_info = self.subparsers.add_parser("task-info") self.argparser_task_info.set_defaults(subcommand=self.cmd_task_info) self.argparser_task_info.add_argument( "index", type=int, help="index of the simulation" ) self.argparser_task_info.add_argument("name", type=str, help="name of the task") self.argparser_qdel = self.subparsers.add_parser("qdel") self.argparser_qdel.set_defaults(subcommand=self.cmd_qdel) def parse_selection(self, arg: str) -> List[int]: tokens = arg.strip().split(",") indices = [] for token in tokens: m = RE_INDEX.match(token) if m: indices.append(int(m.group(1))) continue m = RE_RANGE.match(token) if m: start = int(m.group(1)) stop = int(m.group(2)) if stop < start: raise ValueError("stop index is smaller than start index") indices += list(range(start, stop)) continue result = parse_slice(token) if not result: raise ValueError(f"invalid format: {token}") start = result[0] stop = result[1] if stop is None: stop = len(self.simulations) step = result[2] indices += list(range(start, stop, step)) return list(set(indices)) from mlxtk.simulation_set.cmd_archive import cmd_archive from mlxtk.simulation_set.cmd_clean import cmd_clean from mlxtk.simulation_set.cmd_dry_run import cmd_dry_run from mlxtk.simulation_set.cmd_list import cmd_list from mlxtk.simulation_set.cmd_list_tasks import cmd_list_tasks from mlxtk.simulation_set.cmd_lockfiles import cmd_lockfiles from mlxtk.simulation_set.cmd_propagation_status import cmd_propagation_status from mlxtk.simulation_set.cmd_qdel import cmd_qdel from mlxtk.simulation_set.cmd_qsub_array import cmd_qsub_array from mlxtk.simulation_set.cmd_run import cmd_run from mlxtk.simulation_set.cmd_run_index import cmd_run_index from mlxtk.simulation_set.cmd_task_info import cmd_task_info def main(self, argv: List[str]): if argv is None: argv = sys.argv[1:] args = self.argparser.parse_args(argv) args.subcommand(args)
jsuo/XVim2
XVim2/XcodeHeader/IDEKit/IDENavigatorSCMStatusCell.h
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Mar 30 2018 09:30:25). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>. // #import <DVTKit/DVTStructuredLayoutTextFieldCell.h> @class NSAttributedString, NSString; @interface IDENavigatorSCMStatusCell : DVTStructuredLayoutTextFieldCell { NSString *_localStatus; NSString *_serverStatus; NSAttributedString *_localAttrString; NSAttributedString *_serverAttrString; BOOL _hideLocalStatus; } + (id)keyPathsForValuesAffectingHasDisplayableStatus; + (struct CGSize)singleItemMaxSize; + (void)initialize; @property BOOL hideLocalStatus; // @synthesize hideLocalStatus=_hideLocalStatus; @property(copy, nonatomic) NSString *serverStatus; // @synthesize serverStatus=_serverStatus; @property(copy, nonatomic) NSString *localStatus; // @synthesize localStatus=_localStatus; - (void).cxx_destruct; @property(readonly) BOOL hasDisplayableStatus; - (struct CGSize)cellSizeForBounds:(struct CGRect)arg1; - (struct CGSize)cellSize; - (void)_updateTitle; - (void)drawInteriorWithFrame:(struct CGRect)arg1 inView:(id)arg2; - (id)_bezelPathInRect:(struct CGRect)arg1; - (void)_drawStatusString:(id)arg1 inPathRect:(struct CGRect)arg2 view:(id)arg3; - (id)_fontForSize:(double)arg1 bold:(BOOL)arg2; - (id)_createServerStatusAttributedString; - (id)_createLocalStatusAttributedString; - (id)init; @end
cwolsen7905/UbixOS
doc/html/df/dc9/exec_8c.js
var exec_8c = [ [ "AT_BASE", "df/dc9/exec_8c.html#a9b8e51a44a47d7ea827c570ffdca14e5", null ], [ "AT_ENTRY", "df/dc9/exec_8c.html#a80f2ea99241a6afdc4718d3c4318c871", null ], [ "AT_EXECFD", "df/dc9/exec_8c.html#a8a3070773d3f9231c9390172ae3fd2e7", null ], [ "AT_FLAGS", "df/dc9/exec_8c.html#a08c6bdc11224c9d17fbde19666c332cc", null ], [ "AT_IGNORE", "df/dc9/exec_8c.html#ab11dfd4fb71c15a128c3f70a310b53d1", null ], [ "AT_NULL", "df/dc9/exec_8c.html#aa893f7231479240d49c660c5649cac5b", null ], [ "AT_PAGESZ", "df/dc9/exec_8c.html#a7269f0c135d4bd3f9bc074d18fb30f9d", null ], [ "AT_PHDR", "df/dc9/exec_8c.html#a3de7ec7b5cee2d34208387ab97dc0642", null ], [ "AT_PHENT", "df/dc9/exec_8c.html#a56f1f9975d88f768d7928b0ed587ef38", null ], [ "AT_PHNUM", "df/dc9/exec_8c.html#abc712c67bee059574b4e3c096538d5bd", null ], [ "AUXARGS_ENTRY", "df/dc9/exec_8c.html#afa8ddb544ff5a7ec07bbe00b16927559", null ], [ "STACK_ADDR", "df/dc9/exec_8c.html#ad93dccba62fb5241c0b7cebcd097794a", null ], [ "execFile", "df/dc9/exec_8c.html#af80d4412d42ed1b5bf9a1b9774230f33", null ], [ "execThread", "df/dc9/exec_8c.html#a4b86d28d8fd56197042aef174de2fd58", null ], [ "sys_exec", "df/dc9/exec_8c.html#a094c23346b6d226a03866a54b755ed64", null ], [ "sysExec", "df/dc9/exec_8c.html#a219681612352232186a0f568457d851d", null ] ];
staale/mats
mats-websockets/src/test/java/com/stolsvik/mats/websocket/SetupTestMatsAndMatsSocketEndpoints.java
<reponame>staale/mats<gh_stars>0 package com.stolsvik.mats.websocket; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.stolsvik.mats.MatsFactory; import com.stolsvik.mats.websocket.DummySessionAuthenticator.DummyAuthPrincipal; import com.stolsvik.mats.websocket.MatsSocketServer.ActiveMatsSocketSession; import com.stolsvik.mats.websocket.MatsSocketServer.MatsSocketEnvelopeWithMetaDto; import com.stolsvik.mats.websocket.MatsSocketServer.SessionEstablishedEvent; import com.stolsvik.mats.websocket.MatsSocketServer.SessionRemovedEvent; /** * Sets up the test endpoints used from the integration tests (and the HTML test-pages). * * @author <NAME> 2020-02-20 18:33 - http://stolsvik.com/, <EMAIL> */ public class SetupTestMatsAndMatsSocketEndpoints { private static final Logger log = LoggerFactory.getLogger(SetupTestMatsAndMatsSocketEndpoints.class); static void setupMatsAndMatsSocketEndpoints(MatsFactory matsFactory, MatsSocketServer matsSocketServer) { // Add listeners setup_AddListeners(matsSocketServer); // "Standard" test endpoint setup_StandardTestSingle(matsSocketServer, matsFactory); setup_SimpleMats(matsSocketServer, matsFactory); // Resolve/Reject/Throws in incomingHandler and replyAdapter setupSocket_IgnoreInIncoming(matsSocketServer); setupSocket_DenyInIncoming(matsSocketServer); setupSocket_ResolveInIncoming(matsSocketServer); setupSocket_RejectInIncoming(matsSocketServer); setupSocket_ThrowsInIncoming(matsSocketServer); setupSocket_IgnoreInReplyAdapter(matsSocketServer); setupSocket_ResolveInReplyAdapter(matsSocketServer); setupSocket_RejectInReplyAdapter(matsSocketServer); setupSocket_ThrowsInReplyAdapter(matsSocketServer); setup_TestSlow(matsSocketServer, matsFactory); setup_ServerPush_Send(matsSocketServer, matsFactory); setup_ServerPush_Request_Via_Mats(matsSocketServer, matsFactory); setup_ServerPush_Request_Direct(matsSocketServer, matsFactory); setupSocket_ReplyWithCookieAuthorization(matsSocketServer); setupSocket_CloseThisSession(matsSocketServer); setupSocket_Publish(matsSocketServer); setupSocket_MatsSocket_renewAuth(matsSocketServer); } private static ConcurrentHashMap<String, SessionEstablishedEvent> __sessionMap = new ConcurrentHashMap<>(); private static CopyOnWriteArrayList<SessionRemovedEvent> __sessionRemovedEvents = new CopyOnWriteArrayList<>(); // ===== Listeners: private static void setup_AddListeners(MatsSocketServer matsSocketServer) { matsSocketServer.addSessionEstablishedEventListener(event -> { ActiveMatsSocketSession session = event.getMatsSocketSession(); log.info("#### LISTENER[SESSION]! +++ESTABLISHED!+++ [" + event.getType() + "] #### SessionId:" + event .getMatsSocketSessionId() + ", appName: " + session.getAppName() + ", appVersion:" + session .getAppVersion() + ", clientLibAndVersions:" + session.getClientLibAndVersions() + ", authorization:" + session .getAuthorization()); __sessionMap.put(event.getMatsSocketSessionId(), event); }); matsSocketServer.addSessionRemovedEventListener(event -> { SessionEstablishedEvent removed = __sessionMap.remove(event.getMatsSocketSessionId()); log.info("#### LISTENER[SESSION]! ---REMOVED!!--- [" + event.getType() + "] [" + (removed != null ? "Added session as:" + removed.getType() : "SESSION was already GONE!") + "] #### SessionId:" + event.getMatsSocketSessionId() + ", reason:" + event.getReason() + ", closeCode" + event .getCloseCode()); __sessionRemovedEvents.add(event); }); matsSocketServer.addMessageEventListener(event -> { List<MatsSocketEnvelopeWithMetaDto> envelopes = event.getEnvelopes(); for (MatsSocketEnvelopeWithMetaDto envelope : envelopes) { log.info("==== LISTENER[MESSAGE]! direction: [" + envelope.dir + "], type:[" + envelope.t + "] for Session [" + event.getMatsSocketSession() + "]"); } }); } // ===== "Standard Endpoint". private static final String STANDARD_ENDPOINT = "Test.single"; private static void setup_StandardTestSingle(MatsSocketServer matsSocketServer, MatsFactory matsFactory) { // :: Make default MatsSocket Endpoint matsSocketServer.matsSocketEndpoint(STANDARD_ENDPOINT, MatsSocketRequestDto.class, MatsDataTO.class, MatsSocketReplyDto.class, (ctx, principal, msg) -> { log.info("Got MatsSocket request on MatsSocket EndpointId: " + ctx.getMatsSocketEndpoint()); log.info(" \\- Authorization: " + ctx.getAuthorizationValue()); log.info(" \\- Principal: " + ctx.getPrincipal()); log.info(" \\- UserId: " + ctx.getUserId()); log.info(" \\- Message: " + msg); ctx.forwardCustom(new MatsDataTO(msg.number, msg.string), init -> init.to(ctx.getMatsSocketEndpoint().getMatsSocketEndpointId()) .interactive() .nonPersistent() .setTraceProperty("requestTimestamp", msg.requestTimestamp)); }, (ctx, matsReply) -> { log.info("Adapting message: " + matsReply); MatsSocketReplyDto reply = new MatsSocketReplyDto(matsReply.string.length(), matsReply.number, ctx.getMatsContext().getTraceProperty("requestTimestamp", Long.class)); ctx.resolve(reply); }); // :: Make simple single Mats Endpoint matsFactory.single(STANDARD_ENDPOINT, SetupTestMatsAndMatsSocketEndpoints.MatsDataTO.class, SetupTestMatsAndMatsSocketEndpoints.MatsDataTO.class, (processContext, incomingDto) -> new MatsDataTO( incomingDto.number, incomingDto.string + ":FromSingle", incomingDto.sleepTime)); } private static void setup_SimpleMats(MatsSocketServer matsSocketServer, MatsFactory matsFactory) { // :: Make "Test.simpleMats" MatsSocket Endpoint matsSocketServer.matsSocketEndpoint("Test.simpleMats", MatsDataTO.class, MatsDataTO.class, (ctx, principal, msg) -> ctx.forwardInteractivePersistent(msg)); // :: Make "Test.simpleMats" Endpoint matsFactory.single("Test.simpleMats", SetupTestMatsAndMatsSocketEndpoints.MatsDataTO.class, SetupTestMatsAndMatsSocketEndpoints.MatsDataTO.class, (processContext, incomingDto) -> new MatsDataTO( incomingDto.number, incomingDto.string + ":FromSimpleMats", incomingDto.sleepTime)); } // ===== IncomingHandler private static void setupSocket_IgnoreInIncoming(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketDirectReplyEndpoint("Test.ignoreInIncomingHandler", MatsSocketRequestDto.class, MatsSocketReplyDto.class, // IGNORE - i.e. do nothing (ctx, principal, msg) -> { }); } private static void setupSocket_DenyInIncoming(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketDirectReplyEndpoint("Test.denyInIncomingHandler", MatsSocketRequestDto.class, MatsSocketReplyDto.class, // DENY (ctx, principal, msg) -> ctx.deny()); } private static void setupSocket_ResolveInIncoming(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketDirectReplyEndpoint("Test.resolveInIncomingHandler", MatsDataTO.class, MatsDataTO.class, // RESOLVE (ctx, principal, msg) -> ctx.resolve( new MatsDataTO(msg.number, msg.string + ":From_resolveInIncomingHandler", msg.sleepTime))); } private static void setupSocket_RejectInIncoming(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketDirectReplyEndpoint("Test.rejectInIncomingHandler", MatsSocketRequestDto.class, MatsSocketReplyDto.class, // REJECT (ctx, principal, msg) -> ctx.reject( new MatsSocketReplyDto(3, 4, msg.requestTimestamp))); } private static void setupSocket_ThrowsInIncoming(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketDirectReplyEndpoint("Test.throwsInIncomingHandler", MatsSocketRequestDto.class, MatsSocketReplyDto.class, // THROW (ctx, principal, msg) -> { throw new IllegalStateException("Exception in IncomingAuthorizationAndAdapter should REJECT"); }); } // ===== ReplyAdapter private static void setupSocket_IgnoreInReplyAdapter(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketEndpoint("Test.ignoreInReplyAdapter", MatsSocketRequestDto.class, MatsDataTO.class, MatsSocketReplyDto.class, (ctx, principal, msg) -> ctx.forwardCustom(new MatsDataTO(1, "string1"), init -> init.to(STANDARD_ENDPOINT)), // IGNORE - i.e. do nothing (ctx, matsReply) -> { }); } private static void setupSocket_ResolveInReplyAdapter(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketEndpoint("Test.resolveInReplyAdapter", MatsSocketRequestDto.class, MatsDataTO.class, MatsSocketReplyDto.class, (ctx, principal, msg) -> ctx.forwardCustom(new MatsDataTO(1, "string1"), init -> init.to(STANDARD_ENDPOINT)), // RESOLVE (ctx, matsReply) -> ctx.resolve(new MatsSocketReplyDto(1, 2, 123))); } private static void setupSocket_RejectInReplyAdapter(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketEndpoint("Test.rejectInReplyAdapter", MatsSocketRequestDto.class, MatsDataTO.class, MatsSocketReplyDto.class, (ctx, principal, msg) -> ctx.forwardCustom(new MatsDataTO(2, "string2"), init -> init.to(STANDARD_ENDPOINT)), // REJECT (ctx, matsReply) -> ctx.reject(new MatsSocketReplyDto(1, 2, 123))); } private static void setupSocket_ThrowsInReplyAdapter(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketEndpoint("Test.throwsInReplyAdapter", MatsSocketRequestDto.class, MatsDataTO.class, MatsSocketReplyDto.class, (ctx, principal, msg) -> ctx.forwardCustom(new MatsDataTO(3, "string3"), init -> init.to(STANDARD_ENDPOINT)), // THROW (ctx, matsReply) -> { throw new IllegalStateException("Exception in ReplyAdapter should REJECT."); }); } // ===== Slow endpoint private static void setup_TestSlow(MatsSocketServer matsSocketServer, MatsFactory matsFactory) { // :: Forwards directly to Mats, no replyAdapter matsSocketServer.matsSocketEndpoint("Test.slow", MatsDataTO.class, MatsDataTO.class, (ctx, principal, msg) -> { log.info("SLEEPING " + msg.sleepIncoming + " ms BEFORE FORWARDING TO MATS!"); try { Thread.sleep(msg.sleepIncoming); } catch (InterruptedException e) { throw new RuntimeException(e); } ctx.forwardInteractivePersistent(msg); }); // :: Simple endpoint that just sleeps a tad, to simulate "long(er) running process". matsFactory.single("Test.slow", MatsDataTO.class, MatsDataTO.class, (processContext, incomingDto) -> { if (incomingDto.sleepTime > 0) { log.info("incoming.sleepTime > 0, sleeping specified [" + incomingDto.sleepTime + "] ms."); try { Thread.sleep(incomingDto.sleepTime); } catch (InterruptedException e) { throw new AssertionError("Got interrupted while slow-sleeping..!"); } } return new MatsDataTO(incomingDto.number, incomingDto.string + ":FromSlow", incomingDto.sleepTime); }); } // ===== Server Push: MatsSocketServer.send(..) and .request(..) private static void setup_ServerPush_Send(MatsSocketServer matsSocketServer, MatsFactory matsFactory) { matsSocketServer.matsSocketTerminator("Test.server.send.matsStage", MatsDataTO.class, (ctx, principal, msg) -> ctx.forwardCustom(new MatsDataTO(msg.number, ctx.getMatsSocketSessionId(), 1), init -> init.to("Test.server.send"))); matsSocketServer.matsSocketTerminator("Test.server.send.thread", MatsDataTO.class, (ctx, principal, msg) -> ctx.forwardCustom(new MatsDataTO(msg.number, ctx.getMatsSocketSessionId(), 2), init -> init.to("Test.server.send"))); // :: Simple endpoint that does a MatsSocketServer.send(..), either inside MatsStage, or in separate thread. matsFactory.terminator("Test.server.send", Void.TYPE, MatsDataTO.class, (processContext, state, incomingDto) -> { if (incomingDto.sleepTime == 1) { // Fire the sending off directly within the MatsStage, to prove that this is possible. matsSocketServer.send(incomingDto.string, processContext.getTraceId() + ":SentFromMatsStage", "ClientSide.terminator", incomingDto); } else if (incomingDto.sleepTime == 2) { // Fire the sending in a new Thread, to prove that this can be done totally outside context. new Thread(() -> { matsSocketServer.send(incomingDto.string, processContext.getTraceId() + ":SentFromThread", "ClientSide.terminator", incomingDto); }, "Test-MatsSocketServer.send()").start(); } }); } private static void setup_ServerPush_Request_Direct(MatsSocketServer matsSocketServer, MatsFactory matsFactory) { // Receives the "start", which starts the cascade - and performs a Server-to-Client Request to Client Endpoint, // setting the replyTo to the following MatsSocket endpoint. matsSocketServer.matsSocketTerminator("Test.server.request.direct", MatsDataTO.class, (ctx, principal, msg) -> { // Send request Server-to-Client, passing the message directly on. // NOTE: Client side will add a bit to it! matsSocketServer.request(ctx.getMatsSocketSessionId(), ctx.getTraceId(), "ClientSide.endpoint", msg, "Test.server.request.replyReceiver.direct", "CorrelationString", "CorrelationBinary".getBytes(StandardCharsets.UTF_8)); }); // .. which gets the Reply from Client, and forwards it to the following Mats endpoint matsSocketServer.matsSocketTerminator("Test.server.request.replyReceiver.direct", MatsDataTO.class, (ctx, principal, msg) -> { // Assert the Correlation information Assert.assertEquals("CorrelationString", ctx.getCorrelationString()); Assert.assertArrayEquals("CorrelationBinary".getBytes(StandardCharsets.UTF_8), ctx.getCorrelationBinary()); // Send to Client, add whether it was a RESOLVE or REJECT in the message. matsSocketServer.send(ctx.getMatsSocketSessionId(), ctx.getTraceId(), "ClientSide.terminator", new MatsDataTO(msg.number, msg.string + ':' + ctx.getMessageType().name(), msg.sleepTime)); }); } private static void setup_ServerPush_Request_Via_Mats(MatsSocketServer matsSocketServer, MatsFactory matsFactory) { // Receives the "start", which starts the cascade - and forwards to the following Mats endpoint matsSocketServer.matsSocketTerminator("Test.server.request.viaMats", MatsDataTO.class, // Pass the message directly on (ctx, principal, msg) -> ctx.forwardCustom(msg, init -> init.to("Test.server.requestToClient.viaMats"))); // .. which initiates a request to Client Endpoint, asking for Reply to go to the following MatsSocket Endpoint matsFactory.terminator("Test.server.requestToClient.viaMats", Void.TYPE, MatsDataTO.class, (processContext, state, msg) -> { String matsSocketSessionId = processContext.getString("matsSocketSessionId"); matsSocketServer.request(matsSocketSessionId, processContext.getTraceId(), "ClientSide.endpoint", msg, // Pass the message directly on. NOTE: Client side will add a bit to it! "Test.server.request.replyReceiver.viaMats", "CorrelationString", "CorrelationBinary".getBytes(StandardCharsets.UTF_8)); }); // .. which gets the Reply from Client, and forwards it to the following Mats endpoint matsSocketServer.matsSocketTerminator("Test.server.request.replyReceiver.viaMats", MatsDataTO.class, (ctx, principal, msg) -> { // Assert the Correlation information Assert.assertEquals("CorrelationString", ctx.getCorrelationString()); Assert.assertArrayEquals("CorrelationBinary".getBytes(StandardCharsets.UTF_8), ctx.getCorrelationBinary()); // Forward to the Mats terminator that sends to Client. Pass message directly on. ctx.forwardCustom(msg, init -> { init.addString("resolveReject", ctx.getMessageType().name()); init.to("Test.server.sendReplyBackToClient.viaMats"); }); }); // .. which finally sends the Reply back to the Client Terminator. matsFactory.terminator("Test.server.sendReplyBackToClient.viaMats", Void.TYPE, MatsDataTO.class, (ctx, state, msg) -> { String matsSocketSessionId = ctx.getString("matsSocketSessionId"); // Send to Client, add whether it was a RESOLVE or REJECT in the message. matsSocketServer.send(matsSocketSessionId, ctx.getTraceId(), "ClientSide.terminator", new MatsDataTO(msg.number, msg.string + ':' + ctx.getString("resolveReject"), msg.sleepTime)); }); } private static void setupSocket_ReplyWithCookieAuthorization(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketDirectReplyEndpoint("Test.replyWithCookieAuthorization", MatsDataTO.class, MatsDataTO.class, // RESOLVE (ctx, principal, msg) -> ctx.resolve( new MatsDataTO(msg.number, ((DummyAuthPrincipal) principal).getAuthorizationHeaderFromCookie(), msg.sleepTime))); } private static void setupSocket_CloseThisSession(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketTerminator("Test.closeThisSession", MatsDataTO.class, // Perform 'server.closeSession(thisSession)' in a new Thread. (ctx, principal, msg) -> { new Thread(() -> { try { Thread.sleep(100); } catch (InterruptedException e) { throw new RuntimeException(e); } matsSocketServer.closeSession(ctx.getMatsSocketSessionId(), "Invoked via MatsSocket server-side Terminator Test.closeThisSession"); }, "Mats CloseThisSession").start(); }); } private static void setupSocket_Publish(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketTerminator("Test.publish", String.class, (ctx, principal, msg) -> { new Thread(() -> { matsSocketServer.publish("Test 1 2 3", "Test.topic", new MatsDataTO(Math.PI, "Test from Java!", 42)); }, "Send message").start(); }); } private static void setupSocket_MatsSocket_renewAuth(MatsSocketServer matsSocketServer) { matsSocketServer.matsSocketTerminator("Test.renewAuth", MatsDataTO.class, (ctx, principal, msg) -> matsSocketServer.request(ctx.getMatsSocketSessionId(), ctx.getTraceId(), "MatsSocket.renewAuth", "", "Test.renewAuth_reply", "123", new byte[] { 1, 2, 3 })); matsSocketServer.matsSocketTerminator("Test.renewAuth_reply", MatsDataTO.class, (ctx, principal, msg) -> { Assert.assertEquals("123", ctx.getCorrelationString()); Assert.assertArrayEquals(new byte[] { 1, 2, 3 }, ctx.getCorrelationBinary()); matsSocketServer.send(ctx.getMatsSocketSessionId(), ctx.getTraceId(), "Client.renewAuth_terminator", ctx.getAuthorizationValue()); }); } /** * Request DTO class for MatsSocket Endpoint. */ public static class MatsSocketRequestDto { public String string; public double number; public long requestTimestamp; @Override public int hashCode() { return string.hashCode() + (int) Double.doubleToLongBits(number * 99713.80309); } @Override public boolean equals(Object obj) { if (!(obj instanceof MatsSocketRequestDto)) { throw new AssertionError(MatsSocketRequestDto.class.getSimpleName() + " was attempted equalled to [" + obj + "]."); } MatsSocketRequestDto other = (MatsSocketRequestDto) obj; return Objects.equals(this.string, other.string) && (this.number == other.number); } @Override public String toString() { return "MatsSocketRequestDto [string=" + string + ", number=" + number + "]"; } } /** * A DTO for Mats-side endpoint. */ public static class MatsDataTO { public double number; public String string; public int sleepTime; public int sleepIncoming; public MatsDataTO() { } public MatsDataTO(double number, String string) { this.number = number; this.string = string; } public MatsDataTO(double number, String string, int sleepTime) { this.number = number; this.string = string; this.sleepTime = sleepTime; } @Override public boolean equals(Object o) { if (this == o) return true; // NOTICE: Not Class-equals, but "instanceof", since we accept the "SubDataTO" too. if (o == null || !(o instanceof MatsDataTO)) return false; MatsDataTO matsDataTO = (MatsDataTO) o; return Double.compare(matsDataTO.number, number) == 0 && sleepTime == matsDataTO.sleepTime && Objects.equals(string, matsDataTO.string); } @Override public int hashCode() { return Objects.hash(number, string, sleepTime); } @Override public String toString() { return "MatsDataTO [number=" + number + ", string=" + string + (sleepTime != 0 ? ", multiplier=" + sleepTime : "") + "]"; } } /** * Reply DTO class for MatsSocket Endpoint. */ public static class MatsSocketReplyDto { public int number1; public double number2; public long requestTimestamp; public MatsSocketReplyDto() { } public MatsSocketReplyDto(int number1, double number2, long requestTimestamp) { this.number1 = number1; this.number2 = number2; this.requestTimestamp = requestTimestamp; } @Override public int hashCode() { return (number1 * 3539) + (int) Double.doubleToLongBits(number2 * 99713.80309); } @Override public boolean equals(Object obj) { if (!(obj instanceof MatsSocketReplyDto)) { throw new AssertionError(MatsSocketReplyDto.class.getSimpleName() + " was attempted equalled to [" + obj + "]."); } MatsSocketReplyDto other = (MatsSocketReplyDto) obj; return (this.number1 == other.number1) && (this.number2 == other.number2); } @Override public String toString() { return "MatsSocketReplyDto [number1=" + number1 + ", number2=" + number2 + "]"; } } }
disgoorg/disgo
discord/permission_overwrite.go
<filename>discord/permission_overwrite.go package discord import ( "fmt" "github.com/disgoorg/disgo/json" "github.com/disgoorg/snowflake/v2" ) // PermissionOverwriteType is the type of PermissionOverwrite type PermissionOverwriteType int // Constants for PermissionOverwriteType const ( PermissionOverwriteTypeRole PermissionOverwriteType = iota PermissionOverwriteTypeMember ) type PermissionOverwrites []PermissionOverwrite func (p PermissionOverwrites) Get(overwriteType PermissionOverwriteType, id snowflake.ID) (PermissionOverwrite, bool) { for _, v := range p { if v.Type() == overwriteType && v.ID() == id { return v, true } } return nil, false } func (p PermissionOverwrites) Role(id snowflake.ID) (RolePermissionOverwrite, bool) { if overwrite, ok := p.Get(PermissionOverwriteTypeRole, id); ok { return overwrite.(RolePermissionOverwrite), true } return RolePermissionOverwrite{}, false } func (p PermissionOverwrites) Member(id snowflake.ID) (MemberPermissionOverwrite, bool) { if overwrite, ok := p.Get(PermissionOverwriteTypeMember, id); ok { return overwrite.(MemberPermissionOverwrite), true } return MemberPermissionOverwrite{}, false } // PermissionOverwrite is used to determine who can perform particular actions in a GetGuildChannel type PermissionOverwrite interface { Type() PermissionOverwriteType ID() snowflake.ID } type UnmarshalPermissionOverwrite struct { PermissionOverwrite } func (o *UnmarshalPermissionOverwrite) UnmarshalJSON(data []byte) error { var oType struct { Type PermissionOverwriteType `json:"type"` } if err := json.Unmarshal(data, &oType); err != nil { return err } var ( overwrite PermissionOverwrite err error ) switch oType.Type { case PermissionOverwriteTypeRole: var v RolePermissionOverwrite err = json.Unmarshal(data, &v) overwrite = v case PermissionOverwriteTypeMember: var v MemberPermissionOverwrite err = json.Unmarshal(data, &v) overwrite = v default: err = fmt.Errorf("unkown permission overwrite with type %d received", oType.Type) } if err != nil { return err } o.PermissionOverwrite = overwrite return nil } type RolePermissionOverwrite struct { RoleID snowflake.ID `json:"id"` Allow Permissions `json:"allow"` Deny Permissions `json:"deny"` } func (o RolePermissionOverwrite) ID() snowflake.ID { return o.RoleID } func (o RolePermissionOverwrite) MarshalJSON() ([]byte, error) { type rolePermissionOverwrite RolePermissionOverwrite return json.Marshal(struct { Type PermissionOverwriteType rolePermissionOverwrite }{ Type: o.Type(), rolePermissionOverwrite: rolePermissionOverwrite(o), }) } func (o RolePermissionOverwrite) Type() PermissionOverwriteType { return PermissionOverwriteTypeRole } type MemberPermissionOverwrite struct { UserID snowflake.ID `json:"id"` Allow Permissions `json:"allow"` Deny Permissions `json:"deny"` } func (o MemberPermissionOverwrite) ID() snowflake.ID { return o.UserID } func (o MemberPermissionOverwrite) MarshalJSON() ([]byte, error) { type memberPermissionOverwrite MemberPermissionOverwrite return json.Marshal(struct { Type PermissionOverwriteType memberPermissionOverwrite }{ Type: o.Type(), memberPermissionOverwrite: memberPermissionOverwrite(o), }) } func (o MemberPermissionOverwrite) Type() PermissionOverwriteType { return PermissionOverwriteTypeMember } type PermissionOverwriteUpdate interface { Type() PermissionOverwriteType } type RolePermissionOverwriteUpdate struct { Allow Permissions `json:"allow"` Deny Permissions `json:"deny"` } func (u RolePermissionOverwriteUpdate) MarshalJSON() ([]byte, error) { type rolePermissionOverwriteUpdate RolePermissionOverwriteUpdate return json.Marshal(struct { Type PermissionOverwriteType rolePermissionOverwriteUpdate }{ Type: u.Type(), rolePermissionOverwriteUpdate: rolePermissionOverwriteUpdate(u), }) } func (RolePermissionOverwriteUpdate) Type() PermissionOverwriteType { return PermissionOverwriteTypeRole } type MemberPermissionOverwriteUpdate struct { Allow Permissions `json:"allow"` Deny Permissions `json:"deny"` } func (u MemberPermissionOverwriteUpdate) MarshalJSON() ([]byte, error) { type memberPermissionOverwriteUpdate MemberPermissionOverwriteUpdate return json.Marshal(struct { Type PermissionOverwriteType memberPermissionOverwriteUpdate }{ Type: u.Type(), memberPermissionOverwriteUpdate: memberPermissionOverwriteUpdate(u), }) } func (MemberPermissionOverwriteUpdate) Type() PermissionOverwriteType { return PermissionOverwriteTypeMember }
game-libgdx-unity/GDX-Engine
gdxengine-android/src/com/me/mygdxgame/CopyOfMainActivity.java
package com.me.mygdxgame; import android.os.Bundle; import com.badlogic.gdx.backends.android.AndroidApplication; import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration; import com.gdxengine.framework.test.towerdefense.TowerDefenseGame; public class CopyOfMainActivity extends AndroidApplication { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); AndroidApplicationConfiguration cfg = new AndroidApplicationConfiguration(); cfg.useGL20 = false; initialize(new TowerDefenseGame(), cfg); } }
LaEmma/sparrow_cloud
tests/test_consul_service.py
<gh_stars>10-100 # import os # import unittest # from unittest import mock # from sparrow_cloud.registry.service_discovery import consul_service # from django.core.exceptions import ImproperlyConfigured # # # 与 SPARROW_SERVICE_REGISTER_NAME 对应的 _HOST # SPARROW_SERVICE_REGISTER_NAME_HOST = "172.16.31.10:8001" # # # SERVICE_CONF = { # "ENV_NAME": "PERMISSION_REGISTER_NAME_HOST", # "VALUE": "sprrow-permission-svc" # } # # # CONSUL_RETURN_DATA = ( # "name", # [ # {'ServiceAddress': '172.16.31.10', 'ServicePort': 8001}, # {'ServiceAddress': '172.16.31.10', 'ServicePort': 8001}, # {'ServiceAddress': '172.16.58.3', 'ServicePort': 8001}, # ] # ) # # CONSUL_RETURN_DATA1 = ( # "name", # [] # ) # # # class ConsulServiceTest(unittest.TestCase): # # def setUp(self): # os.environ["DJANGO_SETTINGS_MODULE"] = "tests.mock_settings" # # @mock.patch('consul.Consul.Catalog.service', return_value=CONSUL_RETURN_DATA) # def test_consul_parameter_variable(self, mock_consul_service): # """ # 测试未设置环境变量 # """ # from django.conf import settings # os.environ["PERMISSION_REGISTER_NAME_HOST"] = "" # settings.CONSUL_CLIENT_ADDR = { # "HOST": "127.0.0.1", # "PORT": 8500 # } # settings.SERVICE_CONF = SERVICE_CONF # expect_result_list = ['{}:{}'.format( # _["ServiceAddress"], _['ServicePort']) for _ in CONSUL_RETURN_DATA[1]] # addr = consul_service(SERVICE_CONF) # self.assertEqual(addr in expect_result_list, True) # # @mock.patch('consul.Consul.Catalog.service', return_value=CONSUL_RETURN_DATA) # def test_consul_parameter_no_variable(self, mock_consul_service): # """ # 测试设置环境变量: # os.environ["SPARROW_SERVICE_REGISTER_NAME_HOST"] = "127.0.0.1:8001" # """ # from django.conf import settings # os.environ["PERMISSION_REGISTER_NAME_HOST"] = "127.0.0.1:8001" # settings.CONSUL_CLIENT_ADDR = { # "HOST": "127.0.0.1", # "PORT": 8500 # } # settings.SERVICE_CONF = SERVICE_CONF # addr = consul_service(SERVICE_CONF) # self.assertEqual(addr, '127.0.0.1:8001') # # @mock.patch('consul.Consul.Catalog.service', return_value=CONSUL_RETURN_DATA1) # def test_consul_service_empty_list(self, mock_consul_service): # """ # 测试未设置环境变量 # """ # from django.conf import settings # os.environ["PERMISSION_REGISTER_NAME_HOST"] = "" # settings.CONSUL_CLIENT_ADDR = { # "HOST": "127.0.0.1", # "PORT": 8500 # } # settings.SERVICE_CONF = SERVICE_CONF # try: # consul_service(SERVICE_CONF) # except ImproperlyConfigured as ex: # self.assertEqual(type(ex), ImproperlyConfigured) # # def tearDown(self): # del os.environ["DJANGO_SETTINGS_MODULE"]
franzmk/Oregon-State-Schoolwork
cs162/assignments/assignment5/list.h
<gh_stars>0 /********************************************************************* * ** Program Filename: list.h * ** Author: <NAME> * ** Date: 6/9/19 * ** Description: This is the header file for the linked list node and linked list classes * ** Input: N/a * ** Output: N/a * *********************************************************************/ #ifndef LIST_H #define LIST_H #include <iostream> #include <string> #include <cstdlib> using namespace std; class Linked_List_Node { public: int val; // the value that this node stores Linked_List_Node *next; // a pointer to the next node in the list /********************************************************************* * ** Function: Linked_List_Node(const Linked_List_Node&) * ** Description: copy constructor * ** Parameters: const Linked_List_Node& * ** Pre-Conditions: object created with object in parameter field * ** Post-Conditions: object created correctly * *********************************************************************/ Linked_List_Node(const Linked_List_Node&); /********************************************************************* * ** Function: Linked_List_Node& operator=(const Linked_List_Node&) * ** Description: assignment operator overload * ** Parameters: const Linked_List_Node& * ** Pre-Conditions: object assigned to object of same class type * ** Post-Conditions: object successfully copied * *********************************************************************/ Linked_List_Node& operator=(const Linked_List_Node&); /********************************************************************* * ** Function: Linked_List_Node() * ** Description: default constructor * ** Parameters: N/a * ** Pre-Conditions: object created * ** Post-Conditions: created successfully * *********************************************************************/ Linked_List_Node(); /********************************************************************* * ** Function: ~Linked_List_Node() * ** Description: default destructor * ** Parameters: N/a * ** Pre-Conditions: object out of scope and deleted * ** Post-Conditions: success * *********************************************************************/ ~Linked_List_Node(); }; class Linked_List { private: unsigned int length; // the number of nodes contained in the list Linked_List_Node *first; // a pointer to the first node in the list public: /********************************************************************* * ** Function: int get_length() * ** Description: gets length of current linked list (num. of nodes) * ** Parameters: N/a * ** Pre-Conditions: called * ** Post-Conditions: length returned * *********************************************************************/ int get_length(); /********************************************************************* * ** Function: void print() * ** Description: prints the values of all nodes * ** Parameters: N/a * ** Pre-Conditions: called * ** Post-Conditions: node values printed * *********************************************************************/ void print(); // output a list of all integers contained within the list /********************************************************************* * ** Function: void clear() * ** Description: clears the entire linked list and sets length to 0 * ** Parameters: N/a * ** Pre-Conditions: called * ** Post-Conditions: nodes deleted * *********************************************************************/ void clear(); // delete the entire list (remove all nodes and reset length to 0) /********************************************************************* * ** Function: unsigned int push_front(int) * ** Description: adds a node to the front of the linked list * ** Parameters: int * ** Pre-Conditions: called * ** Post-Conditions: node added * *********************************************************************/ unsigned int push_front(int); // insert a new value at the front of the list (returns the new length of the list) /********************************************************************* * ** Function: unsigned int push_back(int) * ** Description: adds a node to the back of the linked list * ** Parameters: int * ** Pre-Conditions: called * ** Post-Conditions: node added * *********************************************************************/ unsigned int push_back(int); // insert a new value at the back of the list (returns the new length of the list) /********************************************************************* * ** Function: unsigned int insert(int value, unsigned int index) * ** Description: inserts a node at the specified index * ** Parameters: int value, unsigned int index * ** Pre-Conditions: called * ** Post-Conditions: node inserted * *********************************************************************/ unsigned int insert(int value, unsigned int index);//insert a new value in the list at the specified index(returns the new length of the list) /********************************************************************* * ** Function: void sort_ascending() * ** Description: sorts nodes in ascending order by value * ** Parameters: N/a * ** Pre-Conditions: called * ** Post-Conditions: sorted in ascending order * *********************************************************************/ void sort_ascending(); // sort the nodes in ascending order. You must implement the recursive Merge Sort algorithm // Note: it's okay if sort_ascending() calls a recursive private function to perform the sorting. // /********************************************************************* // ** Function: void sort_descending() // ** Description: sorts nodes in descending order by value // ** Parameters: N/a // ** Pre-Conditions: called // ** Post-Conditions: sorted in descending order // *********************************************************************/ void sort_descending(); // sort the nodes in descending order /********************************************************************* * ** Function: Linked_List() * ** Description: default constructor for linked list * ** Parameters: N/a * ** Pre-Conditions: list created * ** Post-Conditions: success * *********************************************************************/ Linked_List(); /********************************************************************* * ** Function: ~Linked_List() * ** Description: default destructor * ** Parameters: N/a * ** Pre-Conditions: object out of scope * ** Post-Conditions: memory deleted * *********************************************************************/ ~Linked_List(); /********************************************************************* * ** Function: void MergeSort(Linked_List_Node**, char) * ** Description: Main function for the merge sort, it calls all the other functions recursively and reconstructs the list when complete * ** Parameters: Linked_List_Node**, char * ** Pre-Conditions: This function is called after the user is done entering values * ** Post-Conditions: Successfully sorts all nodes by value * *********************************************************************/ void MergeSort(Linked_List_Node**, char); /********************************************************************* * ** Function: Linked_List_Node * SortedMerge(Linked_List_Node *a, Linked_List_Node *b, char) * ** Description: This sorts the nodes by value in whichever order is decided by the user (ascending or descending) * ** Parameters: Linked_List_Node *a, Linked_List_Node *b, char * ** Pre-Conditions: This function is called when two nodes are passed to it * ** Post-Conditions: Sorts each node based on value * *********************************************************************/ Linked_List_Node * SortedMerge(Linked_List_Node *a, Linked_List_Node *b, char); /********************************************************************* * ** Function: void FrontBackSplit(Linked_List_Node *source, Linked_List_Node **frontRef, Linked_List_Node **backRef) * ** Description: This splits the given list into two halves * ** Parameters: Linked_List_Node *source, Linked_List_Node **frontRef, Linked_List_Node **backRef * ** Pre-Conditions: The function is called if the given list is more than 0 or 1 items * ** Post-Conditions: The list is split in to two halves * *********************************************************************/ void FrontBackSplit(Linked_List_Node *source, Linked_List_Node **frontRef, Linked_List_Node **backRef); }; #endif
richhastings/email-components
site/pliers/watch.js
module.exports = createTask var notify = require('../../pliers/lib/notify') , browserSync = require('browser-sync') function createTask (pliers, config) { pliers('watch', function (done) { browserSync.create('site').init({ logSnippet: false, port: config.browserSyncPort }) pliers.logger.info('Watching for server JS changes') pliers.watch(pliers.filesets.serverJs, function () { notify('Restarting Server...', config.title + ' Site') pliers.run('start', function () { notify('Server Restarted', config.title + ' Site') browserSync.get('site').reload() }) }) pliers.logger.info('Watching for server template changes') pliers.watch(pliers.filesets.serverTemplates, function () { browserSync.get('site').reload() }) pliers.logger.info('Watching for stylus changes') pliers.watch(pliers.filesets.stylus, function () { pliers.run('buildCss', function () { notify('CSS built', config.title + ' Site', config.url) browserSync.get('site').reload(pliers.filesets.css) }) }) pliers.logger.info('Watching for Modernizr changes') pliers.watch(pliers.filesets.modernizrConfig, function () { pliers.run('buildModernizr', function () { notify('Modernizr built', config.title + ' Site', config.url) browserSync.get('site').reload() }) }) pliers.logger.info('Watching for Sprite changes') pliers.watch(pliers.filesets.spriteRaw.concat(pliers.filesets.spriteTemplate), function () { pliers.run('buildSprite', function () { notify('Sprite built', config.title + ' Site', config.url) browserSync.get('site').reload(pliers.filesets.spriteCompiled) }) }) pliers.logger.info('Watching browser JS with watchify') pliers.run('watchBrowserJs') done() }) }
cloudymolecule/shutterlogger
db/migrate/20201122044521_create_photos.rb
class CreatePhotos < ActiveRecord::Migration def change create_table :photos do |t| t.string :name t.string :description t.string :shutter t.string :aperture t.string :focal_length t.string :date t.string :time t.string :location t.integer :rating t.integer :roll_id t.integer :len_id t.integer :camera_id t.integer :user_id t.timestamps null: false end end end
Westlanderz/AI-Plat1
venv/Lib/site-packages/h5py/tests/test_vds/__init__.py
<reponame>Westlanderz/AI-Plat1 from .test_virtual_source import * from .test_highlevel_vds import * from .test_lowlevel_vds import *
GerHobbelt/prosemirror
src/edit/index.js
<filename>src/edit/index.js export {ProseMirror} from "./main" export {defineOption} from "./options" export {Range} from "./selection" export {eventMixin} from "./event" export {Keymap} from "./keys" export {MarkedRange} from "./range" export {defaultKeymap} from "./defaultkeymap" export {registerCommand} from "./commands"
piyushmoolchandani/Lab_codes
OS_L_3/q5.c
<reponame>piyushmoolchandani/Lab_codes #include<stdio.h> #include<stdlib.h> #include<sys/types.h> #include<sys/wait.h> #include<unistd.h> int main() { // Declare x and assign it a value int x; x = 100; printf("Current value of x: %d\n", x); // fork the parent process int temp_pid = fork(); // if fork failed, return if (temp_pid < 0) { printf("%s", "Fork statement failed\n"); return 0; } // if child, then else if (temp_pid > 0) { printf("Current value(Inside Child process): %d\n", x); x = x + 7; printf("New value after adding 7: %d\n", x); } // if parent, then else { int temp = wait(NULL); printf("Current value(Inside Parent process): %d\n", x); x = x - 7; printf("New value after substracting 7: %d\n", x); } }
AID-Labor/BOSSModeller2
src/main/java/de/snaggly/bossmodellerfx/model/abstraction/AttributeCombinationAbstraction.java
<filename>src/main/java/de/snaggly/bossmodellerfx/model/abstraction/AttributeCombinationAbstraction.java<gh_stars>1-10 package de.snaggly.bossmodellerfx.model.abstraction; /** * Abstracting around AttributeCombination. Here Attributes have been abstracted. * * @author <NAME> */ public abstract class AttributeCombinationAbstraction implements AbstractedModel { private String combinationName; private boolean isPrimaryCombination; //Combination of all PrimaryKeys public String getCombinationName() { return combinationName; } public void setCombinationName(String combinationName) { this.combinationName = combinationName; } public boolean isPrimaryCombination() { return isPrimaryCombination; } public void setPrimaryCombination(boolean primaryCombination) { isPrimaryCombination = primaryCombination; } }
z8432k/feodorov-s4-sadp-cp
src/common/include/client.h
<filename>src/common/include/client.h #include <glib.h> typedef struct { GString *name; GString *license; GString *passport; GString *address; } Client_t; Client_t* new_client(); void free_client(Client_t *client); void fill_client( Client_t *client, const gchar *name, const gchar *license, const gchar *passport, const gchar *address );
EasyDonate/EasyDonate4J
json-providers/gson-json-provider/src/main/java/ru/easydonate/easydonate4j/api/v3/response/gson/shop/GsonGetPaymentsListResponse.java
package ru.easydonate.easydonate4j.api.v3.response.gson.shop; import org.jetbrains.annotations.NotNull; import ru.easydonate.easydonate4j.api.v3.data.model.shop.payment.PaymentsList; import ru.easydonate.easydonate4j.api.v3.response.gson.GsonApiResponse; import ru.easydonate.easydonate4j.api.v3.response.shop.GetPaymentsListResponse; import ru.easydonate.easydonate4j.json.serialization.Implementing; @Implementing(GetPaymentsListResponse.class) public final class GsonGetPaymentsListResponse extends GsonApiResponse<PaymentsList> implements GetPaymentsListResponse { @Override public @NotNull String toString() { return "GsonGetPaymentsListResponse{" + "success=" + success + ", content=" + content + '}'; } }
BearerPipelineTest/jruby
test/mri/racc/case.rb
verbose = $VERBOSE $VERBOSE = true begin require 'test/unit' require 'racc/static' require 'fileutils' require 'tempfile' require 'timeout' module Racc class TestCase < Test::Unit::TestCase PROJECT_DIR = File.expand_path(File.join(__dir__, '..')) test_dir = File.join(PROJECT_DIR, 'test') test_dir = File.join(PROJECT_DIR, 'racc') unless File.exist?(test_dir) TEST_DIR = test_dir racc = File.join(PROJECT_DIR, 'bin', 'racc') racc = File.join(PROJECT_DIR, '..', 'libexec', 'racc') unless File.exist?(racc) RACC = racc ASSET_DIR = File.join(TEST_DIR, 'assets') # test grammars REGRESS_DIR = File.join(TEST_DIR, 'regress') # known-good generated outputs INC = [ File.join(PROJECT_DIR, 'lib'), File.join(PROJECT_DIR, 'ext'), ].join(':') def setup @TEMP_DIR = Dir.mktmpdir("racc") @OUT_DIR = File.join(@TEMP_DIR, 'out') @TAB_DIR = File.join(@TEMP_DIR, 'tab') # generated parsers go here @LOG_DIR = File.join(@TEMP_DIR, 'log') @ERR_DIR = File.join(@TEMP_DIR, 'err') FileUtils.mkdir_p([@OUT_DIR, @TAB_DIR, @LOG_DIR, @ERR_DIR]) FileUtils.cp File.join(TEST_DIR, "src.intp"), @TEMP_DIR end def teardown FileUtils.rm_f(File.join(@TEMP_DIR, "src.intp")) FileUtils.rm_rf([@OUT_DIR, @TAB_DIR, @LOG_DIR, @ERR_DIR, @TEMP_DIR]) end def assert_compile(asset, args = [], **opt) file = File.basename(asset, '.y') args = ([args].flatten) + [ "#{ASSET_DIR}/#{file}.y", '-Do', "-O#{@OUT_DIR}/#{file}", "-o#{@TAB_DIR}/#{file}", ] racc(*args, **opt) end def assert_debugfile(asset, ok) file = File.basename(asset, '.y') Dir.chdir(@LOG_DIR) do File.foreach("#{file}.y") do |line| line.strip! case line when /sr/ then assert_equal "sr#{ok[0]}", line when /rr/ then assert_equal "rr#{ok[1]}", line when /un/ then assert_equal "un#{ok[2]}", line when /ur/ then assert_equal "ur#{ok[3]}", line when /ex/ then assert_equal "ex#{ok[4]}", line else raise TestFailed, 'racc outputs unknown debug report???' end end end end def assert_exec(asset) lib_path = File.expand_path("../../lib", __FILE__) file = File.basename(asset, '.y') ruby "-I#{lib_path}", "#{@TAB_DIR}/#{file}" end def strip_version(source) source.sub(/This file is automatically generated by Racc \d+\.\d+\.\d+/, '') end def assert_output_unchanged(asset) file = File.basename(asset, '.y') # Code to re-generate the expectation files # File.write("#{REGRESS_DIR}/#{file}", File.read("#{@TAB_DIR}/#{file}")) expected = File.read("#{REGRESS_DIR}/#{file}") actual = File.read("#{@TAB_DIR}/#{file}") result = (strip_version(expected) == strip_version(actual)) assert(result, "Output of test/assets/#{file}.y differed from " \ "expectation. Try compiling it and diff with test/regress/#{file}.") end def racc(*arg, **opt) lib_path = File.expand_path("../../lib", __FILE__) ruby "-I#{lib_path}", "-S", RACC, *arg, **opt end def ruby(*arg, **opt) assert_ruby_status(["-C", @TEMP_DIR, *arg], **opt) end end end ensure $VERBOSE = verbose end
NEHAVERMA-07/testjunittomcat
junit-servers-jetty/src/test/java/com/github/mjeanroy/junit/servers/jetty/junit4/JettyServerJunit4RuleTest.java
<gh_stars>10-100 /** * The MIT License (MIT) * * Copyright (c) 2014-2019 <<EMAIL>> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.mjeanroy.junit.servers.jetty.junit4; import com.github.mjeanroy.junit.servers.jetty.EmbeddedJetty; import com.github.mjeanroy.junit.servers.jetty.EmbeddedJettyConfiguration; import com.github.mjeanroy.junit.servers.jetty.tests.EmbeddedJettyConfigurationMockBuilder; import com.github.mjeanroy.junit.servers.jetty.tests.EmbeddedJettyMockBuilder; import org.junit.jupiter.api.Test; import org.junit.runner.Description; import org.junit.runners.model.Statement; import org.mockito.InOrder; import org.mockito.Mockito; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; class JettyServerJunit4RuleTest { @Test void it_should_create_rule_with_server() throws Throwable { final EmbeddedJettyConfiguration config = new EmbeddedJettyConfigurationMockBuilder().build(); final EmbeddedJetty jetty = new EmbeddedJettyMockBuilder().withConfiguration(config).build(); final JettyServerJunit4Rule rule = createRule(jetty); assertThat(rule.getServer()).isSameAs(jetty); assertThat(rule.getScheme()).isEqualTo(jetty.getScheme()); assertThat(rule.getHost()).isEqualTo(jetty.getHost()); assertThat(rule.getPort()).isEqualTo(jetty.getPort()); assertThat(rule.getPath()).isEqualTo(jetty.getPath()); assertThat(rule.getUrl()).isEqualTo(jetty.getUrl()); verify(jetty, never()).start(); verify(jetty, never()).stop(); evaluateRule(rule); InOrder inOrder = Mockito.inOrder(jetty); inOrder.verify(jetty).start(); inOrder.verify(jetty).stop(); } @Test void it_should_create_server_from_configuration() throws Throwable { final EmbeddedJettyConfiguration configuration = EmbeddedJettyConfiguration.defaultConfiguration(); final JettyServerJunit4Rule rule = createRule(configuration); assertThat(rule.getPort()).isZero(); assertRule(rule); } @Test void it_should_create_server_with_default_configuration() throws Throwable { final JettyServerJunit4Rule rule = createRule(); assertThat(rule.getPort()).isZero(); // not started assertRule(rule); } private static JettyServerJunit4Rule createRule() { return new JettyServerJunit4Rule(); } private static JettyServerJunit4Rule createRule(EmbeddedJettyConfiguration configuration) { return new JettyServerJunit4Rule(configuration); } private static JettyServerJunit4Rule createRule(EmbeddedJetty jetty) { return new JettyServerJunit4Rule(jetty); } private static void assertRule(final JettyServerJunit4Rule rule) throws Throwable { final Statement statement = spy(new FakeStatement(rule)); evaluateRule(rule, statement); verify(statement).evaluate(); assertThat(rule.getPort()).isZero(); // not started } private static void evaluateRule(JettyServerJunit4Rule rule) throws Throwable { final Statement statement = mock(Statement.class); evaluateRule(rule, statement); } private static void evaluateRule(JettyServerJunit4Rule rule, Statement statement) throws Throwable { final Description description = mock(Description.class); final Statement testStatement = rule.apply(statement, description); testStatement.evaluate(); } private static class FakeStatement extends Statement { private final JettyServerJunit4Rule rule; private FakeStatement(JettyServerJunit4Rule rule) { this.rule = rule; } @Override public void evaluate() { assertThat(rule.getScheme()).isEqualTo("http"); assertThat(rule.getHost()).isEqualTo("localhost"); assertThat(rule.getPath()).isEqualTo("/"); assertThat(rule.getPort()).isGreaterThan(0); assertThat(rule.getUrl()).isEqualTo( rule.getScheme() + "://" + rule.getHost() + ":" + rule.getPort() + rule.getPath() ); } } }
valery1707/techtalks-async
core/src/main/java/ru/relex/techtalks/async/model/HospitalPreset.java
package ru.relex.techtalks.async.model; import java.math.BigDecimal; import java.util.Random; /** * @author <NAME> * @date 04.12.2018 */ public enum HospitalPreset { HOSPITAL1("CAT1", "CAT2", "CAT3"), DR_REF_H2("CAT2", "CAT3"), EX_HOSP("CAT2"), REEE("CAT3", "CAT1"), BACK_S_H("CAT1", "CAT4"); private final String[] categories; HospitalPreset(String... categories) { this.categories = categories; } public String[] getCategories() { return categories; } public BigDecimal nextPrice() { return BigDecimal.valueOf(new Random().nextInt(2500) + 500); } }
markphip/testing
jira-dvcs-connector-api/src/main/java/com/atlassian/jira/plugins/dvcs/util/ao/query/term/QueryColumn.java
<reponame>markphip/testing package com.atlassian.jira.plugins.dvcs.util.ao.query.term; import com.atlassian.jira.plugins.dvcs.util.ao.query.DefaultQueryNode; import com.atlassian.jira.plugins.dvcs.util.ao.query.QueryContext; import net.java.ao.RawEntity; /** * Defines table column - necessary because of entity alias processing. * * @author <NAME> * */ public class QueryColumn extends DefaultQueryNode implements QueryTerm { /** * @see #QueryColumn(Class, String) */ private final Class<? extends RawEntity<?>> entity; /** * @see #QueryColumn(Class, String) */ private final String columnName; /** * Constructor. * * @param entity * name of entity * @param columnName * name of column */ public QueryColumn(Class<? extends RawEntity<?>> entity, String columnName) { this.entity = entity; this.columnName = columnName; } /** * {@inheritDoc} */ @Override public String joinOn(QueryContext context) { return context.getEntityAlias(entity) + "." + columnName; } /** * {@inheritDoc} */ @Override public String buildOrder(QueryContext context) { return context.getEntityAlias(entity) + "." + columnName; } /** * {@inheritDoc} */ @Override public void buildWhere(QueryContext context, StringBuilder where) { where.append(context.getEntityAlias(entity)).append('.').append(columnName); } }
adambodie/Adventures
src/components/Photo.js
import React, { Component } from 'react' import '../styles/carousel.scss' export default class Photo extends Component { render() { const { id, category, title, color, backgroundColor, index, length } = this.props; const border = `-1px 0 ${backgroundColor}, 0 1px ${backgroundColor}, 1px 0 ${backgroundColor}, 0 -1px ${backgroundColor}`; const colorStyle = { color: color, textShadow: border } return ( <div className='image-caption'> {id !== null ? ( <img src={`/assets/images/${category}/${id}_o.jpg`} alt={title}/> ) : (<div style={{width: '600px', height: '300px'}}></div>) } <h3 style={colorStyle}>{title}</h3> <h6 style={colorStyle}>{index} of {length}</h6> </div> ); } }
rocketbot-cl/genesysCloud
libs/PureCloudPlatformClientV2/apis/scim_api.py
<reponame>rocketbot-cl/genesysCloud # coding: utf-8 """ SCIMApi.py Copyright 2016 SmartBear Software Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..configuration import Configuration from ..api_client import ApiClient class SCIMApi(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): config = Configuration() if api_client: self.api_client = api_client else: if not config.api_client: config.api_client = ApiClient() self.api_client = config.api_client def delete_scim_user(self, user_id, **kwargs): """ Delete a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_scim_user(user_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str user_id: The ID of a user. Returned with GET /api/v2/scim/users. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/users/{userId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: Empty If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_scim_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `delete_scim_user`") resource_path = '/api/v2/scim/users/{userId}'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Empty', auth_settings=auth_settings, callback=params.get('callback')) return response def delete_scim_v2_user(self, user_id, **kwargs): """ Delete a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_scim_v2_user(user_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str user_id: The ID of a user. Returned with GET /api/v2/scim/v2/users. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/v2/users/{userId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: Empty If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_scim_v2_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `delete_scim_v2_user`") resource_path = '/api/v2/scim/v2/users/{userId}'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Empty', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_group(self, group_id, **kwargs): """ Get a group This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_group(group_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str group_id: The ID of a group. Returned with GET /api/v2/scim/groups. (required) :param list[str] attributes: Indicates which attributes to include. Returns these attributes and the \"id\", \"active\", and \"meta\" attributes. Use \"attributes\" to avoid expensive secondary calls for the default attributes. :param list[str] excluded_attributes: Indicates which attributes to exclude. Returns the default attributes minus \"excludedAttributes\". Always returns \"id\", \"active\", and \"meta\" attributes. Use \"excludedAttributes\" to avoid expensive secondary calls for the default attributes. :param str if_none_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/groups/{groupId}. Example: \"42\". If the ETag is different from the version on the server, returns the current configuration of the resource. If the ETag is current, returns 304 Not Modified. :return: ScimV2Group If the method is called asynchronously, returns the request thread. """ all_params = ['group_id', 'attributes', 'excluded_attributes', 'if_none_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'group_id' is set if ('group_id' not in params) or (params['group_id'] is None): raise ValueError("Missing the required parameter `group_id` when calling `get_scim_group`") resource_path = '/api/v2/scim/groups/{groupId}'.replace('{format}', 'json') path_params = {} if 'group_id' in params: path_params['groupId'] = params['group_id'] query_params = {} if 'attributes' in params: query_params['attributes'] = params['attributes'] if 'excluded_attributes' in params: query_params['excludedAttributes'] = params['excluded_attributes'] header_params = {} if 'if_none_match' in params: header_params['If-None-Match'] = params['if_none_match'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2Group', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_groups(self, **kwargs): """ Get a list of groups This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_groups(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int start_index: The 1-based index of the first query result. :param int count: The requested number of items per page. A value of 0 returns \"totalResults\". A page size over 25 may exceed internal resource limits and return a 429 error. For a page size over 25, use the \"excludedAttributes\" or \"attributes\" query parameters to exclude or only include secondary lookup values such as \"externalId\", \"roles\", \"urn:ietf:params:scim:schemas:extension:genesys:purecloud:2.0:User:routingLanguages\", or \"urn:ietf:params:scim:schemas:extension:genesys:purecloud:2.0:User:routingSkills\". :param list[str] attributes: Indicates which attributes to include. Returns these attributes and the \"id\", \"active\", and \"meta\" attributes. Use \"attributes\" to avoid expensive secondary calls for the default attributes. :param list[str] excluded_attributes: Indicates which attributes to exclude. Returns the default attributes minus \"excludedAttributes\". Always returns \"id\", \"active\", and \"meta\" attributes. Use \"excludedAttributes\" to avoid expensive secondary calls for the default attributes. :param str filter: Filters results. If nothing is specified, returns all groups. Examples of valid values: \"id eq 5f4bc742-a019-4e38-8e2a-d39d5bc0b0f3\", \"displayname eq Sales\". :return: ScimGroupListResponse If the method is called asynchronously, returns the request thread. """ all_params = ['start_index', 'count', 'attributes', 'excluded_attributes', 'filter'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_groups" % key ) params[key] = val del params['kwargs'] resource_path = '/api/v2/scim/groups'.replace('{format}', 'json') path_params = {} query_params = {} if 'start_index' in params: query_params['startIndex'] = params['start_index'] if 'count' in params: query_params['count'] = params['count'] if 'attributes' in params: query_params['attributes'] = params['attributes'] if 'excluded_attributes' in params: query_params['excludedAttributes'] = params['excluded_attributes'] if 'filter' in params: query_params['filter'] = params['filter'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimGroupListResponse', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_resourcetype(self, resource_type, **kwargs): """ Get a resource type This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_resourcetype(resource_type, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str resource_type: The type of resource. Returned with GET /api/v2/scim/resourcetypes. (required) :return: ScimConfigResourceType If the method is called asynchronously, returns the request thread. """ all_params = ['resource_type'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_resourcetype" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'resource_type' is set if ('resource_type' not in params) or (params['resource_type'] is None): raise ValueError("Missing the required parameter `resource_type` when calling `get_scim_resourcetype`") resource_path = '/api/v2/scim/resourcetypes/{resourceType}'.replace('{format}', 'json') path_params = {} if 'resource_type' in params: path_params['resourceType'] = params['resource_type'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimConfigResourceType', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_resourcetypes(self, **kwargs): """ Get a list of resource types This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_resourcetypes(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :return: ScimConfigResourceTypesListResponse If the method is called asynchronously, returns the request thread. """ all_params = [] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_resourcetypes" % key ) params[key] = val del params['kwargs'] resource_path = '/api/v2/scim/resourcetypes'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimConfigResourceTypesListResponse', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_schema(self, schema_id, **kwargs): """ Get a SCIM schema This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_schema(schema_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str schema_id: The ID of a schema. Returned with GET /api/v2/scim/schemas. (required) :return: ScimV2SchemaDefinition If the method is called asynchronously, returns the request thread. """ all_params = ['schema_id'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_schema" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'schema_id' is set if ('schema_id' not in params) or (params['schema_id'] is None): raise ValueError("Missing the required parameter `schema_id` when calling `get_scim_schema`") resource_path = '/api/v2/scim/schemas/{schemaId}'.replace('{format}', 'json') path_params = {} if 'schema_id' in params: path_params['schemaId'] = params['schema_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2SchemaDefinition', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_schemas(self, **kwargs): """ Get a list of SCIM schemas This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_schemas(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str filter: Filtered results are invalid and return 403 Unauthorized. :return: ScimV2SchemaListResponse If the method is called asynchronously, returns the request thread. """ all_params = ['filter'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_schemas" % key ) params[key] = val del params['kwargs'] resource_path = '/api/v2/scim/schemas'.replace('{format}', 'json') path_params = {} query_params = {} if 'filter' in params: query_params['filter'] = params['filter'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2SchemaListResponse', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_serviceproviderconfig(self, **kwargs): """ Get a service provider's configuration This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_serviceproviderconfig(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str if_none_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/serviceproviderconfig. Example: \"42\". If the ETag is different from the version on the server, returns the current configuration of the resource. If the ETag is current, returns 304 Not Modified. :return: ScimServiceProviderConfig If the method is called asynchronously, returns the request thread. """ all_params = ['if_none_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_serviceproviderconfig" % key ) params[key] = val del params['kwargs'] resource_path = '/api/v2/scim/serviceproviderconfig'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} if 'if_none_match' in params: header_params['If-None-Match'] = params['if_none_match'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimServiceProviderConfig', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_user(self, user_id, **kwargs): """ Get a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_user(user_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str user_id: The ID of a user. Returned with GET /api/v2/scim/users. (required) :param list[str] attributes: Indicates which attributes to include. Returns these attributes and the \"id\", \"userName\", \"active\", and \"meta\" attributes. Use \"attributes\" to avoid expensive secondary calls for the default attributes. :param list[str] excluded_attributes: Indicates which attributes to exclude. Returns the default attributes minus \"excludedAttributes\". Always returns the \"id\", \"userName\", \"active\", and \"meta\" attributes. Use \"excludedAttributes\" to avoid expensive secondary calls for the default attributes. :param str if_none_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/users/{userId}. Example: \"42\". If the ETag is different from the version on the server, returns the current configuration of the resource. If the ETag is current, returns 304 Not Modified. :return: ScimV2User If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'attributes', 'excluded_attributes', 'if_none_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `get_scim_user`") resource_path = '/api/v2/scim/users/{userId}'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} if 'attributes' in params: query_params['attributes'] = params['attributes'] if 'excluded_attributes' in params: query_params['excludedAttributes'] = params['excluded_attributes'] header_params = {} if 'if_none_match' in params: header_params['If-None-Match'] = params['if_none_match'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2User', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_users(self, **kwargs): """ Get a list of users To return all active users, do not use the filter parameter. To return inactive users, set the filter parameter to \"active eq false\". By default, returns SCIM attributes \"externalId\", \"enterprise-user:manager\", and \"roles\". To exclude these attributes, set the attributes parameter to \"id,active\" or the excludeAttributes parameter to \"externalId,roles,urn:ietf:params:scim:schemas:extension:enterprise:2.0:User:division\". This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_users(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int start_index: The 1-based index of the first query result. :param int count: The requested number of items per page. A value of 0 returns \"totalResults\". A page size over 25 may exceed internal resource limits and return a 429 error. For a page size over 25, use the \"excludedAttributes\" or \"attributes\" query parameters to exclude or only include secondary lookup values such as \"externalId\", \"roles\", \"urn:ietf:params:scim:schemas:extension:genesys:purecloud:2.0:User:routingLanguages\", or \"urn:ietf:params:scim:schemas:extension:genesys:purecloud:2.0:User:routingSkills\". :param list[str] attributes: Indicates which attributes to include. Returns these attributes and the \"id\", \"userName\", \"active\", and \"meta\" attributes. Use \"attributes\" to avoid expensive secondary calls for the default attributes. :param list[str] excluded_attributes: Indicates which attributes to exclude. Returns the default attributes minus \"excludedAttributes\". Always returns the \"id\", \"userName\", \"active\", and \"meta\" attributes. Use \"excludedAttributes\" to avoid expensive secondary calls for the default attributes. :param str filter: Filters results. If nothing is specified, returns all active users. Examples of valid values: \"id eq 857449b0-d9e7-4cd0-acbf-a6adfb9ef1e9\", \"userName eq <EMAIL>\", \"manager eq 16e10e2f-1136-43fe-bb84-eac073168a49\", \"email eq <EMAIL>\", \"division eq divisionName\", \"externalId eq 167844\", \"active eq false\", \"employeeNumber eq 9876543210\". :return: ScimUserListResponse If the method is called asynchronously, returns the request thread. """ all_params = ['start_index', 'count', 'attributes', 'excluded_attributes', 'filter'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_users" % key ) params[key] = val del params['kwargs'] resource_path = '/api/v2/scim/users'.replace('{format}', 'json') path_params = {} query_params = {} if 'start_index' in params: query_params['startIndex'] = params['start_index'] if 'count' in params: query_params['count'] = params['count'] if 'attributes' in params: query_params['attributes'] = params['attributes'] if 'excluded_attributes' in params: query_params['excludedAttributes'] = params['excluded_attributes'] if 'filter' in params: query_params['filter'] = params['filter'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimUserListResponse', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_v2_group(self, group_id, **kwargs): """ Get a group This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_v2_group(group_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str group_id: The ID of a group. Returned with GET /api/v2/scim/v2/groups. (required) :param list[str] attributes: Indicates which attributes to include. Returns these attributes and the \"id\", \"active\", and \"meta\" attributes. Use \"attributes\" to avoid expensive secondary calls for the default attributes. :param list[str] excluded_attributes: Indicates which attributes to exclude. Returns the default attributes minus \"excludedAttributes\". Always returns \"id\", \"active\", and \"meta\" attributes. Use \"excludedAttributes\" to avoid expensive secondary calls for the default attributes. :param str if_none_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/v2/groups/{groupId}. Example: \"42\". If the ETag is different from the version on the server, returns the current configuration of the resource. If the ETag is current, returns 304 Not Modified. :return: ScimV2Group If the method is called asynchronously, returns the request thread. """ all_params = ['group_id', 'attributes', 'excluded_attributes', 'if_none_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_v2_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'group_id' is set if ('group_id' not in params) or (params['group_id'] is None): raise ValueError("Missing the required parameter `group_id` when calling `get_scim_v2_group`") resource_path = '/api/v2/scim/v2/groups/{groupId}'.replace('{format}', 'json') path_params = {} if 'group_id' in params: path_params['groupId'] = params['group_id'] query_params = {} if 'attributes' in params: query_params['attributes'] = params['attributes'] if 'excluded_attributes' in params: query_params['excludedAttributes'] = params['excluded_attributes'] header_params = {} if 'if_none_match' in params: header_params['If-None-Match'] = params['if_none_match'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2Group', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_v2_groups(self, filter, **kwargs): """ Get a list of groups This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_v2_groups(filter, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str filter: Filters results. If nothing is specified, returns all groups. Examples of valid values: \"id eq 5f4bc742-a019-4e38-8e2a-d39d5bc0b0f3\", \"displayname eq Sales\". (required) :param int start_index: The 1-based index of the first query result. :param int count: The requested number of items per page. A value of 0 returns \"totalResults\". A page size over 25 may exceed internal resource limits and return a 429 error. For a page size over 25, use the \"excludedAttributes\" or \"attributes\" query parameters to exclude or only include secondary lookup values such as \"externalId\", \"roles\", \"urn:ietf:params:scim:schemas:extension:genesys:purecloud:2.0:User:routingLanguages\", or \"urn:ietf:params:scim:schemas:extension:genesys:purecloud:2.0:User:routingSkills\". :param list[str] attributes: Indicates which attributes to include. Returns these attributes and the \"id\", \"active\", and \"meta\" attributes. Use \"attributes\" to avoid expensive secondary calls for the default attributes. :param list[str] excluded_attributes: Indicates which attributes to exclude. Returns the default attributes minus \"excludedAttributes\". Always returns \"id\", \"active\", and \"meta\" attributes. Use \"excludedAttributes\" to avoid expensive secondary calls for the default attributes. :return: ScimGroupListResponse If the method is called asynchronously, returns the request thread. """ all_params = ['filter', 'start_index', 'count', 'attributes', 'excluded_attributes'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_v2_groups" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'filter' is set if ('filter' not in params) or (params['filter'] is None): raise ValueError("Missing the required parameter `filter` when calling `get_scim_v2_groups`") resource_path = '/api/v2/scim/v2/groups'.replace('{format}', 'json') path_params = {} query_params = {} if 'start_index' in params: query_params['startIndex'] = params['start_index'] if 'count' in params: query_params['count'] = params['count'] if 'attributes' in params: query_params['attributes'] = params['attributes'] if 'excluded_attributes' in params: query_params['excludedAttributes'] = params['excluded_attributes'] if 'filter' in params: query_params['filter'] = params['filter'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimGroupListResponse', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_v2_resourcetype(self, resource_type, **kwargs): """ Get a resource type This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_v2_resourcetype(resource_type, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str resource_type: The type of resource. Returned with GET /api/v2/scim/v2/resourcetypes. (required) :return: ScimConfigResourceType If the method is called asynchronously, returns the request thread. """ all_params = ['resource_type'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_v2_resourcetype" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'resource_type' is set if ('resource_type' not in params) or (params['resource_type'] is None): raise ValueError("Missing the required parameter `resource_type` when calling `get_scim_v2_resourcetype`") resource_path = '/api/v2/scim/v2/resourcetypes/{resourceType}'.replace('{format}', 'json') path_params = {} if 'resource_type' in params: path_params['resourceType'] = params['resource_type'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimConfigResourceType', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_v2_resourcetypes(self, **kwargs): """ Get a list of resource types This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_v2_resourcetypes(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :return: ScimConfigResourceTypesListResponse If the method is called asynchronously, returns the request thread. """ all_params = [] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_v2_resourcetypes" % key ) params[key] = val del params['kwargs'] resource_path = '/api/v2/scim/v2/resourcetypes'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimConfigResourceTypesListResponse', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_v2_schema(self, schema_id, **kwargs): """ Get a SCIM schema This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_v2_schema(schema_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str schema_id: The ID of a schema. Returned with GET /api/v2/scim/v2/schemas. (required) :return: ScimV2SchemaDefinition If the method is called asynchronously, returns the request thread. """ all_params = ['schema_id'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_v2_schema" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'schema_id' is set if ('schema_id' not in params) or (params['schema_id'] is None): raise ValueError("Missing the required parameter `schema_id` when calling `get_scim_v2_schema`") resource_path = '/api/v2/scim/v2/schemas/{schemaId}'.replace('{format}', 'json') path_params = {} if 'schema_id' in params: path_params['schemaId'] = params['schema_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2SchemaDefinition', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_v2_schemas(self, **kwargs): """ Get a list of SCIM schemas This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_v2_schemas(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str filter: Filtered results are invalid and return 403 Unauthorized. :return: ScimV2SchemaListResponse If the method is called asynchronously, returns the request thread. """ all_params = ['filter'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_v2_schemas" % key ) params[key] = val del params['kwargs'] resource_path = '/api/v2/scim/v2/schemas'.replace('{format}', 'json') path_params = {} query_params = {} if 'filter' in params: query_params['filter'] = params['filter'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2SchemaListResponse', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_v2_serviceproviderconfig(self, **kwargs): """ Get a service provider's configuration This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_v2_serviceproviderconfig(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str if_none_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/v2/serviceproviderconfig. Example: \"42\". If the ETag is different from the version on the server, returns the current configuration of the resource. If the ETag is current, returns 304 Not Modified. :return: ScimServiceProviderConfig If the method is called asynchronously, returns the request thread. """ all_params = ['if_none_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_v2_serviceproviderconfig" % key ) params[key] = val del params['kwargs'] resource_path = '/api/v2/scim/v2/serviceproviderconfig'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} if 'if_none_match' in params: header_params['If-None-Match'] = params['if_none_match'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimServiceProviderConfig', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_v2_user(self, user_id, **kwargs): """ Get a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_v2_user(user_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str user_id: The ID of a user. Returned with GET /api/v2/scim/v2/users. (required) :param list[str] attributes: Indicates which attributes to include. Returns these attributes and the \"id\", \"userName\", \"active\", and \"meta\" attributes. Use \"attributes\" to avoid expensive secondary calls for the default attributes. :param list[str] excluded_attributes: Indicates which attributes to exclude. Returns the default attributes minus \"excludedAttributes\". Always returns the \"id\", \"userName\", \"active\", and \"meta\" attributes. Use \"excludedAttributes\" to avoid expensive secondary calls for the default attributes. :param str if_none_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/v2/users/{userId}. Example: \"42\". If the ETag is different from the version on the server, returns the current configuration of the resource. If the ETag is current, returns 304 Not Modified. :return: ScimV2User If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'attributes', 'excluded_attributes', 'if_none_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_v2_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `get_scim_v2_user`") resource_path = '/api/v2/scim/v2/users/{userId}'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} if 'attributes' in params: query_params['attributes'] = params['attributes'] if 'excluded_attributes' in params: query_params['excludedAttributes'] = params['excluded_attributes'] header_params = {} if 'if_none_match' in params: header_params['If-None-Match'] = params['if_none_match'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2User', auth_settings=auth_settings, callback=params.get('callback')) return response def get_scim_v2_users(self, **kwargs): """ Get a list of users To return all active users, do not use the filter parameter. To return inactive users, set the filter parameter to \"active eq false\". By default, returns SCIM attributes \"externalId\", \"enterprise-user:manager\", and \"roles\". To exclude these attributes, set the attributes parameter to \"id,active\" or the excludeAttributes parameter to \"externalId,roles,urn:ietf:params:scim:schemas:extension:enterprise:2.0:User:division\". This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_scim_v2_users(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int start_index: The 1-based index of the first query result. :param int count: The requested number of items per page. A value of 0 returns \"totalResults\". A page size over 25 may exceed internal resource limits and return a 429 error. For a page size over 25, use the \"excludedAttributes\" or \"attributes\" query parameters to exclude or only include secondary lookup values such as \"externalId\", \"roles\", \"urn:ietf:params:scim:schemas:extension:genesys:purecloud:2.0:User:routingLanguages\", or \"urn:ietf:params:scim:schemas:extension:genesys:purecloud:2.0:User:routingSkills\". :param list[str] attributes: Indicates which attributes to include. Returns these attributes and the \"id\", \"userName\", \"active\", and \"meta\" attributes. Use \"attributes\" to avoid expensive secondary calls for the default attributes. :param list[str] excluded_attributes: Indicates which attributes to exclude. Returns the default attributes minus \"excludedAttributes\". Always returns the \"id\", \"userName\", \"active\", and \"meta\" attributes. Use \"excludedAttributes\" to avoid expensive secondary calls for the default attributes. :param str filter: Filters results. If nothing is specified, returns all active users. Examples of valid values: \"id eq 857449b0-d9e7-4cd0-acbf-a6adfb9ef1e9\", \"userName eq <EMAIL>\", \"manager eq 16e10e2f-1136-43fe-bb84-eac073168a49\", \"email eq <EMAIL>\", \"division eq divisionName\", \"externalId eq 167844\", \"active eq false\", \"employeeNumber eq 9876543210\". :return: ScimUserListResponse If the method is called asynchronously, returns the request thread. """ all_params = ['start_index', 'count', 'attributes', 'excluded_attributes', 'filter'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_scim_v2_users" % key ) params[key] = val del params['kwargs'] resource_path = '/api/v2/scim/v2/users'.replace('{format}', 'json') path_params = {} query_params = {} if 'start_index' in params: query_params['startIndex'] = params['start_index'] if 'count' in params: query_params['count'] = params['count'] if 'attributes' in params: query_params['attributes'] = params['attributes'] if 'excluded_attributes' in params: query_params['excludedAttributes'] = params['excluded_attributes'] if 'filter' in params: query_params['filter'] = params['filter'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimUserListResponse', auth_settings=auth_settings, callback=params.get('callback')) return response def patch_scim_group(self, group_id, body, **kwargs): """ Modify a group This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.patch_scim_group(group_id, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str group_id: The ID of a group. Returned with GET /api/v2/scim/groups. (required) :param ScimV2PatchRequest body: The information used to modify a group. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/groups/{groupId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: ScimV2Group If the method is called asynchronously, returns the request thread. """ all_params = ['group_id', 'body', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_scim_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'group_id' is set if ('group_id' not in params) or (params['group_id'] is None): raise ValueError("Missing the required parameter `group_id` when calling `patch_scim_group`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `patch_scim_group`") resource_path = '/api/v2/scim/groups/{groupId}'.replace('{format}', 'json') path_params = {} if 'group_id' in params: path_params['groupId'] = params['group_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2Group', auth_settings=auth_settings, callback=params.get('callback')) return response def patch_scim_user(self, user_id, body, **kwargs): """ Modify a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.patch_scim_user(user_id, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str user_id: The ID of a user. Returned with GET /api/v2/scim/users. (required) :param ScimV2PatchRequest body: The information used to modify a user. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/users/{userId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: ScimV2User If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'body', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_scim_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `patch_scim_user`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `patch_scim_user`") resource_path = '/api/v2/scim/users/{userId}'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2User', auth_settings=auth_settings, callback=params.get('callback')) return response def patch_scim_v2_group(self, group_id, body, **kwargs): """ Modify a group This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.patch_scim_v2_group(group_id, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str group_id: The ID of a group. Returned with GET /api/v2/scim/v2/groups. (required) :param ScimV2PatchRequest body: The information used to modify a group. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/v2/groups/{groupId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: ScimV2Group If the method is called asynchronously, returns the request thread. """ all_params = ['group_id', 'body', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_scim_v2_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'group_id' is set if ('group_id' not in params) or (params['group_id'] is None): raise ValueError("Missing the required parameter `group_id` when calling `patch_scim_v2_group`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `patch_scim_v2_group`") resource_path = '/api/v2/scim/v2/groups/{groupId}'.replace('{format}', 'json') path_params = {} if 'group_id' in params: path_params['groupId'] = params['group_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2Group', auth_settings=auth_settings, callback=params.get('callback')) return response def patch_scim_v2_user(self, user_id, body, **kwargs): """ Modify a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.patch_scim_v2_user(user_id, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str user_id: The ID of a user. Returned with GET /api/v2/scim/v2/users. (required) :param ScimV2PatchRequest body: The information used to modify a user. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/v2/users/{userId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: ScimV2User If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'body', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_scim_v2_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `patch_scim_v2_user`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `patch_scim_v2_user`") resource_path = '/api/v2/scim/v2/users/{userId}'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2User', auth_settings=auth_settings, callback=params.get('callback')) return response def post_scim_users(self, body, **kwargs): """ Create a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.post_scim_users(body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param ScimV2CreateUser body: The information used to create a user. (required) :return: ScimV2User If the method is called asynchronously, returns the request thread. """ all_params = ['body'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method post_scim_users" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `post_scim_users`") resource_path = '/api/v2/scim/users'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2User', auth_settings=auth_settings, callback=params.get('callback')) return response def post_scim_v2_users(self, body, **kwargs): """ Create a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.post_scim_v2_users(body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param ScimV2CreateUser body: The information used to create a user. (required) :return: ScimV2User If the method is called asynchronously, returns the request thread. """ all_params = ['body'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method post_scim_v2_users" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `post_scim_v2_users`") resource_path = '/api/v2/scim/v2/users'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2User', auth_settings=auth_settings, callback=params.get('callback')) return response def put_scim_group(self, group_id, body, **kwargs): """ Replace a group This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.put_scim_group(group_id, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str group_id: The ID of a group. Returned with GET /api/v2/scim/groups. (required) :param ScimV2Group body: The information used to replace a group. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/groups/{groupId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: ScimV2Group If the method is called asynchronously, returns the request thread. """ all_params = ['group_id', 'body', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method put_scim_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'group_id' is set if ('group_id' not in params) or (params['group_id'] is None): raise ValueError("Missing the required parameter `group_id` when calling `put_scim_group`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `put_scim_group`") resource_path = '/api/v2/scim/groups/{groupId}'.replace('{format}', 'json') path_params = {} if 'group_id' in params: path_params['groupId'] = params['group_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2Group', auth_settings=auth_settings, callback=params.get('callback')) return response def put_scim_user(self, user_id, body, **kwargs): """ Replace a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.put_scim_user(user_id, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str user_id: The ID of a user. Returned with GET /api/v2/scim/users. (required) :param ScimV2User body: The information used to replace a user. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/users/{userId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: ScimV2User If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'body', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method put_scim_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `put_scim_user`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `put_scim_user`") resource_path = '/api/v2/scim/users/{userId}'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2User', auth_settings=auth_settings, callback=params.get('callback')) return response def put_scim_v2_group(self, group_id, body, **kwargs): """ Replace a group This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.put_scim_v2_group(group_id, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str group_id: The ID of a group. Returned with GET /api/v2/scim/v2/groups. (required) :param ScimV2Group body: The information used to replace a group. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/v2/groups/{groupId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: ScimV2Group If the method is called asynchronously, returns the request thread. """ all_params = ['group_id', 'body', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method put_scim_v2_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'group_id' is set if ('group_id' not in params) or (params['group_id'] is None): raise ValueError("Missing the required parameter `group_id` when calling `put_scim_v2_group`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `put_scim_v2_group`") resource_path = '/api/v2/scim/v2/groups/{groupId}'.replace('{format}', 'json') path_params = {} if 'group_id' in params: path_params['groupId'] = params['group_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2Group', auth_settings=auth_settings, callback=params.get('callback')) return response def put_scim_v2_user(self, user_id, body, **kwargs): """ Replace a user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.put_scim_v2_user(user_id, body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str user_id: The ID of a user. Returned with GET /api/v2/scim/v2/users. (required) :param ScimV2User body: The information used to replace a user. (required) :param str if_match: The ETag of a resource in double quotes. Returned as header and meta.version with initial call to GET /api/v2/scim/v2/users/{userId}. Example: \"42\". If the ETag is different from the version on the server, returns 400 with a \"scimType\" of \"invalidVers\". :return: ScimV2User If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'body', 'if_match'] all_params.append('callback') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method put_scim_v2_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `put_scim_v2_user`") # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `put_scim_v2_user`") resource_path = '/api/v2/scim/v2/users/{userId}'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} header_params = {} if 'if_match' in params: header_params['If-Match'] = params['if_match'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/scim+json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/scim+json']) # Authentication setting auth_settings = ['PureCloud OAuth'] response = self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScimV2User', auth_settings=auth_settings, callback=params.get('callback')) return response
AlexanderPetrovv/Java-Fundamentals
JavaAdvanced/14.StreamApiExercise/src/FilterStudentsByEmailDomain.java
<reponame>AlexanderPetrovv/Java-Fundamentals import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.LinkedHashMap; import java.util.Map; public class FilterStudentsByEmailDomain { public static void main(String[] args) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); Map<String, String> students = new LinkedHashMap<>(); String targetDomain = "@gmail.com"; String line = reader.readLine(); while (!"END".equals(line)) { String[] tokens = line.split("\\s+"); String name = tokens[0] + " " + tokens[1]; String email = tokens[2]; students.put(name, email); line = reader.readLine(); } students.entrySet().stream() .filter(kvp -> kvp.getValue().endsWith(targetDomain)) .forEach(kvp -> System.out.println(kvp.getKey())); } }
Next-Gen-UI/Code-Dynamics
Leetcode/0633. Sum of Square Numbers/0633.java
class Solution { public boolean judgeSquareSum(int c) { int l = 0; int r = (int) Math.sqrt(c); while (l <= r) { final int sum = l * l + r * r; if (sum == c) return true; if (sum < c) ++l; else --r; } return false; } }
gmachado-janrain/ApiExplorer
src/services/SchemaSvc.js
'use strict'; var pluck = require('lodash/collection/pluck'); var intersect = require('lodash/array/intersection'); module.exports = function(CredentialSvc, HttpSvc, $q) { 'ngInject'; var self = this; function basePath() { var creds = CredentialSvc.get() return [ 'config', creds.appId, 'schemas' ]; } // Since lodash 3.10.1 doesn't have intersectionBy, we'll need to create our own // Remove this when we move to lodash 4.14.1+ function intersectionBy(results, property) { if(results.length <= 0) return []; // pull a list of the specified property and intesect on those var propIntersect = intersect.apply(this, results.map(function(result) { return result.data.map(function(attr) { return attr[property]; }); })); // Build an array of the matching objects var result = []; var baseSchema = results[0].data; for(var i = 0; i < baseSchema.length; i++) { if(propIntersect.indexOf(baseSchema[i][property]) != -1) { result = result.concat([baseSchema[i]]); } } return result; } this.getAll = function() { return HttpSvc.get(basePath()) } this.get = function(schema) { return HttpSvc.get(basePath().concat([schema])) } this.getAllIntersect = function(schemas) { return $q.all(schemas.map(self.get)) .then(function(results) { return { data: intersectionBy(results, 'schemaAttribute') }; }); } this.getAttribute = function(schema, attribute) { return HttpSvc.get(basePath().concat([schema, attribute])) } this.addAttribute = function(schema, attribute, attribute_def) { return HttpSvc.put(basePath().concat([schema, attribute]), attribute_def) } this.deleteAttribute = function(schema, attribute) { return HttpSvc.delete(basePath().concat([schema, attribute])) } return this; }
Leoyuseu/Code
LeetCode/c/116.c
#include <stdio.h> #include <stdlib.h> struct TreeLinkNode { int val; struct TreeLinkNode *left, *right, *next; }; void connectHelper(struct TreeLinkNode *node, struct TreeLinkNode *sibling) { if (node == NULL) return; if (!node->left || !node->right) return; /* node have two children */ node->left->next = node->right; node->right->next = (sibling == NULL) ? NULL : sibling->left; connectHelper(node->left, node->right); connectHelper(node->right, node->right->next); } void connect(struct TreeLinkNode *root) { if (root == NULL) return; root->next = NULL; connectHelper(root, NULL); } int main() { struct TreeLinkNode *root = (struct TreeLinkNode *)calloc(7, sizeof(struct TreeLinkNode)); root->val = 1; root->left = root + 1; root->left->val = 2; root->right = root + 2; root->right->val = 3; root->left->left = root + 3; root->left->left->val = 4; root->left->right = root + 4; root->left->right->val = 5; root->right->left = root + 5; root->right->left->val = 6; root->right->right = root + 6; root->right->right->val = 7; connect(root); return 0; }
vadim8kiselev/reflection-ui
src/main/java/com/kiselev/reflection/ui/impl/name/NameUtils.java
package com.kiselev.reflection.ui.impl.name; public class NameUtils { public String getTypeName(Class<?> clazz) { String typeName = clazz.getSimpleName(); if ("".equals(typeName)) { typeName = clazz.getName().substring(clazz.getName().lastIndexOf('.') + 1); } return typeName; } }
zhangkn/iOS14Header
System/Library/PrivateFrameworks/Translation.framework/_LTTranslationRequest.h
<gh_stars>1-10 /* * This header is generated by classdump-dyld 1.0 * on Sunday, September 27, 2020 at 11:42:11 AM Mountain Standard Time * Operating System: Version 14.0 (Build 18A373) * Image Source: /System/Library/PrivateFrameworks/Translation.framework/Translation * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. */ @class NSString, _LTLocalePair, NSURL; @interface _LTTranslationRequest : NSObject { BOOL _censorSpeech; BOOL _forcedOfflineTranslation; BOOL __forcedOnlineTranslation; BOOL _autodetectLanguage; NSString* _uniqueID; NSString* _sessionID; long long _taskHint; _LTLocalePair* _localePair; NSURL* _outputFileURL; NSURL* __offlineMTModelURL; long long __mtConfidenceThreshold; } @property (nonatomic,readonly) NSString * loggingType; @property (nonatomic,copy) NSString * uniqueID; //@synthesize uniqueID=_uniqueID - In the implementation block @property (nonatomic,copy) NSString * sessionID; //@synthesize sessionID=_sessionID - In the implementation block @property (assign,nonatomic) long long taskHint; //@synthesize taskHint=_taskHint - In the implementation block @property (nonatomic,readonly) _LTLocalePair * localePair; //@synthesize localePair=_localePair - In the implementation block @property (nonatomic,retain) NSURL * outputFileURL; //@synthesize outputFileURL=_outputFileURL - In the implementation block @property (assign,nonatomic) BOOL censorSpeech; //@synthesize censorSpeech=_censorSpeech - In the implementation block @property (assign,nonatomic) BOOL forcedOfflineTranslation; //@synthesize forcedOfflineTranslation=_forcedOfflineTranslation - In the implementation block @property (assign,nonatomic) BOOL _forcedOnlineTranslation; //@synthesize _forcedOnlineTranslation=__forcedOnlineTranslation - In the implementation block @property (nonatomic,retain) NSURL * _offlineMTModelURL; //@synthesize _offlineMTModelURL=__offlineMTModelURL - In the implementation block @property (assign,nonatomic) long long _mtConfidenceThreshold; //@synthesize _mtConfidenceThreshold=__mtConfidenceThreshold - In the implementation block @property (assign,nonatomic) BOOL autodetectLanguage; //@synthesize autodetectLanguage=_autodetectLanguage - In the implementation block -(id)requestContext; -(_LTLocalePair *)localePair; -(void)setSessionID:(NSString *)arg1 ; -(id)initWithSourceLocale:(id)arg1 targetLocale:(id)arg2 ; -(BOOL)autodetectLanguage; -(void)setAutodetectLanguage:(BOOL)arg1 ; -(NSString *)sessionID; -(NSURL *)outputFileURL; -(void)setOutputFileURL:(NSURL *)arg1 ; -(void)setTaskHint:(long long)arg1 ; -(void)setUniqueID:(NSString *)arg1 ; -(NSString *)uniqueID; -(BOOL)censorSpeech; -(long long)taskHint; -(void)setCensorSpeech:(BOOL)arg1 ; -(BOOL)forcedOfflineTranslation; -(BOOL)_forcedOnlineTranslation; -(void)_startTranslationWithService:(id)arg1 done:(/*^block*/id)arg2 ; -(id)initWithLocalePair:(id)arg1 ; -(NSURL *)_offlineMTModelURL; -(NSString *)loggingType; -(void)_translationFailedWithError:(id)arg1 ; -(void)setForcedOfflineTranslation:(BOOL)arg1 ; -(void)set_forcedOnlineTranslation:(BOOL)arg1 ; -(void)set_offlineMTModelURL:(NSURL *)arg1 ; -(long long)_mtConfidenceThreshold; -(void)set_mtConfidenceThreshold:(long long)arg1 ; @end
timfel/netbeans
ide/xml.schema.model/src/org/netbeans/modules/xml/schema/model/ElementReference.java
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.xml.schema.model; import java.util.Set; import org.netbeans.modules.xml.xam.dom.NamedComponentReference; import org.netbeans.modules.xml.xam.Nameable; /** * This interface represents a local element using the ref attribute * @author <NAME> */ public interface ElementReference extends Element, SequenceDefinition, SchemaComponent { public static final String MIN_OCCURS_PROPERTY = "minOccurs"; public static final String MAX_OCCURS_PROPERTY = "maxOccurs"; public static final String FORM_PROPERTY = "form"; //NOI18N Form getForm(); void setForm(Form form); Form getFormDefault(); Form getFormEffective(); /** * true if #getMaxOccurs() and #getMinOccurs() allow multiciplity outside * [0,1], false otherwise. This method is only accurate after the element * has been inserted into the model. */ boolean allowsFullMultiplicity(); String getMaxOccurs(); void setMaxOccurs(String max); String getMaxOccursDefault(); String getMaxOccursEffective(); Integer getMinOccurs(); void setMinOccurs(Integer min); int getMinOccursDefault(); int getMinOccursEffective(); NamedComponentReference<GlobalElement> getRef(); void setRef(NamedComponentReference<GlobalElement> ref); }
OuesFa/integrations-core
teradata/datadog_checks/teradata/utils.py
<reponame>OuesFa/integrations-core # (C) Datadog, Inc. 2022-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import time from typing import Any, AnyStr, Sequence, Set, Tuple from datadog_checks.base import AgentCheck from datadog_checks.teradata.config_models.instance import Table def filter_tables(self, row): # type: (Any, Sequence) -> Sequence tables_to_collect, tables_to_exclude = self._tables_filter table_name = row[3] # No tables filter if not tables_to_collect and not tables_to_exclude: return row # Table filtered out if table_name in tables_to_exclude: return [] # Table included if table_name in tables_to_collect: return row # Table excluded return [] def create_tables_filter(self): # type: (Any) -> Tuple[Set, Set] tables_to_collect = set() tables_to_exclude = set() tables = self.config.tables if isinstance(tables, tuple): tables_to_collect = set(tables) return tables_to_collect, tables_to_exclude if isinstance(tables, Table): if tables.include and tables.exclude: for table in tables.include: if table not in tables.exclude: tables_to_collect.add(table) tables_to_exclude = set(tables.exclude) return tables_to_collect, tables_to_exclude if tables.include: tables_to_collect = set(tables.include) if tables.exclude: tables_to_exclude = set(tables.exclude) return (tables_to_collect, tables_to_exclude) def timestamp_validator(self, row): # type: (Any, Sequence) -> Sequence now = time.time() row_ts = row[0] if type(row_ts) is not int: msg = 'Returned timestamp `{}` is invalid.'.format(row_ts) self.log.warning(msg) self._query_errors += 1 return [] diff = now - row_ts # Valid metrics should be no more than 10 min in the future or 1h in the past if (diff > 3600) or (diff < -600): msg = 'Resource Usage stats are invalid. {}' if diff > 3600: msg = msg.format('Row timestamp is more than 1h in the past. Is `SPMA` Resource Usage Logging enabled?') elif diff < -600: msg = msg.format('Row timestamp is more than 10 min in the future. Try checking system time settings.') self.log.warning(msg) self._query_errors += 1 return [] return row def tags_normalizer(self, row, query_name): # type: (Any, Sequence, AnyStr) -> Sequence base_tags = [{"name": "td_amp", "col": row[0]}, {"name": "td_account", "col": row[1]}] tags_map = [ {"stats_name": "DBC.DiskSpaceV", "tags": base_tags + [{"name": "td_database", "col": row[2]}]}, { "stats_name": "DBC.AllSpaceV", "tags": base_tags + [{"name": "td_database", "col": row[2]}, {"name": "td_table", "col": row[3]}], }, { "stats_name": "DBC.AMPUsageV", "tags": base_tags + [{"name": "td_user", "col": row[2]}], }, ] for stats_type in tags_map: if query_name == stats_type['stats_name']: for idx, tag in enumerate(stats_type['tags']): # tag value may be type int if not len(str(tag['col'])): row[idx] = "undefined" return row @AgentCheck.metadata_entrypoint def submit_version(check, row): # type (Any) -> None """ Example version: 17.10.03.01 https://docs.teradata.com/r/Teradata-VantageTM-Data-Dictionary/July-2021/Views-Reference/DBCInfoV/Example-Using-DBCInfoV """ try: teradata_version = row[0] version_parts = { name: part for name, part in zip(('major', 'minor', 'maintenance', 'patch'), teradata_version.split('.')) } check.set_metadata('version', teradata_version, scheme='parts', final_scheme='semver', part_map=version_parts) except Exception as e: check.log.warning("Could not collect version info: %s", e)
rantler/AdaFruit
adafruit-circuitpython-bundle-py-20201107/examples/stmpe610_simpletest.py
import busio import board import digitalio from adafruit_stmpe610 import Adafruit_STMPE610_SPI spi = busio.SPI(board.SCK, board.MOSI, board.MISO) cs = digitalio.DigitalInOut(board.D6) st = Adafruit_STMPE610_SPI(spi, cs) print("Go Ahead - Touch the Screen - Make My Day!") while True: if not st.buffer_empty: print(st.read_data())
tusharchoudhary0003/Custom-Football-Game
sources/p005cm/aptoide/p006pt/home/apps/C3610Td.java
<reponame>tusharchoudhary0003/Custom-Football-Game package p005cm.aptoide.p006pt.home.apps; import p005cm.aptoide.p006pt.database.realm.Update; import p026rx.p027b.C0132p; /* renamed from: cm.aptoide.pt.home.apps.Td */ /* compiled from: lambda */ public final /* synthetic */ class C3610Td implements C0132p { /* renamed from: a */ private final /* synthetic */ SeeMoreAppcManager f6998a; public /* synthetic */ C3610Td(SeeMoreAppcManager seeMoreAppcManager) { this.f6998a = seeMoreAppcManager; } public final Object call(Object obj) { return this.f6998a.mo14850a((Update) obj); } }
daffinito/dmh
client/src/reducers/dispensaryReducer.js
<gh_stars>0 import { GET_DISPENSARY_BY_ACCOUNT } from "../actions/types"; const DEFAULT_STATE = { getDispensaryByAccount: { success: null, message: null }, name: "", place_id: "", address: "", lat: 0, lng: 0, zip: "" }; // refactor this to use spread operator export default (state = DEFAULT_STATE, action) => { switch (action.type) { case GET_DISPENSARY_BY_ACCOUNT: return Object.assign({}, state, { getDispensaryByAccount: { success: action.payload.success, message: action.payload.message }, name: action.payload.dispensary.name, place_id: action.payload.dispensary.place_id, address: action.payload.dispensary.address, lat: action.payload.dispensary.lat, lng: action.payload.dispensary.lng, zip: action.payload.dispensary.zip }); default: return state; } };
OpenHEC/SNN-simulator-on-PYNQcluster
NEST-14.0-FPGA/pynest/nest/tests/test_onetooneconnect.py
<filename>NEST-14.0-FPGA/pynest/nest/tests/test_onetooneconnect.py # -*- coding: utf-8 -*- # # test_onetooneconnect.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. """ UnitTests for the PyNEST connect API. """ import unittest import nest @nest.check_stack class OneToOneConnectTestCase(unittest.TestCase): """Tests of Connect with OneToOne pattern""" def test_ConnectPrePost(self): """Connect pre to post""" # Connect([pre], [post]) nest.ResetKernel() pre = nest.Create("iaf_neuron", 2) post = nest.Create("iaf_neuron", 2) nest.Connect(pre, post, "one_to_one") connections = nest.GetConnections(pre) targets = nest.GetStatus(connections, "target") self.assertEqual(targets, post) def test_ConnectPrePostParams(self): """Connect pre to post with a params dict""" # Connect([pre], [post], params) nest.ResetKernel() pre = nest.Create("iaf_neuron", 2) post = nest.Create("iaf_neuron", 2) nest.Connect(pre, post, "one_to_one", syn_spec={"weight": 2.0}) connections = nest.GetConnections(pre) weights = nest.GetStatus(connections, "weight") self.assertEqual(weights, (2.0, 2.0)) # Connect([pre], [post], [params, params]) nest.ResetKernel() pre = nest.Create("iaf_neuron", 2) post = nest.Create("iaf_neuron", 2) nest.Connect(pre, post, conn_spec={"rule": "one_to_one"}, syn_spec={"weight": [2.0, 3.0]}) connections = nest.GetConnections(pre) weights = nest.GetStatus(connections, "weight") self.assertEqual(weights, (2.0, 3.0)) def test_ConnectPrePostWD(self): """Connect pre to post with a weight and delay""" # Connect([pre], [post], w, d) nest.ResetKernel() pre = nest.Create("iaf_neuron", 2) post = nest.Create("iaf_neuron", 2) nest.Connect(pre, post, conn_spec={"rule": "one_to_one"}, syn_spec={"weight": 2.0, "delay": 2.0}) connections = nest.GetConnections(pre) weights = nest.GetStatus(connections, "weight") delays = nest.GetStatus(connections, "delay") self.assertEqual(weights, (2.0, 2.0)) self.assertEqual(delays, (2.0, 2.0)) # Connect([pre], [post], [w, w], [d, d]) nest.ResetKernel() pre = nest.Create("iaf_neuron", 2) post = nest.Create("iaf_neuron", 2) nest.Connect(pre, post, conn_spec={"rule": "one_to_one"}, syn_spec={"weight": [2.0, 3.0], "delay": [2.0, 3.0]}) connections = nest.GetConnections(pre) weights = nest.GetStatus(connections, "weight") delays = nest.GetStatus(connections, "delay") self.assertEqual(weights, (2.0, 3.0)) self.assertEqual(delays, (2.0, 3.0)) def test_IllegalConnection(self): """Wrong Connections""" nest.ResetKernel() n = nest.Create('iaf_neuron') vm = nest.Create('voltmeter') self.assertRaisesRegex( nest.NESTError, "IllegalConnection", nest.Connect, n, vm) def test_UnexpectedEvent(self): """Unexpected Event""" nest.ResetKernel() n = nest.Create('iaf_neuron') sd = nest.Create('spike_detector') self.assertRaisesRegex( nest.NESTError, "UnexpectedEvent", nest.Connect, sd, n) def suite(): suite = unittest.makeSuite(OneToOneConnectTestCase, 'test') return suite def run(): runner = unittest.TextTestRunner(verbosity=2) runner.run(suite()) if __name__ == "__main__": run()
EtsTest-ReactNativeApps/athens-bus-app
src/_commons/style/colors.js
const sharper = 'black'; const sharp = 'steelblue'; const canvas = 'lightgrey'; const smooth = 'skyblue'; const smoother = 'powderblue'; const info = 'green'; const warning = 'orange'; const error = 'tomato'; const bus = '#3333ff'; const trolley = '#ffff33'; const polyline = 'green'; const marker = 'orange'; const specialMarker = 'tomato'; export { sharper, sharp, canvas, smooth, smoother, info, warning, error, bus, trolley, polyline, marker, specialMarker };
Nicolas-Francisco/final-reality-Nicolas-Francisco
src/test/java/com/github/cc3002/finalreality/model/weapon/AxeTest.java
package com.github.cc3002.finalreality.model.weapon; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** * This testing class tests all the methods of an Axe weapon. * * @author <NAME> * @author <NAME> */ public class AxeTest extends AbstractWeaponTest{ private static final String AXE_NAME = "Test Axe"; private static final int DAMAGE = 15; private static final int SPEED = 10; private IWeapon testAxe; private IWeapon expectedAxe; private IWeapon differentAxe1; private IWeapon differentAxe2; private IWeapon differentAxe3; private Object differentAxe4; /** * This method creates all the objects that we are going to test with the constructorTest(). * Using inheritance, we only have to create these objects with different attributes and test them all * in the upper method checkConstructor, which tests all the weapons. */ @BeforeEach void setUp() { testAxe = new Axe(AXE_NAME, DAMAGE, SPEED); expectedAxe = new Axe(AXE_NAME, DAMAGE, SPEED); differentAxe1 = new Axe("Test", DAMAGE, SPEED); differentAxe2 = new Axe(AXE_NAME, 1, SPEED); differentAxe3 = new Axe(AXE_NAME, DAMAGE, 1); differentAxe4 = new Object(); } /** * This method tests the construction method. * Because every Hashcode() and Equals() is different in every Class, we have to * test every branch and any difference between weapons. In this case, the Hashcode * and Equals compares every parameter of the Axe Class, thus we have to test * differences in each parameter (name, damage and weight/speed) */ @Test void constructorTest() { checkConstruction(testAxe, expectedAxe, differentAxe1, differentAxe2, differentAxe3, differentAxe4); } }
yen-igaw/reactnative_dfinery_ecommerce_demo
src/screens/FavoriteScreen/components/FavoriteItem.js
import React, { useState, useEffect, useRef } from "react"; import { View, Text, Image, StyleSheet, TouchableOpacity, ActivityIndicator, Animated, Alert, } from "react-native"; import Swipeable from "react-native-gesture-handler/Swipeable"; //Redux import { useDispatch } from "react-redux"; // Action import { addToCart, removeFavorite } from "../../../reducers"; //Color import Colors from "../../../utils/Colors"; //number format import NumberFormat from "react-number-format"; //Text import CustomText from "../../../components/UI/CustomText"; //PropTypes check import PropTypes from "prop-types"; export const renderRightAction = (text, color, action, x, progress) => { const trans = progress.interpolate({ inputRange: [0, 1], outputRange: [x, 0], }); return ( <Animated.View style={{ flex: 1, transform: [{ translateX: trans }] }}> <TouchableOpacity style={[styles.rightAction, { backgroundColor: color }]} onPress={action} > <Text style={styles.actionText}>{text}</Text> </TouchableOpacity> </Animated.View> ); }; export const FavoriteItem = ({ navigation, item }) => { const [isLoading, setIsLoading] = useState(true); const unmounted = useRef(false); useEffect(() => { return () => { unmounted.current = true; }; }, []); const dispatch = useDispatch(); const addToCartAct = async () => { try { await dispatch(addToCart(item)); if (!unmounted.current) { Alert.alert("Thêm thành công", "Sản phẩm đã được thêm vào giỏ hàng", [ { text: "OK", }, ]); } } catch (err) { throw err; } }; const removeFavoriteAct = () => { Alert.alert( "Bỏ yêu thích", "Bạn có muốn bỏ sản phẩm ra khỏi mục yêu thích?", [ { text: "Hủy", style: "cancel", }, { text: "Đồng ý", onPress: () => dispatch(removeFavorite(item._id)), }, ] ); }; const RightActions = (progress) => { return ( <View style={{ width: 170, flexDirection: "row" }}> {renderRightAction( "Thêm vào giỏ", "#ffab00", addToCartAct, 140, progress )} {renderRightAction( "Bỏ thích", Colors.red, removeFavoriteAct, 30, progress )} </View> ); }; return ( <View> <Swipeable friction={2} rightThreshold={20} renderRightActions={RightActions} > <View style={styles.itemContainer}> <TouchableOpacity onPress={() => navigation.navigate("Detail", { item: item })} style={{ marginLeft: 5, width: "30%", height: "100%", marginRight: 10, alignContent: "center", justifyContent: "center", }} > <Image style={{ height: 70, width: "100%", resizeMode: "contain", borderRadius: 10, }} source={{ uri: item.thumb }} onLoadStart={() => { setIsLoading(true); }} onLoadEnd={() => setIsLoading(false)} /> {isLoading && ( <View style={{ position: "absolute", width: "100%", height: "100%", alignItems: "center", justifyContent: "center", }} > <ActivityIndicator size='small' color={Colors.grey} /> </View> )} </TouchableOpacity> <View style={styles.info}> <CustomText style={styles.title}>{item.filename}</CustomText> <CustomText style={styles.subText}>{item.type}</CustomText> <View style={styles.rateContainer}> <NumberFormat value={item.price} displayType={"text"} thousandSeparator={true} suffix={" đ"} renderText={(formattedValue) => ( <View style={styles.priceContainer}> <CustomText style={styles.price}> {formattedValue} </CustomText> </View> )} /> </View> </View> </View> </Swipeable> </View> ); }; FavoriteItem.propTypes = { item: PropTypes.object.isRequired, navigation: PropTypes.object.isRequired, }; const styles = StyleSheet.create({ itemContainer: { height: 90, flexDirection: "row", backgroundColor: Colors.light_grey, marginTop: 5, borderRadius: 0, alignItems: "center", }, info: { height: "100%", flexDirection: "column", justifyContent: "flex-start", paddingVertical: 10, width: "75%", }, title: { fontSize: 15, }, subText: { fontSize: 13, color: Colors.grey, }, rateContainer: { flexDirection: "row", justifyContent: "space-between", width: "70%", }, rate: { flexDirection: "row", alignItems: "flex-end", paddingBottom: 5, }, score: { fontSize: 12, marginLeft: 5, color: Colors.grey, }, priceContainer: { flexDirection: "row", alignItems: "center", }, price: { fontSize: 13, color: Colors.red, }, action: { alignItems: "center", justifyContent: "center", paddingHorizontal: 5, }, rightAction: { alignItems: "center", justifyContent: "center", marginVertical: 6, flexDirection: "row", height: 90, }, actionText: { color: "white", fontSize: 11, backgroundColor: "transparent", padding: 5, }, });
epicmanmoo/post-from-OS
InstagramCode/post_to_instagram_obj.py
import base64 import datetime from email.mime.text import MIMEText import os import requests import selenium.common.exceptions from requests.structures import CaseInsensitiveDict from selenium import webdriver import smtplib import time from tinydb import TinyDB, Query from webdriver_manager.chrome import ChromeDriverManager class _OpenSeaTransactionObjectInstagram: def __init__(self, name_, image_url_, seller_, buyer_, eth_nft_price_, usd_price_, total_usd_cost_, the_date_, the_time_, insta_tags_, tx_hash_): self.insta_caption = None self.name = name_ self.image_url = image_url_ self.seller = seller_ self.buyer = buyer_ self.eth_nft_price = eth_nft_price_ self.usd_price = usd_price_ self.total_usd_cost = total_usd_cost_ self.the_date = the_date_ self.the_time = the_time_ self.insta_tags = insta_tags_ self.is_posted = False self.tx_hash = tx_hash_ def __eq__(self, other): return self.tx_hash == other.tx_hash def __hash__(self): return hash(('tx_hash', self.tx_hash)) def create_insta_caption(self): self.insta_caption = '{} has been purchased on {} at {} (UTC).\n\nSeller {} has sold their NFT to {} for ' \ 'the price of ${}!\n\nAt the time of purchase, the price of the NFT was {} ETH and ' \ 'the price of ETH was ${}.\n\n{}'.format(self.name, self.the_date, self.the_time, self.seller, self.buyer, self.total_usd_cost, self.eth_nft_price, self.usd_price, self.insta_tags) class _PostFromOpenSeaInstagram: def __init__(self, values_file, contract_address): instagram_values_file = values_file self.instagram_access_token_file = 'instagram_user_access_token_{}.txt'.format(contract_address) self.values = open(instagram_values_file, 'r') self.insta_tags = self.values.readline() self.collection_name = self.values.readline().strip() self.img_bb_key = self.values.readline().strip() self.page_id = self.values.readline().strip() self.os_api_key = self.values.readline().strip() self.values.close() self.contract_address = contract_address self.file_name = self.collection_name + '_instagram.jpeg' self.os_url = "https://api.opensea.io/api/v1/events" self.insta_id_url = 'https://graph.facebook.com/v10.0/{}?fields=instagram_business_account'. \ format(self.page_id) self.graph_api_url = 'https://graph.facebook.com/v10.0/' self.image_link = None self.response = None self.os_obj_to_post = None self.limit = 5 self.daily_posts = 0 self.tomorrow = int(time.time()) + 86400 self.tx_queue = [] self.tx_db = TinyDB(self.collection_name + '_tx_hash_instagram_db.json') self.tx_query = Query() def get_recent_sales(self): try: querystring = {"asset_contract_address": self.contract_address, "event_type": "successful", "only_opensea": "false", "offset": "0", "limit": self.limit} headers = CaseInsensitiveDict() headers['Accept'] = 'application/json' headers['x-api-key'] = self.os_api_key self.response = requests.get(self.os_url, headers=headers, params=querystring, timeout=1.5) return self.response.status_code == 200 except Exception as e: print(e, flush=True) return False def parse_response_objects(self): if len(self.tx_queue) > 0: queue_has_objects = self.process_queue() if queue_has_objects: return True for i in range(0, self.limit): base = self.response.json()['asset_events'][i] asset = base['asset'] try: name = str(asset['name']) except TypeError: return False image_url = asset['image_original_url'] seller_address = str(base['seller']['address']) buyer_address = str(asset['owner']['address']) try: seller = str(base['seller']['user']['username']) if seller == 'None': seller = seller_address[0:8] except TypeError: seller = seller_address[0:8] try: buyer = str(asset['owner']['user']['username']) if buyer == 'None': buyer = buyer_address[0:8] except TypeError: buyer = buyer_address[0:8] if seller_address == buyer_address or seller == buyer: continue tx_hash = str(base['transaction']['transaction_hash']) tx_exists = False if len(self.tx_db.search(self.tx_query.tx == tx_hash)) == 0 else True if tx_exists: continue try: eth_nft_price = float('{0:.5f}'.format(int(base['total_price']) / 1e18)) usd_price = float(base['payment_token']['usd_price']) total_usd_cost = '{:.2f}'.format(round(eth_nft_price * usd_price, 2)) timestamp = str(base['transaction']['timestamp']).split('T') date = datetime.datetime.strptime(timestamp[0], '%Y-%m-%d') month = datetime.date(date.year, date.month, date.day).strftime('%B') except (ValueError, TypeError): continue year = str(date.year) day = str(date.day) the_date = month + ' ' + day + ', ' + year the_time = timestamp[1] transaction = _OpenSeaTransactionObjectInstagram(name, image_url, seller, buyer, eth_nft_price, usd_price, total_usd_cost, the_date, the_time, self.insta_tags, tx_hash) transaction.create_insta_caption() self.tx_queue.append(transaction) return self.process_queue() def process_queue(self): if len(self.tx_db) > 200: for first in self.tx_db: self.tx_db.remove(doc_ids=[first.doc_id]) break index = 0 self.tx_queue = list(set(self.tx_queue)) while index < len(self.tx_queue): cur_os_obj = self.tx_queue[index] tx_exists = False if len(self.tx_db.search(self.tx_query.tx == str(cur_os_obj.tx_hash))) == 0 else True if cur_os_obj.is_posted or tx_exists: self.tx_queue.pop(index) else: index += 1 if len(self.tx_queue) == 0: return False self.os_obj_to_post = self.tx_queue[-1] return True def download_image(self): if self.daily_posts == 25: if self.tomorrow - int(time.time()) <= 0: self.daily_posts = 0 self.tomorrow = int(time.time()) + 86400 else: return -1 img = open(self.file_name, "wb") try: img_response = requests.get(self.os_obj_to_post.image_url, stream=True, timeout=3) img.write(img_response.content) img.close() return True except Exception as e: img.close() print(e, flush=True) return False def post_to_image_bb(self): with open(self.file_name, "rb") as file: url = "https://api.imgbb.com/1/upload?expiration=60" payload = { "key": self.img_bb_key, "image": base64.b64encode(file.read()), } res = requests.post(url, payload, timeout=2) image_url = str(res.json()['data']['url']) image_url = image_url[:len(image_url) - 3] self.image_link = image_url + 'jpg' def post_to_instagram(self): try: user_access_token_file = open(self.instagram_access_token_file, 'r') user_access_token = user_access_token_file.readline() user_access_token_file.close() querystring = {"access_token": user_access_token} headers = {"Accept": "application/json"} response = requests.get(self.insta_id_url, headers=headers, params=querystring, timeout=2) insta_id = response.json()['instagram_business_account']['id'] pre_upload_url = self.graph_api_url + '{}/media'.format(insta_id) pre_upload = {'image_url': self.image_link, 'caption': self.os_obj_to_post.insta_caption, 'access_token': user_access_token} pre_upload_request = requests.post(pre_upload_url, data=pre_upload, timeout=10) pre_upload_result = pre_upload_request.json() if 'id' in pre_upload_result: creation_id = pre_upload_result['id'] publish_url = self.graph_api_url + '{}/media_publish'.format(insta_id) publish = { 'creation_id': creation_id, 'access_token': user_access_token } requests.post(publish_url, data=publish, timeout=10) self.os_obj_to_post.is_posted = True self.tx_db.insert({'tx': self.os_obj_to_post.tx_hash}) self.daily_posts += 1 return True else: print(pre_upload_result, flush=True) return False except Exception as e: print(e, flush=True) return False class ManageFlowObj: def __init__(self, instagram_values_file, instagram_generate_long_user_token_credentials_file): self.instagram_values_file = instagram_values_file self.instagram_gen_token_file = instagram_generate_long_user_token_credentials_file contract_address = self.validate_params() self.begin_time = int(time.time()) self.gen_long_lived_token_class = GenerateLongLivedToken(self.instagram_gen_token_file, contract_address) # first_time_generated = self.gen_long_lived_token_class.generate() # if first_time_generated: # print('Generated token for first time!', flush=True) self.__base_obj = _PostFromOpenSeaInstagram(self.instagram_values_file, contract_address) self.begin() def validate_params(self): print('Beginning validation of Instagram Values File...') with open(self.instagram_values_file) as values_file: if len(values_file.readlines()) != 5: raise Exception('The Instagram Values file must be formatted correctly.') test_instagram_values = open(self.instagram_values_file, 'r') hashtags_test = test_instagram_values.readline().strip() hashtags = 0 words_in_hash_tag = hashtags_test.split() if hashtags_test != 'None': if len(hashtags_test) == 0 or hashtags_test.split() == 0: test_instagram_values.close() raise Exception('Hashtags field is empty.') if len(hashtags_test) >= 1500: test_instagram_values.close() raise Exception('Too many characters in hashtags.') if len(words_in_hash_tag) > 25: test_instagram_values.close() raise Exception('Too many hashtags.') for word in words_in_hash_tag: if word[0] == '#': hashtags += 1 if hashtags != len(words_in_hash_tag): test_instagram_values.close() raise Exception('All words must be preceded by a hashtag (#).') print('Hashtags validated...') collection_name_test = test_instagram_values.readline().strip() test_collection_name_url = 'https://api.opensea.io/api/v1/collection/{}'.format(collection_name_test) test_response = requests.get(test_collection_name_url, timeout=1.5) if test_response.status_code == 200: collection_json = test_response.json()['collection'] primary_asset_contracts_json = collection_json['primary_asset_contracts'][0] # got the contract address contract_address = primary_asset_contracts_json['address'] else: test_instagram_values.close() raise Exception('The provided collection name does not exist.') print('Collection validated...') test_img_bb_key = test_instagram_values.readline().strip() test_img_bb_url = "https://api.imgbb.com/1/upload?expiration=60" payload = { "key": test_img_bb_key, "image": 'https://sienaconstruction.com/wp-content/uploads/2017/05/test-image.jpg', # just some random pic } test_upload_req = requests.post(test_img_bb_url, payload, timeout=2) if test_upload_req.status_code != 200: test_instagram_values.close() raise Exception('Invalid img.bb key provided.') print('IMG BB key validated...') test_page_id = test_instagram_values.readline().strip() print('Page ID is:', test_page_id) # test_insta_id_url = 'https://graph.facebook.com/v10.0/{}?fields=instagram_business_account'. \ # format(test_page_id) # test_page_req = requests.get(test_insta_id_url, timeout=2) # fake_status_code = int(test_page_req.json()['error']['code']) # if fake_status_code != 200: # test_instagram_values.close() # raise Exception('Invalid page ID for Facebook supplied') # print('Facebook page ID validated...') test_os_key = test_instagram_values.readline().strip() if test_os_key != 'None': test_os_key_url = "https://api.opensea.io/api/v1/events?only_opensea=false&offset=0&limit=1" test_os_headers = CaseInsensitiveDict() test_os_headers['Accept'] = 'application/json' test_os_headers['x-api-key'] = test_os_key test_os_response = requests.get(test_os_key_url, headers=test_os_headers, timeout=2) if test_os_response.status_code != 200: test_instagram_values.close() raise Exception('Invalid OpenSea API key supplied.') print('OpenSea Key validated...') else: print('No OpenSea API Key supplied...') print('Validation of Instagram Values .txt complete. No errors found...') return contract_address def run_methods(self, date_time_now): self.check_os_api_status(date_time_now) def check_os_api_status(self, date_time_now): os_api_working = self.__base_obj.get_recent_sales() if os_api_working: self.check_if_new_post_exists(date_time_now) else: print('OS API is not working at roughly', date_time_now, flush=True) time.sleep(300) def check_if_new_post_exists(self, date_time_now): new_post_exists = self.__base_obj.parse_response_objects() if new_post_exists: self.try_to_download_image(date_time_now) else: print('No new post at roughly', date_time_now, flush=True) time.sleep(60) def try_to_download_image(self, date_time_now): image_downloaded = self.__base_obj.download_image() if image_downloaded == -1: print('Daily limit reached for posts.') time.sleep(30) elif image_downloaded: self.post_to_image_bb(date_time_now) else: print('Downloading image error at roughly', date_time_now, flush=True) time.sleep(60) def post_to_image_bb(self, date_time_now): self.__base_obj.post_to_image_bb() self.try_to_post_to_instagram(date_time_now) def try_to_post_to_instagram(self, date_time_now): posted_to_instagram = self.__base_obj.post_to_instagram() if posted_to_instagram: print('Posted to Instagram at roughly', date_time_now, flush=True) time.sleep(60) else: print('Post to Instagram error at roughly', date_time_now, flush=True) time.sleep(120) def begin(self): while True: date_time_now = datetime.datetime.fromtimestamp(time.time()).strftime('%m/%d/%Y %H:%M:%S') self.run_methods(date_time_now) if int(time.time()) - self.begin_time >= 3600 * 24 * 55: self.gen_long_lived_token_class.send_email_to_manually_change_user_token() # time_now = int(time.time()) # time_elapsed_since_token_generated = None # if self.gen_long_lived_token_class.previous_time is not None: # time_elapsed_since_token_generated = time_now - self.gen_long_lived_token_class.previous_time # elif self.gen_long_lived_token_class.generated_time is not None: # time_elapsed_since_token_generated = time_now - self.gen_long_lived_token_class.generated_time # if time_elapsed_since_token_generated >= 3600 * 24 * 50: # if self.gen_long_lived_token_class.previous_time is None: # self.gen_long_lived_token_class.previous_time = None # generated = self.gen_long_lived_token_class.generate() # if generated: # print('Generated new long lived user access token at roughly', date_time_now, flush=True) # else: # print('Generating token failed. Email sent.', date_time_now, flush=True) class GenerateLongLivedToken: def __init__(self, token_file, contract_address): self.driver = None self.generated_time = None self.graph_explorer_url_redirect = 'https://www.facebook.com/login/?next=https%3A%2F%2Fdevelopers' \ '.facebook.com%2Ftools%2Fexplorer%2F' self.api_fb_exchange_token_url = 'https://graph.facebook.com/oauth/access_token?grant_type=fb_exchange_token' self.graph_explorer_url = 'https://developers.facebook.com/tools/explorer/' self.email_field_xpath = '//*[@id="email"]' self.pwd_field_xpath = '//*[@id="pass"]' self.login_btn_xpath = '//*[@id="loginbutton"]' self.gen_btn_xpath = '//*[@id="facebook"]/body/div[1]/div[5]/div[2]/div/div[2]/span/div/div[2]/div/div[' \ '5]/div[5]/div/div/div/div/div/div[2]/div/button ' self.continue_btn_xpath = '//*[@id="platformDialogForm"]/div/div/div/div/div/div[3]/div[1]/div[1]/div[2]' self.copy_btn_xpath = '/html/body/div[1]/div[5]/div[2]/div/div[2]/span/div/div[2]/div/div[5]/div[' \ '5]/div/div/div/div/div/div[2]/div/div/div[1]/label/input' with open(token_file) as tokens: if 7 > len(tokens.readlines()) > 8: raise Exception('The Instagram Generate User Token Values file must be formatted correctly.') tokens = open(token_file, 'r') self.client_id = tokens.readline().strip() self.client_secret = tokens.readline().strip() self.facebook_email = tokens.readline().strip() self.facebook_password = <PASSWORD>().<PASSWORD>() self.gmail_email = tokens.readline().strip() self.gmail_password = <PASSWORD>() self.gmail_to_email = tokens.readline().strip() self.access_token = tokens.readline().strip() tokens.close() self.first_time = True self.previous_time = None self.user_access_token_file = 'instagram_user_access_token_{}.txt'.format(contract_address) with open(self.user_access_token_file, 'w') as tk_file: if self.access_token != '': tk_file.write(self.access_token) # if os.path.exists(self.user_access_token_file): # get_time_from_token_file = open(self.user_access_token_file, 'r') # get_time_from_token_file.readline().strip() # previous_generated_time = int(get_time_from_token_file.readline().strip()) # difference = int(time.time()) - previous_generated_time # if difference < 3600 * 24 * 50: # self.previous_time = previous_generated_time def generate(self): if self.previous_time is not None: print('Using already existing token', flush=True) return False try: short_lived_access_token = self.generate_short_lived_user_access_token() new_token = self.get_long_lived_user_access_token(short_lived_access_token) self.replace_old_token_with_new(new_token) self.generated_time = int(time.time()) if self.first_time: self.first_time = False return True except Exception as e: # if ANY sort of error happens, we must manually reset if self.first_time: # script MUST always work for the first time. of course, it may fail 50+ days from now # if the website is changed, but then it is handled accordingly by sending an email and further # inspection can take place to fix the script. raise Exception('Provided Instagram Generate User Token file is formatted incorrectly.') print(e, flush=True) self.driver.quit() self.send_email_to_manually_change_user_token() self.generated_time = int(time.time()) + 3600 * 24 * 2 # allow 2 days for manual reset return False def generate_short_lived_user_access_token(self): options = webdriver.ChromeOptions() options.add_argument('--no-sandbox') options.add_argument("--kiosk") options.headless = True options.add_argument('--disable-dev-shm-usage') self.driver = webdriver.Chrome(ChromeDriverManager().install(), options=options) self.driver.get(self.graph_explorer_url_redirect) email_field = self.driver.find_element_by_xpath(self.email_field_xpath) email_field.send_keys(self.facebook_email) password_field = self.driver.find_element_by_xpath(self.pwd_field_xpath) password_field.send_keys(<PASSWORD>.facebook_password) login_button = self.driver.find_element_by_xpath(self.login_btn_xpath) login_button.click() time.sleep(3) self.driver.get(self.graph_explorer_url) gen_short_lived_access_token_button = self.driver.find_element_by_xpath(self.gen_btn_xpath) gen_short_lived_access_token_button.click() window_before = self.driver.window_handles[0] window_after = self.driver.window_handles[1] self.driver.switch_to.window(window_after) self.driver.maximize_window() continue_button = self.driver.find_element_by_xpath(self.continue_btn_xpath) continue_button.click() self.driver.implicitly_wait(3) close_again_flag = True try: short_lived_access_token = self.driver.find_element_by_xpath(self.copy_btn_xpath).get_attribute('value') except selenium.common.exceptions.NoSuchWindowException: self.driver.switch_to.window(window_before) short_lived_access_token = self.driver.find_element_by_xpath(self.copy_btn_xpath).get_attribute('value') close_again_flag = False self.driver.close() if close_again_flag: self.driver.switch_to.window(window_before) self.driver.close() self.driver.quit() return short_lived_access_token def get_long_lived_user_access_token(self, short_lived_access_token): querystring = {"client_id": self.client_id, "client_secret": self.client_secret, "fb_exchange_token": short_lived_access_token} headers = {"Accept": "application/json"} response = requests.get(self.api_fb_exchange_token_url, headers=headers, params=querystring, timeout=2) long_lived_access_token = response.json()['access_token'] return long_lived_access_token def replace_old_token_with_new(self, token): if os.path.exists(self.user_access_token_file): os.remove(self.user_access_token_file) token_file = open(self.user_access_token_file, 'w') token_file.write(token + '\n') token_file.write(str(int(time.time()))) token_file.close() def send_email_to_manually_change_user_token(self): smtp_server = "smtp.gmail.com" smtp_port = 587 smtp_username = self.gmail_email smtp_password = self.gmail_password email_to = [self.gmail_to_email] email_from = self.gmail_email email_subject = "Refresh Exchange Token" email_space = ", " data = 'To manually refresh the exchange token, login to the Facebook Graph API Explorer (' \ 'https://developers.facebook.com/tools/explorer/) with your credentials, EMAIL: {}, PASSWORD: {}. ' \ 'Then, click the \'Generate Access Token\' button and follow the steps. Once you are redirected back ' \ 'to the page, simply click the Copy to clipboard button and open ' \ '\'instagram_manually_reset_long_user_token.py\'. Paste the copied token into the ' \ '\'short_lived_access_token\' field and run the program. Once the program outputs the long lived user ' \ 'token, copy and paste that token into the file where the token is kept. In your case, the file is ' \ 'called {}. Because automatic execution failed, ensure that the selenium code is properly working.'.\ format(self.facebook_email, self.facebook_password, self.user_access_token_file) msg = MIMEText(data) msg['Subject'] = email_subject msg['To'] = email_space.join(email_to) msg['From'] = email_from mail = smtplib.SMTP(smtp_server, smtp_port) mail.starttls() mail.login(smtp_username, smtp_password) mail.sendmail(email_from, email_to, msg.as_string()) mail.quit()
paulolima18/ProgrammingMesh
C/OSystems/exame/g3/g3.c
<gh_stars>1-10 #include <unistd.h> /* chamadas ao sistema: defs e decls essenciais */ int execl(const char *path, const char *arg0, ..., NULL); int execlp(const char *file, const char *arg0, ..., NULL); int execv(const char *path, char *const argv[]); int execvp(const char *file, char *const argv[]);
ausmarton/jruby
truffle/src/main/java/org/jruby/truffle/nodes/core/ArrayNodes.java
<reponame>ausmarton/jruby /* * Copyright (c) 2013, 2015 Oracle and/or its affiliates. All rights reserved. This * code is released under a tri EPL/GPL/LGPL license. You can use it, * redistribute it and/or modify it under the terms of the: * * Eclipse Public License version 1.0 * GNU General Public License version 2 * GNU Lesser General Public License version 2.1 */ package org.jruby.truffle.nodes.core; import com.oracle.truffle.api.CallTarget; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.Truffle; import com.oracle.truffle.api.dsl.*; import com.oracle.truffle.api.frame.FrameDescriptor; import com.oracle.truffle.api.frame.FrameSlot; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.UnexpectedResultException; import com.oracle.truffle.api.source.SourceSection; import com.oracle.truffle.api.utilities.BranchProfile; import org.jruby.RubyObject; import org.jruby.runtime.Visibility; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.truffle.nodes.CoreSourceSection; import org.jruby.truffle.nodes.RubyNode; import org.jruby.truffle.nodes.RubyRootNode; import org.jruby.truffle.nodes.array.*; import org.jruby.truffle.nodes.coerce.ToIntNode; import org.jruby.truffle.nodes.coerce.ToAryNodeFactory; import org.jruby.truffle.nodes.coerce.ToIntNodeFactory; import org.jruby.truffle.nodes.dispatch.*; import org.jruby.truffle.nodes.methods.arguments.MissingArgumentBehaviour; import org.jruby.truffle.nodes.methods.arguments.ReadPreArgumentNode; import org.jruby.truffle.nodes.methods.locals.ReadLevelVariableNodeFactory; import org.jruby.truffle.nodes.objects.IsFrozenNode; import org.jruby.truffle.nodes.objects.IsFrozenNodeFactory; import org.jruby.truffle.nodes.yield.YieldDispatchHeadNode; import org.jruby.truffle.runtime.*; import org.jruby.truffle.runtime.control.BreakException; import org.jruby.truffle.runtime.control.NextException; import org.jruby.truffle.runtime.control.RaiseException; import org.jruby.truffle.runtime.control.RedoException; import org.jruby.truffle.runtime.core.*; import org.jruby.truffle.runtime.methods.Arity; import org.jruby.truffle.runtime.methods.InternalMethod; import org.jruby.truffle.runtime.methods.SharedMethodInfo; import org.jruby.truffle.runtime.util.ArrayUtils; import org.jruby.util.ByteList; import org.jruby.util.Memo; import java.util.Arrays; import java.util.Comparator; @CoreClass(name = "Array") public abstract class ArrayNodes { @CoreMethod(names = "+", required = 1) @NodeChildren({ @NodeChild(value = "a"), @NodeChild(value = "b") }) @ImportGuards(ArrayGuards.class) public abstract static class AddNode extends RubyNode { public AddNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public AddNode(AddNode prev) { super(prev); } @CreateCast("b") public RubyNode coerceOtherToAry(RubyNode other) { return ToAryNodeFactory.create(getContext(), getSourceSection(), other); } @Specialization(guards = {"isNull", "isOtherNull"}) public RubyArray addNull(RubyArray a, RubyArray b) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), null, 0); } @Specialization(guards = {"isObject", "isOtherNull"}) public RubyArray addObjectNull(RubyArray a, RubyArray b) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOf((Object[]) a.getStore(), a.getSize()), a.getSize()); } @Specialization(guards = "areBothIntegerFixnum") public RubyArray addBothIntegerFixnum(RubyArray a, RubyArray b) { final int combinedSize = a.getSize() + b.getSize(); final int[] combined = new int[combinedSize]; System.arraycopy(a.getStore(), 0, combined, 0, a.getSize()); System.arraycopy(b.getStore(), 0, combined, a.getSize(), b.getSize()); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), combined, combinedSize); } @Specialization(guards = "areBothLongFixnum") public RubyArray addBothLongFixnum(RubyArray a, RubyArray b) { final int combinedSize = a.getSize() + b.getSize(); final long[] combined = new long[combinedSize]; System.arraycopy(a.getStore(), 0, combined, 0, a.getSize()); System.arraycopy(b.getStore(), 0, combined, a.getSize(), b.getSize()); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), combined, combinedSize); } @Specialization(guards = "areBothFloat") public RubyArray addBothFloat(RubyArray a, RubyArray b) { final int combinedSize = a.getSize() + b.getSize(); final double[] combined = new double[combinedSize]; System.arraycopy(a.getStore(), 0, combined, 0, a.getSize()); System.arraycopy(b.getStore(), 0, combined, a.getSize(), b.getSize()); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), combined, combinedSize); } @Specialization(guards = "areBothObject") public RubyArray addBothObject(RubyArray a, RubyArray b) { final int combinedSize = a.getSize() + b.getSize(); final Object[] combined = new Object[combinedSize]; System.arraycopy(a.getStore(), 0, combined, 0, a.getSize()); System.arraycopy(b.getStore(), 0, combined, a.getSize(), b.getSize()); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), combined, combinedSize); } @Specialization(guards = {"isNull", "isOtherIntegerFixnum"}) public RubyArray addNullIntegerFixnum(RubyArray a, RubyArray b) { final int size = b.getSize(); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOf((int[]) b.getStore(), size), size); } @Specialization(guards = {"isNull", "isOtherLongFixnum"}) public RubyArray addNullLongFixnum(RubyArray a, RubyArray b) { final int size = b.getSize(); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOf((long[]) b.getStore(), size), size); } @Specialization(guards = {"isNull", "isOtherObject"}) public RubyArray addNullObject(RubyArray a, RubyArray b) { final int size = b.getSize(); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOf((Object[]) b.getStore(), size), size); } @Specialization(guards = {"!isObject", "isOtherObject"}) public RubyArray addOtherObject(RubyArray a, RubyArray b) { final int combinedSize = a.getSize() + b.getSize(); final Object[] combined = new Object[combinedSize]; System.arraycopy(ArrayUtils.box(a.getStore()), 0, combined, 0, a.getSize()); System.arraycopy(b.getStore(), 0, combined, a.getSize(), b.getSize()); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), combined, combinedSize); } @Specialization(guards = {"isObject", "!isOtherObject"}) public RubyArray addObject(RubyArray a, RubyArray b) { final int combinedSize = a.getSize() + b.getSize(); final Object[] combined = new Object[combinedSize]; System.arraycopy(a.getStore(), 0, combined, 0, a.getSize()); System.arraycopy(ArrayUtils.box(b.getStore()), 0, combined, a.getSize(), b.getSize()); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), combined, combinedSize); } @Specialization(guards = "isEmpty") public RubyArray addEmpty(RubyArray a, RubyArray b) { final int size = b.getSize(); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), ArrayUtils.box(b.getStore()), size); } @Specialization(guards = "isOtherEmpty") public RubyArray addOtherEmpty(RubyArray a, RubyArray b) { final int size = a.getSize(); return new RubyArray(getContext().getCoreLibrary().getArrayClass(), ArrayUtils.box(a.getStore()), size); } } @CoreMethod(names = "*", required = 1, lowerFixnumParameters = 0, taintFromSelf = true) public abstract static class MulNode extends ArrayCoreMethodNode { @Child private KernelNodes.RespondToNode respondToToStrNode; @Child private ToIntNode toIntNode; public MulNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public MulNode(MulNode prev) { super(prev); respondToToStrNode = prev.respondToToStrNode; toIntNode = prev.toIntNode; } @Specialization(guards = "isNull") public RubyArray mulEmpty(RubyArray array, int count) { if (count < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative argument", this)); } return new RubyArray(array.getLogicalClass()); } @Specialization(guards = "isIntegerFixnum") public RubyArray mulIntegerFixnum(RubyArray array, int count) { if (count < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative argument", this)); } final int[] store = (int[]) array.getStore(); final int storeLength = store.length; final int newStoreLength = storeLength * count; final int[] newStore = new int[newStoreLength]; for (int n = 0; n < count; n++) { System.arraycopy(store, 0, newStore, storeLength * n, storeLength); } return new RubyArray(array.getLogicalClass(), array.getAllocationSite(), newStore, newStoreLength); } @Specialization(guards = "isLongFixnum") public RubyArray mulLongFixnum(RubyArray array, int count) { if (count < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative argument", this)); } final long[] store = (long[]) array.getStore(); final int storeLength = store.length; final int newStoreLength = storeLength * count; final long[] newStore = new long[newStoreLength]; for (int n = 0; n < count; n++) { System.arraycopy(store, 0, newStore, storeLength * n, storeLength); } return new RubyArray(array.getLogicalClass(), array.getAllocationSite(), newStore, newStoreLength); } @Specialization(guards = "isFloat") public RubyArray mulFloat(RubyArray array, int count) { if (count < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative argument", this)); } final double[] store = (double[]) array.getStore(); final int storeLength = store.length; final int newStoreLength = storeLength * count; final double[] newStore = new double[newStoreLength]; for (int n = 0; n < count; n++) { System.arraycopy(store, 0, newStore, storeLength * n, storeLength); } return new RubyArray(array.getLogicalClass(), array.getAllocationSite(), newStore, newStoreLength); } @Specialization(guards = "isObject") public RubyArray mulObject(RubyArray array, int count) { if (count < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative argument", this)); } final Object[] store = (Object[]) array.getStore(); final int storeLength = store.length; final int newStoreLength = storeLength * count; final Object[] newStore = new Object[newStoreLength]; for (int n = 0; n < count; n++) { System.arraycopy(store, 0, newStore, storeLength * n, storeLength); } return new RubyArray(array.getLogicalClass(), array.getAllocationSite(), newStore, newStoreLength); } @Specialization(guards = "isRubyString(arguments[1])") public Object mulObject(VirtualFrame frame, RubyArray array, RubyString string) { notDesignedForCompilation(); return ruby(frame, "join(sep)", "sep", string); } @Specialization(guards = {"!isRubyString(arguments[1])"}) public Object mulObjectCount(VirtualFrame frame, RubyArray array, Object object) { notDesignedForCompilation(); if (respondToToStrNode == null) { CompilerDirectives.transferToInterpreter(); respondToToStrNode = insert(KernelNodesFactory.RespondToNodeFactory.create(getContext(), getSourceSection(), new RubyNode[]{null, null, null})); } if (respondToToStrNode.doesRespondTo(frame, object, getContext().makeString("to_str"), false)) { return ruby(frame, "join(sep.to_str)", "sep", object); } else { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int count = toIntNode.executeIntegerFixnum(frame, object); if (count < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative argument", this)); } if (array.getStore() instanceof int[]) { return mulIntegerFixnum(array, count); } else if (array.getStore() instanceof long[]) { return mulLongFixnum(array, count); } else if (array.getStore() instanceof double[]) { return mulFloat(array, count); } else if (array.getStore() == null) { return mulEmpty(array, count); } else { return mulObject(array, count); } } } } @CoreMethod(names = { "[]", "slice" }, required = 1, optional = 1, lowerFixnumParameters = { 0, 1 }) public abstract static class IndexNode extends ArrayCoreMethodNode { @Child protected ArrayReadDenormalizedNode readNode; @Child protected ArrayReadSliceDenormalizedNode readSliceNode; @Child protected ArrayReadSliceNormalizedNode readNormalizedSliceNode; @Child protected CallDispatchHeadNode fallbackNode; public IndexNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public IndexNode(IndexNode prev) { super(prev); readNode = prev.readNode; readSliceNode = prev.readSliceNode; readNormalizedSliceNode = prev.readNormalizedSliceNode; fallbackNode = prev.fallbackNode; } @Specialization public Object index(VirtualFrame frame, RubyArray array, int index, UndefinedPlaceholder undefined) { if (readNode == null) { CompilerDirectives.transferToInterpreter(); readNode = insert(ArrayReadDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null)); } return readNode.executeRead(frame, array, index); } @Specialization public Object slice(VirtualFrame frame, RubyArray array, int start, int length) { if (length < 0) { return nil(); } if (readSliceNode == null) { CompilerDirectives.transferToInterpreter(); readSliceNode = insert(ArrayReadSliceDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } return readSliceNode.executeReadSlice(frame, array, start, length); } @Specialization public Object slice(VirtualFrame frame, RubyArray array, RubyRange.IntegerFixnumRange range, UndefinedPlaceholder undefined) { final int normalizedIndex = array.normalizeIndex(range.getBegin()); if (normalizedIndex < 0 || normalizedIndex > array.getSize()) { return nil(); } else { final int end = array.normalizeIndex(range.getEnd()); final int exclusiveEnd = array.clampExclusiveIndex(range.doesExcludeEnd() ? end : end + 1); if (exclusiveEnd <= normalizedIndex) { return new RubyArray(array.getLogicalClass(), null, 0); } final int length = exclusiveEnd - normalizedIndex; if (readNormalizedSliceNode == null) { CompilerDirectives.transferToInterpreter(); readNormalizedSliceNode = insert(ArrayReadSliceNormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } return readNormalizedSliceNode.executeReadSlice(frame, array, normalizedIndex, length); } } @Specialization(guards = {"!isInteger(arguments[1])", "!isIntegerFixnumRange(arguments[1])"}) public Object fallbackIndex(VirtualFrame frame, RubyArray array, Object a, UndefinedPlaceholder undefined) { return fallback(frame, array, RubyArray.fromObjects(getContext().getCoreLibrary().getArrayClass(), a)); } @Specialization(guards = {"!isIntegerFixnumRange(arguments[1])", "!isUndefinedPlaceholder(arguments[2])"}) public Object fallbackSlice(VirtualFrame frame, RubyArray array, Object a, Object b) { return fallback(frame, array, RubyArray.fromObjects(getContext().getCoreLibrary().getArrayClass(), a, b)); } public Object fallback(VirtualFrame frame, RubyArray array, RubyArray args) { if (fallbackNode == null) { CompilerDirectives.transferToInterpreter(); fallbackNode = insert(DispatchHeadNodeFactory.createMethodCall(getContext())); } InternalMethod method = RubyArguments.getMethod(frame.getArguments()); return fallbackNode.call(frame, array, "element_reference_fallback", null, getContext().makeString(method.getName()), args); } } @CoreMethod(names = "[]=", required = 2, optional = 1, lowerFixnumParameters = 0, raiseIfFrozenSelf = true) public abstract static class IndexSetNode extends ArrayCoreMethodNode { @Child private ArrayWriteDenormalizedNode writeNode; @Child protected ArrayReadSliceDenormalizedNode readSliceNode; @Child private PopNode popNode; @Child private ToIntNode toIntNode; private final BranchProfile tooSmallBranch = BranchProfile.create(); public IndexSetNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public IndexSetNode(IndexSetNode prev) { super(prev); writeNode = prev.writeNode; readSliceNode = prev.readSliceNode; popNode = prev.popNode; toIntNode = prev.toIntNode; } @Specialization(guards = {"!isInteger(arguments[1])", "!isIntegerFixnumRange(arguments[1])"}) public Object set(VirtualFrame frame, RubyArray array, Object indexObject, Object value, UndefinedPlaceholder unused) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int index = toIntNode.executeIntegerFixnum(frame, indexObject); return set(frame, array, index, value, unused); } @Specialization public Object set(VirtualFrame frame, RubyArray array, int index, Object value, UndefinedPlaceholder unused) { final int normalizedIndex = array.normalizeIndex(index); if (normalizedIndex < 0) { CompilerDirectives.transferToInterpreter(); String errMessage = "index " + index + " too small for array; minimum: " + Integer.toString(-array.getSize()); throw new RaiseException(getContext().getCoreLibrary().indexError(errMessage, this)); } if (writeNode == null) { CompilerDirectives.transferToInterpreter(); writeNode = insert(ArrayWriteDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } return writeNode.executeWrite(frame, array, index, value); } @Specialization(guards = {"!isRubyArray(arguments[3])", "!isUndefinedPlaceholder(arguments[3])", "!isInteger(arguments[2])"}) public Object setObject(VirtualFrame frame, RubyArray array, int start, Object lengthObject, Object value) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } int length = toIntNode.executeIntegerFixnum(frame, lengthObject); return setObject(frame, array, start, length, value); } @Specialization(guards = {"!isRubyArray(arguments[3])", "!isUndefinedPlaceholder(arguments[3])", "!isInteger(arguments[1])"}) public Object setObject(VirtualFrame frame, RubyArray array, Object startObject, int length, Object value) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } int start = toIntNode.executeIntegerFixnum(frame, startObject); return setObject(frame, array, start, length, value); } @Specialization(guards = {"!isRubyArray(arguments[3])", "!isUndefinedPlaceholder(arguments[3])", "!isInteger(arguments[1])", "!isInteger(arguments[2])"}) public Object setObject(VirtualFrame frame, RubyArray array, Object startObject, Object lengthObject, Object value) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } int length = toIntNode.executeIntegerFixnum(frame, lengthObject); int start = toIntNode.executeIntegerFixnum(frame, startObject); return setObject(frame, array, start, length, value); } @Specialization(guards = {"!isRubyArray(arguments[3])", "!isUndefinedPlaceholder(arguments[3])"}) public Object setObject(VirtualFrame frame, RubyArray array, int start, int length, Object value) { notDesignedForCompilation(); if (length < 0) { CompilerDirectives.transferToInterpreter(); final String errMessage = "negative length (" + length + ")"; throw new RaiseException(getContext().getCoreLibrary().indexError(errMessage, this)); } final int normalizedIndex = array.normalizeIndex(start); if (normalizedIndex < 0) { CompilerDirectives.transferToInterpreter(); final String errMessage = "index " + start + " too small for array; minimum: " + Integer.toString(-array.getSize()); throw new RaiseException(getContext().getCoreLibrary().indexError(errMessage, this)); } final int begin = array.normalizeIndex(start); if (begin < array.getSize() && length == 1) { if (writeNode == null) { CompilerDirectives.transferToInterpreter(); writeNode = insert(ArrayWriteDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } return writeNode.executeWrite(frame, array, begin, value); } else { if (array.getSize() > (begin + length)) { // there is a tail, else other values discarded if (readSliceNode == null) { CompilerDirectives.transferToInterpreter(); readSliceNode = insert(ArrayReadSliceDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } RubyArray endValues = (RubyArray) readSliceNode.executeReadSlice(frame, array, (begin + length), (array.getSize() - begin - length)); if (writeNode == null) { CompilerDirectives.transferToInterpreter(); writeNode = insert(ArrayWriteDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } writeNode.executeWrite(frame, array, begin, value); Object[] endValuesStore = ArrayUtils.box(endValues.getStore()); int i = begin + 1; for (Object obj : endValuesStore) { writeNode.executeWrite(frame, array, i, obj); i += 1; } } else { writeNode.executeWrite(frame, array, begin, value); } if (popNode == null) { CompilerDirectives.transferToInterpreter(); popNode = insert(ArrayNodesFactory.PopNodeFactory.create(getContext(), getSourceSection(), new RubyNode[]{null, null})); } int popLength = length - 1 < array.getSize() ? length - 1 : array.getSize() - 1; for (int i = 0; i < popLength; i++) { // TODO 3-15-2015 BF update when pop can pop multiple popNode.executePop(frame, array, UndefinedPlaceholder.INSTANCE); } return value; } } @Specialization(guards = {"!isInteger(arguments[1])"}) public Object setOtherArray(VirtualFrame frame, RubyArray array, Object startObject, int length, RubyArray value) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } int start = toIntNode.executeIntegerFixnum(frame, startObject); return setOtherArray(frame, array, start, length, value); } @Specialization(guards = {"!isInteger(arguments[2])"}) public Object setOtherArray(VirtualFrame frame, RubyArray array, int start, Object lengthObject, RubyArray value) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } int length = toIntNode.executeIntegerFixnum(frame, lengthObject); return setOtherArray(frame, array, start, length, value); } @Specialization(guards = {"!isInteger(arguments[1])", "!isInteger(arguments[2])"}) public Object setOtherArray(VirtualFrame frame, RubyArray array, Object startObject, Object lengthObject, RubyArray value) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } int start = toIntNode.executeIntegerFixnum(frame, startObject); int length = toIntNode.executeIntegerFixnum(frame, lengthObject); return setOtherArray(frame, array, start, length, value); } @Specialization public Object setOtherArray(VirtualFrame frame, RubyArray array, int start, int length, RubyArray value) { notDesignedForCompilation(); if (length < 0) { CompilerDirectives.transferToInterpreter(); final String errMessage = "negative length (" + length + ")"; throw new RaiseException(getContext().getCoreLibrary().indexError(errMessage, this)); } final int normalizedIndex = array.normalizeIndex(start); if (normalizedIndex < 0) { CompilerDirectives.transferToInterpreter(); String errMessage = "index " + start + " too small for array; minimum: " + Integer.toString(-array.getSize()); throw new RaiseException(getContext().getCoreLibrary().indexError(errMessage, this)); } final int begin = array.normalizeIndex(start); if (value.getSize() == 0) { final int exclusiveEnd = begin + length; Object[] store = ArrayUtils.box(array.getStore()); if (begin < 0) { tooSmallBranch.enter(); CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().indexTooSmallError("array", start, array.getSize(), this)); } else if (exclusiveEnd > array.getSize()) { throw new UnsupportedOperationException(); } // TODO: This is a moving overlapping memory, should we use sth else instead? System.arraycopy(store, exclusiveEnd, store, begin, array.getSize() - exclusiveEnd); array.setStore(store, array.getSize() - length); return value; } else { if (writeNode == null) { CompilerDirectives.transferToInterpreter(); writeNode = insert(ArrayWriteDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } Object[] values = ArrayUtils.box(value.getStore()); if (value.getSize() == length || (begin + length + 1) > array.getSize()) { int i = begin; for (Object obj : values) { writeNode.executeWrite(frame, array, i, obj); i += 1; } } else { if (readSliceNode == null) { CompilerDirectives.transferToInterpreter(); readSliceNode = insert(ArrayReadSliceDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } final int newLength = (length + begin) > array.getSize() ? begin + values.length : array.getSize() + values.length - length; final int popNum = newLength < array.getSize() ? array.getSize() - newLength : 0; if (popNum > 0) { if (popNode == null) { CompilerDirectives.transferToInterpreter(); popNode = insert(ArrayNodesFactory.PopNodeFactory.create(getContext(), getSourceSection(), new RubyNode[]{null, null})); } for (int i = 0; i < popNum; i++) { // TODO 3-28-2015 BF update to pop multiple popNode.executePop(frame, array, UndefinedPlaceholder.INSTANCE); } } final int readLen = newLength - values.length - begin; RubyArray endValues = null; if (readLen > 0) { endValues = (RubyArray) readSliceNode.executeReadSlice(frame, array, array.getSize() - readLen, readLen); } int i = begin; for (Object obj : values) { writeNode.executeWrite(frame, array, i, obj); i += 1; } if (readLen > 0) { final Object[] endValuesStore = ArrayUtils.box(endValues.getStore()); for (Object obj : endValuesStore) { writeNode.executeWrite(frame, array, i, obj); i += 1; } } } return value; } } @Specialization(guards = "!isRubyArray(arguments[2])") public Object setRange(VirtualFrame frame, RubyArray array, RubyRange.IntegerFixnumRange range, Object other, UndefinedPlaceholder unused) { final int normalizedStart = array.normalizeIndex(range.getBegin()); int normalizedEnd = range.doesExcludeEnd() ? array.normalizeIndex(range.getEnd()) - 1 : array.normalizeIndex(range.getEnd()); if (normalizedEnd < 0) { normalizedEnd = -1; } final int length = normalizedEnd - normalizedStart + 1; if (normalizedStart < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().rangeError(range, this)); } return setObject(frame, array, normalizedStart, length, other); } @Specialization(guards = "!areBothIntegerFixnum") public Object setRangeArray(VirtualFrame frame, RubyArray array, RubyRange.IntegerFixnumRange range, RubyArray other, UndefinedPlaceholder unused) { final int normalizedStart = array.normalizeIndex(range.getBegin()); if (normalizedStart < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().rangeError(range, this)); } int normalizedEnd = range.doesExcludeEnd() ? array.normalizeIndex(range.getEnd()) - 1 : array.normalizeIndex(range.getEnd()); if (normalizedEnd < 0) { normalizedEnd = -1; } final int length = normalizedEnd - normalizedStart + 1; return setOtherArray(frame, array, normalizedStart, length, other); } @Specialization(guards = "areBothIntegerFixnum") public Object setIntegerFixnumRange(VirtualFrame frame, RubyArray array, RubyRange.IntegerFixnumRange range, RubyArray other, UndefinedPlaceholder unused) { if (range.doesExcludeEnd()) { CompilerDirectives.transferToInterpreter(); return setRangeArray(frame, array, range, other, unused); } else { int normalizedBegin = array.normalizeIndex(range.getBegin()); int normalizedEnd = array.normalizeIndex(range.getEnd()); if (normalizedEnd < 0) { normalizedEnd = -1; } if (normalizedBegin == 0 && normalizedEnd == array.getSize() - 1) { array.setStore(Arrays.copyOf((int[]) other.getStore(), other.getSize()), other.getSize()); } else { CompilerDirectives.transferToInterpreter(); return setRangeArray(frame, array, range, other, unused); } } return other; } } @CoreMethod(names = "at", required = 1) @NodeChildren({ @NodeChild(value = "array"), @NodeChild(value = "index") }) public abstract static class AtNode extends RubyNode { @Child private ArrayReadDenormalizedNode readNode; public AtNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public AtNode(AtNode prev) { super(prev); readNode = prev.readNode; } @CreateCast("index") public RubyNode coerceOtherToInt(RubyNode index) { return ToIntNodeFactory.create(getContext(), getSourceSection(), index); } @Specialization public Object at(VirtualFrame frame, RubyArray array, int index) { if (readNode == null) { CompilerDirectives.transferToInterpreter(); readNode = insert(ArrayReadDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null)); } return readNode.executeRead(frame, array, index); } } @CoreMethod(names = "clear", raiseIfFrozenSelf = true) public abstract static class ClearNode extends ArrayCoreMethodNode { public ClearNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public ClearNode(ClearNode prev) { super(prev); } @Specialization public RubyArray clear(RubyArray array) { array.setStore(array.getStore(), 0); return array; } } @CoreMethod(names = "compact") @ImportGuards(ArrayGuards.class) public abstract static class CompactNode extends ArrayCoreMethodNode { public CompactNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public CompactNode(CompactNode prev) { super(prev); } @Specialization(guards = "isIntArray") public RubyArray compactInt(RubyArray array) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOf((int[]) array.getStore(), array.getSize()), array.getSize()); } @Specialization(guards = "isLongArray") public RubyArray compactLong(RubyArray array) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOf((long[]) array.getStore(), array.getSize()), array.getSize()); } @Specialization(guards = "isDoubleArray") public RubyArray compactDouble(RubyArray array) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOf((double[]) array.getStore(), array.getSize()), array.getSize()); } @Specialization(guards = "isObjectArray") public Object compactObjects(RubyArray array) { // TODO CS 9-Feb-15 by removing nil we could make this array suitable for a primitive array storage class final Object[] store = (Object[]) array.getStore(); final Object[] newStore = new Object[store.length]; final int size = array.getSize(); int m = 0; for (int n = 0; n < size; n++) { if (store[n] != nil()) { newStore[m] = store[n]; m++; } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), newStore, m); } @Specialization(guards = "isNullArray") public Object compactNull(RubyArray array) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), null, 0); } } @CoreMethod(names = "compact!", raiseIfFrozenSelf = true) public abstract static class CompactBangNode extends ArrayCoreMethodNode { public CompactBangNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public CompactBangNode(CompactBangNode prev) { super(prev); } @Specialization(guards = "!isObject") public RubyNilClass compactNotObjects(RubyArray array) { return nil(); } @Specialization(guards = "isObject") public Object compactObjects(RubyArray array) { final Object[] store = (Object[]) array.getStore(); final int size = array.getSize(); int m = 0; for (int n = 0; n < size; n++) { if (store[n] != nil()) { store[m] = store[n]; m++; } } array.setStore(store, m); if (m == size) { return nil(); } else { return array; } } } @CoreMethod(names = "concat", required = 1, raiseIfFrozenSelf = true) @NodeChildren({ @NodeChild(value = "array"), @NodeChild(value = "other") }) @ImportGuards(ArrayGuards.class) public abstract static class ConcatNode extends RubyNode { public ConcatNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public ConcatNode(ConcatNode prev) { super(prev); } public abstract RubyArray executeConcat(RubyArray array, RubyArray other); @CreateCast("other") public RubyNode coerceOtherToAry(RubyNode other) { return ToAryNodeFactory.create(getContext(), getSourceSection(), other); } @Specialization(guards = "areBothNull") public RubyArray concatNull(RubyArray array, RubyArray other) { return array; } @Specialization(guards = "areBothIntegerFixnum") public RubyArray concatIntegerFixnum(RubyArray array, RubyArray other) { notDesignedForCompilation(); final int newSize = array.getSize() + other.getSize(); int[] store = (int[]) array.getStore(); if ( store.length < newSize) { store = Arrays.copyOf((int[]) array.getStore(), ArrayUtils.capacity(store.length, newSize)); } System.arraycopy(other.getStore(), 0, store, array.getSize(), other.getSize()); array.setStore(store, newSize); return array; } @Specialization(guards = "areBothLongFixnum") public RubyArray concatLongFixnum(RubyArray array, RubyArray other) { notDesignedForCompilation(); final int newSize = array.getSize() + other.getSize(); long[] store = (long[]) array.getStore(); if ( store.length < newSize) { store = Arrays.copyOf((long[]) array.getStore(), ArrayUtils.capacity(store.length, newSize)); } System.arraycopy(other.getStore(), 0, store, array.getSize(), other.getSize()); array.setStore(store, newSize); return array; } @Specialization(guards = "areBothFloat") public RubyArray concatDouble(RubyArray array, RubyArray other) { notDesignedForCompilation(); final int newSize = array.getSize() + other.getSize(); double[] store = (double[]) array.getStore(); if ( store.length < newSize) { store = Arrays.copyOf((double[]) array.getStore(), ArrayUtils.capacity(store.length, newSize)); } System.arraycopy(other.getStore(), 0, store, array.getSize(), other.getSize()); array.setStore(store, newSize); return array; } @Specialization(guards = "areBothObject") public RubyArray concatObject(RubyArray array, RubyArray other) { notDesignedForCompilation(); final int size = array.getSize(); final int newSize = size + other.getSize(); Object[] store = (Object[]) array.getStore(); if (newSize > store.length) { store = Arrays.copyOf(store, ArrayUtils.capacity(store.length, newSize)); } System.arraycopy(other.getStore(), 0, store, size, other.getSize()); array.setStore(store, newSize); return array; } @Specialization public RubyArray concat(RubyArray array, RubyArray other) { notDesignedForCompilation(); final int newSize = array.getSize() + other.getSize(); Object[] store; if (array.getStore() instanceof Object[]) { store = (Object[]) array.getStore(); if (store.length < newSize) { store = Arrays.copyOf(store, ArrayUtils.capacity(store.length, newSize)); } ArrayUtils.copy(other.getStore(), store, array.getSize(), other.getSize()); } else { store = new Object[newSize]; ArrayUtils.copy(array.getStore(), store, 0, array.getSize()); ArrayUtils.copy(other.getStore(), store, array.getSize(), other.getSize()); } array.setStore(store, newSize); return array; } } @CoreMethod(names = "delete", required = 1) public abstract static class DeleteNode extends ArrayCoreMethodNode { @Child private KernelNodes.SameOrEqualNode equalNode; @Child private IsFrozenNode isFrozenNode; public DeleteNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); equalNode = KernelNodesFactory.SameOrEqualNodeFactory.create(context, sourceSection, new RubyNode[]{null,null}); } public DeleteNode(DeleteNode prev) { super(prev); equalNode = prev.equalNode; isFrozenNode = prev.isFrozenNode; } @Specialization(guards = "isIntegerFixnum") public Object deleteIntegerFixnum(VirtualFrame frame, RubyArray array, Object value) { final int[] store = (int[]) array.getStore(); Object found = nil(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { final Object stored = store[n]; if (equalNode.executeSameOrEqual(frame, stored, value)) { if (isFrozenNode == null) { CompilerDirectives.transferToInterpreter(); isFrozenNode = insert(IsFrozenNodeFactory.create(getContext(), getSourceSection(), null)); } if (isFrozenNode.executeIsFrozen(array)) { CompilerDirectives.transferToInterpreter(); throw new RaiseException( getContext().getCoreLibrary().frozenError(array.getLogicalClass().getName(), this)); } found = store[n]; continue; } if (i != n) { store[i] = store[n]; } i++; } if(i != n){ array.setStore(store, i); } return found; } @Specialization(guards = "isObject") public Object deleteObject(VirtualFrame frame, RubyArray array, Object value) { final Object[] store = (Object[]) array.getStore(); Object found = nil(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { final Object stored = store[n]; if (equalNode.executeSameOrEqual(frame, stored, value)) { if (isFrozenNode == null) { CompilerDirectives.transferToInterpreter(); isFrozenNode = insert(IsFrozenNodeFactory.create(getContext(), getSourceSection(), null)); } if (isFrozenNode.executeIsFrozen(array)) { CompilerDirectives.transferToInterpreter(); throw new RaiseException( getContext().getCoreLibrary().frozenError(array.getLogicalClass().getName(), this)); } found = store[n]; continue; } if (i != n) { store[i] = store[n]; } i++; } if(i != n){ array.setStore(store, i); } return found; } @Specialization(guards = "isNullArray") public Object deleteNull(VirtualFrame frame, RubyArray array, Object value) { return nil(); } } @CoreMethod(names = "delete_at", required = 1, raiseIfFrozenSelf = true) @NodeChildren({ @NodeChild(value = "array"), @NodeChild(value = "index") }) @ImportGuards(ArrayGuards.class) public abstract static class DeleteAtNode extends RubyNode { private final BranchProfile tooSmallBranch = BranchProfile.create(); private final BranchProfile beyondEndBranch = BranchProfile.create(); public DeleteAtNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public DeleteAtNode(DeleteAtNode prev) { super(prev); } @CreateCast("index") public RubyNode coerceOtherToInt(RubyNode index) { return ToIntNodeFactory.create(getContext(), getSourceSection(), index); } @Specialization(guards = "isIntegerFixnum", rewriteOn = UnexpectedResultException.class) public int deleteAtIntegerFixnumInBounds(RubyArray array, int index) throws UnexpectedResultException { final int normalizedIndex = array.normalizeIndex(index); if (normalizedIndex < 0) { throw new UnexpectedResultException(nil()); } else if (normalizedIndex >= array.getSize()) { throw new UnexpectedResultException(nil()); } else { final int[] store = (int[]) array.getStore(); final int value = store[normalizedIndex]; System.arraycopy(store, normalizedIndex + 1, store, normalizedIndex, array.getSize() - normalizedIndex - 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "deleteAtIntegerFixnumInBounds", guards = "isIntegerFixnum") public Object deleteAtIntegerFixnum(RubyArray array, int index) { notDesignedForCompilation(); int normalizedIndex = index; if (normalizedIndex < 0) { normalizedIndex = array.getSize() + index; } if (normalizedIndex < 0) { tooSmallBranch.enter(); return nil(); } else if (normalizedIndex >= array.getSize()) { beyondEndBranch.enter(); return nil(); } else { final int[] store = (int[]) array.getStore(); final int value = store[normalizedIndex]; System.arraycopy(store, normalizedIndex + 1, store, normalizedIndex, array.getSize() - normalizedIndex - 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isLongFixnum", rewriteOn = UnexpectedResultException.class) public long deleteAtLongFixnumInBounds(RubyArray array, int index) throws UnexpectedResultException { final int normalizedIndex = array.normalizeIndex(index); if (normalizedIndex < 0) { throw new UnexpectedResultException(nil()); } else if (normalizedIndex >= array.getSize()) { throw new UnexpectedResultException(nil()); } else { final long[] store = (long[]) array.getStore(); final long value = store[normalizedIndex]; System.arraycopy(store, normalizedIndex + 1, store, normalizedIndex, array.getSize() - normalizedIndex - 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "deleteAtLongFixnumInBounds", guards = "isLongFixnum") public Object deleteAtLongFixnum(RubyArray array, int index) { notDesignedForCompilation(); int normalizedIndex = index; if (normalizedIndex < 0) { normalizedIndex = array.getSize() + index; } if (normalizedIndex < 0) { tooSmallBranch.enter(); return nil(); } else if (normalizedIndex >= array.getSize()) { beyondEndBranch.enter(); return nil(); } else { final long[] store = (long[]) array.getStore(); final long value = store[normalizedIndex]; System.arraycopy(store, normalizedIndex + 1, store, normalizedIndex, array.getSize() - normalizedIndex - 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isFloat", rewriteOn = UnexpectedResultException.class) public double deleteAtFloatInBounds(RubyArray array, int index) throws UnexpectedResultException { final int normalizedIndex = array.normalizeIndex(index); if (normalizedIndex < 0) { throw new UnexpectedResultException(nil()); } else if (normalizedIndex >= array.getSize()) { throw new UnexpectedResultException(nil()); } else { final double[] store = (double[]) array.getStore(); final double value = store[normalizedIndex]; System.arraycopy(store, normalizedIndex + 1, store, normalizedIndex, array.getSize() - normalizedIndex - 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "deleteAtFloatInBounds", guards = "isFloat") public Object deleteAtFloat(RubyArray array, int index) { notDesignedForCompilation(); int normalizedIndex = index; if (normalizedIndex < 0) { normalizedIndex = array.getSize() + index; } if (normalizedIndex < 0) { tooSmallBranch.enter(); return nil(); } else if (normalizedIndex >= array.getSize()) { beyondEndBranch.enter(); return nil(); } else { final double[] store = (double[]) array.getStore(); final double value = store[normalizedIndex]; System.arraycopy(store, normalizedIndex + 1, store, normalizedIndex, array.getSize() - normalizedIndex - 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isObject", rewriteOn = UnexpectedResultException.class) public Object deleteAtObjectInBounds(RubyArray array, int index) throws UnexpectedResultException { final int normalizedIndex = array.normalizeIndex(index); if (normalizedIndex < 0) { throw new UnexpectedResultException(nil()); } else if (normalizedIndex >= array.getSize()) { throw new UnexpectedResultException(nil()); } else { final Object[] store = (Object[]) array.getStore(); final Object value = store[normalizedIndex]; System.arraycopy(store, normalizedIndex + 1, store, normalizedIndex, array.getSize() - normalizedIndex - 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "deleteAtObjectInBounds", guards = "isObject") public Object deleteAtObject(RubyArray array, int index) { notDesignedForCompilation(); int normalizedIndex = index; if (normalizedIndex < 0) { normalizedIndex = array.getSize() + index; } if (normalizedIndex < 0) { tooSmallBranch.enter(); return nil(); } else if (normalizedIndex >= array.getSize()) { beyondEndBranch.enter(); return nil(); } else { final Object[] store = (Object[]) array.getStore(); final Object value = store[normalizedIndex]; System.arraycopy(store, normalizedIndex + 1, store, normalizedIndex, array.getSize() - normalizedIndex - 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isNullOrEmpty") public Object deleteAtNullOrEmpty(RubyArray array, int index) { return nil(); } } @CoreMethod(names = "each", needsBlock = true) @ImportGuards(ArrayGuards.class) public abstract static class EachNode extends YieldingCoreMethodNode { @Child private CallDispatchHeadNode toEnumNode; private final BranchProfile breakProfile = BranchProfile.create(); private final BranchProfile nextProfile = BranchProfile.create(); private final BranchProfile redoProfile = BranchProfile.create(); public EachNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public EachNode(EachNode prev) { super(prev); toEnumNode = prev.toEnumNode; } @Specialization public Object eachEnumerator(VirtualFrame frame, RubyArray array, UndefinedPlaceholder block) { if (toEnumNode == null) { CompilerDirectives.transferToInterpreter(); toEnumNode = insert(DispatchHeadNodeFactory.createMethodCall(getContext())); } return toEnumNode.call(frame, array, "to_enum", null, getContext().getCoreLibrary().getEachSymbol()); } @Specialization(guards = "isNull") public Object eachNull(VirtualFrame frame, RubyArray array, RubyProc block) { return nil(); } @Specialization(guards = "isIntegerFixnum") public Object eachIntegerFixnum(VirtualFrame frame, RubyArray array, RubyProc block) { final int[] store = (int[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { yield(frame, block, store[n]); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } @Specialization(guards = "isLongFixnum") public Object eachLongFixnum(VirtualFrame frame, RubyArray array, RubyProc block) { final long[] store = (long[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { yield(frame, block, store[n]); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } @Specialization(guards = "isFloat") public Object eachFloat(VirtualFrame frame, RubyArray array, RubyProc block) { final double[] store = (double[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { yield(frame, block, store[n]); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } @Specialization(guards = "isObject") public Object eachObject(VirtualFrame frame, RubyArray array, RubyProc block) { final Object[] store = (Object[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { yield(frame, block, store[n]); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } } @CoreMethod(names = "each_with_index", needsBlock = true) @ImportGuards(ArrayGuards.class) public abstract static class EachWithIndexNode extends YieldingCoreMethodNode { private final BranchProfile breakProfile = BranchProfile.create(); private final BranchProfile nextProfile = BranchProfile.create(); private final BranchProfile redoProfile = BranchProfile.create(); public EachWithIndexNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public EachWithIndexNode(EachWithIndexNode prev) { super(prev); } @Specialization(guards = "isNull") public RubyArray eachWithEmpty(VirtualFrame frame, RubyArray array, RubyProc block) { return array; } @Specialization(guards = "isIntegerFixnum") public Object eachWithIndexInt(VirtualFrame frame, RubyArray array, RubyProc block) { final int[] store = (int[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { yield(frame, block, store[n], n); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } @Specialization(guards = "isLongFixnum") public Object eachWithIndexLong(VirtualFrame frame, RubyArray array, RubyProc block) { final long[] store = (long[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { yield(frame, block, store[n], n); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } @Specialization(guards = "isFloat") public Object eachWithIndexDouble(VirtualFrame frame, RubyArray array, RubyProc block) { final double[] store = (double[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { yield(frame, block, store[n], n); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } @Specialization(guards = "isObject") public Object eachWithIndexObject(VirtualFrame frame, RubyArray array, RubyProc block) { final Object[] store = (Object[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { yield(frame, block, store[n], n); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } @Specialization public Object eachWithIndexObject(VirtualFrame frame, RubyArray array, UndefinedPlaceholder block) { return ruby(frame, "to_enum(:each_with_index)"); } } @CoreMethod(names = "include?", required = 1) public abstract static class IncludeNode extends ArrayCoreMethodNode { @Child private KernelNodes.SameOrEqualNode equalNode; public IncludeNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); equalNode = KernelNodesFactory.SameOrEqualNodeFactory.create(context, sourceSection, new RubyNode[]{null,null}); } public IncludeNode(IncludeNode prev) { super(prev); equalNode = prev.equalNode; } @Specialization(guards = "isNull") public boolean includeNull(VirtualFrame frame, RubyArray array, Object value) { return false; } @Specialization(guards = "isIntegerFixnum") public boolean includeIntegerFixnum(VirtualFrame frame, RubyArray array, Object value) { final int[] store = (int[]) array.getStore(); for (int n = 0; n < array.getSize(); n++) { final Object stored = store[n]; notDesignedForCompilation(); if (equalNode.executeSameOrEqual(frame, stored, value)) { return true; } } return false; } @Specialization(guards = "isLongFixnum") public boolean includeLongFixnum(VirtualFrame frame, RubyArray array, Object value) { final long[] store = (long[]) array.getStore(); for (int n = 0; n < array.getSize(); n++) { final Object stored = store[n]; notDesignedForCompilation(); if (equalNode.executeSameOrEqual(frame, stored, value)) { return true; } } return false; } @Specialization(guards = "isFloat") public boolean includeFloat(VirtualFrame frame, RubyArray array, Object value) { final double[] store = (double[]) array.getStore(); for (int n = 0; n < array.getSize(); n++) { final Object stored = store[n]; notDesignedForCompilation(); if (equalNode.executeSameOrEqual(frame, stored, value)) { return true; } } return false; } @Specialization(guards = "isObject") public boolean includeObject(VirtualFrame frame, RubyArray array, Object value) { final Object[] store = (Object[]) array.getStore(); for (int n = 0; n < array.getSize(); n++) { final Object stored = store[n]; if (equalNode.executeSameOrEqual(frame, stored, value)) { return true; } } return false; } } @CoreMethod(names = "initialize", needsBlock = true, optional = 2, raiseIfFrozenSelf = true) @ImportGuards(ArrayGuards.class) public abstract static class InitializeNode extends YieldingCoreMethodNode { @Child private ToIntNode toIntNode; @Child private CallDispatchHeadNode toAryNode; @Child private KernelNodes.RespondToNode respondToToAryNode; @Child private ArrayBuilderNode arrayBuilder; private final BranchProfile breakProfile = BranchProfile.create(); public InitializeNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); arrayBuilder = new ArrayBuilderNode.UninitializedArrayBuilderNode(context); } public InitializeNode(InitializeNode prev) { super(prev); arrayBuilder = prev.arrayBuilder; toIntNode = prev.toIntNode; respondToToAryNode = prev.respondToToAryNode; } @Specialization(guards = {"!isInteger(arguments[1])", "!isLong(arguments[1])", "!isUndefinedPlaceholder(arguments[1])", "!isRubyArray(arguments[1])"}) public RubyArray initialize(VirtualFrame frame, RubyArray array, Object object, UndefinedPlaceholder defaultValue, UndefinedPlaceholder block) { RubyArray copy = null; if (respondToToAryNode == null) { CompilerDirectives.transferToInterpreter(); respondToToAryNode = insert(KernelNodesFactory.RespondToNodeFactory.create(getContext(), getSourceSection(), new RubyNode[]{null, null, null})); } if (respondToToAryNode.doesRespondTo(frame, object, getContext().makeString("to_ary"), false)) { if (toAryNode == null) { CompilerDirectives.transferToInterpreter(); toAryNode = insert(DispatchHeadNodeFactory.createMethodCall(getContext(), false)); } Object toAryResult = toAryNode.call(frame, object, "to_ary", null); if (toAryResult instanceof RubyArray) { copy = (RubyArray) toAryResult; } } if (copy != null) { return initialize(array, copy, UndefinedPlaceholder.INSTANCE, UndefinedPlaceholder.INSTANCE); } else { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } int size = toIntNode.executeIntegerFixnum(frame, object); if (size < 0) { return initializeNegative(array, size, UndefinedPlaceholder.INSTANCE, UndefinedPlaceholder.INSTANCE); } else { return initialize(array, size, UndefinedPlaceholder.INSTANCE, UndefinedPlaceholder.INSTANCE); } } } @Specialization public RubyArray initialize(RubyArray array, UndefinedPlaceholder size, UndefinedPlaceholder defaultValue, UndefinedPlaceholder block) { return initialize(array, 0, nil(), block); } @Specialization public RubyArray initialize(RubyArray array, UndefinedPlaceholder size, UndefinedPlaceholder defaultValue, RubyProc block) { return initialize(array, 0, nil(), UndefinedPlaceholder.INSTANCE); } @Specialization(guards = "!isNegative") public RubyArray initialize(RubyArray array, int size, UndefinedPlaceholder defaultValue, UndefinedPlaceholder block) { return initialize(array, size, nil(), block); } @Specialization(guards = "isNegative") public RubyArray initializeNegative(RubyArray array, int size, UndefinedPlaceholder defaultValue, UndefinedPlaceholder block) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } @Specialization(guards = "!isNegative") public RubyArray initialize(RubyArray array, long size, UndefinedPlaceholder defaultValue, UndefinedPlaceholder block) { if (size > Integer.MAX_VALUE) { throw new RaiseException(getContext().getCoreLibrary().argumentError("array size too big", this)); } return initialize(array, (int) size, nil(), block); } @Specialization(guards = "isNegative") public RubyArray initializeNegative(RubyArray array, long size, UndefinedPlaceholder defaultValue, UndefinedPlaceholder block) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } @Specialization(guards = "!isNegative") public RubyArray initialize(RubyArray array, int size, int defaultValue, UndefinedPlaceholder block) { final int[] store = new int[size]; Arrays.fill(store, defaultValue); array.setStore(store, size); return array; } @Specialization(guards = "isNegative") public RubyArray initializeNegative(RubyArray array, int size, int defaultValue, UndefinedPlaceholder block) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } @Specialization(guards = "!isNegative") public RubyArray initialize(RubyArray array, int size, long defaultValue, UndefinedPlaceholder block) { final long[] store = new long[size]; Arrays.fill(store, defaultValue); array.setStore(store, size); return array; } @Specialization(guards = "isNegative") public RubyArray initializeNegative(RubyArray array, int size, long defaultValue, UndefinedPlaceholder block) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } @Specialization(guards = "!isNegative") public RubyArray initialize(RubyArray array, int size, double defaultValue, UndefinedPlaceholder block) { final double[] store = new double[size]; Arrays.fill(store, defaultValue); array.setStore(store, size); return array; } @Specialization(guards = "isNegative") public RubyArray initializeNegative(RubyArray array, int size, double defaultValue, UndefinedPlaceholder block) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } @Specialization(guards = {"!isUndefinedPlaceholder(arguments[2])", "!isNegative"}) public RubyArray initialize(RubyArray array, int size, Object defaultValue, UndefinedPlaceholder block) { final Object[] store = new Object[size]; Arrays.fill(store, defaultValue); array.setStore(store, size); return array; } @Specialization(guards = {"!isUndefinedPlaceholder(arguments[2])", "isNegative"}) public RubyArray initializeNegative(RubyArray array, int size, Object defaultValue, UndefinedPlaceholder block) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } @Specialization(guards = {"!isInteger(arguments[1])", "!isUndefinedPlaceholder(arguments[2])"}) public RubyArray initialize(VirtualFrame frame, RubyArray array, Object sizeObject, Object defaultValue, UndefinedPlaceholder block) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } int size = toIntNode.executeIntegerFixnum(frame, sizeObject); if (size < 0) { return initializeNegative(array, size, defaultValue, UndefinedPlaceholder.INSTANCE); } else { return initialize(array, size, defaultValue, UndefinedPlaceholder.INSTANCE); } } @Specialization(guards = {"!isUndefinedPlaceholder(arguments[2])", "!isNegative"}) public Object initialize(VirtualFrame frame, RubyArray array, int size, Object defaultValue, RubyProc block) { return initialize(frame, array, size, UndefinedPlaceholder.INSTANCE, block); } @Specialization(guards = {"!isUndefinedPlaceholder(arguments[2])", "isNegative"}) public Object initializeNegative(VirtualFrame frame, RubyArray array, int size, Object defaultValue, RubyProc block) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } @Specialization(guards = "!isNegative") public Object initialize(VirtualFrame frame, RubyArray array, int size, UndefinedPlaceholder defaultValue, RubyProc block) { Object store = arrayBuilder.start(); int count = 0; try { for (int n = 0; n < size; n++) { if (CompilerDirectives.inInterpreter()) { count++; } try { arrayBuilder.ensure(store, n + 1); store = arrayBuilder.append(store, n, yield(frame, block, n)); } catch (BreakException e) { breakProfile.enter(); array.setStore(arrayBuilder.finish(store, n), n); return e.getResult(); } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } array.setStore(arrayBuilder.finish(store, size), size); return array; } @Specialization(guards = "isNegative") public Object initializeNegative(VirtualFrame frame, RubyArray array, int size, UndefinedPlaceholder defaultValue, RubyProc block) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } @Specialization public RubyArray initialize(RubyArray array, RubyArray copy, UndefinedPlaceholder defaultValue, UndefinedPlaceholder block) { notDesignedForCompilation(); array.setStore(copy.slowToArray(), copy.getSize()); return array; } @Specialization public RubyArray initialize(RubyArray array, RubyArray copy, UndefinedPlaceholder defaultValue, RubyProc block) { notDesignedForCompilation(); array.setStore(copy.slowToArray(), copy.getSize()); return array; } } @CoreMethod(names = "initialize_copy", visibility = Visibility.PRIVATE, required = 1, raiseIfFrozenSelf = true) @NodeChildren({ @NodeChild(value = "self"), @NodeChild(value = "from") }) @ImportGuards(ArrayGuards.class) public abstract static class InitializeCopyNode extends RubyNode { // TODO(cs): what about allocationSite ? public InitializeCopyNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public InitializeCopyNode(InitializeCopyNode prev) { super(prev); } @CreateCast("from") public RubyNode coerceOtherToAry(RubyNode other) { return ToAryNodeFactory.create(getContext(), getSourceSection(), other); } @Specialization(guards = "isOtherNull") public RubyArray initializeCopyNull(RubyArray self, RubyArray from) { if (self == from) { return self; } self.setStore(null, 0); return self; } @Specialization(guards = "isOtherIntegerFixnum") public RubyArray initializeCopyIntegerFixnum(RubyArray self, RubyArray from) { if (self == from) { return self; } self.setStore(Arrays.copyOf((int[]) from.getStore(), from.getSize()), from.getSize()); return self; } @Specialization(guards = "isOtherLongFixnum") public RubyArray initializeCopyLongFixnum(RubyArray self, RubyArray from) { if (self == from) { return self; } self.setStore(Arrays.copyOf((long[]) from.getStore(), from.getSize()), from.getSize()); return self; } @Specialization(guards = "isOtherFloat") public RubyArray initializeCopyFloat(RubyArray self, RubyArray from) { if (self == from) { return self; } self.setStore(Arrays.copyOf((double[]) from.getStore(), from.getSize()), from.getSize()); return self; } @Specialization(guards = "isOtherObject") public RubyArray initializeCopyObject(RubyArray self, RubyArray from) { if (self == from) { return self; } self.setStore(Arrays.copyOf((Object[]) from.getStore(), from.getSize()), from.getSize()); return self; } } @CoreMethod(names = {"inject", "reduce"}, needsBlock = true, optional = 1) @ImportGuards(ArrayGuards.class) public abstract static class InjectNode extends YieldingCoreMethodNode { @Child private CallDispatchHeadNode dispatch; public InjectNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); dispatch = DispatchHeadNodeFactory.createMethodCall(context, MissingBehavior.CALL_METHOD_MISSING); } public InjectNode(InjectNode prev) { super(prev); dispatch = prev.dispatch; } @Specialization(guards = "isIntegerFixnum") public Object injectIntegerFixnum(VirtualFrame frame, RubyArray array, Object initial, RubyProc block) { int count = 0; final int[] store = (int[]) array.getStore(); Object accumulator = initial; try { for (int n = 0; n < array.getSize(); n++) { if (CompilerDirectives.inInterpreter()) { count++; } accumulator = yield(frame, block, accumulator, store[n]); } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return accumulator; } @Specialization(guards = "isLongFixnum") public Object injectLongFixnum(VirtualFrame frame, RubyArray array, Object initial, RubyProc block) { int count = 0; final long[] store = (long[]) array.getStore(); Object accumulator = initial; try { for (int n = 0; n < array.getSize(); n++) { if (CompilerDirectives.inInterpreter()) { count++; } accumulator = yield(frame, block, accumulator, store[n]); } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return accumulator; } @Specialization(guards = "isFloat") public Object injectFloat(VirtualFrame frame, RubyArray array, Object initial, RubyProc block) { int count = 0; final double[] store = (double[]) array.getStore(); Object accumulator = initial; try { for (int n = 0; n < array.getSize(); n++) { if (CompilerDirectives.inInterpreter()) { count++; } accumulator = yield(frame, block, accumulator, store[n]); } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return accumulator; } @Specialization(guards = "isObject") public Object injectObject(VirtualFrame frame, RubyArray array, Object initial, RubyProc block) { int count = 0; final Object[] store = (Object[]) array.getStore(); Object accumulator = initial; try { for (int n = 0; n < array.getSize(); n++) { if (CompilerDirectives.inInterpreter()) { count++; } accumulator = yield(frame, block, accumulator, store[n]); } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return accumulator; } @Specialization public Object inject(VirtualFrame frame, RubyArray array, RubySymbol symbol, UndefinedPlaceholder unused) { notDesignedForCompilation(); final Object[] store = array.slowToArray(); if (store.length < 2) { throw new UnsupportedOperationException(); } Object accumulator = dispatch.call(frame, store[0], symbol, null, store[1]); for (int n = 2; n < array.getSize(); n++) { accumulator = dispatch.call(frame, accumulator, symbol, null, store[n]); } return accumulator; } } @CoreMethod(names = "insert", required = 1, raiseIfFrozenSelf = true, argumentsAsArray = true) public abstract static class InsertNode extends ArrayCoreMethodNode { @Child private ToIntNode toIntNode; private final BranchProfile tooSmallBranch = BranchProfile.create(); public InsertNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public InsertNode(InsertNode prev) { super(prev); this.toIntNode = prev.toIntNode; } @Specialization(guards = {"isNull", "isIntIndexAndOtherSingleObjectArg"}) public Object insertNull(RubyArray array, Object[] values) { notDesignedForCompilation(); final int index = (int) values[0]; if (index < 0) { CompilerDirectives.transferToInterpreter(); throw new UnsupportedOperationException(); } final Object value = (Object) values[1]; final Object[] store = new Object[index + 1]; Arrays.fill(store, nil()); store[index] = value; array.setStore(store, array.getSize() + 1); return array; } @Specialization(guards = "isArgsLengthTwo", rewriteOn = {ClassCastException.class, IndexOutOfBoundsException.class}) public Object insert(RubyArray array, Object[] values) { final int index = (int) values[0]; final int value = (int) values[1]; final int[] store = (int[]) array.getStore(); System.arraycopy(store, index, store, index + 1, array.getSize() - index); store[index] = value; array.setStore(store, array.getSize() + 1); return array; } @Specialization(contains = {"insert", "insertNull"}) public Object insertBoxed(VirtualFrame frame, RubyArray array, Object[] values) { notDesignedForCompilation(); if (values.length == 1) { return array; } int index; if (values[0] instanceof Integer) { index = (int) values[0]; } else { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } index = toIntNode.executeIntegerFixnum(frame, values[0]); } final int valuesLength = values.length - 1; final int normalizedIndex = index < 0 ? array.normalizeIndex(index) + 1 : array.normalizeIndex(index); if (normalizedIndex < 0) { CompilerDirectives.transferToInterpreter(); String errMessage = "index " + index + " too small for array; minimum: " + Integer.toString(-array.getSize()); throw new RaiseException(getContext().getCoreLibrary().indexError(errMessage, this)); } Object[] store = ArrayUtils.box(array.getStore()); final int newSize = normalizedIndex < array.getSize() ? array.getSize() + valuesLength : normalizedIndex + valuesLength; store = Arrays.copyOf(store, newSize); if (normalizedIndex >= array.getSize()) { for (int i = array.getSize(); i < normalizedIndex; i++) { store[i] = nil(); } } final int dest = normalizedIndex + valuesLength; final int len = array.getSize() - normalizedIndex; if (normalizedIndex < array.getSize()) { System.arraycopy(store, normalizedIndex, store, dest, len); } System.arraycopy(values, 1, store, normalizedIndex, valuesLength); array.setStore(store, newSize); return array; } } @CoreMethod(names = {"map", "collect"}, needsBlock = true, returnsEnumeratorIfNoBlock = true) @ImportGuards(ArrayGuards.class) public abstract static class MapNode extends YieldingCoreMethodNode { @Child private ArrayBuilderNode arrayBuilder; private final BranchProfile breakProfile = BranchProfile.create(); private final BranchProfile nextProfile = BranchProfile.create(); private final BranchProfile redoProfile = BranchProfile.create(); public MapNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); arrayBuilder = new ArrayBuilderNode.UninitializedArrayBuilderNode(context); } public MapNode(MapNode prev) { super(prev); arrayBuilder = prev.arrayBuilder; } @Specialization(guards = "isNull") public RubyArray mapNull(RubyArray array, RubyProc block) { return new RubyArray(getContext().getCoreLibrary().getArrayClass()); } @Specialization(guards = "isIntegerFixnum") public Object mapIntegerFixnum(VirtualFrame frame, RubyArray array, RubyProc block) { final int[] store = (int[]) array.getStore(); final int arraySize = array.getSize(); Object mappedStore = arrayBuilder.start(arraySize); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { mappedStore = arrayBuilder.append(mappedStore, n, yield(frame, block, store[n])); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), arrayBuilder.finish(mappedStore, arraySize), arraySize); } @Specialization(guards = "isLongFixnum") public Object mapLongFixnum(VirtualFrame frame, RubyArray array, RubyProc block) { final long[] store = (long[]) array.getStore(); final int arraySize = array.getSize(); Object mappedStore = arrayBuilder.start(arraySize); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { mappedStore = arrayBuilder.append(mappedStore, n, yield(frame, block, store[n])); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), arrayBuilder.finish(mappedStore, arraySize), arraySize); } @Specialization(guards = "isFloat") public Object mapFloat(VirtualFrame frame, RubyArray array, RubyProc block) { final double[] store = (double[]) array.getStore(); final int arraySize = array.getSize(); Object mappedStore = arrayBuilder.start(arraySize); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { mappedStore = arrayBuilder.append(mappedStore, n, yield(frame, block, store[n])); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), arrayBuilder.finish(mappedStore, arraySize), arraySize); } @Specialization(guards = "isObject") public Object mapObject(VirtualFrame frame, RubyArray array, RubyProc block) { final Object[] store = (Object[]) array.getStore(); final int arraySize = array.getSize(); Object mappedStore = arrayBuilder.start(arraySize); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { mappedStore = arrayBuilder.append(mappedStore, n, yield(frame, block, store[n])); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), arrayBuilder.finish(mappedStore, arraySize), arraySize); } } @CoreMethod(names = {"map!", "collect!"}, needsBlock = true, returnsEnumeratorIfNoBlock = true, raiseIfFrozenSelf = true) @ImportGuards(ArrayGuards.class) public abstract static class MapInPlaceNode extends YieldingCoreMethodNode { @Child private ArrayWriteDenormalizedNode writeNode; private final BranchProfile breakProfile = BranchProfile.create(); private final BranchProfile nextProfile = BranchProfile.create(); private final BranchProfile redoProfile = BranchProfile.create(); public MapInPlaceNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public MapInPlaceNode(MapInPlaceNode prev) { super(prev); writeNode = prev.writeNode; } @Specialization(guards = "isNull") public RubyArray mapInPlaceNull(RubyArray array, RubyProc block) { return array; } @Specialization(guards = "isIntegerFixnum") public Object mapInPlaceFixnumInteger(VirtualFrame frame, RubyArray array, RubyProc block) { if (writeNode == null) { CompilerDirectives.transferToInterpreter(); writeNode = insert(ArrayWriteDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } final int[] store = (int[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { writeNode.executeWrite(frame, array, n, yield(frame, block, store[n])); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } @Specialization(guards = "isObject") public Object mapInPlaceObject(VirtualFrame frame, RubyArray array, RubyProc block) { if (writeNode == null) { CompilerDirectives.transferToInterpreter(); writeNode = insert(ArrayWriteDenormalizedNodeFactory.create(getContext(), getSourceSection(), null, null, null)); } final Object[] store = (Object[]) array.getStore(); int count = 0; try { outer: for (int n = 0; n < array.getSize(); n++) { while (true) { if (CompilerDirectives.inInterpreter()) { count++; } try { writeNode.executeWrite(frame, array, n, yield(frame, block, store[n])); continue outer; } catch (BreakException e) { breakProfile.enter(); return e.getResult(); } catch (NextException e) { nextProfile.enter(); continue outer; } catch (RedoException e) { redoProfile.enter(); } } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return array; } } // TODO: move into Enumerable? @CoreMethod(names = "max") public abstract static class MaxNode extends ArrayCoreMethodNode { @Child private CallDispatchHeadNode eachNode; private final MaxBlock maxBlock; public MaxNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); eachNode = DispatchHeadNodeFactory.createMethodCall(context); maxBlock = context.getCoreLibrary().getArrayMaxBlock(); } public MaxNode(MaxNode prev) { super(prev); eachNode = prev.eachNode; maxBlock = prev.maxBlock; } @Specialization public Object max(VirtualFrame frame, RubyArray array) { // TODO: can we just write to the frame instead of having this indirect object? final Memo<Object> maximum = new Memo<>(); final VirtualFrame maximumClosureFrame = Truffle.getRuntime().createVirtualFrame(RubyArguments.pack(null, null, array, null, new Object[] {}), maxBlock.getFrameDescriptor()); maximumClosureFrame.setObject(maxBlock.getFrameSlot(), maximum); final RubyProc block = new RubyProc(getContext().getCoreLibrary().getProcClass(), RubyProc.Type.PROC, maxBlock.getSharedMethodInfo(), maxBlock.getCallTarget(), maxBlock.getCallTarget(), maxBlock.getCallTarget(), maximumClosureFrame.materialize(), null, array, null); eachNode.call(frame, array, "each", block); if (maximum.get() == null) { return nil(); } else { return maximum.get(); } } } public abstract static class MaxBlockNode extends CoreMethodNode { @Child private CallDispatchHeadNode compareNode; public MaxBlockNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); compareNode = DispatchHeadNodeFactory.createMethodCall(context); } public MaxBlockNode(MaxBlockNode prev) { super(prev); compareNode = prev.compareNode; } @Specialization public RubyNilClass max(VirtualFrame frame, Object maximumObject, Object value) { final Memo<Object> maximum = (Memo<Object>) maximumObject; // TODO(CS): cast final Object current = maximum.get(); if (current == null || (int) compareNode.call(frame, value, "<=>", null, current) < 0) { maximum.set(value); } return nil(); } } public static class MaxBlock { private final FrameDescriptor frameDescriptor; private final FrameSlot frameSlot; private final SharedMethodInfo sharedMethodInfo; private final CallTarget callTarget; public MaxBlock(RubyContext context) { final SourceSection sourceSection = new CoreSourceSection("Array", "max"); frameDescriptor = new FrameDescriptor(); frameSlot = frameDescriptor.addFrameSlot("maximum_memo"); sharedMethodInfo = new SharedMethodInfo(sourceSection, null, Arity.NO_ARGUMENTS, "max", false, null, false); callTarget = Truffle.getRuntime().createCallTarget(new RubyRootNode( context, sourceSection, null, sharedMethodInfo, ArrayNodesFactory.MaxBlockNodeFactory.create(context, sourceSection, new RubyNode[]{ ReadLevelVariableNodeFactory.create(context, sourceSection, frameSlot, 1), new ReadPreArgumentNode(context, sourceSection, 0, MissingArgumentBehaviour.RUNTIME_ERROR) }))); } public FrameDescriptor getFrameDescriptor() { return frameDescriptor; } public FrameSlot getFrameSlot() { return frameSlot; } public SharedMethodInfo getSharedMethodInfo() { return sharedMethodInfo; } public CallTarget getCallTarget() { return callTarget; } } @CoreMethod(names = "min") public abstract static class MinNode extends ArrayCoreMethodNode { @Child private CallDispatchHeadNode eachNode; private final MinBlock minBlock; public MinNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); eachNode = DispatchHeadNodeFactory.createMethodCall(context); minBlock = context.getCoreLibrary().getArrayMinBlock(); } public MinNode(MinNode prev) { super(prev); eachNode = prev.eachNode; minBlock = prev.minBlock; } @Specialization public Object min(VirtualFrame frame, RubyArray array) { // TODO: can we just write to the frame instead of having this indirect object? final Memo<Object> minimum = new Memo<>(); final VirtualFrame minimumClosureFrame = Truffle.getRuntime().createVirtualFrame(RubyArguments.pack(null, null, array, null, new Object[] {}), minBlock.getFrameDescriptor()); minimumClosureFrame.setObject(minBlock.getFrameSlot(), minimum); final RubyProc block = new RubyProc(getContext().getCoreLibrary().getProcClass(), RubyProc.Type.PROC, minBlock.getSharedMethodInfo(), minBlock.getCallTarget(), minBlock.getCallTarget(), minBlock.getCallTarget(), minimumClosureFrame.materialize(), null, array, null); eachNode.call(frame, array, "each", block); if (minimum.get() == null) { return nil(); } else { return minimum.get(); } } } public abstract static class MinBlockNode extends CoreMethodNode { @Child private CallDispatchHeadNode compareNode; public MinBlockNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); compareNode = DispatchHeadNodeFactory.createMethodCall(context); } public MinBlockNode(MinBlockNode prev) { super(prev); compareNode = prev.compareNode; } @Specialization public RubyNilClass min(VirtualFrame frame, Object minimumObject, Object value) { final Memo<Object> minimum = (Memo<Object>) minimumObject; // TODO(CS): cast final Object current = minimum.get(); if (current == null || (int) compareNode.call(frame, value, "<=>", null, current) < 0) { minimum.set(value); } return nil(); } } public static class MinBlock { private final FrameDescriptor frameDescriptor; private final FrameSlot frameSlot; private final SharedMethodInfo sharedMethodInfo; private final CallTarget callTarget; public MinBlock(RubyContext context) { final SourceSection sourceSection = new CoreSourceSection("Array", "min"); frameDescriptor = new FrameDescriptor(); frameSlot = frameDescriptor.addFrameSlot("minimum_memo"); sharedMethodInfo = new SharedMethodInfo(sourceSection, null, Arity.NO_ARGUMENTS, "min", false, null, false); callTarget = Truffle.getRuntime().createCallTarget(new RubyRootNode( context, sourceSection, null, sharedMethodInfo, ArrayNodesFactory.MinBlockNodeFactory.create(context, sourceSection, new RubyNode[]{ ReadLevelVariableNodeFactory.create(context, sourceSection, frameSlot, 1), new ReadPreArgumentNode(context, sourceSection, 0, MissingArgumentBehaviour.RUNTIME_ERROR) }))); } public FrameDescriptor getFrameDescriptor() { return frameDescriptor; } public FrameSlot getFrameSlot() { return frameSlot; } public SharedMethodInfo getSharedMethodInfo() { return sharedMethodInfo; } public CallTarget getCallTarget() { return callTarget; } } @CoreMethod(names = "pack", required = 1) public abstract static class PackNode extends ArrayCoreMethodNode { @Child private CallDispatchHeadNode toStringNode; public PackNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public PackNode(PackNode prev) { super(prev); toStringNode = prev.toStringNode; } // TODO CS 3-Mar-15 to be honest these two specialisations are a bit sneaky - we'll get rid of them ASAP @Specialization(guards = {"arrayIsInts", "formatIsXN2000"}) public RubyString packXN2000(RubyArray array, RubyString format) { final int size = array.getSize(); final int[] store = (int[]) array.getStore(); final byte[] bytes = new byte[1 + size * 4]; // bytes[0] = 0 is implicit for (int n = 0; n < size; n++) { final int value = store[n]; final int byteOffset = 1 + n * 4; bytes[byteOffset + 3] = (byte) (value >>> 24); bytes[byteOffset + 2] = (byte) (value >>> 16); bytes[byteOffset + 1] = (byte) (value >>> 8); bytes[byteOffset + 0] = (byte) value; } // TODO CS 3-Mar-15 should be tainting here - but ideally have a pack node, and then taint on top of that return new RubyString(getContext().getCoreLibrary().getStringClass(), new ByteList(bytes)); } @Specialization(guards = {"arrayIsLongs", "formatIsLStar"}) public RubyString packLStar(RubyArray array, RubyString format) { final int size = array.getSize(); final long[] store = (long[]) array.getStore(); final byte[] bytes = new byte[size * 4]; for (int n = 0; n < size; n++) { final int value = (int) store[n]; // happy to truncate final int byteOffset = n * 4; // TODO CS 3-Mar-15 this should be native endian bytes[byteOffset + 3] = (byte) (value >>> 24); bytes[byteOffset + 2] = (byte) (value >>> 16); bytes[byteOffset + 1] = (byte) (value >>> 8); bytes[byteOffset + 0] = (byte) value; } // TODO CS 1-Mar-15 should be tainting here - but ideally have a pack node, and then taint on top of that return new RubyString(getContext().getCoreLibrary().getStringClass(), new ByteList(bytes)); } @CompilerDirectives.TruffleBoundary @Specialization public RubyString pack(VirtualFrame frame, RubyArray array, RubyString format) { notDesignedForCompilation(); final Object[] objects = array.slowToArray(); final IRubyObject[] jrubyObjects = new IRubyObject[objects.length]; for (int n = 0; n < objects.length; n++) { if (objects[n] instanceof RubyNilClass || objects[n] instanceof Integer || objects[n] instanceof Long || objects[n] instanceof RubyBignum || objects[n] instanceof Double || objects[n] instanceof RubyString) { jrubyObjects[n] = getContext().toJRuby(objects[n]); } else { if (toStringNode == null) { CompilerDirectives.transferToInterpreter(); toStringNode = insert(DispatchHeadNodeFactory.createMethodCall(getContext(), MissingBehavior.RETURN_MISSING)); } final Object result = toStringNode.call(frame, objects[n], "to_str", null); if (result == DispatchNode.MISSING) { throw new RaiseException(getContext().getCoreLibrary().typeErrorNoImplicitConversion(objects[n], "String", this)); } else if (result instanceof RubyString) { jrubyObjects[n] = getContext().toJRuby((RubyString) result); } else { throw new RaiseException(getContext().getCoreLibrary().typeErrorNoImplicitConversion(objects[n], "String", this)); } } } try { return getContext().toTruffle( org.jruby.util.Pack.pack( getContext().getRuntime().getCurrentContext(), getContext().getRuntime(), getContext().getRuntime().newArray(jrubyObjects), getContext().toJRuby(format))); } catch (org.jruby.exceptions.RaiseException e) { throw new RaiseException(getContext().toTruffle(e.getException(), this)); } } @Specialization(guards = "!isRubyString(arguments[1])") public RubyString pack(VirtualFrame frame, RubyArray array, Object format) { // TODO CS 1-Mar-15 sloppy until I can get @CreateCast to work if (toStringNode == null) { CompilerDirectives.transferToInterpreter(); toStringNode = insert(DispatchHeadNodeFactory.createMethodCall(getContext(), MissingBehavior.RETURN_MISSING)); } final Object result = toStringNode.call(frame, format, "to_str", null); if (result == DispatchNode.MISSING) { throw new RaiseException(getContext().getCoreLibrary().typeErrorNoImplicitConversion(format, "String", this)); } if (result instanceof RubyString) { return pack(frame, array, (RubyString) result); } throw new UnsupportedOperationException(); } protected boolean arrayIsInts(RubyArray array) { return array.getStore() instanceof int[]; } protected boolean arrayIsLongs(RubyArray array) { return array.getStore() instanceof long[]; } protected boolean formatIsLStar(RubyArray array, RubyString format) { final ByteList byteList = format.getByteList(); if (!byteList.getEncoding().isAsciiCompatible()) { return false; } if (byteList.length() != 2) { return false; } final byte[] bytes = byteList.unsafeBytes(); return bytes[0] == 'L' && bytes[1] == '*'; } protected boolean formatIsXN2000(RubyArray array, RubyString format) { final ByteList byteList = format.getByteList(); if (!byteList.getEncoding().isAsciiCompatible()) { return false; } if (byteList.length() != 6) { return false; } final byte[] bytes = byteList.unsafeBytes(); return bytes[0] == 'x' && bytes[1] == 'N' && bytes[2] == '2' && bytes[3] == '0' && bytes[4] == '0' && bytes[5] == '0'; } } @CoreMethod(names = "pop", raiseIfFrozenSelf = true, optional = 1) public abstract static class PopNode extends ArrayCoreMethodNode { @Child private ToIntNode toIntNode; public PopNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public PopNode(PopNode prev) { super(prev); this.toIntNode = prev.toIntNode; } public abstract Object executePop(VirtualFrame frame, RubyArray array, Object n); @Specialization(guards = "isNullOrEmpty") public Object popNil(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { return nil(); } @Specialization(guards = "isIntegerFixnum", rewriteOn = UnexpectedResultException.class) public int popIntegerFixnumInBounds(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) throws UnexpectedResultException { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int[] store = ((int[]) array.getStore()); final int value = store[array.getSize() - 1]; array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "popIntegerFixnumInBounds", guards = "isIntegerFixnum") public Object popIntegerFixnum(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int[] store = ((int[]) array.getStore()); final int value = store[array.getSize() - 1]; array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isLongFixnum", rewriteOn = UnexpectedResultException.class) public long popLongFixnumInBounds(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) throws UnexpectedResultException { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final long[] store = ((long[]) array.getStore()); final long value = store[array.getSize() - 1]; array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "popLongFixnumInBounds", guards = "isLongFixnum") public Object popLongFixnum(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final long[] store = ((long[]) array.getStore()); final long value = store[array.getSize() - 1]; array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isFloat", rewriteOn = UnexpectedResultException.class) public double popFloatInBounds(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) throws UnexpectedResultException { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final double[] store = ((double[]) array.getStore()); final double value = store[array.getSize() - 1]; array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "popFloatInBounds", guards = "isFloat") public Object popFloat(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final double[] store = ((double[]) array.getStore()); final double value = store[array.getSize() - 1]; array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isObject") public Object popObject(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final Object[] store = ((Object[]) array.getStore()); final Object value = store[array.getSize() - 1]; array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = {"isNullOrEmpty","!isUndefinedPlaceholder(arguments[1])"}) public Object popNilWithNum(VirtualFrame frame, RubyArray array, Object object) { if (object instanceof Integer && ((Integer) object) < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } else { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int n = toIntNode.executeIntegerFixnum(frame, object); if (n < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), null, 0); } @Specialization(guards = "isIntegerFixnum", rewriteOn = UnexpectedResultException.class) public RubyArray popIntegerFixnumInBoundsWithNum(VirtualFrame frame, RubyArray array, int num) throws UnexpectedResultException { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final int[] store = ((int[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final int[] filler = new int[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } @Specialization(contains = "popIntegerFixnumInBoundsWithNum", guards = "isIntegerFixnum") public Object popIntegerFixnumWithNum(VirtualFrame frame, RubyArray array, int num) { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final int[] store = ((int[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final int[] filler = new int[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } @Specialization(guards = "isLongFixnum", rewriteOn = UnexpectedResultException.class) public RubyArray popLongFixnumInBoundsWithNum(VirtualFrame frame, RubyArray array, int num) throws UnexpectedResultException { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final long[] store = ((long[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final long[] filler = new long[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } @Specialization(contains = "popLongFixnumInBoundsWithNum", guards = "isLongFixnum") public Object popLongFixnumWithNum(VirtualFrame frame, RubyArray array, int num) { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final long[] store = ((long[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final long[] filler = new long[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } @Specialization(guards = "isFloat", rewriteOn = UnexpectedResultException.class) public RubyArray popFloatInBoundsWithNum(VirtualFrame frame, RubyArray array, int num) throws UnexpectedResultException { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final double[] store = ((double[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final double[] filler = new double[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result;} } @Specialization(contains = "popFloatInBoundsWithNum", guards = "isFloat") public Object popFloatWithNum(VirtualFrame frame, RubyArray array, int num) { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final double[] store = ((double[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final double[] filler = new double[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result;} } @Specialization(guards = "isObject") public Object popObjectWithNum(VirtualFrame frame, RubyArray array, int num) { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final Object[] store = ((Object[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final Object[] filler = new Object[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } @Specialization(guards = {"isIntegerFixnum","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}, rewriteOn = UnexpectedResultException.class) public RubyArray popIntegerFixnumInBoundsWithNumObj(VirtualFrame frame, RubyArray array, Object object) throws UnexpectedResultException { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final int[] store = ((int[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final int[] filler = new int[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } @Specialization(contains = "popIntegerFixnumInBoundsWithNumObj", guards = {"isIntegerFixnum","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"} ) public Object popIntegerFixnumWithNumObj(VirtualFrame frame, RubyArray array, Object object) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final int[] store = ((int[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final int[] filler = new int[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } @Specialization(guards = {"isLongFixnum","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"} , rewriteOn = UnexpectedResultException.class) public RubyArray popLongFixnumInBoundsWithNumObj(VirtualFrame frame, RubyArray array, Object object) throws UnexpectedResultException { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final long[] store = ((long[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final long[] filler = new long[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } @Specialization(contains = "popLongFixnumInBoundsWithNumObj", guards = {"isLongFixnum","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}) public Object popLongFixnumWithNumObj(VirtualFrame frame, RubyArray array, Object object) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final long[] store = ((long[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final long[] filler = new long[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } @Specialization(guards = {"isFloat","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}, rewriteOn = UnexpectedResultException.class) public RubyArray popFloatInBoundsWithNumObj(VirtualFrame frame, RubyArray array, Object object) throws UnexpectedResultException { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final double[] store = ((double[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final double[] filler = new double[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result;} } @Specialization(contains = "popFloatInBoundsWithNumObj", guards = {"isFloat","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}) public Object popFloatWithNumObj(VirtualFrame frame, RubyArray array, Object object) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final double[] store = ((double[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final double[] filler = new double[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result;} } @Specialization(guards = {"isObject","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}) public Object popObjectWithNumObj(VirtualFrame frame, RubyArray array, Object object) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numPop = array.getSize() < num ? array.getSize() : num; final Object[] store = ((Object[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, array.getSize() - numPop , array.getSize()), numPop); final Object[] filler = new Object[numPop]; System.arraycopy(filler, 0, store, array.getSize() - numPop, numPop); array.setStore(store, array.getSize() - numPop); return result; } } } @CoreMethod(names = {"push", "<<", "__append__"}, argumentsAsArray = true, raiseIfFrozenSelf = true) public abstract static class PushNode extends ArrayCoreMethodNode { private final BranchProfile extendBranch = BranchProfile.create(); public PushNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public PushNode(PushNode prev) { super(prev); } @Specialization(guards = {"isNull", "isSingleIntegerFixnum"}) public RubyArray pushEmptySingleIntegerFixnum(RubyArray array, Object... values) { array.setStore(new int[]{(int) values[0]}, 1); return array; } @Specialization(guards = {"isNull", "isSingleLongFixnum"}) public RubyArray pushEmptySingleIntegerLong(RubyArray array, Object... values) { array.setStore(new long[]{(long) values[0]}, 1); return array; } @Specialization(guards = "isNull") public RubyArray pushEmptyObjects(RubyArray array, Object... values) { array.setStore(values, values.length); return array; } @Specialization(guards = {"isIntegerFixnum", "isSingleIntegerFixnum"}) public RubyArray pushIntegerFixnumSingleIntegerFixnum(RubyArray array, Object... values) { final int oldSize = array.getSize(); final int newSize = oldSize + 1; int[] store = (int[]) array.getStore(); if (store.length < newSize) { extendBranch.enter(); store = Arrays.copyOf(store, ArrayUtils.capacity(store.length, newSize)); } store[oldSize] = (int) values[0]; array.setStore(store, newSize); return array; } @Specialization(guards = { "isIntegerFixnum", "!isSingleIntegerFixnum", "!isSingleLongFixnum" }) public RubyArray pushIntegerFixnum(RubyArray array, Object... values) { final int oldSize = array.getSize(); final int newSize = oldSize + values.length; int[] oldStore = (int[]) array.getStore(); final Object[] store; if (oldStore.length < newSize) { extendBranch.enter(); store = ArrayUtils.box(oldStore, ArrayUtils.capacity(oldStore.length, newSize) - oldStore.length); } else { store = ArrayUtils.box(oldStore); } for (int n = 0; n < values.length; n++) { store[oldSize + n] = values[n]; } array.setStore(store, newSize); return array; } @Specialization(guards = {"isLongFixnum", "isSingleIntegerFixnum"}) public RubyArray pushLongFixnumSingleIntegerFixnum(RubyArray array, Object... values) { final int oldSize = array.getSize(); final int newSize = oldSize + 1; long[] store = (long[]) array.getStore(); if (store.length < newSize) { extendBranch.enter(); store = Arrays.copyOf(store, ArrayUtils.capacity(store.length, newSize)); } store[oldSize] = (long) (int) values[0]; array.setStore(store, newSize); return array; } @Specialization(guards = {"isLongFixnum", "isSingleLongFixnum"}) public RubyArray pushLongFixnumSingleLongFixnum(RubyArray array, Object... values) { final int oldSize = array.getSize(); final int newSize = oldSize + 1; long[] store = (long[]) array.getStore(); if (store.length < newSize) { extendBranch.enter(); store = Arrays.copyOf(store, ArrayUtils.capacity(store.length, newSize)); } store[oldSize] = (long) values[0]; array.setStore(store, newSize); return array; } @Specialization(guards = "isLongFixnum") public RubyArray pushLongFixnum(RubyArray array, Object... values) { // TODO CS 5-Feb-15 hack to get things working with empty long[] store if (array.getSize() != 0) { throw new UnsupportedOperationException(); } array.setStore(values, values.length); return array; } @Specialization(guards = "isFloat") public RubyArray pushFloat(RubyArray array, Object... values) { // TODO CS 5-Feb-15 hack to get things working with empty double[] store if (array.getSize() != 0) { throw new UnsupportedOperationException(); } array.setStore(values, values.length); return array; } @Specialization(guards = "isObject") public RubyArray pushObject(RubyArray array, Object... values) { final int oldSize = array.getSize(); final int newSize = oldSize + values.length; Object[] store = (Object[]) array.getStore(); if (store.length < newSize) { extendBranch.enter(); store = Arrays.copyOf(store, ArrayUtils.capacity(store.length, newSize)); } for (int n = 0; n < values.length; n++) { store[oldSize + n] = values[n]; } array.setStore(store, newSize); return array; } protected boolean isSingleIntegerFixnum(RubyArray array, Object... values) { return values.length == 1 && values[0] instanceof Integer; } protected boolean isSingleLongFixnum(RubyArray array, Object... values) { return values.length == 1 && values[0] instanceof Long; } } // Not really a core method - used internally public abstract static class PushOneNode extends ArrayCoreMethodNode { private final BranchProfile extendBranch = BranchProfile.create(); public PushOneNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public PushOneNode(PushOneNode prev) { super(prev); } @Specialization(guards = "isNull") public RubyArray pushEmpty(RubyArray array, Object value) { array.setStore(new Object[]{value}, 1); return array; } @Specialization(guards = "isIntegerFixnum") public RubyArray pushIntegerFixnumIntegerFixnum(RubyArray array, int value) { final int oldSize = array.getSize(); final int newSize = oldSize + 1; int[] store = (int[]) array.getStore(); if (store.length < newSize) { extendBranch.enter(); array.setStore(store = Arrays.copyOf(store, ArrayUtils.capacity(store.length, newSize)), array.getSize()); } store[oldSize] = value; array.setStore(store, newSize); return array; } @Specialization(guards = { "isIntegerFixnum", "!isInteger(arguments[1])" }) public RubyArray pushIntegerFixnumObject(RubyArray array, Object value) { final int oldSize = array.getSize(); final int newSize = oldSize + 1; final int[] oldStore = (int[]) array.getStore(); final Object[] newStore; if (oldStore.length < newSize) { extendBranch.enter(); newStore = ArrayUtils.box(oldStore, ArrayUtils.capacity(oldStore.length, newSize) - oldStore.length); } else { newStore = ArrayUtils.box(oldStore); } newStore[oldSize] = value; array.setStore(newStore, newSize); return array; } @Specialization(guards = "isObject") public RubyArray pushObjectObject(RubyArray array, Object value) { final int oldSize = array.getSize(); final int newSize = oldSize + 1; Object[] store = (Object[]) array.getStore(); if (store.length < newSize) { extendBranch.enter(); array.setStore(store = Arrays.copyOf(store, ArrayUtils.capacity(store.length, newSize)), array.getSize()); } store[oldSize] = value; array.setStore(store, newSize); return array; } } @CoreMethod(names = "reject", needsBlock = true, returnsEnumeratorIfNoBlock = true) @ImportGuards(ArrayGuards.class) public abstract static class RejectNode extends YieldingCoreMethodNode { @Child private ArrayBuilderNode arrayBuilder; public RejectNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); arrayBuilder = new ArrayBuilderNode.UninitializedArrayBuilderNode(context); } public RejectNode(RejectNode prev) { super(prev); arrayBuilder = prev.arrayBuilder; } @Specialization(guards = "isNull") public Object selectNull(VirtualFrame frame, RubyArray array, RubyProc block) { return new RubyArray(getContext().getCoreLibrary().getArrayClass()); } @Specialization(guards = "isObject") public Object selectObject(VirtualFrame frame, RubyArray array, RubyProc block) { final Object[] store = (Object[]) array.getStore(); Object selectedStore = arrayBuilder.start(array.getSize()); int selectedSize = 0; int count = 0; try { for (int n = 0; n < array.getSize(); n++) { if (CompilerDirectives.inInterpreter()) { count++; } final Object value = store[n]; notDesignedForCompilation(); if (! yieldIsTruthy(frame, block, new Object[]{value})) { selectedStore = arrayBuilder.append(selectedStore, selectedSize, value); selectedSize++; } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), arrayBuilder.finish(selectedStore, selectedSize), selectedSize); } @Specialization(guards = "isIntegerFixnum") public Object selectFixnumInteger(VirtualFrame frame, RubyArray array, RubyProc block) { final int[] store = (int[]) array.getStore(); Object selectedStore = arrayBuilder.start(array.getSize()); int selectedSize = 0; int count = 0; try { for (int n = 0; n < array.getSize(); n++) { if (CompilerDirectives.inInterpreter()) { count++; } final Object value = store[n]; notDesignedForCompilation(); if (! yieldIsTruthy(frame, block, value)) { selectedStore = arrayBuilder.append(selectedStore, selectedSize, value); selectedSize++; } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), arrayBuilder.finish(selectedStore, selectedSize), selectedSize); } } @CoreMethod(names = "delete_if" , needsBlock = true, returnsEnumeratorIfNoBlock = true, raiseIfFrozenSelf = true) @ImportGuards(ArrayGuards.class) public abstract static class DeleteIfNode extends YieldingCoreMethodNode { public DeleteIfNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public DeleteIfNode(DeleteIfNode prev) { super(prev); } @Specialization(guards = "isNullArray") public Object rejectInPlaceNull(VirtualFrame frame, RubyArray array, RubyProc block) { return array; } @Specialization(guards = "isIntArray") public Object rejectInPlaceInt(VirtualFrame frame, RubyArray array, RubyProc block) { final int[] store = (int[]) array.getStore(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { if (yieldIsTruthy(frame, block, store[n])) { continue; } if (i != n) { store[i] = store[n]; } i++; } if (i != n) { final int[] filler = new int[n - i]; System.arraycopy(filler, 0, store, i, n - i); array.setStore(store, i); } return array; } @Specialization(guards = "isLongArray") public Object rejectInPlaceLong(VirtualFrame frame, RubyArray array, RubyProc block) { final long[] store = (long[]) array.getStore(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { if (yieldIsTruthy(frame, block, store[n])) { continue; } if (i != n) { store[i] = store[n]; } i++; } if (i != n) { final long[] filler = new long[n - i]; System.arraycopy(filler, 0, store, i, n - i); array.setStore(store, i); } return array; } @Specialization(guards = "isDoubleArray") public Object rejectInPlaceDouble(VirtualFrame frame, RubyArray array, RubyProc block) { final double[] store = (double[]) array.getStore(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { if (yieldIsTruthy(frame, block, store[n])) { continue; } if (i != n) { store[i] = store[n]; } i++; } if (i != n) { final double[] filler = new double[n - i]; System.arraycopy(filler, 0, store, i, n - i); array.setStore(store, i); } return array; } @Specialization(guards = "isObjectArray") public Object rejectInPlaceObject(VirtualFrame frame, RubyArray array, RubyProc block) { final Object[] store = (Object[]) array.getStore(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { if (yieldIsTruthy(frame, block, store[n])) { continue; } if (i != n) { store[i] = store[n]; } i++; } if (i != n) { final Object[] filler = new Object[n - i]; System.arraycopy(filler, 0, store, i, n - i); array.setStore(store, i); } return array; } } @CoreMethod(names = "reject!", needsBlock = true, returnsEnumeratorIfNoBlock = true, raiseIfFrozenSelf = true) @ImportGuards(ArrayGuards.class) public abstract static class RejectInPlaceNode extends YieldingCoreMethodNode { public RejectInPlaceNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public RejectInPlaceNode(RejectInPlaceNode prev) { super(prev); } @Specialization(guards = "isNullArray") public Object rejectInPlaceNull(VirtualFrame frame, RubyArray array, RubyProc block) { return nil(); } @Specialization(guards = "isIntArray") public Object rejectInPlaceInt(VirtualFrame frame, RubyArray array, RubyProc block) { final int[] store = (int[]) array.getStore(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { if (yieldIsTruthy(frame, block, store[n])) { continue; } if (i != n) { store[i] = store[n]; } i++; } if (i != n) { final int[] filler = new int[n - i]; System.arraycopy(filler, 0, store, i, n - i); array.setStore(store, i); return array; } else { return nil(); } } @Specialization(guards = "isLongArray") public Object rejectInPlaceLong(VirtualFrame frame, RubyArray array, RubyProc block) { final long[] store = (long[]) array.getStore(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { if (yieldIsTruthy(frame, block, store[n])) { continue; } if (i != n) { store[i] = store[n]; } i++; } if (i != n) { final long[] filler = new long[n - i]; System.arraycopy(filler, 0, store, i, n - i); array.setStore(store, i); return array; } else { return nil(); } } @Specialization(guards = "isDoubleArray") public Object rejectInPlaceDouble(VirtualFrame frame, RubyArray array, RubyProc block) { final double[] store = (double[]) array.getStore(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { if (yieldIsTruthy(frame, block, store[n])) { continue; } if (i != n) { store[i] = store[n]; } i++; } if (i != n) { final double[] filler = new double[n - i]; System.arraycopy(filler, 0, store, i, n - i); array.setStore(store, i); return array; } else { return nil(); } } @Specialization(guards = "isObjectArray") public Object rejectInPlaceObject(VirtualFrame frame, RubyArray array, RubyProc block) { final Object[] store = (Object[]) array.getStore(); int i = 0; int n = 0; for (; n < array.getSize(); n++) { if (yieldIsTruthy(frame, block, store[n])) { continue; } if (i != n) { store[i] = store[n]; } i++; } if (i != n) { final Object[] filler = new Object[n - i]; System.arraycopy(filler, 0, store, i, n - i); array.setStore(store, i); return array; } else { return nil(); } } } @CoreMethod(names = "replace", required = 1, raiseIfFrozenSelf = true) @NodeChildren({ @NodeChild(value = "array"), @NodeChild(value = "other") }) @ImportGuards(ArrayGuards.class) public abstract static class ReplaceNode extends RubyNode { public ReplaceNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public ReplaceNode(ReplaceNode prev) { super(prev); } @CreateCast("other") public RubyNode coerceOtherToAry(RubyNode index) { return ToAryNodeFactory.create(getContext(), getSourceSection(), index); } @Specialization(guards = "isOtherNull") public RubyArray replace(RubyArray array, RubyArray other) { notDesignedForCompilation(); array.setStore(null, 0); return array; } @Specialization(guards = "isOtherIntegerFixnum") public RubyArray replaceIntegerFixnum(RubyArray array, RubyArray other) { notDesignedForCompilation(); array.setStore(Arrays.copyOf((int[]) other.getStore(), other.getSize()), other.getSize()); return array; } @Specialization(guards = "isOtherLongFixnum") public RubyArray replaceLongFixnum(RubyArray array, RubyArray other) { notDesignedForCompilation(); array.setStore(Arrays.copyOf((long[]) other.getStore(), other.getSize()), other.getSize()); return array; } @Specialization(guards = "isOtherFloat") public RubyArray replaceFloat(RubyArray array, RubyArray other) { notDesignedForCompilation(); array.setStore(Arrays.copyOf((double[]) other.getStore(), other.getSize()), other.getSize()); return array; } @Specialization(guards = "isOtherObject") public RubyArray replaceObject(RubyArray array, RubyArray other) { notDesignedForCompilation(); array.setStore(Arrays.copyOf((Object[]) other.getStore(), other.getSize()), other.getSize()); return array; } } @CoreMethod(names = "select", needsBlock = true, returnsEnumeratorIfNoBlock = true) @ImportGuards(ArrayGuards.class) public abstract static class SelectNode extends YieldingCoreMethodNode { @Child private ArrayBuilderNode arrayBuilder; public SelectNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); arrayBuilder = new ArrayBuilderNode.UninitializedArrayBuilderNode(context); } public SelectNode(SelectNode prev) { super(prev); arrayBuilder = prev.arrayBuilder; } @Specialization(guards = "isNull") public Object selectNull(VirtualFrame frame, RubyArray array, RubyProc block) { return new RubyArray(getContext().getCoreLibrary().getArrayClass()); } @Specialization(guards = "isObject") public Object selectObject(VirtualFrame frame, RubyArray array, RubyProc block) { final Object[] store = (Object[]) array.getStore(); Object selectedStore = arrayBuilder.start(array.getSize()); int selectedSize = 0; int count = 0; try { for (int n = 0; n < array.getSize(); n++) { if (CompilerDirectives.inInterpreter()) { count++; } final Object value = store[n]; notDesignedForCompilation(); if (yieldIsTruthy(frame, block, new Object[]{value})) { selectedStore = arrayBuilder.append(selectedStore, selectedSize, value); selectedSize++; } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), arrayBuilder.finish(selectedStore, selectedSize), selectedSize); } @Specialization(guards = "isIntegerFixnum") public Object selectFixnumInteger(VirtualFrame frame, RubyArray array, RubyProc block) { final int[] store = (int[]) array.getStore(); Object selectedStore = arrayBuilder.start(array.getSize()); int selectedSize = 0; int count = 0; try { for (int n = 0; n < array.getSize(); n++) { if (CompilerDirectives.inInterpreter()) { count++; } final Object value = store[n]; notDesignedForCompilation(); if (yieldIsTruthy(frame, block, value)) { selectedStore = arrayBuilder.append(selectedStore, selectedSize, value); selectedSize++; } } } finally { if (CompilerDirectives.inInterpreter()) { getRootNode().reportLoopCount(count); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), arrayBuilder.finish(selectedStore, selectedSize), selectedSize); } } @CoreMethod(names = "shift", raiseIfFrozenSelf = true, optional = 1) public abstract static class ShiftNode extends ArrayCoreMethodNode { @Child private ToIntNode toIntNode; public ShiftNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public ShiftNode(ShiftNode prev) { super(prev); } public abstract Object executeShift(VirtualFrame frame, RubyArray array, Object n); @Specialization(guards = "isNullOrEmpty") public Object shiftNil(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { return nil(); } @Specialization(guards = "isIntegerFixnum", rewriteOn = UnexpectedResultException.class) public int shiftIntegerFixnumInBounds(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) throws UnexpectedResultException { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int[] store = ((int[]) array.getStore()); final int value = store[0]; System.arraycopy(store, 1, store, 0, array.getSize() - 1); final int[] filler = new int[1]; System.arraycopy(filler, 0, store, array.getSize() - 1, 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "shiftIntegerFixnumInBounds", guards = "isIntegerFixnum") public Object shiftIntegerFixnum(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int[] store = ((int[]) array.getStore()); final int value = store[0]; System.arraycopy(store, 1, store, 0, array.getSize() - 1); final int[] filler = new int[1]; System.arraycopy(filler, 0, store, array.getSize() - 1, 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isLongFixnum", rewriteOn = UnexpectedResultException.class) public long shiftLongFixnumInBounds(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) throws UnexpectedResultException { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final long[] store = ((long[]) array.getStore()); final long value = store[0]; System.arraycopy(store, 1, store, 0, array.getSize() - 1); final long[] filler = new long[1]; System.arraycopy(filler, 0, store, array.getSize() - 1, 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "shiftLongFixnumInBounds", guards = "isLongFixnum") public Object shiftLongFixnum(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final long[] store = ((long[]) array.getStore()); final long value = store[0]; System.arraycopy(store, 1, store, 0, array.getSize() - 1); final long[] filler = new long[1]; System.arraycopy(filler, 0, store, array.getSize() - 1, 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isFloat", rewriteOn = UnexpectedResultException.class) public double shiftFloatInBounds(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) throws UnexpectedResultException { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final double[] store = ((double[]) array.getStore()); final double value = store[0]; System.arraycopy(store, 1, store, 0, array.getSize() - 1); final double[] filler = new double[1]; System.arraycopy(filler, 0, store, array.getSize() - 1, 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(contains = "shiftFloatInBounds", guards = "isFloat") public Object shiftFloat(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final double[] store = ((double[]) array.getStore()); final double value = store[0]; System.arraycopy(store, 1, store, 0, array.getSize() - 1); final double[] filler = new double[1]; System.arraycopy(filler, 0, store, array.getSize() - 1, 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = "isObject") public Object shiftObject(VirtualFrame frame, RubyArray array, UndefinedPlaceholder undefinedPlaceholder) { if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final Object[] store = ((Object[]) array.getStore()); final Object value = store[0]; System.arraycopy(store, 1, store, 0, array.getSize() - 1); final Object[] filler = new Object[1]; System.arraycopy(filler, 0, store, array.getSize() - 1, 1); array.setStore(store, array.getSize() - 1); return value; } } @Specialization(guards = {"isNullOrEmpty","!isUndefinedPlaceholder(arguments[1])"}) public Object shiftNilWithNum(VirtualFrame frame, RubyArray array, Object object) { if (object instanceof Integer && ((Integer) object) < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } else { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int n = toIntNode.executeIntegerFixnum(frame, object); if (n < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), null, 0); } @Specialization(guards = "isIntegerFixnum", rewriteOn = UnexpectedResultException.class) public RubyArray popIntegerFixnumInBoundsWithNum(VirtualFrame frame, RubyArray array, int num) throws UnexpectedResultException { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final int[] store = ((int[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0 , numShift), numShift); final int[] filler = new int[numShift]; System.arraycopy(store, numShift, store, 0 , array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(contains = "popIntegerFixnumInBoundsWithNum", guards = "isIntegerFixnum") public Object popIntegerFixnumWithNum(VirtualFrame frame, RubyArray array, int num) { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final int[] store = ((int[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0 , numShift), numShift); final int[] filler = new int[numShift]; System.arraycopy(store, numShift, store, 0 , array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(guards = "isLongFixnum", rewriteOn = UnexpectedResultException.class) public RubyArray shiftLongFixnumInBoundsWithNum(VirtualFrame frame, RubyArray array, int num) throws UnexpectedResultException { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final long[] store = ((long[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0 , numShift), numShift); final long[] filler = new long[numShift]; System.arraycopy(store, numShift, store, 0 , array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(contains = "shiftLongFixnumInBoundsWithNum", guards = "isLongFixnum") public Object shiftLongFixnumWithNum(VirtualFrame frame, RubyArray array, int num) { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final long[] store = ((long[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0 , numShift), numShift); final long[] filler = new long[numShift]; System.arraycopy(store, numShift, store, 0 , array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(guards = "isFloat", rewriteOn = UnexpectedResultException.class) public RubyArray shiftFloatInBoundsWithNum(VirtualFrame frame, RubyArray array, int num) throws UnexpectedResultException { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final double[] store = ((double[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0, numShift), numShift); final double[] filler = new double[numShift]; System.arraycopy(store, numShift, store, 0, array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(contains = "shiftFloatInBoundsWithNum", guards = "isFloat") public Object shiftFloatWithNum(VirtualFrame frame, RubyArray array, int num) { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final double[] store = ((double[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0, numShift), numShift); final double[] filler = new double[numShift]; System.arraycopy(store, numShift, store, 0, array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(guards = "isObject") public Object shiftObjectWithNum(VirtualFrame frame, RubyArray array, int num) { if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final Object[] store = ((Object[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0, numShift), numShift); final Object[] filler = new Object[numShift]; System.arraycopy(store, numShift, store, 0, array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(guards = {"isIntegerFixnum","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}, rewriteOn = UnexpectedResultException.class) public RubyArray shiftIntegerFixnumInBoundsWithNumObj(VirtualFrame frame, RubyArray array, Object object) throws UnexpectedResultException { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final int[] store = ((int[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0 , numShift), numShift); final int[] filler = new int[numShift]; System.arraycopy(store, numShift, store, 0 , array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(contains = "shiftIntegerFixnumInBoundsWithNumObj", guards = {"isIntegerFixnum","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"} ) public Object shiftIntegerFixnumWithNumObj(VirtualFrame frame, RubyArray array, Object object) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final int[] store = ((int[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0 , numShift), numShift); final int[] filler = new int[numShift]; System.arraycopy(store, numShift, store, 0 , array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(guards = {"isLongFixnum","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"} , rewriteOn = UnexpectedResultException.class) public RubyArray shiftLongFixnumInBoundsWithNumObj(VirtualFrame frame, RubyArray array, Object object) throws UnexpectedResultException { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final long[] store = ((long[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0 , numShift), numShift); final long[] filler = new long[numShift]; System.arraycopy(store, numShift, store, 0 , array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(contains = "shiftLongFixnumInBoundsWithNumObj", guards = {"isLongFixnum","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}) public Object shiftLongFixnumWithNumObj(VirtualFrame frame, RubyArray array, Object object) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final long[] store = ((long[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0 , numShift), numShift); final long[] filler = new long[numShift]; System.arraycopy(store, numShift, store, 0 , array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(guards = {"isFloat","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}, rewriteOn = UnexpectedResultException.class) public RubyArray shiftFloatInBoundsWithNumObj(VirtualFrame frame, RubyArray array, Object object) throws UnexpectedResultException { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { throw new UnexpectedResultException(nil()); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final double[] store = ((double[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0, array.getSize() - numShift), numShift); final double[] filler = new double[numShift]; System.arraycopy(store, numShift, store, 0, array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(contains = "shiftFloatInBoundsWithNumObj", guards = {"isFloat","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}) public Object shiftFloatWithNumObj(VirtualFrame frame, RubyArray array, Object object) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final double[] store = ((double[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0, array.getSize() - numShift), numShift); final double[] filler = new double[numShift]; System.arraycopy(store, numShift, store, 0, array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } @Specialization(guards = {"isObject","!isInteger(arguments[1])","!isUndefinedPlaceholder(arguments[1])"}) public Object shiftObjectWithNumObj(VirtualFrame frame, RubyArray array, Object object) { if (toIntNode == null) { CompilerDirectives.transferToInterpreter(); toIntNode = insert(ToIntNodeFactory.create(getContext(), getSourceSection(), null)); } final int num = toIntNode.executeIntegerFixnum(frame, object); if (num < 0) { CompilerDirectives.transferToInterpreter(); throw new RaiseException(getContext().getCoreLibrary().argumentError("negative array size", this)); } if (CompilerDirectives.injectBranchProbability(CompilerDirectives.UNLIKELY_PROBABILITY, array.getSize() == 0)) { return nil(); } else { final int numShift = array.getSize() < num ? array.getSize() : num; final Object[] store = ((Object[]) array.getStore()); final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass(), Arrays.copyOfRange(store, 0, array.getSize() - numShift), numShift); final Object[] filler = new Object[numShift]; System.arraycopy(store, numShift, store, 0, array.getSize() - numShift); System.arraycopy(filler, 0, store, array.getSize() - numShift, numShift); array.setStore(store, array.getSize() - numShift); return result; } } } @CoreMethod(names = {"size", "length"}) public abstract static class SizeNode extends ArrayCoreMethodNode { public SizeNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public SizeNode(SizeNode prev) { super(prev); } @Specialization public int size(RubyArray array) { return array.getSize(); } } @CoreMethod(names = "sort", needsBlock = true) public abstract static class SortNode extends ArrayCoreMethodNode { @Child private CallDispatchHeadNode compareDispatchNode; @Child private YieldDispatchHeadNode yieldNode; public SortNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); compareDispatchNode = DispatchHeadNodeFactory.createMethodCall(context); yieldNode = new YieldDispatchHeadNode(context); } public SortNode(SortNode prev) { super(prev); compareDispatchNode = prev.compareDispatchNode; yieldNode = prev.yieldNode; } @Specialization(guards = "isNull") public RubyArray sortNull(RubyArray array, Object block) { return new RubyArray(getContext().getCoreLibrary().getArrayClass()); } @ExplodeLoop @Specialization(guards = {"isIntegerFixnum", "isSmall"}) public RubyArray sortVeryShortIntegerFixnum(VirtualFrame frame, RubyArray array, UndefinedPlaceholder block) { final int[] store = (int[]) array.getStore(); final int[] newStore = new int[store.length]; final int size = array.getSize(); // Selection sort - written very carefully to allow PE for (int i = 0; i < RubyArray.ARRAYS_SMALL; i++) { if (i < size) { for (int j = i + 1; j < RubyArray.ARRAYS_SMALL; j++) { if (j < size) { if (castSortValue(compareDispatchNode.call(frame, store[j], "<=>", null, store[i])) < 0) { final int temp = store[j]; store[j] = store[i]; store[i] = temp; } } } newStore[i] = store[i]; } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), newStore, size); } @ExplodeLoop @Specialization(guards = {"isLongFixnum", "isSmall"}) public RubyArray sortVeryShortLongFixnum(VirtualFrame frame, RubyArray array, UndefinedPlaceholder block) { final long[] store = (long[]) array.getStore(); final long[] newStore = new long[store.length]; final int size = array.getSize(); // Selection sort - written very carefully to allow PE for (int i = 0; i < RubyArray.ARRAYS_SMALL; i++) { if (i < size) { for (int j = i + 1; j < RubyArray.ARRAYS_SMALL; j++) { if (j < size) { if (castSortValue(compareDispatchNode.call(frame, store[j], "<=>", null, store[i])) < 0) { final long temp = store[j]; store[j] = store[i]; store[i] = temp; } } } newStore[i] = store[i]; } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), newStore, size); } @Specialization(guards = {"isObject", "isSmall"}) public RubyArray sortVeryShortObject(VirtualFrame frame, RubyArray array, UndefinedPlaceholder block) { final Object[] oldStore = (Object[]) array.getStore(); final Object[] store = Arrays.copyOf(oldStore, oldStore.length); // Insertion sort final int size = array.getSize(); for (int i = 1; i < size; i++) { final Object x = store[i]; int j = i; // TODO(CS): node for this cast while (j > 0 && castSortValue(compareDispatchNode.call(frame, store[j - 1], "<=>", null, x)) > 0) { store[j] = store[j - 1]; j--; } store[j] = x; } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), store, size); } @Specialization public Object sortUsingRubinius(VirtualFrame frame, RubyArray array, RubyProc block) { return sortUsingRubinius(frame, array, (Object) block); } @Specialization(guards = {"!isNull", "!isSmall"}) public Object sortUsingRubinius(VirtualFrame frame, RubyArray array, Object block) { if (block == UndefinedPlaceholder.INSTANCE) { return ruby(frame, "sorted = dup; Rubinius.privately { sorted.isort!(0, right) }; sorted", "right", array.getSize()); } else { return ruby(frame, "sorted = dup; Rubinius.privately { sorted.isort_block!(0, right, block) }; sorted", "right", array.getSize(), "block", block); } } private int castSortValue(Object value) { if (value instanceof Integer) { return (int) value; } CompilerDirectives.transferToInterpreter(); // TODO CS 14-Mar-15 - what's the error message here? throw new RaiseException(getContext().getCoreLibrary().argumentError("expecting a Fixnum to sort", this)); } protected static boolean isSmall(RubyArray array) { return array.getSize() <= RubyArray.ARRAYS_SMALL; } } @CoreMethod(names = "unshift", argumentsAsArray = true, raiseIfFrozenSelf = true) public abstract static class UnshiftNode extends CoreMethodNode { public UnshiftNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public UnshiftNode(UnshiftNode prev) { super(prev); } @Specialization public RubyArray unshift(RubyArray array, Object... args) { notDesignedForCompilation(); array.slowUnshift(args); return array; } } @CoreMethod(names = "zip", required = 1, argumentsAsArray = true) public abstract static class ZipNode extends ArrayCoreMethodNode { public ZipNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); } public ZipNode(ZipNode prev) { super(prev); } @Specialization(guards = {"isObject", "isOtherSingleIntegerFixnumArray"}) public RubyArray zipObjectIntegerFixnum(RubyArray array, Object[] others) { final RubyArray other = (RubyArray) others[0]; final Object[] a = (Object[]) array.getStore(); final int[] b = (int[]) other.getStore(); final int bLength = other.getSize(); final int zippedLength = array.getSize(); final Object[] zipped = new Object[zippedLength]; final boolean areSameLength = bLength == zippedLength; if (areSameLength) { for (int n = 0; n < zippedLength; n++) { zipped[n] = new RubyArray(getContext().getCoreLibrary().getArrayClass(), new Object[]{a[n], b[n]}, 2); } } else { for (int n = 0; n < zippedLength; n++) { if (n < bLength) { zipped[n] = new RubyArray(getContext().getCoreLibrary().getArrayClass(), new Object[]{a[n], b[n]}, 2); } else { zipped[n] = new RubyArray(getContext().getCoreLibrary().getArrayClass(), new Object[]{a[n], nil()}, 2); } } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), zipped, zippedLength); } @Specialization(guards = {"isObject", "isOtherSingleObjectArray"}) public RubyArray zipObjectObject(RubyArray array, Object[] others) { final RubyArray other = (RubyArray) others[0]; final Object[] a = (Object[]) array.getStore(); final Object[] b = (Object[]) other.getStore(); final int bLength = other.getSize(); final int zippedLength = array.getSize(); final Object[] zipped = new Object[zippedLength]; final boolean areSameLength = bLength == zippedLength; if (areSameLength) { for (int n = 0; n < zippedLength; n++) { zipped[n] = new RubyArray(getContext().getCoreLibrary().getArrayClass(), new Object[]{a[n], b[n]}, 2); } } else { for (int n = 0; n < zippedLength; n++) { if (n < bLength) { zipped[n] = new RubyArray(getContext().getCoreLibrary().getArrayClass(), new Object[]{a[n], b[n]}, 2); } else { zipped[n] = new RubyArray(getContext().getCoreLibrary().getArrayClass(), new Object[]{a[n], nil()}, 2); } } } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), zipped, zippedLength); } @Specialization(guards = {"!isOtherSingleObjectArray"}) public Object zipObjectObjectNotSingleObject(VirtualFrame frame, RubyArray array, Object[] others) { return zipRuby(frame, others); } @Specialization(guards = {"!isOtherSingleIntegerFixnumArray"}) public Object zipObjectObjectNotSingleInteger(VirtualFrame frame, RubyArray array, Object[] others) { return zipRuby(frame, others); } @Specialization(guards = {"!isObject"}) public Object zipObjectObjectNotObject(VirtualFrame frame, RubyArray array, Object[] others) { return zipRuby(frame, others); } private Object zipRuby(VirtualFrame frame, Object[] others) { RubyBasicObject proc = RubyArguments.getBlock(frame.getArguments()); if (proc == null) { proc = nil(); } return ruby(frame, "zip_internal(*others, &block)", "others", new RubyArray(getContext().getCoreLibrary().getArrayClass(), others, others.length), "block", proc); } } }
norbertosanchezdichi/TIL
MachineLearning/Regression/ModelSelection/random_forest_regression_model_selection.py
<filename>MachineLearning/Regression/ModelSelection/random_forest_regression_model_selection.py # Import libraries import numpy as np import matplotlib.pyplot as plt import pandas as pd # Import dataset dataset = pd.read_csv('Data.csv') X = dataset.iloc[:, :-1].values Y = dataset.iloc[:, -1].values print(f"X = {X}") print(f"Y = {Y}") print() # Convert Y to 2D Array for Feature Scaling Y = Y.reshape(len(Y), 1) print(f"Y as a 2D array = {Y}") print() # Split Dataset: Training Set and Test Set from sklearn.model_selection import train_test_split X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.2, random_state = 0) print(f"X_train = {X_train}") print(f"X_test = {X_test}") print(f"Y_train = {Y_train}") print(f"Y_test = {Y_test}") print() # Random Forest Regression uses ensemble learning # A Random Forest Regression model has better predictability compared to a Decision Tree Regression Model. However, it has less interpretability. ## 1. Pick random K data points from Training Set. ## 2. Build the Decision Tree associated with these K points. ## 3. Choose the number N of trees to build and repeat #1 and #2 ## 4. For a new data point, make each one of your N trees predict the value for the point in question. The new predicted output is the average across all the predicted N values. ## To find the optimal number of Decision Trees, use k-Fold Cross Validation and Grid Search to use Parameter Tuning technique. ## To evaluate the random forest regressor, compute the "Mean of Squared Residuals" (the mean of the squared errors). ## Can't apply Backward Elimination to Random Forest models because there are no coefficients combined in a linear regression equation and therefore there are no p-values. # Create and train the Random Forest Regression model ## Use 10 trees in the forest from sklearn.ensemble import RandomForestRegressor regressor = RandomForestRegressor(n_estimators = 10, random_state = 0) regressor.fit(X_train, Y_train.ravel()) # Predict using Random Forest Regression Y_predict = regressor.predict(X_test) # Output Training and Test Set results np.set_printoptions(precision = 2) print(f"[Y_predict Y_test] = {np.concatenate((Y_predict.reshape(len(Y_predict), 1), Y_test.reshape(len(Y_test), 1)), axis = 1)}") print() # Evaluate Model Performance from sklearn.metrics import r2_score print(f"R2 Score = {r2_score(Y_test, Y_predict)}") print(f"Adusted R2 Score = {1 - (1 - r2_score(Y_test, Y_predict)) * ((len(X_test) - 1) / (len(X_test) - len(X_test[0]) - 1))}")
jmiserez/onos
providers/netconf/device/src/test/java/org/onosproject/provider/netconf/device/impl/NetconfDeviceProviderTest.java
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.provider.netconf.device.impl; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.junit.Assert.assertFalse; import static org.onlab.util.Tools.delay; import static org.slf4j.LoggerFactory.getLogger; import java.io.IOException; import java.net.SocketTimeoutException; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; import java.util.Dictionary; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.easymock.EasyMock; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.onlab.packet.ChassisId; import org.onosproject.cfg.ComponentConfigService; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.MastershipRole; import org.onosproject.net.device.DefaultDeviceDescription; import org.onosproject.net.device.DeviceDescription; import org.onosproject.net.device.DeviceProvider; import org.onosproject.net.device.DeviceProviderRegistry; import org.onosproject.net.device.DeviceProviderService; import org.onosproject.net.device.PortDescription; import org.onosproject.net.device.PortStatistics; import org.onosproject.net.provider.ProviderId; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import com.tailf.jnc.JNCException; /** * Test Case to Validate Netconf Device Provider. * */ public class NetconfDeviceProviderTest { // private NetconfDevice device; TestDeviceCreator create; private final Logger log = getLogger(NetconfDeviceProviderTest.class); private Map<DeviceId, NetconfDevice> netconfDeviceMap = new ConcurrentHashMap<DeviceId, NetconfDevice>(); private DeviceProviderService providerService; private static final int EVENTINTERVAL = 5; private static final String SCHEME = "netconf"; private static final DeviceId DID1 = DeviceId .deviceId("of:0000000000000001"); private final NetconfDeviceProvider provider = new NetconfDeviceProvider(); private final TestDeviceRegistry registry = new TestDeviceRegistry(); private ComponentConfigService mockCfgService; @Before public void setUp() { mockCfgService = EasyMock.createMock(ComponentConfigService.class); provider.cfgService = mockCfgService; provider.providerRegistry = registry; } @SuppressWarnings("unchecked") private Dictionary<String, String> getDictionaryMock(ComponentContext componentContext) { Dictionary<String, String> dictionary = EasyMock .createMock(Dictionary.class); expect(dictionary.get("devConfigs")) .andReturn("cisco:cisco@10.18.11.14:22:active," + "sanjay:b33rb3lly@10.18.24.122:2022:inactive"); replay(dictionary); expect(componentContext.getProperties()).andReturn(dictionary); return dictionary; } @SuppressWarnings("unchecked") private Dictionary<String, String> getDictionaryMockWithoutValues(ComponentContext componentContext) { Dictionary<String, String> dictionary = EasyMock .createMock(Dictionary.class); expect(dictionary.get("devConfigs")).andReturn(""); replay(dictionary); expect(componentContext.getProperties()).andReturn(dictionary); return dictionary; } @SuppressWarnings("unchecked") private Dictionary<String, String> getDictionaryMockWithDeviceEntryNull(ComponentContext componentContext) { Dictionary<String, String> dictionary = EasyMock .createMock(Dictionary.class); expect(dictionary.get("devConfigs")).andReturn("null,null"); replay(dictionary); expect(componentContext.getProperties()).andReturn(dictionary); return dictionary; } @SuppressWarnings("unchecked") private Dictionary<String, String> getDictionaryMockDeviceEntryNumberFomatEx(ComponentContext componentContext) { Dictionary<String, String> dictionary = EasyMock .createMock(Dictionary.class); expect(dictionary.get("devConfigs")) .andReturn("cisco:cisco@10.18.11.14:cisco:active") .andThrow(new NumberFormatException()); replay(dictionary); expect(componentContext.getProperties()).andReturn(dictionary); return dictionary; } @SuppressWarnings("unchecked") private Dictionary<String, String> getDictionaryMockWithoutUsernameAndPassword(ComponentContext componentContext) { Dictionary<String, String> dictionary = EasyMock .createMock(Dictionary.class); expect(dictionary.get("devConfigs")) .andReturn("null:null@null:0:active"); replay(dictionary); expect(componentContext.getProperties()).andReturn(dictionary); return dictionary; } @SuppressWarnings("unchecked") private Dictionary<String, String> getDictionaryMockWithDifferentDeviceState(ComponentContext componentContext) { Dictionary<String, String> dictionary = EasyMock .createMock(Dictionary.class); expect(dictionary.get("devConfigs")) .andReturn("cisco:cisco@10.18.11.14:22:active,cisco:cisco@10.18.11.18:22:inactive," + "cisco:cisco@10.18.11.14:22:invalid,cisco:cisco@10.18.11.14:22:null"); replay(dictionary); expect(componentContext.getProperties()).andReturn(dictionary); return dictionary; } @SuppressWarnings("unchecked") private Dictionary<String, String> getDictionaryMockDeviceWithArrayOutOFBoundEx(ComponentContext componentContext) { Dictionary<String, String> dictionary = EasyMock .createMock(Dictionary.class); expect(dictionary.get("devConfigs")) .andReturn("@10.18.11.14:22:active") .andThrow(new ArrayIndexOutOfBoundsException()); replay(dictionary); expect(componentContext.getProperties()).andReturn(dictionary); return dictionary; } @SuppressWarnings("unchecked") private Dictionary<String, String> getDictionaryMockDeviceEntryForDeactivate(ComponentContext componentContext) { Dictionary<String, String> dictionary = EasyMock .createMock(Dictionary.class); expect(dictionary.get("devConfigs")) .andReturn("netconf:cisco@10.18.11.14:22:active") .andThrow(new ArrayIndexOutOfBoundsException()); replay(dictionary); expect(componentContext.getProperties()).andReturn(dictionary); return dictionary; } @Ignore("Test fails if the hard coded host actually exists.") @Test(expected = SocketTimeoutException.class) public void testSSHAuthentication() throws JNCException, IOException { NetconfDevice netconfDevice = new NetconfDevice("10.18.14.19", 22, "cisco", "cisco"); netconfDevice.setConnectTimeout(1000); TestDeviceCreator objForTestDev = new TestDeviceCreator(netconfDevice, true); objForTestDev.run(); } @After public void tearDown() { provider.providerRegistry = null; provider.cfgService = null; } @Test public void testActiveWithComponentContext() { ComponentContext componentContext = EasyMock .createMock(ComponentContext.class); getDictionaryMock(componentContext); replay(componentContext); provider.activate(componentContext); } // To check if deviceCfgValue is empty or null @Test public void testActiveWithcomponentContextIsNull() { ComponentContext componentContext = EasyMock .createMock(ComponentContext.class); getDictionaryMockWithoutValues(componentContext); replay(componentContext); provider.activate(componentContext); } // To check deviceEntry and device is null @Test public void testActiveWithDeviceEntryIsNull() { ComponentContext componentContext = EasyMock .createMock(ComponentContext.class); getDictionaryMockWithDeviceEntryNull(componentContext); replay(componentContext); provider.activate(componentContext); } @Test public void testActiveWithDeviceEntryWithoutUsernameAndPassword() { ComponentContext componentContext = EasyMock .createMock(ComponentContext.class); getDictionaryMockWithoutUsernameAndPassword(componentContext); replay(componentContext); provider.activate(componentContext); } @Test public void testActiveWithDeviceEntryWithNumberFomatEx() { ComponentContext componentContext = EasyMock .createMock(ComponentContext.class); getDictionaryMockDeviceEntryNumberFomatEx(componentContext); replay(componentContext); provider.activate(componentContext); } @Test public void testActiveWithDeviceEntryWithDifferentDeviceState() { ComponentContext componentContext = EasyMock .createMock(ComponentContext.class); getDictionaryMockWithDifferentDeviceState(componentContext); replay(componentContext); provider.activate(componentContext); } @Test public void testActiveWithDeviceEntryWithArrayOutOFBoundEx() { ComponentContext componentContext = EasyMock .createMock(ComponentContext.class); getDictionaryMockDeviceWithArrayOutOFBoundEx(componentContext); replay(componentContext); provider.activate(componentContext); } @Test public void isReachableWithInvalidDeviceId() { assertFalse("Initially the Device ID Should not be reachable", provider.isReachable(DID1)); NetconfDevice device = new NetconfDevice("", 0, "", ""); provider.netconfDeviceMap.put(DID1, device); assertFalse("Particular Device ID cannot be Reachable", provider.isReachable(DID1)); } @Test public void testDeactivate() { ComponentContext componentContext = EasyMock .createMock(ComponentContext.class); getDictionaryMockDeviceEntryForDeactivate(componentContext); replay(componentContext); testActiveWithComponentContext(); provider.deactivate(componentContext); } private class TestDeviceCreator { private NetconfDevice device; private boolean createFlag; public TestDeviceCreator(NetconfDevice device, boolean createFlag) { this.device = device; this.createFlag = createFlag; } public void run() throws JNCException, IOException { if (createFlag) { log.info("Trying to create Device Info on ONOS core"); advertiseDevices(); } else { log.info("Trying to remove Device Info on ONOS core"); removeDevices(); } } /** * For each Netconf Device, remove the entry from the device store. * @throws URISyntaxException */ private void removeDevices() { if (device == null) { log.warn("The Request Netconf Device is null, cannot proceed further"); return; } try { DeviceId did = getDeviceId(); if (!netconfDeviceMap.containsKey(did)) { log.error("BAD Request: 'Currently device is not discovered, " + "so cannot remove/disconnect the device: " + device.deviceInfo() + "'"); return; } providerService.deviceDisconnected(did); device.disconnect(); netconfDeviceMap.remove(did); delay(EVENTINTERVAL); } catch (URISyntaxException uriSyntaxExcpetion) { log.error("Syntax Error while creating URI for the device: " + device.deviceInfo() + " couldn't remove the device from the store", uriSyntaxExcpetion); } } /** * Initialize Netconf Device object, and notify core saying device * connected. */ private void advertiseDevices() throws JNCException, IOException, SocketTimeoutException { try { if (device == null) { log.warn("The Request Netconf Device is null, cannot proceed further"); return; } device.init(); DeviceId did = getDeviceId(); ChassisId cid = new ChassisId(); DeviceDescription desc = new DefaultDeviceDescription( did.uri(), Device.Type.OTHER, "", "", "", "", cid); log.info("Persisting Device" + did.uri().toString()); netconfDeviceMap.put(did, device); providerService.deviceConnected(did, desc); log.info("Done with Device Info Creation on ONOS core. Device Info: " + device.deviceInfo() + " " + did.uri().toString()); delay(EVENTINTERVAL); } catch (URISyntaxException e) { log.error("Syntax Error while creating URI for the device: " + device.deviceInfo() + " couldn't persist the device onto the store", e); } catch (JNCException e) { throw e; } catch (SocketTimeoutException e) { throw e; } catch (IOException e) { throw e; } catch (Exception e) { log.error("Error while initializing session for the device: " + device.deviceInfo(), e); } } private DeviceId getDeviceId() throws URISyntaxException { String additionalSSP = new StringBuilder(device.getUsername()) .append("@").append(device.getSshHost()).append(":") .append(device.getSshPort()).toString(); DeviceId did = DeviceId.deviceId(new URI(SCHEME, additionalSSP, null)); return did; } } private class TestDeviceRegistry implements DeviceProviderRegistry { @Override public DeviceProviderService register(DeviceProvider provider) { return new TestProviderService(); } @Override public void unregister(DeviceProvider provider) { } @Override public Set<ProviderId> getProviders() { return null; } private class TestProviderService implements DeviceProviderService { @Override public DeviceProvider provider() { return null; } @Override public void deviceConnected(DeviceId deviceId, DeviceDescription deviceDescription) { } @Override public void deviceDisconnected(DeviceId deviceId) { } @Override public void updatePorts(DeviceId deviceId, List<PortDescription> portDescriptions) { } @Override public void portStatusChanged(DeviceId deviceId, PortDescription portDescription) { } @Override public void receivedRoleReply(DeviceId deviceId, MastershipRole requested, MastershipRole response) { } @Override public void updatePortStatistics(DeviceId deviceId, Collection<PortStatistics> portStatistics) { } } } }
Newsqueak/mixianyongche
server/codes/NewbierCar/doCloud/main/www.js
<reponame>Newsqueak/mixianyongche<filename>server/codes/NewbierCar/doCloud/main/www.js #!/usr/bin/env node /********** * * should start module config first */ var fs = require('fs'); var express = require('express'); var passport = require('passport'); var config = require("../config/config"); var common = require("../common"); var app = express(); var port = process.env.PORT || 9001; // Bootstrap models like classLoader ["action", "service", "dao"].forEach(function (subdir) { fs.readdirSync(__dirname + '/../app/' + subdir).forEach(function (file) { if (~file.indexOf('.js')) require(__dirname + '/../app/' + subdir + '/' + file); }); }); // The project's individual business modules assembling below // Bootstrap passport config //require('./config/passport')(passport, config); // Bootstrap application underlying settings require('../config/express')(app, passport); // Bootstrap routes require('../config/routes')(app, passport, common.ErrCode); // Starting server and connecting database after all preparations var server = null; common.DBObject.queryAsync("select 1+2 as aaa").then(function (query) { if (query[0][0].aaa === '3') { server = app.listen(port, function () { process.on('uncaughtException', function (err) { //打印出错误 console.log(err); //打印出错误的调用栈方便调试 console.log(err.stack); }); console.log('Express app started on port ' + port); }); /** * Expose */ //This file is the top level of the system, so note it's not to be injected down-stairs process.httpServer = server; process.app = app; } }).catch(function (err) { console.log("sql error: \n" + err); common.DBObject.end(function (error) { console.log("ERROR ENDING: " + error); console.log("POOL ENDED"); }); });
damien-monni/simple-planning-poker
src/components/ResetSessionButton.js
<reponame>damien-monni/simple-planning-poker<gh_stars>0 import React from 'react'; import Button from '@material-ui/core/Button'; import { useTranslation } from 'react-i18next'; import { makeStyles } from '@material-ui/styles'; const useStyles = makeStyles({ noBorderRadius: { borderRadius: 0, }, }); export default (props) => { const { noBorderRadius, onClick } = props; const { t } = useTranslation(); const classes = useStyles(); return ( <Button variant="contained" color="secondary" fullWidth size="large" className={noBorderRadius && classes.noBorderRadius} onClick={onClick} > {t('ResetSessionButton.text')} </Button> ); };
proglang/dts-generate-results
results/4_extract-code/code/css-modules-require-hook/css-modules-require-hook_104.js
<filename>results/4_extract-code/code/css-modules-require-hook/css-modules-require-hook_104.js const hook = require('css-modules-require-hook'); const lessParser = require('postcss-less').parse; hook({ extensions: '.less', processorOpts: {parser: lessParser}, });
vitahlin/kennen
javar/jdk8-analysis/src/com/sun/corba/se/spi/monitoring/MonitoringConstants.java
/* * Copyright (c) 2003, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.corba.se.spi.monitoring; public interface MonitoringConstants { public static final String DEFAULT_MONITORING_ROOT = "orb"; public static final String DEFAULT_MONITORING_ROOT_DESCRIPTION = "ORB Management and Monitoring Root"; // // Connection Monitoring // public static final String CONNECTION_MONITORING_ROOT = "Connections"; public static final String CONNECTION_MONITORING_ROOT_DESCRIPTION = "Statistics on inbound/outbound connections"; public static final String INBOUND_CONNECTION_MONITORING_ROOT = "Inbound"; public static final String INBOUND_CONNECTION_MONITORING_ROOT_DESCRIPTION= "Statistics on inbound connections"; public static final String OUTBOUND_CONNECTION_MONITORING_ROOT = "Outbound"; public static final String OUTBOUND_CONNECTION_MONITORING_ROOT_DESCRIPTION= "Statistics on outbound connections"; public static final String CONNECTION_MONITORING_DESCRIPTION = "Connection statistics"; public static final String CONNECTION_TOTAL_NUMBER_OF_CONNECTIONS = "NumberOfConnections"; public static final String CONNECTION_TOTAL_NUMBER_OF_CONNECTIONS_DESCRIPTION = "The total number of connections"; public static final String CONNECTION_NUMBER_OF_IDLE_CONNECTIONS = "NumberOfIdleConnections"; public static final String CONNECTION_NUMBER_OF_IDLE_CONNECTIONS_DESCRIPTION = "The number of idle connections"; public static final String CONNECTION_NUMBER_OF_BUSY_CONNECTIONS = "NumberOfBusyConnections"; public static final String CONNECTION_NUMBER_OF_BUSY_CONNECTIONS_DESCRIPTION = "The number of busy connections"; // // ThreadPool and WorkQueue monitoring constants // public static final String THREADPOOL_MONITORING_ROOT = "threadpool"; public static final String THREADPOOL_MONITORING_ROOT_DESCRIPTION = "Monitoring for all ThreadPool instances"; public static final String THREADPOOL_MONITORING_DESCRIPTION = "Monitoring for a ThreadPool"; public static final String THREADPOOL_CURRENT_NUMBER_OF_THREADS = "currentNumberOfThreads"; public static final String THREADPOOL_CURRENT_NUMBER_OF_THREADS_DESCRIPTION = "Current number of total threads in the ThreadPool"; public static final String THREADPOOL_NUMBER_OF_AVAILABLE_THREADS = "numberOfAvailableThreads"; public static final String THREADPOOL_NUMBER_OF_AVAILABLE_THREADS_DESCRIPTION = "Number of available threads in the ThreadPool"; public static final String THREADPOOL_NUMBER_OF_BUSY_THREADS = "numberOfBusyThreads"; public static final String THREADPOOL_NUMBER_OF_BUSY_THREADS_DESCRIPTION = "Number of busy threads in the ThreadPool"; public static final String THREADPOOL_AVERAGE_WORK_COMPLETION_TIME = "averageWorkCompletionTime"; public static final String THREADPOOL_AVERAGE_WORK_COMPLETION_TIME_DESCRIPTION = "Average elapsed time taken to complete a work item by the ThreadPool"; public static final String THREADPOOL_CURRENT_PROCESSED_COUNT = "currentProcessedCount"; public static final String THREADPOOL_CURRENT_PROCESSED_COUNT_DESCRIPTION = "Number of Work items processed by the ThreadPool"; public static final String WORKQUEUE_MONITORING_DESCRIPTION = "Monitoring for a Work Queue"; public static final String WORKQUEUE_TOTAL_WORK_ITEMS_ADDED = "totalWorkItemsAdded"; public static final String WORKQUEUE_TOTAL_WORK_ITEMS_ADDED_DESCRIPTION = "Total number of Work items added to the Queue"; public static final String WORKQUEUE_WORK_ITEMS_IN_QUEUE = "workItemsInQueue"; public static final String WORKQUEUE_WORK_ITEMS_IN_QUEUE_DESCRIPTION = "Number of Work items in the Queue to be processed"; public static final String WORKQUEUE_AVERAGE_TIME_IN_QUEUE = "averageTimeInQueue"; public static final String WORKQUEUE_AVERAGE_TIME_IN_QUEUE_DESCRIPTION = "Average time a work item waits in the work queue"; } // End of file.
ladyian15/quickdic-dictionary.dictionary
jars/icu4j-52_1/main/classes/core/src/com/ibm/icu/util/TimeUnit.java
/* ************************************************************************** * Copyright (C) 2008-2013, Google, International Business Machines * Corporation and others. All Rights Reserved. ************************************************************************** */ package com.ibm.icu.util; /** * Measurement unit for time units. * @see TimeUnitAmount * @see TimeUnit * @author markdavis * @stable ICU 4.0 */ public class TimeUnit extends MeasureUnit { private static final long serialVersionUID = -2839973855554750484L; /** * Supports selected time duration units */ private final int index; // Total number of time units. Adjust as necessary. static final int TIME_UNIT_COUNT = 7; private static TimeUnit[] values = new TimeUnit[TIME_UNIT_COUNT]; /** * Constant value for supported time unit. * @stable ICU 4.0 */ public static TimeUnit SECOND = new TimeUnit("second", 6), MINUTE = new TimeUnit("minute", 5), HOUR = new TimeUnit("hour", 4), DAY = new TimeUnit("day", 3), WEEK = new TimeUnit("week", 2), MONTH = new TimeUnit("month", 1), YEAR = new TimeUnit("year", 0); // idx must be sequential and must order time units from largest to smallest. // e.g YEAR is 0; MONTH is 1; ...; SECOND is 6. private TimeUnit(String name, int idx) { super("duration", name); this.index = idx; values[idx] = this; // store in values array } /** * @return the available values * @stable ICU 4.0 */ public static TimeUnit[] values() { return values.clone(); } // Returns the index for this TimeUnit. Something between 0 inclusive and // number of time units exclusive. Smaller time units have larger indexes. int getIndex() { return index; } }
EBGToo/swift
unittests/runtime/Enum.cpp
//===--- Enum.cpp - Enum tests --------------------------------------------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// #include "swift/Runtime/Metadata.h" #include "swift/Runtime/Enum.h" #include "gtest/gtest.h" using namespace swift; // Mock up a value witness table for Builtin.Int8 will 254 and 255 as extra // inhabitants. ExtraInhabitantsValueWitnessTable Int8WithExtraInhabitantValueWitness = { // ValueWitnessTable ValueWitnessTable{ #define STEAL_INT8_WITNESS(witness) VALUE_WITNESS_SYM(Bi8_).witness, FOR_ALL_FUNCTION_VALUE_WITNESSES(STEAL_INT8_WITNESS) #undef STEAL_INT8_WITNESS VALUE_WITNESS_SYM(Bi8_).size, VALUE_WITNESS_SYM(Bi8_).flags.withExtraInhabitants(true), VALUE_WITNESS_SYM(Bi8_).stride }, // extraInhabitantFlags ExtraInhabitantFlags().withNumExtraInhabitants(2), // storeExtraInhabitant [](OpaqueValue *dest, int index, const Metadata *self) { *reinterpret_cast<uint8_t*>(dest) = 254 + index; }, // getExtraInhabitantIndex [](const OpaqueValue *src, const Metadata *self) -> int { uint8_t byte = *reinterpret_cast<const uint8_t*>(src); if (byte >= 254) return byte - 254; return -1; } }; FullMetadata<OpaqueMetadata> XI_TMBi8_ = { {&Int8WithExtraInhabitantValueWitness}, {{MetadataKind::Opaque}} }; const OpaqueValue *asOpaque(const void *v) { return reinterpret_cast<const OpaqueValue*>(v); } OpaqueValue *asOpaque(void *v) { return reinterpret_cast<OpaqueValue*>(v); } int test_getEnumCaseSinglePayload(std::initializer_list<uint8_t> repr, const FullOpaqueMetadata &metadata, unsigned numEmptyCases) { return swift_getEnumCaseSinglePayload(asOpaque(repr.begin()), &metadata.base, numEmptyCases); } TEST(EnumTest, getEnumCaseSinglePayload) { // Test with no XI. ASSERT_EQ(-1, test_getEnumCaseSinglePayload({0, 0}, METADATA_SYM(Bi8_), 512)); ASSERT_EQ(-1, test_getEnumCaseSinglePayload({255, 0}, METADATA_SYM(Bi8_), 512)); ASSERT_EQ(0, test_getEnumCaseSinglePayload({0, 1}, METADATA_SYM(Bi8_), 512)); ASSERT_EQ(255, test_getEnumCaseSinglePayload({255, 1}, METADATA_SYM(Bi8_), 512)); ASSERT_EQ(511, test_getEnumCaseSinglePayload({255, 2}, METADATA_SYM(Bi8_), 512)); ASSERT_EQ(-1, test_getEnumCaseSinglePayload({0, 0, 0}, METADATA_SYM(Bi8_), 128*1024)); ASSERT_EQ(-1, test_getEnumCaseSinglePayload({255, 0, 0}, METADATA_SYM(Bi8_), 128*1024)); #if defined(__BIG_ENDIAN__) ASSERT_EQ(65535 - 255, test_getEnumCaseSinglePayload({0, 1, 0}, METADATA_SYM(Bi8_), 128*1024)); #else ASSERT_EQ(65535 - 255, test_getEnumCaseSinglePayload({0, 0, 1}, METADATA_SYM(Bi8_), 128*1024)); #endif // Test with XI. ASSERT_EQ(-1, test_getEnumCaseSinglePayload({0}, XI_TMBi8_, 2)); ASSERT_EQ(-1, test_getEnumCaseSinglePayload({253}, XI_TMBi8_, 2)); ASSERT_EQ(0, test_getEnumCaseSinglePayload({254}, XI_TMBi8_, 2)); ASSERT_EQ(1, test_getEnumCaseSinglePayload({255}, XI_TMBi8_, 2)); ASSERT_EQ(-1, test_getEnumCaseSinglePayload({0, 0}, XI_TMBi8_, 4)); ASSERT_EQ(-1, test_getEnumCaseSinglePayload({253, 0}, XI_TMBi8_, 4)); ASSERT_EQ(0, test_getEnumCaseSinglePayload({254, 0}, XI_TMBi8_, 4)); ASSERT_EQ(1, test_getEnumCaseSinglePayload({255, 0}, XI_TMBi8_, 4)); ASSERT_EQ(2, test_getEnumCaseSinglePayload({0, 1}, XI_TMBi8_, 4)); ASSERT_EQ(3, test_getEnumCaseSinglePayload({1, 1}, XI_TMBi8_, 4)); } bool test_storeEnumTagSinglePayload(std::initializer_list<uint8_t> after, std::initializer_list<uint8_t> before, const FullOpaqueMetadata &metadata, unsigned whichCase, unsigned numEmptyCases) { assert(after.size() == before.size()); std::vector<uint8_t> buf; buf.resize(before.size()); memcpy(buf.data(), before.begin(), before.size()); swift_storeEnumTagSinglePayload(asOpaque(buf.data()), &metadata.base, whichCase, numEmptyCases); return memcmp(buf.data(), after.begin(), after.size()) == 0; } TEST(EnumTest, storeEnumTagSinglePayload) { // Test with no XI. ASSERT_TRUE(test_storeEnumTagSinglePayload({219, 0}, {219, 123}, METADATA_SYM(Bi8_), -1, 512)); ASSERT_TRUE(test_storeEnumTagSinglePayload({0, 1}, {219, 123}, METADATA_SYM(Bi8_), 0, 512)); ASSERT_TRUE(test_storeEnumTagSinglePayload({255, 1}, {219, 123}, METADATA_SYM(Bi8_), 255, 512)); ASSERT_TRUE(test_storeEnumTagSinglePayload({255, 2}, {219, 123}, METADATA_SYM(Bi8_), 511, 512)); ASSERT_TRUE(test_storeEnumTagSinglePayload({219, 0, 0}, {219, 123, 77}, METADATA_SYM(Bi8_), -1, 128*1024)); #if defined(__BIG_ENDIAN__) ASSERT_TRUE(test_storeEnumTagSinglePayload({0, 0, 1}, {219, 123, 77}, METADATA_SYM(Bi8_), 0, 128*1024)); ASSERT_TRUE(test_storeEnumTagSinglePayload({255, 0, 1}, {219, 123, 77}, METADATA_SYM(Bi8_), 255, 128*1024)); ASSERT_TRUE(test_storeEnumTagSinglePayload({0, 0, 2}, {219, 123, 77}, METADATA_SYM(Bi8_), 256, 128*1024)); ASSERT_TRUE(test_storeEnumTagSinglePayload({255, 2, 0}, {219, 123, 77}, METADATA_SYM(Bi8_), 128*1024 - 1, 128*1024)); #else ASSERT_TRUE(test_storeEnumTagSinglePayload({0, 1, 0}, {219, 123, 77}, METADATA_SYM(Bi8_), 0, 128*1024)); ASSERT_TRUE(test_storeEnumTagSinglePayload({255, 1, 0}, {219, 123, 77}, METADATA_SYM(Bi8_), 255, 128*1024)); ASSERT_TRUE(test_storeEnumTagSinglePayload({0, 2, 0}, {219, 123, 77}, METADATA_SYM(Bi8_), 256, 128*1024)); ASSERT_TRUE(test_storeEnumTagSinglePayload({255, 0, 2}, {219, 123, 77}, METADATA_SYM(Bi8_), 128*1024 - 1, 128*1024)); #endif // Test with XI. ASSERT_TRUE(test_storeEnumTagSinglePayload({219}, {219}, XI_TMBi8_, -1, 2)); ASSERT_TRUE(test_storeEnumTagSinglePayload({254}, {219}, XI_TMBi8_, 0, 2)); ASSERT_TRUE(test_storeEnumTagSinglePayload({255}, {219}, XI_TMBi8_, 1, 2)); ASSERT_TRUE(test_storeEnumTagSinglePayload({219, 0}, {219, 123}, XI_TMBi8_, -1, 4)); ASSERT_TRUE(test_storeEnumTagSinglePayload({254, 0}, {219, 123}, XI_TMBi8_, 0, 4)); ASSERT_TRUE(test_storeEnumTagSinglePayload({255, 0}, {219, 123}, XI_TMBi8_, 1, 4)); ASSERT_TRUE(test_storeEnumTagSinglePayload({0, 1}, {219, 123}, XI_TMBi8_, 2, 4)); ASSERT_TRUE(test_storeEnumTagSinglePayload({1, 1}, {219, 123}, XI_TMBi8_, 3, 4)); }
gonzalogarahuetes/REST_API-with-Mongoose
src/models/publisher-model.js
const mongoose = require("mongoose"); const PublisherSchema = new mongoose.Schema({ name: { type: String, required: true, trim: true, }, creationDate: { type: Date, }, authors: [ { type: mongoose.SchemaTypes.ObjectId, ref: "author", }, ], books: [ { type: mongoose.SchemaTypes.ObjectId, ref: "book", }, ], }); const PublisherModel = new mongoose.model("publisher", PublisherSchema); module.exports = PublisherModel;
dreamsxin/ultimatepp
uppdev/Layout/Layout.h
<filename>uppdev/Layout/Layout.h<gh_stars>1-10 #ifndef _Layout_Layout_h #define _Layout_Layout_h #include <CtrlLib/CtrlLib.h> using namespace Upp; #define LAYOUTFILE <Layout/Layout.lay> #include <CtrlCore/lay.h> class Layout : public WithLayoutLayout<TopWindow> { public: typedef Layout CLASSNAME; Layout(); }; #endif
artkuli/openvino
src/plugins/intel_cpu/src/nodes/gather_tree.cpp
<reponame>artkuli/openvino // Copyright (C) 2018-2022 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #include <string> #include <vector> #include <cmath> #include <ngraph/op/gather_tree.hpp> #include "ie_parallel.hpp" #include "gather_tree.h" #include <utils/general_utils.h> using namespace InferenceEngine; namespace ov { namespace intel_cpu { namespace node { bool GatherTree::isSupportedOperation(const std::shared_ptr<const ngraph::Node>& op, std::string& errorMessage) noexcept { try { const auto gatherElementsOp = ngraph::as_type_ptr<const ngraph::op::v1::GatherTree>(op); if (!gatherElementsOp) { errorMessage = "Node is not an instance of the GatherTree operation from operation set v1."; return false; } } catch (...) { return false; } return true; } GatherTree::GatherTree(const std::shared_ptr<ngraph::Node>& op, const dnnl::engine& eng, WeightsSharing::Ptr &cache) : Node(op, eng, cache) { std::string errorMessage; if (!isSupportedOperation(op, errorMessage)) { IE_THROW(NotImplemented) << errorMessage; } errorPrefix = std::string("Node GatherTree with name '") + op->get_friendly_name() + "'"; if (inputShapes.size() != 4) IE_THROW() << errorPrefix << " has incorrect number of input edges."; if (outputShapes.size() != 1) IE_THROW() << errorPrefix << " has incorrect number of output edges."; if (getInputShapeAtPort(GATHER_TREE_STEP_IDX).getRank() != 3) IE_THROW() << errorPrefix << " step_idx vector should be 3 dimension"; if (getInputShapeAtPort(GATHER_TREE_PARENT_IDX).getRank() != 3) IE_THROW() << errorPrefix << " parent_idx vector should be 3 dimension"; if (getInputShapeAtPort(GATHER_TREE_MAX_SEQ_LEN).getRank() != 1) IE_THROW() << errorPrefix << " max_seq_len vector should be 1 dimension"; if (!is_scalar(op->get_input_partial_shape(GATHER_TREE_END_TOKEN))) IE_THROW() << errorPrefix << " end_token should be scalar"; } void GatherTree::initSupportedPrimitiveDescriptors() { if (!supportedPrimitiveDescriptors.empty()) return; precision = getOriginalInputPrecisionAtPort(GATHER_TREE_STEP_IDX); if (!one_of(precision, Precision::FP32, Precision::I32)) precision = Precision::FP32; if (getOriginalInputPrecisionAtPort(GATHER_TREE_PARENT_IDX) != precision || getOriginalInputPrecisionAtPort(GATHER_TREE_MAX_SEQ_LEN) != precision || getOriginalInputPrecisionAtPort(GATHER_TREE_END_TOKEN) != precision || getOriginalOutputPrecisionAtPort(0) != precision) { IE_THROW() << errorPrefix << " has incorrect input/output data precision. Must be the same."; } addSupportedPrimDesc({{LayoutType::ncsp, precision}, {LayoutType::ncsp, precision}, {LayoutType::ncsp, precision}, {LayoutType::ncsp, precision}}, {{LayoutType::ncsp, precision}}, impl_desc_type::ref_any); } void GatherTree::execute(dnnl::stream strm) { if (!execPtr) IE_THROW() << errorPrefix << " has not compiled executor."; if (precision == Precision::FP32) execPtr->exec<float>(getParentEdgeAt(GATHER_TREE_STEP_IDX)->getMemoryPtr(), getParentEdgeAt(GATHER_TREE_PARENT_IDX)->getMemoryPtr(), getParentEdgeAt(GATHER_TREE_MAX_SEQ_LEN)->getMemoryPtr(), getParentEdgeAt(GATHER_TREE_END_TOKEN)->getMemoryPtr(), getChildEdgeAt(0)->getMemoryPtr()); else execPtr->exec<int32_t>(getParentEdgeAt(GATHER_TREE_STEP_IDX)->getMemoryPtr(), getParentEdgeAt(GATHER_TREE_PARENT_IDX)->getMemoryPtr(), getParentEdgeAt(GATHER_TREE_MAX_SEQ_LEN)->getMemoryPtr(), getParentEdgeAt(GATHER_TREE_END_TOKEN)->getMemoryPtr(), getChildEdgeAt(0)->getMemoryPtr()); } void GatherTree::prepareParams() { const auto& stepIdxMemPtr = getParentEdgeAt(GATHER_TREE_STEP_IDX)->getMemoryPtr(); const auto& parentIdxMemPtr = getParentEdgeAt(GATHER_TREE_PARENT_IDX)->getMemoryPtr(); const auto& maxSeqLenMemPtr = getParentEdgeAt(GATHER_TREE_MAX_SEQ_LEN)->getMemoryPtr(); const auto& dstMemPtr = getChildEdgeAt(0)->getMemoryPtr(); if (!stepIdxMemPtr || !stepIdxMemPtr->isAllocated()) IE_THROW() << errorPrefix << " has not allocated input memory of 'step_ids'."; if (!parentIdxMemPtr || !parentIdxMemPtr->isAllocated()) IE_THROW() << errorPrefix << " has not allocated input memory of 'parent_ids'."; if (!maxSeqLenMemPtr || !maxSeqLenMemPtr->isAllocated()) IE_THROW() << errorPrefix << " has not allocated input memory of 'max_seq_len'."; if (!dstMemPtr || !dstMemPtr->isAllocated()) IE_THROW() << errorPrefix << " has not allocated output memory."; if (getSelectedPrimitiveDescriptor() == nullptr) IE_THROW() << errorPrefix << " has unidentified preferable primitive descriptor."; const VectorDims& stepIdxDims = stepIdxMemPtr->getStaticDims(); const VectorDims& parentIdxDims = parentIdxMemPtr->getStaticDims(); const VectorDims& maxSeqLenDims = maxSeqLenMemPtr->getStaticDims(); const VectorDims& dstDims = dstMemPtr->getStaticDims(); execPtr = std::make_shared<GatherTreeExecutor>(stepIdxDims, parentIdxDims, maxSeqLenDims, dstDims); } void GatherTree::executeDynamicImpl(dnnl::stream strm) { execute(strm); } GatherTree::GatherTreeExecutor::GatherTreeExecutor(const VectorDims& stepIdxDims, const VectorDims& parentIdxDims, const VectorDims& maxSeqLenDims, const VectorDims& dstDims) : maxTime{static_cast<int32_t>(stepIdxDims[0])} , batchSize{stepIdxDims[1]} , beamWidth{stepIdxDims[2]} , bbSize{batchSize * beamWidth} , parentIdxSize{std::accumulate(parentIdxDims.cbegin(), parentIdxDims.cend(), 1lu, std::multiplies<size_t>())} { if (maxTime != static_cast<int32_t>(parentIdxDims[0]) || maxTime != static_cast<int32_t>(dstDims[0]) || batchSize != parentIdxDims[1] || batchSize != dstDims[1] || batchSize != maxSeqLenDims[0] || beamWidth != parentIdxDims[2] || beamWidth != dstDims[2]) { std::string errorMsg = "Input/Output tensors dimensions mismatch"; IE_THROW() << errorMsg; } } template<typename DATA_T> void GatherTree::GatherTreeExecutor::exec(const MemoryPtr& stepIdxMemPtr, const MemoryPtr& parentIdxMemPtr, const MemoryPtr& maxSeqLenMemPtr, const MemoryPtr& endTokenMemPtr, MemoryPtr& dstMemPtr) { const auto *stepIdx = reinterpret_cast<DATA_T *>(stepIdxMemPtr->GetPtr()); const auto *parentIdx = reinterpret_cast<DATA_T *>(parentIdxMemPtr->GetPtr()); const auto *maxSeqLen = reinterpret_cast<DATA_T *>(maxSeqLenMemPtr->GetPtr()); const auto endToken = (reinterpret_cast<DATA_T *>(endTokenMemPtr->GetPtr()))[0]; auto *finalIdx = reinterpret_cast<DATA_T *>(dstMemPtr->GetPtr()); bool incorrectResult = false; parallel_for2d(batchSize, beamWidth, [&](size_t batch, size_t beam) { int32_t maxSequenceInBeam = std::min<int32_t>(maxTime, static_cast<int32_t>(maxSeqLen[batch])); if (maxSequenceInBeam > 0) { int32_t time, idx = (maxTime - 1) * bbSize + batch * beamWidth; for (time = (maxTime - 1); time >= maxSequenceInBeam; time--, idx -= bbSize) finalIdx[idx + beam] = endToken; for (int32_t parent = static_cast<int32_t>(beam); time >= 0; time--, idx -= bbSize) { if (parent < 0 || parent >= static_cast<int32_t>(beamWidth) || idx + parent >= parentIdxSize) { incorrectResult = true; break; } finalIdx[idx + beam] = stepIdx[idx + parent]; parent = static_cast<int32_t>(parentIdx[idx + parent]); } bool finished = false; auto *final = &finalIdx[batch * beamWidth + beam]; for (time = 0; time < maxSequenceInBeam; time++, final += bbSize) { if (finished) (*final) = endToken; else if ((*final) == endToken) finished = true; } } }); if (incorrectResult) { std::string errorMsg = "Wrong parent index, result is incorrect"; IE_THROW() << errorMsg; } } bool GatherTree::created() const { return getType() == Type::GatherTree; } } // namespace node } // namespace intel_cpu } // namespace ov
75986562/ToolsDemo
src/main/java/com/hfi/web01/mybatisPlus/entity/Product.java
package com.hfi.web01.mybatisPlus.entity; import java.math.BigDecimal; import com.baomidou.mybatisplus.annotation.IdType; import java.time.LocalDateTime; import com.baomidou.mybatisplus.annotation.TableId; import java.io.Serializable; import lombok.*; import lombok.experimental.Accessors; /** * <p> * * </p> * * @author NZH * @since 2019-04-20 */ @Data @EqualsAndHashCode(callSuper = false) @Accessors(chain = true) public class Product implements Serializable { private static final long serialVersionUID = 1L; @TableId(value = "p_no", type = IdType.AUTO) private String pNo; private String pName; private BigDecimal price; private Integer pAmount; private Integer version; private LocalDateTime createtime; private LocalDateTime updatetime; }
DemigodsRPG/DemigodsRPG
src/main/java/com/demigodsrpg/DGData.java
<gh_stars>1-10 package com.demigodsrpg; import com.demigodsrpg.ability.AbilityRegistry; import com.demigodsrpg.deity.Deity; import com.demigodsrpg.family.Family; import com.demigodsrpg.registry.*; import com.demigodsrpg.registry.file.*; import com.demigodsrpg.registry.memory.BattleRegistry; import com.demigodsrpg.registry.mongo.*; import com.mongodb.client.MongoDatabase; import org.bukkit.plugin.Plugin; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.logging.Logger; public class DGData { public static Plugin PLUGIN; public static Logger CONSOLE; public static String SAVE_PATH; public static PlayerRegistry PLAYER_R; public static ShrineRegistry SHRINE_R; public static TributeRegistry TRIBUTE_R; public static SpawnRegistry SPAWN_R; public static ServerDataRegistry SERVER_R; public static ConcurrentMap<String, AreaRegistry> AREA_R = new ConcurrentHashMap<>(); public static AbilityRegistry ABILITY_R = new AbilityRegistry(PLAYER_R, SERVER_R, Setting.NO_COST_ASPECT_MODE); public static BattleRegistry BATTLE_R = new BattleRegistry(); public static final List<Pantheon> PANTHEONS = new ArrayList<>(); public static final List<Family> FAMILIES = new ArrayList<>(); public static final List<Deity> DEITIES = new ArrayList<>(); static { FAMILIES.add(Family.EXCOMMUNICATED); FAMILIES.add(Family.NEUTRAL); } static void enableMongo(MongoDatabase database) { PLAYER_R = new MPlayerRegistry(database); SHRINE_R = new MShrineRegistry(database); TRIBUTE_R = new MTributeRegistry(database); SPAWN_R = new MSpawnRegistry(database); SERVER_R = new MServerDataRegistry(database); } static void enableFile() { PLAYER_R = new FPlayerRegistry(); SHRINE_R = new FShrineRegistry(); TRIBUTE_R = new FTributeRegistry(); SPAWN_R = new FSpawnRegistry(); SERVER_R = new FServerDataRegistry(); } static void addPantheon(Pantheon pantheon) { PANTHEONS.add(pantheon); FAMILIES.addAll(Arrays.asList(pantheon.getFamilies())); DEITIES.addAll(Arrays.asList(pantheon.getDeities())); } public static Family getFamily(String name) { return FAMILIES.stream().filter(f -> f.getName().equals(name)).findAny().orElse(null); } public static Deity getDeity(String name) { return DEITIES.stream().filter(d -> d.getName().equals(name)).findAny().orElse(null); } }
lofunz/mieme
Reloaded/tags/MAME4droid.Reloaded.1.0.WIP/src/mame/drivers/gijoe.c
<reponame>lofunz/mieme #define JOE_DEBUG 0 #define JOE_DMADELAY attotime_add(ATTOTIME_IN_NSEC(42700), ATTOTIME_IN_NSEC(341300)) /*************************************************************************** <NAME> (c) 1992 Konami Change Log ---------- AT070403: tilemap.h,tilemap.c - added tilemap_get_transparency_data() for transparency cache manipulation video\konamiic.c - added preliminary K056832 tilemap<->linemap switching and tileline code drivers\gijoe.c - updated video settings, memory map and irq handler - added object blitter video\gijoe.c - completed K054157 to K056832 migration - added ground scroll emulation - fixed sprite and BG priority - improved shadows and layer alignment Known Issues ------------ - sprite gaps (K053247 zoom fraction rounding) - shadow masking (eg. the shadow of Baroness' aircraft should not project on the sky) ***************************************************************************/ #include "driver.h" #include "cpu/m68000/m68000.h" #include "video/konamiic.h" #include "cpu/z80/z80.h" #include "machine/eeprom.h" #include "sound/k054539.h" #include "konamipt.h" VIDEO_START( gijoe ); VIDEO_UPDATE( gijoe ); static UINT16 *gijoe_workram; static UINT16 cur_control2; static int init_eeprom_count; static emu_timer *dmadelay_timer; static const eeprom_interface eeprom_intf = { 7, /* address bits */ 8, /* data bits */ "011000", /* read command */ "011100", /* write command */ "0100100000000",/* erase command */ "0100000000000",/* lock command */ "0100110000000" /* unlock command */ }; static NVRAM_HANDLER( gijoe ) { if (read_or_write) eeprom_save(file); else { eeprom_init(machine, &eeprom_intf); if (file) { init_eeprom_count = 0; eeprom_load(file); } else init_eeprom_count = 2720; } } static READ16_HANDLER( control1_r ) { int res; /* bit 8 is EEPROM data */ /* bit 9 is EEPROM ready */ /* bit 11 is service button */ res = input_port_read(space->machine, "START"); if (init_eeprom_count) { init_eeprom_count--; res &= 0xf7ff; } return res; } static READ16_HANDLER( control2_r ) { return cur_control2; } static WRITE16_HANDLER( control2_w ) { if(ACCESSING_BITS_0_7) { /* bit 0 is data */ /* bit 1 is cs (active low) */ /* bit 2 is clock (active high) */ /* bit 3 (unknown: coin) */ /* bit 5 is enable irq 6 */ /* bit 7 (unknown: enable irq 5?) */ eeprom_write_bit(data & 0x01); eeprom_set_cs_line((data & 0x02) ? CLEAR_LINE : ASSERT_LINE); eeprom_set_clock_line((data & 0x04) ? ASSERT_LINE : CLEAR_LINE); cur_control2 = data; /* bit 6 = enable sprite ROM reading */ K053246_set_OBJCHA_line((data & 0x0040) ? ASSERT_LINE : CLEAR_LINE); } } static void gijoe_objdma(void) { UINT16 *src_head, *src_tail, *dst_head, *dst_tail; src_head = spriteram16; src_tail = spriteram16 + 255*8; K053247_export_config(&dst_head, 0, 0, 0, 0); dst_tail = dst_head + 255*8; for (; src_head<=src_tail; src_head+=8) { if (*src_head & 0x8000) { memcpy(dst_head, src_head, 0x10); dst_head += 8; } else { *dst_tail = 0; dst_tail -= 8; } } } static TIMER_CALLBACK( dmaend_callback ) { if (cur_control2 & 0x0020) cputag_set_input_line(machine, "maincpu", 6, HOLD_LINE); } static INTERRUPT_GEN( gijoe_interrupt ) { // global interrupt masking (*this game only) if (!K056832_is_IRQ_enabled(0)) return; if (K053246_is_IRQ_enabled()) { gijoe_objdma(); // 42.7us(clr) + 341.3us(xfer) delay at 6Mhz dotclock timer_adjust_oneshot(dmadelay_timer, JOE_DMADELAY, 0); } // trigger V-blank interrupt if (cur_control2 & 0x0080) cpu_set_input_line(device, 5, HOLD_LINE); } static WRITE16_HANDLER( sound_cmd_w ) { if(ACCESSING_BITS_0_7) { data &= 0xff; soundlatch_w(space, 0, data); } } static WRITE16_HANDLER( sound_irq_w ) { cputag_set_input_line(space->machine, "audiocpu", 0, HOLD_LINE); } static READ16_HANDLER( sound_status_r ) { return soundlatch2_r(space,0); } static void sound_nmi(const device_config *device) { cputag_set_input_line(device->machine, "audiocpu", INPUT_LINE_NMI, PULSE_LINE); } static MACHINE_START( gijoe ) { state_save_register_global(machine, cur_control2); dmadelay_timer = timer_alloc(machine, dmaend_callback, NULL); } static ADDRESS_MAP_START( gijoe_map, ADDRESS_SPACE_PROGRAM, 16 ) AM_RANGE(0x000000, 0x0fffff) AM_ROM AM_RANGE(0x100000, 0x100fff) AM_RAM AM_BASE(&spriteram16) // Sprites AM_RANGE(0x110000, 0x110007) AM_WRITE(K053246_word_w) AM_RANGE(0x120000, 0x121fff) AM_READWRITE(K056832_ram_word_r, K056832_ram_word_w) // Graphic planes AM_RANGE(0x122000, 0x123fff) AM_READWRITE(K056832_ram_word_r, K056832_ram_word_w) // Graphic planes mirror read AM_RANGE(0x130000, 0x131fff) AM_READ(K056832_rom_word_r) // Passthrough to tile roms AM_RANGE(0x160000, 0x160007) AM_WRITE(K056832_b_word_w) // VSCCS (board dependent) AM_RANGE(0x170000, 0x170001) AM_WRITENOP // Watchdog AM_RANGE(0x180000, 0x18ffff) AM_RAM AM_BASE(&gijoe_workram) // Main RAM. Spec. 180000-1803ff, 180400-187fff AM_RANGE(0x190000, 0x190fff) AM_RAM_WRITE(paletteram16_xBBBBBGGGGGRRRRR_word_w) AM_BASE(&paletteram16) AM_RANGE(0x1a0000, 0x1a001f) AM_WRITE(K053251_lsb_w) AM_RANGE(0x1b0000, 0x1b003f) AM_WRITE(K056832_word_w) AM_RANGE(0x1c000c, 0x1c000d) AM_WRITE(sound_cmd_w) AM_RANGE(0x1c0014, 0x1c0015) AM_READ(sound_status_r) AM_RANGE(0x1c0000, 0x1c001f) AM_RAM AM_RANGE(0x1d0000, 0x1d0001) AM_WRITE(sound_irq_w) AM_RANGE(0x1e0000, 0x1e0001) AM_READ_PORT("P1_P2") AM_RANGE(0x1e0002, 0x1e0003) AM_READ_PORT("P3_P4") AM_RANGE(0x1e4000, 0x1e4001) AM_READ_PORT("SYSTEM") AM_RANGE(0x1e4002, 0x1e4003) AM_READ(control1_r) AM_RANGE(0x1e8000, 0x1e8001) AM_READWRITE(control2_r, control2_w) AM_RANGE(0x1f0000, 0x1f0001) AM_READ(K053246_word_r) #if JOE_DEBUG AM_RANGE(0x110000, 0x110007) AM_READ(K053246_reg_word_r) AM_RANGE(0x160000, 0x160007) AM_READ(K056832_b_word_r) AM_RANGE(0x1a0000, 0x1a001f) AM_READ(K053251_lsb_r) AM_RANGE(0x1b0000, 0x1b003f) AM_READ(K056832_word_r) #endif ADDRESS_MAP_END static ADDRESS_MAP_START( sound_map, ADDRESS_SPACE_PROGRAM, 8 ) AM_RANGE(0x0000, 0xebff) AM_ROM AM_RANGE(0xf000, 0xf7ff) AM_RAM AM_RANGE(0xf800, 0xfa2f) AM_DEVREADWRITE("konami", k054539_r, k054539_w) AM_RANGE(0xfc00, 0xfc00) AM_WRITE(soundlatch2_w) AM_RANGE(0xfc02, 0xfc02) AM_READ(soundlatch_r) ADDRESS_MAP_END static INPUT_PORTS_START( gijoe ) PORT_START("START") PORT_BIT( 0x0001, IP_ACTIVE_LOW, IPT_START1 ) PORT_BIT( 0x0002, IP_ACTIVE_LOW, IPT_START2 ) PORT_BIT( 0x0004, IP_ACTIVE_LOW, IPT_START3 ) PORT_BIT( 0x0008, IP_ACTIVE_LOW, IPT_START4 ) PORT_BIT( 0x0100, IP_ACTIVE_HIGH, IPT_SPECIAL ) PORT_CUSTOM(eeprom_bit_r, NULL) // EEPROM data PORT_BIT( 0x0200, IP_ACTIVE_LOW, IPT_SPECIAL ) // EEPROM ready (always 1) PORT_SERVICE_NO_TOGGLE( 0x0800, IP_ACTIVE_LOW ) PORT_START("SYSTEM") PORT_BIT( 0x0001, IP_ACTIVE_LOW, IPT_COIN1 ) PORT_BIT( 0x0002, IP_ACTIVE_LOW, IPT_COIN2 ) PORT_BIT( 0x0004, IP_ACTIVE_LOW, IPT_COIN3 ) PORT_BIT( 0x0008, IP_ACTIVE_LOW, IPT_COIN4 ) PORT_BIT( 0x0100, IP_ACTIVE_LOW, IPT_SERVICE1 ) PORT_BIT( 0x0200, IP_ACTIVE_LOW, IPT_SERVICE2 ) PORT_BIT( 0x0400, IP_ACTIVE_LOW, IPT_SERVICE3 ) PORT_BIT( 0x0800, IP_ACTIVE_LOW, IPT_SERVICE4 ) PORT_START("P1_P2") KONAMI16_LSB_40(1, IPT_BUTTON3 ) PORT_DIPNAME( 0x0080, 0x0000, "Sound" ) PORT_DIPLOCATION("SW1:1") PORT_DIPSETTING( 0x0080, DEF_STR( Mono ) ) PORT_DIPSETTING( 0x0000, DEF_STR( Stereo ) ) KONAMI16_MSB_40(2, IPT_BUTTON3 ) PORT_DIPNAME( 0x8000, 0x8000, "Coin mechanism" ) PORT_DIPLOCATION("SW1:2") PORT_DIPSETTING( 0x8000, "Common" ) PORT_DIPSETTING( 0x0000, "Independant" ) PORT_START("P3_P4") KONAMI16_LSB_40(3, IPT_BUTTON3 ) PORT_DIPNAME( 0x0080, 0x0080, DEF_STR( Players ) ) PORT_DIPLOCATION("SW1:3") PORT_DIPSETTING( 0x0080, "2" ) PORT_DIPSETTING( 0x0000, "4" ) KONAMI16_MSB_40(4, IPT_BUTTON3 ) PORT_DIPUNUSED_DIPLOC( 0x8000, 0x8000, "SW1:4" ) /* Listed as "Unused" */ INPUT_PORTS_END static const k054539_interface k054539_config = { NULL, NULL, sound_nmi }; static MACHINE_DRIVER_START( gijoe ) /* basic machine hardware */ MDRV_CPU_ADD("maincpu", M68000, 16000000) /* Confirmed */ MDRV_CPU_PROGRAM_MAP(gijoe_map) MDRV_CPU_VBLANK_INT("screen", gijoe_interrupt) MDRV_CPU_ADD("audiocpu", Z80, 8000000) /* Amuse & confirmed. z80e */ MDRV_CPU_PROGRAM_MAP(sound_map) MDRV_MACHINE_START(gijoe) MDRV_NVRAM_HANDLER(gijoe) /* video hardware */ MDRV_VIDEO_ATTRIBUTES(VIDEO_HAS_SHADOWS | VIDEO_UPDATE_BEFORE_VBLANK) MDRV_SCREEN_ADD("screen", RASTER) MDRV_SCREEN_REFRESH_RATE(60) MDRV_SCREEN_VBLANK_TIME(ATTOSECONDS_IN_USEC(0)) MDRV_SCREEN_FORMAT(BITMAP_FORMAT_INDEXED16) MDRV_SCREEN_SIZE(64*8, 32*8) MDRV_SCREEN_VISIBLE_AREA(24, 24+288-1, 16, 16+224-1) MDRV_PALETTE_LENGTH(2048) MDRV_VIDEO_START(gijoe) MDRV_VIDEO_UPDATE(gijoe) /* sound hardware */ MDRV_SPEAKER_STANDARD_STEREO("lspeaker", "rspeaker") MDRV_SOUND_ADD("konami", K054539, 48000) MDRV_SOUND_CONFIG(k054539_config) MDRV_SOUND_ROUTE(0, "lspeaker", 1.0) MDRV_SOUND_ROUTE(1, "rspeaker", 1.0) MACHINE_DRIVER_END ROM_START( gijoe ) ROM_REGION( 0x100000, "maincpu", 0 ) ROM_LOAD16_BYTE( "069eab03.rom", 0x000000, 0x40000, CRC(dd2d533f) SHA1(6fc9f7a8fc89155ef2b9ee43fe5e456d9b574f8c) ) ROM_LOAD16_BYTE( "069eab02.rom", 0x000001, 0x40000, CRC(6bb11c87) SHA1(86581d24f73f2e837f1d4fc5f1f2188f610c50b6) ) ROM_LOAD16_BYTE( "069a12", 0x080000, 0x40000, CRC(75a7585c) SHA1(443d6dee99edbe81ab1b7289e6cad403fe01cc0d) ) ROM_LOAD16_BYTE( "069a11", 0x080001, 0x40000, CRC(3153e788) SHA1(fde4543eac707ef24b431e64011cf0f923d4d3ac) ) ROM_REGION( 0x010000, "audiocpu", 0 ) ROM_LOAD( "069a01", 0x000000, 0x010000, CRC(74172b99) SHA1(f5e0e0d43317454fdacd3df7cd3035fcae4aef68) ) ROM_REGION( 0x200000, "gfx1", 0 ) ROM_LOAD( "069a10", 0x000000, 0x100000, CRC(4c6743ee) SHA1(fa94fbfb55955fdb40705e79b49103676961d919) ) ROM_LOAD( "069a09", 0x100000, 0x100000, CRC(e6e36b05) SHA1(fecad503f2c285b2b0312e888c06dd6e87f95a07) ) ROM_REGION( 0x400000, "gfx2", 0 ) ROM_LOAD( "069a08", 0x000000, 0x100000, CRC(325477d4) SHA1(140c57b0ac9e5cf702d788f416408a5eeb5d6d3c) ) ROM_LOAD( "069a05", 0x100000, 0x100000, CRC(c4ab07ed) SHA1(dc806eff00937d9465b1726fae8fdc3022464a28) ) ROM_LOAD( "069a07", 0x200000, 0x100000, CRC(ccaa3971) SHA1(16989cbbd65fe1b41c4a85fea02ba1e9880818a9) ) ROM_LOAD( "069a06", 0x300000, 0x100000, CRC(63eba8e1) SHA1(aa318d356c2580765452106ea0d2228273a90523) ) ROM_REGION( 0x200000, "konami", 0 ) ROM_LOAD( "069a04", 0x000000, 0x200000, CRC(11d6dcd6) SHA1(04cbff9f61cd8641db538db809ddf20da29fd5ac) ) ROM_END ROM_START( gijoeu ) ROM_REGION( 0x100000, "maincpu", 0 ) ROM_LOAD16_BYTE("069uab03", 0x000000, 0x40000, CRC(25ff77d2) SHA1(bea2ae975718806698fd35ef1217bd842b2b69ec) ) ROM_LOAD16_BYTE("069uab02", 0x000001, 0x40000, CRC(31cced1c) SHA1(3df1def671966b3c3d8117ac1b68adeeef9d98c0) ) ROM_LOAD16_BYTE("069a12", 0x080000, 0x40000, CRC(75a7585c) SHA1(443d6dee99edbe81ab1b7289e6cad403fe01cc0d) ) ROM_LOAD16_BYTE("069a11", 0x080001, 0x40000, CRC(3153e788) SHA1(fde4543eac707ef24b431e64011cf0f923d4d3ac) ) ROM_REGION( 0x010000, "audiocpu", 0 ) ROM_LOAD( "069a01", 0x000000, 0x010000, CRC(74172b99) SHA1(f5e0e0d43317454fdacd3df7cd3035fcae4aef68) ) ROM_REGION( 0x200000, "gfx1", 0 ) ROM_LOAD( "069a10", 0x000000, 0x100000, CRC(4c6743ee) SHA1(fa94fbfb55955fdb40705e79b49103676961d919) ) ROM_LOAD( "069a09", 0x100000, 0x100000, CRC(e6e36b05) SHA1(fecad503f2c285b2b0312e888c06dd6e87f95a07) ) ROM_REGION( 0x400000, "gfx2", 0 ) ROM_LOAD( "069a08", 0x000000, 0x100000, CRC(325477d4) SHA1(140c57b0ac9e5cf702d788f416408a5eeb5d6d3c) ) ROM_LOAD( "069a05", 0x100000, 0x100000, CRC(c4ab07ed) SHA1(dc806eff00937d9465b1726fae8fdc3022464a28) ) ROM_LOAD( "069a07", 0x200000, 0x100000, CRC(ccaa3971) SHA1(16989cbbd65fe1b41c4a85fea02ba1e9880818a9) ) ROM_LOAD( "069a06", 0x300000, 0x100000, CRC(63eba8e1) SHA1(aa318d356c2580765452106ea0d2228273a90523) ) ROM_REGION( 0x200000, "konami", 0 ) ROM_LOAD( "069a04", 0x000000, 0x200000, CRC(11d6dcd6) SHA1(04cbff9f61cd8641db538db809ddf20da29fd5ac) ) ROM_END ROM_START( gijoej ) ROM_REGION( 0x100000, "maincpu", 0 ) ROM_LOAD16_BYTE("069jaa03", 0x000000, 0x40000, CRC(4b398901) SHA1(98fcc6ae9cc69c67d82eb1a7ab0bb71e61aee623) ) ROM_LOAD16_BYTE("069jaa02", 0x000001, 0x40000, CRC(8bb22392) SHA1(9f066ce2b529f7dad6f80a91fff266c478d56414) ) ROM_LOAD16_BYTE("069a12", 0x080000, 0x40000, CRC(75a7585c) SHA1(443d6dee99edbe81ab1b7289e6cad403fe01cc0d) ) ROM_LOAD16_BYTE("069a11", 0x080001, 0x40000, CRC(3153e788) SHA1(fde4543eac707ef24b431e64011cf0f923d4d3ac) ) ROM_REGION( 0x010000, "audiocpu", 0 ) ROM_LOAD( "069a01", 0x000000, 0x010000, CRC(74172b99) SHA1(f5e0e0d43317454fdacd3df7cd3035fcae4aef68) ) ROM_REGION( 0x200000, "gfx1", 0 ) ROM_LOAD( "069a10", 0x000000, 0x100000, CRC(4c6743ee) SHA1(fa94fbfb55955fdb40705e79b49103676961d919) ) ROM_LOAD( "069a09", 0x100000, 0x100000, CRC(e6e36b05) SHA1(fecad503f2c285b2b0312e888c06dd6e87f95a07) ) ROM_REGION( 0x400000, "gfx2", 0 ) ROM_LOAD( "069a08", 0x000000, 0x100000, CRC(325477d4) SHA1(140c57b0ac9e5cf702d788f416408a5eeb5d6d3c) ) ROM_LOAD( "069a05", 0x100000, 0x100000, CRC(c4ab07ed) SHA1(dc806eff00937d9465b1726fae8fdc3022464a28) ) ROM_LOAD( "069a07", 0x200000, 0x100000, CRC(ccaa3971) SHA1(16989cbbd65fe1b41c4a85fea02ba1e9880818a9) ) ROM_LOAD( "069a06", 0x300000, 0x100000, CRC(63eba8e1) SHA1(aa318d356c2580765452106ea0d2228273a90523) ) ROM_REGION( 0x200000, "konami", 0 ) ROM_LOAD( "069a04", 0x000000, 0x200000, CRC(11d6dcd6) SHA1(04cbff9f61cd8641db538db809ddf20da29fd5ac) ) ROM_END static DRIVER_INIT( gijoe ) { konami_rom_deinterleave_2(machine, "gfx1"); konami_rom_deinterleave_4(machine, "gfx2"); } GAME( 1992, gijoe, 0, gijoe, gijoe, gijoe, ROT0, "Konami", "GI Joe (World)", 0) GAME( 1992, gijoeu, gijoe, gijoe, gijoe, gijoe, ROT0, "Konami", "GI Joe (US)", 0) GAME( 1992, gijoej, gijoe, gijoe, gijoe, gijoe, ROT0, "Konami", "GI Joe (Japan)", 0)
yosshor/limestone
optimization/src/main/java/org/tglanz/limestone/optimization/LimeRelOptCostFactory.java
<filename>optimization/src/main/java/org/tglanz/limestone/optimization/LimeRelOptCostFactory.java package org.tglanz.limestone.optimization; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptCostFactory; import org.apache.commons.lang.NotImplementedException; public class LimeRelOptCostFactory implements RelOptCostFactory { /** cost with zero values **/ public static final LimeRelOptCost ZeroCost = new LimeRelOptCost(0, 0, 0); /** cost with infinite values **/ public static final RelOptCost InfiniteCost = ZeroCost.apply(a -> Double.POSITIVE_INFINITY); /** cost with tiny values **/ public static final RelOptCost TinyCost = ZeroCost.apply(a -> 1E-5); /** cost with huge values **/ public static final RelOptCost HugeCost = ZeroCost.apply(a -> 1E-20); @Override public RelOptCost makeCost(double rowCount, double cpu, double io) { return new LimeRelOptCost(rowCount, cpu, io); } @Override public RelOptCost makeHugeCost() { return HugeCost; } @Override public RelOptCost makeInfiniteCost() { return InfiniteCost; } @Override public RelOptCost makeTinyCost() { return TinyCost; } @Override public RelOptCost makeZeroCost() { return ZeroCost; } }
SynthSys/BioDare2-BACK
src/test/java/ed/biodare2/backend/features/tsdata/datahandling/TSDataHandlerSpringTest.java
<filename>src/test/java/ed/biodare2/backend/features/tsdata/datahandling/TSDataHandlerSpringTest.java /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package ed.biodare2.backend.features.tsdata.datahandling; import ed.biodare2.backend.repo.dao.ExperimentsStorage; import ed.biodare2.backend.MapperConfiguration; import ed.biodare2.backend.repo.isa_dom.dataimport.DataBundle; import ed.biodare2.backend.repo.isa_dom.dataimport.DataTrace; import ed.biodare2.backend.repo.system_dom.AssayPack; import ed.robust.dom.data.DetrendingType; import ed.robust.dom.data.TimeSeries; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; import org.springframework.test.context.junit4.SpringRunner; import static org.junit.Assert.*; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import static org.mockito.Mockito.*; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.data.jpa.JpaRepositoriesAutoConfiguration; import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.cache.CacheManager; import org.springframework.cache.annotation.EnableCaching; import org.springframework.context.annotation.Import; import org.springframework.core.env.Environment; /** * * @author Zielu */ @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment=WebEnvironment.NONE) public class TSDataHandlerSpringTest { @EnableCaching @SpringBootApplication @Import(MapperConfiguration.class) @EnableAutoConfiguration(exclude={DataSourceAutoConfiguration.class,JpaRepositoriesAutoConfiguration.class,HibernateJpaAutoConfiguration.class}) public static class Config { } final String cacheName = "TSData"; @Rule public TemporaryFolder testFolder = new TemporaryFolder(); Path bdStorageDir; AssayPack exp; @Autowired TSDataHandler handler; @MockBean ExperimentsStorage expStorage; @Autowired Environment env; @Autowired CacheManager cacheManager; @Before public void setup() throws Exception { bdStorageDir = testFolder.newFolder().toPath(); Files.createDirectories(bdStorageDir.resolve("1")); Files.createDirectories(bdStorageDir.resolve("2")); when(expStorage.getExperimentDir(eq(1L))).thenReturn(bdStorageDir.resolve("1")); when(expStorage.getExperimentDir(eq(2L))).thenReturn(bdStorageDir.resolve("2")); when(expStorage.getExperimentsDir()).thenReturn(bdStorageDir); exp = mock(AssayPack.class); when(exp.getId()).thenReturn(1L); } DataBundle makeBoundle() { DataBundle dataBoundle = new DataBundle(); DataTrace trace = new DataTrace(); trace.traceRef = "A"; trace.traceNr = 1; trace.dataId = trace.traceNr; trace.rawDataId = trace.dataId; TimeSeries serie = new TimeSeries(); serie.add(1,1); serie.add(2,2); serie.add(3,3); trace.trace = serie; dataBoundle.data.add(trace); trace = new DataTrace(); trace.traceRef = "B"; trace.traceNr = 2; trace.dataId = trace.traceNr; trace.rawDataId = trace.dataId; serie = new TimeSeries(); serie.add(1.1,2); serie.add(1.2,2); serie.add(2.2,3); serie.add(3.0,4); serie.add(3.3,4); trace.trace = serie; dataBoundle.data.add(trace); return dataBoundle; } @Test public void wiringWorks() { assertNotNull(handler); assertTrue(env.getProperty("spring.cache.cache-names","").contains(cacheName)); } @Test public void caffeineIsUsed() { assertTrue(env.getProperty("spring.cache.cache-names","").contains(cacheName)); assertNotNull(cacheManager); assertNotNull(cacheManager.getCache(cacheName)); assertTrue(com.github.benmanes.caffeine.cache.Cache.class.isInstance(cacheManager.getCache(cacheName).getNativeCache())); //System.out.println(.getNativeCache().getClass().getName()); } @Test public void cachingWorks() throws Exception { DataBundle db1 = makeBoundle(); handler.handleNewData(exp, db1); DetrendingType detrending = DetrendingType.LIN_DTR; List<DataTrace> set1 = handler.getDataSet(exp, detrending).get(); List<DataTrace> set2 = handler.getDataSet(exp, detrending).get(); assertSame(set1,set2); set2 = handler.getDataSet(exp, DetrendingType.POLY_DTR).get(); assertNotSame(set1,set2); } @Test public void binnedCachingWorks() throws Exception { DataBundle db1 = makeBoundle(); handler.handleNewData(exp, db1); DetrendingType detrending = DetrendingType.LIN_DTR; List<DataTrace> set1 = handler.getHourlyDataSet(exp, detrending).get(); List<DataTrace> set2 = handler.getHourlyDataSet(exp, detrending).get(); assertSame(set1,set2); set2 = handler.getHourlyDataSet(exp, DetrendingType.POLY_DTR).get(); assertNotSame(set1,set2); } @Test public void binnedCachingDoesNotInterfereWorks() throws Exception { DataBundle db1 = makeBoundle(); handler.handleNewData(exp, db1); DetrendingType detrending = DetrendingType.LIN_DTR; List<DataTrace> set1 = handler.getDataSet(exp, detrending).get(); List<DataTrace> binset1 = handler.getHourlyDataSet(exp, detrending).get(); List<DataTrace> set2 = handler.getDataSet(exp, detrending).get(); List<DataTrace> binset2 = handler.getHourlyDataSet(exp, detrending).get(); assertSame(set1,set2); assertSame(binset1,binset2); assertNotSame(set1,binset1); assertNotEquals(set1,binset1); } @Test public void handleNewDataInvalidesDataAndBinnedCache() throws Exception { DataBundle db1 = makeBoundle(); handler.handleNewData(exp, db1); DetrendingType detrending = DetrendingType.LIN_DTR; List<DataTrace> set1 = handler.getDataSet(exp, detrending).get(); List<DataTrace> binset1 = handler.getHourlyDataSet(exp, detrending).get(); List<DataTrace> set2 = handler.getDataSet(exp, detrending).get(); List<DataTrace> binset2 = handler.getHourlyDataSet(exp, detrending).get(); assertSame(set1,set2); assertSame(binset1,binset2); handler.handleNewData(exp, db1); set2 = handler.getDataSet(exp, detrending).get(); binset2 = handler.getHourlyDataSet(exp, detrending).get(); assertNotSame(set1,set2); assertNotSame(binset1,binset2); } @Test public void handleNewDataInvalidesCache() throws Exception { DataBundle db1 = makeBoundle(); handler.handleNewData(exp, db1); AssayPack exp2 = mock(AssayPack.class); when(exp2.getId()).thenReturn(2L); DataBundle db2 = makeBoundle(); handler.handleNewData(exp2, db2); DetrendingType detrending = DetrendingType.LIN_DTR; List<DataTrace> set1 = handler.getDataSet(exp, detrending).get(); List<DataTrace> set2 = handler.getDataSet(exp, detrending).get(); assertSame(set1,set2); set2 = handler.getDataSet(exp2, detrending).get(); assertNotSame(set1,set2); handler.handleNewData(exp2, db2); set2 = handler.getDataSet(exp, detrending).get(); assertNotSame(set1,set2); set1 = handler.getDataSet(exp, detrending).get(); assertSame(set1,set2); } }
bezout/LMA
examples/convert_camera_model/src/unified.cpp
#include <libv/geometry/camera_model.hpp> #include <libv/lma/lma.hpp> #include "modules.hpp" namespace { struct _Error: Error { bool operator()(const Vector5d &v, double (&res)[2]) const { Vector2d p; bool good = UnifiedCameraModel(v[0], v[1], v[2], v[3], v[4]).project(p3, p); if(config.verbose()) v3.add_line(p2.x(), p2.y(), p.x(), p.y()); res[0] = p.x() - p2.matrix().x(); res[1] = p.y() - p2.matrix().y(); return good; } }; typedef Solver<_Error> Solver; struct _Module: Module { Vector5d result; Solver solver; _Module() : solver(config.lambda(), config.iteration_count()) { result << config.focal(), config.center(), config.xi(); } void init(Error &e) { UnifiedCameraModel(config.focal().x(), config.focal().y(), config.center().x(), config.center().y(), config.xi()).raytrace(e.p2, e.p3); } void add(Error &e) { solver.add(static_cast<_Error &>(e),&result); } void run() { solver.solve(DENSE); cout << setprecision(20) << result.transpose() << endl; } void run_verbose() { solver.solve(DENSE,Callbacks()); cout << boost::format("focal: [%f, %f], center: [%f, %f], xi: %f") % result[0] % result[1] % result[2] % result[3] % result[4] << endl; } }; } Module *init_unified() { return new _Module; }
Dup4/TI1050
docs/trainings/2018-Multi-University-Trainings/Contest-3/solutions/c.cpp
<reponame>Dup4/TI1050 #include <bits/stdc++.h> using namespace std; typedef long long ll; const int maxn = 1 << 10; const int MOD = 1e9 + 7; int t, n, m; ll dp[maxn]; ll ans[12]; inline int cal(int x) { int cnt = 0; while (x) { if (x & 1) cnt++; x >>= 1; } return cnt; } int main() { scanf("%d", &t); while (t--) { memset(ans, 0, sizeof ans); memset(dp, 0, sizeof dp); dp[0] = 1; scanf("%d %d", &n, &m); while (m--) { char op[10]; int u, v; scanf("%s %d %d", op, &u, &v); --u, --v; for (int s = 0; s < (1 << n); ++s) { if ((s & (1 << u)) || (s & (1 << v))) continue; int S = s | (1 << u); S |= (1 << v); if (op[0] == '+') { dp[S] = (dp[S] + dp[s]) % MOD; ans[cal(S)] = (ans[cal(S)] + dp[s]) % MOD; } else if (op[0] == '-') { dp[S] = (dp[S] - dp[s] + MOD) % MOD; ans[cal(S)] = (ans[cal(S)] - dp[s] + MOD) % MOD; } } for (int i = 2; i <= n; i += 2) { printf("%lld%c", ans[i], " \n"[i == n]); } } } return 0; }
tuwiendsg/RAHYMS
hcu/hcu-common/src/main/java/at/ac/tuwien/dsg/hcu/common/interfaces/ServiceManagerInterface.java
package at.ac.tuwien.dsg.hcu.common.interfaces; import java.util.Collection; import java.util.List; import java.util.Map; import at.ac.tuwien.dsg.hcu.common.exceptions.NotFoundException; import at.ac.tuwien.dsg.hcu.common.model.ComputingElement; import at.ac.tuwien.dsg.hcu.common.model.Connection; import at.ac.tuwien.dsg.hcu.common.model.Functionality; import at.ac.tuwien.dsg.hcu.common.model.Service; public interface ServiceManagerInterface { // instantiator public ServiceManagerInterface getInstance(); // manage element public ComputingElement createElement(); public ComputingElement registerElement(ComputingElement element); public void removeElement(ComputingElement element) throws NotFoundException; public ComputingElement retrieveElement(long id); public Collection<ComputingElement> retrieveElements(); // manage service public Service registerService(Service service); public void removeService(Service service) throws NotFoundException; public List<Service> retrieveServices(Functionality functionality); public Service getServiceById(int serviceId); public void setConfiguration(Map<String, Object> config); }
Next-Gen-UI/Code-Dynamics
Leetcode/2001-3000/2154. Keep Multiplying Found Values by Two/2155.py
<gh_stars>0 class Solution: def findFinalValue(self, nums: List[int], original: int) -> int: numsSet = set(nums) while original in numsSet: original *= 2 return original
HuttonICS/diversify-server
src/jhi/diversify/server/resource/TraitPlotDataResource.java
<gh_stars>0 package jhi.diversify.server.resource; import org.jooq.DSLContext; import org.jooq.tools.StringUtils; import org.restlet.data.Status; import org.restlet.data.*; import org.restlet.representation.FileRepresentation; import org.restlet.resource.*; import java.io.*; import java.nio.charset.StandardCharsets; import java.sql.*; import java.util.logging.Logger; import jhi.diversify.server.Database; import jhi.diversify.server.database.routines.ExportTraitDataPlot; import jhi.diversify.server.util.FileUtils; /** * @author <NAME> */ public class TraitPlotDataResource extends ServerResource { private String traitIds; @Override protected void doInit() throws ResourceException { super.doInit(); this.traitIds = getRequestAttributes().get("traitIds").toString(); if (!StringUtils.isEmpty(this.traitIds)) { // Replace everything that isn't a digit or a comma this.traitIds = this.traitIds.replaceAll("[^0-9,]", ""); } } @Get public FileRepresentation getFile() { if (StringUtils.isEmpty(traitIds)) throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST); FileRepresentation representation; File file = FileUtils.createTempFile("traits-plotdata", ".tsv"); try (Connection conn = Database.getConnection(); DSLContext context = Database.getContext(conn); PrintWriter bw = new PrintWriter(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8)))) { ExportTraitDataPlot procedure = new ExportTraitDataPlot(); procedure.setTraitids(traitIds); procedure.execute(context.configuration()); FileUtils.exportToFile(bw, procedure.getResults().get(0)); } catch (SQLException | IOException e) { Logger.getLogger("").info(e.getMessage()); e.printStackTrace(); throw new ResourceException(Status.SERVER_ERROR_INTERNAL); } Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT); disposition.setFilename(file.getName()); representation = new FileRepresentation(file, MediaType.TEXT_PLAIN); representation.setSize(file.length()); representation.setDisposition(disposition); return representation; } }
tharindusathis/sourcecodes-of-CodeReadingTheOpenSourcePerspective
ace/tao/tao/Muxed_TMS.cpp
<reponame>tharindusathis/sourcecodes-of-CodeReadingTheOpenSourcePerspective // Muxed_TMS.cpp,v 1.10 2001/08/08 17:24:07 bala Exp #include "tao/Muxed_TMS.h" #include "tao/Reply_Dispatcher.h" #include "tao/GIOP_Message_Version.h" #include "tao/debug.h" #include "tao/Pluggable_Messaging_Utils.h" #include "Transport.h" ACE_RCSID(tao, Muxed_TMS, "Muxed_TMS.cpp,v 1.10 2001/08/08 17:24:07 bala Exp") TAO_Muxed_TMS::TAO_Muxed_TMS (TAO_Transport *transport) : TAO_Transport_Mux_Strategy (transport), request_id_generator_ (0), orb_core_ (transport->orb_core ()) { } TAO_Muxed_TMS::~TAO_Muxed_TMS (void) { } // Generate and return an unique request id for the current // invocation. CORBA::ULong TAO_Muxed_TMS::request_id (void) { // @@ What is a good error return value? ACE_GUARD_RETURN (TAO_SYNCH_MUTEX, ace_mon, this->lock_, 0); return this->request_id_generator_++; } // Bind the dispatcher with the request id. int TAO_Muxed_TMS::bind_dispatcher (CORBA::ULong request_id, TAO_Reply_Dispatcher *rd) { int result = this->dispatcher_table_.bind (request_id, rd); if (result != 0) { if (TAO_debug_level > 0) ACE_DEBUG ((LM_DEBUG, ACE_TEXT ("(%P | %t):TAO_Muxed_TMS::bind_dispatcher: ") ACE_TEXT ("bind dispatcher failed: result = %d\n"), result)); return -1; } return 0; } void TAO_Muxed_TMS::unbind_dispatcher (CORBA::ULong request_id) { ACE_GUARD (TAO_SYNCH_MUTEX, ace_mon, this->lock_); TAO_Reply_Dispatcher *rd = 0; (void) this->dispatcher_table_.unbind (request_id, rd); } int TAO_Muxed_TMS::dispatch_reply (TAO_Pluggable_Reply_Params &params) { // This message state should be the same as the one we have here, // which we gave to the Transport to read the message. Just a sanity // check here. // ACE_ASSERT (message_state == this->message_state_); int result = 0; TAO_Reply_Dispatcher *rd = 0; // Grab the reply dispatcher for this id. { ACE_GUARD_RETURN (TAO_SYNCH_MUTEX, ace_mon, this->lock_, -1); result = this->dispatcher_table_.unbind (params.request_id_, rd); //ACE_DEBUG ((LM_DEBUG, // "\n(%P|%t) TAO_Muxed_TMS::dispatch_reply: id = %d\n", // params.request_id_)); } if (result != 0) { if (TAO_debug_level > 0) ACE_DEBUG ((LM_DEBUG, ACE_TEXT ("(%P | %t):TAO_Muxed_TMS::dispatch_reply: ") ACE_TEXT ("unbind dispatcher failed: result = %d\n"), result)); // This return value means that the mux strategy was not able // to find a registered reply handler, either because the reply // was not our reply - just forget about it - or it was ours, but // the reply timed out - just forget about the reply. return 0; } // Dispatch the reply. // They return 1 on success, and -1 on failure. return rd->dispatch_reply (params); // No need for idling Transport, it would have got idle'd soon after // sending the request. } int TAO_Muxed_TMS::idle_after_send (void) { if (this->transport_ != 0) return this->transport_->make_idle (); return 0; } int TAO_Muxed_TMS::idle_after_reply (void) { // No op. return 0; } void TAO_Muxed_TMS::connection_closed (void) { ACE_GUARD (TAO_SYNCH_MUTEX, ace_mon, this->lock_); // @@ This should be done using a mutex, the table REQUEST_DISPATCHER_TABLE::ITERATOR end = this->dispatcher_table_.end (); for (REQUEST_DISPATCHER_TABLE::ITERATOR i = this->dispatcher_table_.begin (); i != end; ++i) { (*i).int_id_->connection_closed (); } this->dispatcher_table_.unbind_all (); } #if defined (ACE_HAS_EXPLICIT_TEMPLATE_INSTANTIATION) template class ACE_Hash_Map_Manager_Ex <CORBA::ULong, TAO_Reply_Dispatcher *, ACE_Hash <CORBA::ULong>, ACE_Equal_To <CORBA::ULong>, ACE_Null_Mutex>; template class ACE_Hash_Map_Entry<CORBA::ULong, TAO_Reply_Dispatcher *>; template class ACE_Hash_Map_Iterator_Base_Ex<CORBA::ULong, TAO_Reply_Dispatcher *, ACE_Hash<unsigned int>, ACE_Equal_To<unsigned int>, ACE_Null_Mutex>; template class ACE_Hash_Map_Iterator_Ex<CORBA::ULong, TAO_Reply_Dispatcher*, ACE_Hash<CORBA::ULong>, ACE_Equal_To<CORBA::ULong>, ACE_Null_Mutex>; template class ACE_Hash_Map_Reverse_Iterator_Ex<CORBA::ULong, TAO_Reply_Dispatcher*, ACE_Hash<CORBA::ULong>, ACE_Equal_To<CORBA::ULong>, ACE_Null_Mutex>; template class ACE_Equal_To <CORBA::ULong>; #elif defined (ACE_HAS_TEMPLATE_INSTANTIATION_PRAGMA) #pragma instantiate ACE_Hash_Map_Manager_Ex <CORBA::ULong, TAO_Reply_Dispatcher *, ACE_Hash <CORBA::ULong>, ACE_Equal_To <CORBA::ULong>, ACE_Null_Mutex> #pragma instantiate ACE_Hash_Map_Entry<CORBA::ULong, TAO_Reply_Dispatcher *> #pragma instantiate ACE_Hash_Map_Iterator_Base_Ex<unsigned int, TAO_Reply_Dispatcher *, ACE_Hash<unsigned int>, ACE_Equal_To<unsigned int>, ACE_Null_Mutex> #pragma instantiate ACE_Hash_Map_Iterator_Ex<CORBA::ULong, TAO_Reply_Dispatcher*, ACE_Hash<CORBA::ULong>, ACE_Equal_To<CORBA::ULong>, ACE_Null_Mutex> #pragma instantiate ACE_Hash_Map_Reverse_Iterator_Ex<CORBA::ULong, TAO_Reply_Dispatcher*, ACE_Hash<CORBA::ULong>, ACE_Equal_To<CORBA::ULong>, ACE_Null_Mutex> #pragma instantiate ACE_Equal_To <CORBA::ULong> #endif /* ACE_HAS_EXPLICIT_TEMPLATE_INSTANTIATION */
vicvans20/ruby-brightpearl
spec/ruby/brightpearl_spec.rb
<gh_stars>0 # frozen_string_literal: true RSpec.describe Brightpearl do it "has a version number" do expect(Brightpearl::VERSION).not_to be nil end xit "does something useful" do expect(false).to eq(true) end context '.config' do it 'is an instance of Config' do expect(Brightpearl.config).to be_an_instance_of(Brightpearl::Config) end end end
bdcorps/airswap.js
src/index/redux/eventTrackingActions.js
// This file is generated code, edits will be overwritten const abi = require('../../abis/index.json') export const trackIndexOwnershipTransferred = ({ callback, previousOwner, newOwner, fromBlock, backFillBlockCount, } = {}) => ({ callback, abi, name: 'OwnershipTransferred', params: { previousOwner, newOwner }, fromBlock, backFillBlockCount, type: 'TRACK_EVENT', namespace: 'index', }) export const trackIndexSetLocator = ({ callback, identifier, locator, fromBlock, backFillBlockCount } = {}) => ({ callback, abi, name: 'SetLocator', params: { identifier, locator }, fromBlock, backFillBlockCount, type: 'TRACK_EVENT', namespace: 'index', }) export const trackIndexUnsetLocator = ({ callback, identifier, fromBlock, backFillBlockCount } = {}) => ({ callback, abi, name: 'UnsetLocator', params: { identifier }, fromBlock, backFillBlockCount, type: 'TRACK_EVENT', namespace: 'index', })
codrinbucur/activemq-artemis
artemis-server/src/test/java/org/apache/activemq/artemis/core/reload/ReloadManagerTest.java
<reponame>codrinbucur/activemq-artemis<gh_stars>100-1000 /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.reload; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.activemq.artemis.core.server.reload.ReloadCallback; import org.apache.activemq.artemis.core.server.reload.ReloadManagerImpl; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.apache.activemq.artemis.utils.ReusableLatch; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class ReloadManagerTest extends ActiveMQTestBase { private ScheduledExecutorService scheduledExecutorService; private ExecutorService executorService; private ReloadManagerImpl manager; @Before public void startScheduled() { scheduledExecutorService = new ScheduledThreadPoolExecutor(5); executorService = Executors.newSingleThreadExecutor(); manager = new ReloadManagerImpl(scheduledExecutorService, executorService, 100); } @After public void stopScheduled() { manager.stop(); scheduledExecutorService.shutdown(); executorService.shutdown(); scheduledExecutorService = null; } @Test public void testUpdate() throws Exception { File file = new File(getTemporaryDir(), "checkFile.tst"); internalTest(manager, file); } @Test public void testUpdateWithSpace() throws Exception { File spaceDir = new File(getTemporaryDir(), "./with %25space"); spaceDir.mkdirs(); File file = new File(spaceDir, "checkFile.tst"); internalTest(manager, file); } private void internalTest(ReloadManagerImpl manager, File file) throws IOException, InterruptedException { file.createNewFile(); final ReusableLatch latch = new ReusableLatch(1); manager.addCallback(file.toURL(), new ReloadCallback() { @Override public void reload(URL uri) { latch.countDown(); } }); Assert.assertFalse(latch.await(1, TimeUnit.SECONDS)); file.setLastModified(System.currentTimeMillis()); Assert.assertTrue(latch.await(1, TimeUnit.SECONDS)); } }