repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
jakubknejzlik/js-core-data
lib/Descriptors/MigrationDescription.js
<reponame>jakubknejzlik/js-core-data<gh_stars>1-10 // Generated by CoffeeScript 1.10.0 (function() { var EntityDescription, MigrationDescription; EntityDescription = require('../Descriptors/EntityDescription'); MigrationDescription = (function() { function MigrationDescription(modelFrom, modelTo) { this.modelFrom = modelFrom; this.modelTo = modelTo; this.entitiesChanges = []; this.attributesChanges = {}; this.relationshipsChanges = {}; this.scriptsBefore = []; this.scriptsAfter = []; } MigrationDescription.prototype.addEntity = function(name) { return this.entitiesChanges.push({ entity: this._entityName(name), change: '+' }); }; MigrationDescription.prototype.renameEntity = function(oldName, newName) { return this.entitiesChanges.push({ entity: this._entityName(oldName), change: this._entityName(newName) }); }; MigrationDescription.prototype.removeEntity = function(name) { return this.entitiesChanges.push({ entity: this._entityName(name), change: '-' }); }; MigrationDescription.prototype._entityName = function(entity) { if (entity instanceof EntityDescription) { return entity.name; } return entity; }; MigrationDescription.prototype.addAttribute = function(entity, name) { var entityName; entityName = this._entityName(entity); this.attributesChanges[entityName] = this.attributesChanges[entityName] || {}; return this.attributesChanges[entityName][name] = '+'; }; MigrationDescription.prototype.renameAttribute = function(entity, name, newName) { var entityName; entityName = this._entityName(entity); this.attributesChanges[entityName] = this.attributesChanges[entityName] || {}; return this.attributesChanges[entityName][name] = newName; }; MigrationDescription.prototype.removeAttribute = function(entity, name) { var entityName; entityName = this._entityName(entity); this.attributesChanges[entityName] = this.attributesChanges[entityName] || {}; return this.attributesChanges[entityName][name] = '-'; }; MigrationDescription.prototype.addRelationship = function(entity, name) { var entityName; entityName = this._entityName(entity); this.relationshipsChanges[entityName] = this.relationshipsChanges[entityName] || {}; return this.relationshipsChanges[entityName][name] = '+'; }; MigrationDescription.prototype.renameRelationship = function(entity, name, newName) { var entityName; entityName = this._entityName(entity); this.relationshipsChanges[entityName] = this.relationshipsChanges[entityName] || {}; return this.relationshipsChanges[entityName][name] = newName; }; MigrationDescription.prototype.removeRelationship = function(entity, name) { var entityName; entityName = this._entityName(entity); this.relationshipsChanges[entityName] = this.relationshipsChanges[entityName] || {}; return this.relationshipsChanges[entityName][name] = '-'; }; MigrationDescription.prototype.addScriptBefore = function(script, name) { return this.scriptsBefore.push({ script: script, name: name }); }; MigrationDescription.prototype.addScriptAfter = function(script, name) { return this.scriptsAfter.push({ script: script, name: name }); }; return MigrationDescription; })(); module.exports = MigrationDescription; }).call(this);
fireairforce/leetCode-Record
leetcode/1095. 山脉数组中查找目标值.js
/** * @param {number} target * @param {MountainArray} mountainArr * @return {number} */ var findInMountainArray = function(target, mountainArr) { let left = 0 let right = mountainArr.length() - 1 // 寻找山峰 while (left < right) { // 取中位数,向下取整 const mid = (left + right) / 2 | 0 if (mountainArr.get(mid) >= mountainArr.get(mid + 1)) { right = mid } else { left = mid + 1 } } // 标记山峰所在的位置 const peak = left // 在山峰左边查找,即在升序序列中查找 const index = binarySearch(mountainArr, target, 0, peak, v => v) // 若存在,则直接返回下标 if (index !== -1) { return index } // 否则在山峰右边查找,即在降序序列中查找 return binarySearch(mountainArr, target, peak + 1, mountainArr.length() - 1, v => -v) // 二分法查找 // 其中 fn 是用来对升序还是降序的特殊处理 function binarySearch (list, target, l, r, fn) { target = fn(target) while (l <= r) { const mid = (l + r) / 2 | 0 const cur = fn(list.get(mid)) if (cur === target) { return mid } else if (cur < target) { l = mid + 1 } else { r = mid - 1 } } return -1 } };
ArthasZhang007/15418FinalProject
pin-3.22-98547-g7a303a835-gcc-linux/source/tools/ToolUnitTests/lpd.c
<reponame>ArthasZhang007/15418FinalProject<filename>pin-3.22-98547-g7a303a835-gcc-linux/source/tools/ToolUnitTests/lpd.c /* * Copyright (C) 2009-2021 Intel Corporation. * SPDX-License-Identifier: MIT */ #include <stdint.h> #include <stdio.h> typedef struct { uint64_t data[2] __attribute__((aligned(16))); } XMM_VALUE; XMM_VALUE in; XMM_VALUE out; int main() { in.data[0] = 0x1234567887654321; in.data[1] = 0x1234567887654321; asm("movapd %0,%%xmm0" ::"m"(in)); asm("movlpd %0,%%xmm0" ::"m"(in)); asm("movapd %%xmm0,%0" : "=m"(out)); if (memcmp(&in, &out, sizeof(in)) != 0) { printf("Error:\n"); printf("in:%llx %llx\n", in.data[0], in.data[1]); printf("out: %llx %llx\n", out.data[0], out.data[1]); return 1; } else { printf("Passed\n"); } return 0; }
Ewpratten/frc_971_mirror
y2018/control_loops/superstructure/intake/sensor_unwrap.h
#ifndef Y2018_CONTROL_LOOPS_SUPERSTRUCTURE_INTAKE_SENSOR_UNWRAP_H_ #define Y2018_CONTROL_LOOPS_SUPERSTRUCTURE_INTAKE_SENSOR_UNWRAP_H_ namespace y2018 { namespace control_loops { namespace superstructure { namespace intake { // UnwrapSensor takes in a sensor value from a sensor that loops in a certain // interval. ex(the sensor moves from 0 to 10 and back to 0 while moving the // same direction) By checking for big gaps in sensor readings it assumes you // have wrapped either back or forwards and handles accordingly. It returns the // overall sensor value. class UnwrapSensor { public: // The sensor_offset (+ or -) present the sensor value that is 'zero' // The sensor_range presents the absolute value of the sensor range from 0 to // sensor_range. This will be adjusted using the sensor_offset UnwrapSensor(double sensor_offset, double sensor_range); // Takes a wrapped sensor value and unwraps it to give you its total position. double Unwrap(double current_sensor_value); void Reset(); int sensor_wrapped() const { return wrap_count_; } private: const double sensor_offset_, sensor_range_; // The last value given from set_position, starts at offset double sensor_last_value_ = sensor_offset_; // Log if sensor is in wrapped state in either direction int wrap_count_ = 0; // function waits for first call with a value to set sensor_last_value_. Will // start to calculate the spring unwrap at the second function call. bool uninitialized_ = true; }; } // namespace intake } // namespace superstructure } // namespace control_loops } // namespace y2018 #endif // Y2018_CONTROL_LOOPS_SUPERSTRUCTURE_INTAKE_SENSOR_UNWRAP_H_
bipinu/consent-manager-web-ext
tests/unit/scripts/popup_scripts/utils/SiteParser.spec.js
<filename>tests/unit/scripts/popup_scripts/utils/SiteParser.spec.js import { Site } from '../../../../../app/scripts/entities'; import { SiteParser } from '../../../../../app/scripts/popup_scripts/utils/SiteParser'; describe('SiteParser', () => { let json; beforeEach(() => { json = { url: 'https://comply.org', cookies: [{ url: 'https://third.party.com' }], }; }); it('does not throw an exception for a valid json', () => { const subject = new SiteParser(); expect(() => subject.parse({ json })).not.toThrowError(); }); it('returns a Site instance', () => { const subject = new SiteParser(); const site = subject.parse({ json }); expect(site).toBeInstanceOf(Site); }); it('builds the Site successfully', () => { const subject = new SiteParser(); const site = subject.parse({ json }); expect(site.url).toEqual('https://comply.org'); expect(site.cookies).toHaveLength(1); expect(site.hasThirdPartyCookies()).toBe(true); }); it('throws an exception if the input is invalid', () => { const invalidJson = { cookies: [{ url: 'https://third.party.com' }] }; const subject = new SiteParser(); expect(() => subject.parse({ json: invalidJson })).toThrowError(); }); });
whwu123/zf
src/main/java/com/active4j/hr/hr/service/impl/OaHrTrainPlanServiceImpl.java
<reponame>whwu123/zf package com.active4j.hr.hr.service.impl; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.active4j.hr.hr.dao.OaHrTrainPlanDao; import com.active4j.hr.hr.entity.OaHrTrainPlanEntity; import com.active4j.hr.hr.service.OaHrTrainPlanService; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; /** * * @title OaHrTrainPlanServiceImpl.java * @description 课程计划管理 * @time 2020年4月23日 上午10:15:01 * @author guyp * @version 1.0 */ @Service("oaHrTrainPlanService") @Transactional public class OaHrTrainPlanServiceImpl extends ServiceImpl<OaHrTrainPlanDao, OaHrTrainPlanEntity> implements OaHrTrainPlanService { }
josesilva1987/ms-students
node_modules/@nestjs/core/injector/modules-container.js
<filename>node_modules/@nestjs/core/injector/modules-container.js "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.ModulesContainer = void 0; const uuid_1 = require("uuid"); class ModulesContainer extends Map { constructor() { super(...arguments); this._applicationId = uuid_1.v4(); } get applicationId() { return this._applicationId; } } exports.ModulesContainer = ModulesContainer;
henrikingo/dsi
bin/tests/test_rules.py
"""Unit tests for the rules module. Run using nosetests.""" import os import unittest from dateutil import parser as date_parser from nose.tools import nottest import libanalysis.rules as rules import libanalysis.readers as readers import libanalysis.util as util from test_lib.fixture_files import FixtureFiles FIXTURE_FILES = FixtureFiles(os.path.join(os.path.dirname(__file__)), 'analysis') class TestResourceRules(unittest.TestCase): """Test class evaluates correctness of resource sanity check rules. """ def setUp(self): """Specifies the paths used to fetch JSON testing files. Additionally, sets up the common parameters for each operation being tested. """ # parameters used in test cases self.path_ftdc_3node_repl = FIXTURE_FILES.fixture_file_path( 'linux_3node_replSet_p1.ftdc.metrics') self.single_chunk_3node = self._first_chunk(self.path_ftdc_3node_repl) self.times_3node = self.single_chunk_3node[rules.FTDC_KEYS['time']] self.members_3node = ['0', '1', '2'] self.times_1node = [self.single_chunk_3node[rules.FTDC_KEYS['time']][0]] length = len(self.times_1node) self.single_chunk_1node = { key: self.single_chunk_3node[key][0:length] for key in self.single_chunk_3node } self.members_1node = ['0'] path_ftdc_standalone = FIXTURE_FILES.fixture_file_path('core_workloads_wt.ftdc.metrics') self.single_chunk_standalone = self._first_chunk(path_ftdc_standalone) self.times_standalone = self.single_chunk_standalone[rules.FTDC_KEYS['time']] self.path_3shard_directory = FIXTURE_FILES.fixture_file_path('test_replset_resource_rules') self.path_ftdc_repllag = FIXTURE_FILES.fixture_file_path('test_repllag') @staticmethod def _first_chunk(ftdc_filepath): """Short helper to only use the first chunk of a file """ for chunk in readers.read_ftdc(ftdc_filepath): return chunk def test_get_cache_max(self): """Test that we can get the configured cache size from a chunk """ cache_max_3node = 31122784256 observed = rules.get_configured_cache_size(self.single_chunk_3node) self.assertEqual(observed, cache_max_3node) def test_get_oplog_max(self): """Test that we can get the configured oplog size from a chunk """ oplog_max_3node = 161061273600 observed = rules.get_configured_oplog_size(self.single_chunk_3node) self.assertEqual(observed, oplog_max_3node) def test_get_repl_members(self): """Test that we can return a set of members from a 3-node replSet FTDC chunk """ observed = rules.get_repl_members(self.single_chunk_3node) expected = ['0', '1', '2'] self.assertEqual(observed, expected) def test_no_repl_members(self): """Test that we cannot return a set of members from a standalone FTDC chunk """ observed = rules.get_repl_members(self.single_chunk_standalone) expected = [] self.assertEqual(observed, expected) def test_below_cache_max_success(self): """Test expected success for case of current cache size below configured cache size """ cache_max_3node = 31122784256 observed = rules.below_configured_cache_size(self.single_chunk_3node, self.times_3node, cache_max_3node) expected = {} self.assertEqual(observed, expected) def test_below_cache_max_fail(self): """Test expected failure for case of current cache size being above configured cache size """ configured_cache_size = 100 observed = rules.below_configured_cache_size(self.single_chunk_3node, self.times_3node, configured_cache_size) expected = { 'times': self.times_3node, 'compared_values': [(32554, ), (32554, )], 'labels': ('current cache size (bytes)', ), 'additional': { 'WT configured cache size (bytes)': configured_cache_size } } self.assertEqual(observed, expected) def test_below_oplog_max_success(self): """Test expected success for case of current oplog size below configured oplog size """ oplog_max_3node = 161061273600 observed = rules.below_configured_oplog_size(self.single_chunk_3node, self.times_3node, oplog_max_3node) expected = {} self.assertEqual(observed, expected) def test_below_oplog_max_fail(self): """Test expected failure for case of current oplog size above configured oplog size """ configured_oplog_size = 10 observed = rules.below_configured_oplog_size(self.single_chunk_3node, self.times_3node, configured_oplog_size) expected = { 'times': self.times_3node, 'compared_values': [(86, ), (86, )], 'labels': ('current oplog size (MB)', ), 'additional': { 'WT configured max oplog size (MB)': configured_oplog_size, 'rule': 'current size <= (max size * 1.1)' } } self.assertEqual(observed, expected) def test_rule_not_applicable(self): """Test case where a rule does not apply to a variant """ configured_oplog_size = 0 observed = rules.below_configured_oplog_size(self.single_chunk_standalone, self.times_standalone, configured_oplog_size) expected = {} self.assertEqual(observed, expected) def test_max_connections_success(self): """Test expected success for current # connections below our specified upper bound """ max_thread_level = 64 observed = rules.max_connections(self.single_chunk_standalone, self.times_standalone, max_thread_level, []) expected = {} self.assertEqual(observed, expected) @nottest def helper_test_max_connections_equal(self, chunk, times, max_thread_level, repl_member_list, current_connections=42, expected=None): """Test expected success for current # connections below our specified upper bound: BF-8357 """ if expected is None: expected = {} key = rules.FTDC_KEYS['curr_connections'] length = len(chunk[key]) chunk[key] = [current_connections] * length observed = rules.max_connections(chunk, times, max_thread_level, repl_member_list) self.assertEqual(observed, expected) def test_max_connections_lte(self): """Test expected success for current # connections less than or equal to specified upper bound: BF-8357 """ max_thread_level = 8 self.helper_test_max_connections_equal(self.single_chunk_1node, self.times_1node, max_thread_level, self.members_1node, current_connections=41) self.helper_test_max_connections_equal(self.single_chunk_1node, self.times_1node, max_thread_level, self.members_1node) def test_max_connections_greater(self): """Test expected success for current # connections above our specified upper bound: BF-8357 """ max_thread_level = 8 self.helper_test_max_connections_equal( self.single_chunk_1node, self.times_1node, max_thread_level, self.members_1node, current_connections=43, expected={ 'times': self.times_1node, 'compared_values': [(43, )], 'labels': ('number of current connections', ), 'additional': { 'max thread level for this task': max_thread_level, 'connections between members? (4 * N)': 4, 'connections to MC and shell': 2, 'fudge_factor': 20, 'rule': '# connections <= (2 * max thread level + 2 + 4 + 20)' } }) # fudge factor behavior changes between 20 and 21 threads. lte 20 the fudge factor is 20, # above this value the fudge factor is 1.75 * max thread level. # PERF-1389 should fix this by identifying the tests (change stream and probably # snapshot_reads) with other source of connections (e.g. listener threads) and passing this # value to the max connections checks def test_max_20_connections_lte(self): """Test expected success for current # connections less than or equal to specified upper bound: BF-8357 """ max_thread_level = 20 self.helper_test_max_connections_equal(self.single_chunk_1node, self.times_1node, max_thread_level, self.members_1node, current_connections=66) self.helper_test_max_connections_equal(self.single_chunk_1node, self.times_1node, max_thread_level, self.members_1node) def test_max_20_connections_greater(self): """Test expected success for current # connections above our specified upper bound: BF-8357 """ max_thread_level = 20 self.helper_test_max_connections_equal( self.single_chunk_1node, self.times_1node, max_thread_level, self.members_1node, current_connections=67, expected={ 'times': self.times_1node, 'compared_values': [(67, )], 'labels': ('number of current connections', ), 'additional': { 'max thread level for this task': max_thread_level, 'connections between members? (4 * N)': 4, 'connections to MC and shell': 2, 'fudge_factor': 20, 'rule': '# connections <= (2 * max thread level + 2 + 4 + 20)' } }) def test_max_21_connections_lte(self): """Test expected success for current # connections less than or equal to specified upper bound: BF-8357 """ max_thread_level = 21 current_connections = 86 self.helper_test_max_connections_equal(self.single_chunk_1node, self.times_1node, max_thread_level, self.members_1node, current_connections=current_connections) self.helper_test_max_connections_equal(self.single_chunk_1node, self.times_1node, max_thread_level, self.members_1node) def test_max_21_connections_greater(self): """Test expected success for current # connections above our specified upper bound: BF-8357 """ max_thread_level = 21 current_connections = 87 fudge = 38 self.helper_test_max_connections_equal( self.single_chunk_1node, self.times_1node, max_thread_level, self.members_1node, current_connections=current_connections, expected={ 'times': self.times_1node, 'compared_values': [(current_connections, )], 'labels': ('number of current connections', ), 'additional': { 'max thread level for this task': max_thread_level, 'connections between members? (4 * N)': 4, 'connections to MC and shell': 2, 'fudge_factor': fudge, 'rule': '# connections <= (2 * max thread level + 2 + 4 + {})'.format(fudge) } }) # new test for a subsequent failure def test_max_241_connections_pass(self): """Test expected success for current # connections well above our specified upper bound """ max_thread_level = 60 current_connections = 242 self.helper_test_max_connections_equal(self.single_chunk_3node, self.times_3node, max_thread_level, self.members_3node, current_connections=current_connections) # this test will need to change if PERF-1389 is fixed def test_max_connections_fail(self): """Test expected failure for current # connections well above our specified upper bound """ max_thread_level = 60 current_connections = 243 fudge = 108 self.helper_test_max_connections_equal( self.single_chunk_3node, self.times_3node, max_thread_level, self.members_3node, current_connections=current_connections, expected={ 'times': self.times_3node, 'compared_values': [(current_connections, ), (current_connections, )], 'labels': ('number of current connections', ), 'additional': { 'max thread level for this task': max_thread_level, 'connections between members? (4 * N)': 12, 'connections to MC and shell': 2, 'fudge_factor': fudge, 'rule': '# connections <= (2 * max thread level + 2 + 12 + {})'.format(fudge) } }) def test_member_state_success(self): """Test expected success for members all in 'healthy' states """ observed = rules.repl_member_state(self.single_chunk_3node, self.times_3node, self.members_3node, None) # no test times expected = {} print(observed) self.assertEqual(observed, expected) def test_member_state_fail(self): """Test expected failure for member discovered in an 'unhealthy' state """ rules.FLAG_MEMBER_STATES[2] = 'SECONDARY' observed = rules.repl_member_state(self.single_chunk_3node, self.times_3node, self.members_3node, None) # no test times expected = { 'members': { '0': { 'times': self.times_3node, 'compared_values': [('SECONDARY', ), ('SECONDARY', )], 'labels': ('member 0 state', ) } } } self.assertEqual(observed, expected) del rules.FLAG_MEMBER_STATES[2] def test_pri_not_found(self): """Test expected primary member cannot be found """ primary = rules.find_primary(self.single_chunk_3node, self.members_3node) self.assertIsNone(primary) def test_pri_found(self): """Test expected primary member is found by chunk #4 (manually verified) """ chunks_until_primary = 3 for chunk in readers.read_ftdc(self.path_ftdc_3node_repl): primary = rules.find_primary(chunk, self.members_3node) if not chunks_until_primary: self.assertEqual(primary, '0') break else: self.assertIsNone(primary) chunks_until_primary -= 1 def test_ftdc_replica_lag_check_success(self): """Test expected success for repl set secondary member lag check """ path_ftdc = os.path.join(self.path_3shard_directory, 'metrics.3shard_p1_repl') perf_json = os.path.join(self.path_3shard_directory, 'perf.json') test_times = util.get_test_times(perf_json) observed = rules.ftdc_replica_lag_check(path_ftdc, test_times) expected = [] self.assertEqual(observed, expected) def test_ftdc_replica_lag_check_fail(self): """Test expected failure for repl set secondary member lag check The diagnostic.data file metrics.mongod.0 contains ftdc data from the primary on a 3 node replica set. In the data there are 4 distinct periods where replication lag will be above the threshold of 15 seconds, as you can see from the `expected` output object below. Note that unittest-files/test_repllag/failure_message.txt.ok contains the human readable failure message that corresponds to these replication lag failures. """ path_ftdc = os.path.join(self.path_ftdc_repllag, 'metrics.mongod.0') perf_json = os.path.join(self.path_ftdc_repllag, 'perf.json') test_times = util.get_test_times(perf_json) observed = rules.ftdc_replica_lag_check(path_ftdc, test_times) expected = [{ 'additional': { 'lag end threshold (s)': 2.0, 'lag start threshold (s)': 15.0, 'primary member': '0' }, 'members': { '1': { 'compared_values': [ (16.0, '2017-05-31 16:54:42Z', 129.0, '2017-05-31 16:54:42Z', 120.0), (17.0, '2017-05-31 16:59:23Z', 104.0, '2017-05-31 16:59:26Z', 99.0), (16.0, '2017-05-31 17:04:33Z', 117.0, '2017-05-31 17:04:34Z', 110.0), (16.0, '2017-05-31 17:09:13Z', 93.0, '2017-05-31 17:09:32Z', 12.0) ], 'labels': ('start value (s)', 'max time', 'max value (s)', 'end time', 'end value (s)'), 'report_all_values': True, 'times': [1496248949000, 1496249726000, 1496250019000, 1496250331000] }, '2': { 'compared_values': [ (16.0, '2017-05-31 16:54:03Z', 90.0, '2017-05-31 16:54:04Z', 82.0), (16.0, '2017-05-31 16:58:53Z', 76.0, '2017-05-31 16:59:00Z', 72.0), (16.0, '2017-05-31 17:03:53Z', 80.0, '2017-05-31 17:03:58Z', 77.0), (16.0, '2017-05-31 17:08:53Z', 70.0, '2017-05-31 17:08:54Z', 62.0) ], 'labels': ('start value (s)', 'max time', 'max value (s)', 'end time', 'end value (s)'), 'report_all_values': True, 'times': [1496248967000, 1496249735000, 1496250027000, 1496250339000] } } }] self.assertEqual(observed, expected) def test_lag_no_perf_file(self): """Test expected success when no test times are specified """ path_ftdc_3shard = os.path.join(self.path_3shard_directory, 'metrics.3shard_p1_repl') observed = rules.ftdc_replica_lag_check(path_ftdc_3shard, None) expected = [] self.assertEqual(observed, expected) class TestFailureOutputFormatting(unittest.TestCase): """Test class checks resource sanity rules' error message formatting """ def test_fail_collection_info(self): """Test expected output in _failure_collection when failure is detected """ times = [1, 2, 3] compared_values = [(0, 1), (0, 3)] labels = ('label1', 'label2') additional = {'random_info': 1} observed = rules.failure_collection(times, compared_values, labels, additional) expected = { 'times': times, 'compared_values': compared_values, 'labels': labels, 'additional': additional } self.assertEqual(observed, expected) def test_fail_collection_empty(self): """Test expected output in _failure_collection when no failures detected """ labels = ('label1', 'label2') observed = rules.failure_collection([], [], labels) expected = {} self.assertEqual(observed, expected) class TestLogAnalysisRules(unittest.TestCase): """Test class evaluates correctness of mongod.log check rules """ def setUp(self): self.rules = { 'bad_log_types': ["F", "E"], 'bad_messages': [ 'starting an election', 'election succeeded', 'transition to primary', 'posix_fallocate failed' ] } def test_is_log_line_bad(self): """Test `_is_log_line_bad()`.""" bad_lines = [ "2016-07-14T01:00:04.000+0000 F err-type foo bar baz", "2016-07-14T01:00:04.000+0000 E err-type foo bar baz", "2016-07-14T01:00:04.000+0000 L err-type elecTIon suCCEeded", "2016-07-14T01:00:04.000+0000 D err-type transition TO PRIMARY", "2016-07-14T01:00:04.000+0000 I err-type PosIx_FallocaTE FailEd", # First logv2 message. The message has format param `{rsConfig_getElectionTimeoutPeriod}` # but the attr is `rsConfig_getElectionTimeoutPeriodMillis` (probably a typo). '{"t":{"$date":"2020-03-02T05:37:29.666+0000"},"s":"I", "c":"ELECTION","id":4615652,' '"ctx":"ReplCoord-2","msg":"Starting an election, since we\'ve seen no PRIMARY in the past ' '{rsConfig_getElectionTimeoutPeriod}","attr":{"rsConfig_getElectionTimeoutPeriodMillis":10000}}', # Second logv2 message. The `msg` itself is just "{}" but the params indicate failure # because it has "Starting an election" in the attr. '{"t":{"$date":"2020-03-02T06:32:06.307+0000"},"s":"I", "c":"ELECTION","id":0,' '"ctx":"ReplCoord-1","msg":"{}","attr":{"message":"Starting an election"}}', "{\"t\":{\"$date\":\"2016-07-14T01:00:04.000Z\"},\"s\":\"F\", \"c\":\"COMMAND\", \"ctx\":\"conn7\"," "\"msg\":\"foo bar baz\"}", "{\"t\":{\"$date\":\"2016-07-14T01:00:04.000Z\"},\"s\":\"E\", \"c\":\"COMMAND\", \"ctx\":\"conn7\"," "\"msg\":\"foo bar baz\"}", "{\"t\":{\"$date\":\"2016-07-14T01:00:04.000Z\"},\"s\":\"L\", \"c\":\"ELECTION\", \"ctx\":\"conn7\"," "\"msg\":\"elecTIon suCCEeded\"}", "{\"t\":{\"$date\":\"2016-07-14T01:00:04.000Z\"},\"s\":\"D\", \"c\":\"REPL\", \"ctx\":\"conn7\"," "\"msg\":\"transition TO PRIMARY\"}", "{\"t\":{\"$date\":\"2016-07-14T01:00:04.000Z\"},\"s\":\"I\", \"c\":\"STORAGE\", \"ctx\":\"conn7\"," "\"msg\":\"PosIx_FallocaTE FailEd\"}", "{\"t\":{\"$date\":\"2016-07-14T01:00:04.000Z\"},\"s\":\"D\", \"c\":\"REPL\", \"ctx\":\"conn7\"," "\"msg\":\"transition to {newState} from {memberState}\",\"attr\":{\"newState\":\"PRIMARY\"," "\"memberState\":\"SECONDARY\"}}" ] good_lines = [ "2016-07-14T01:00:04.000+0000 L err-type nothing bad here", "2016-07-14T01:00:04.000+0000 L err-type or here", "2016-07-14T01:00:04.000+0000 E err-type ttl query execution for index", # Example logv2 message. The `msg` itself is just "{}". '{"t":{"$date":"2020-03-02T06:32:06.307+0000"},"s":"I", "c":"ELECTION","id":0,' '"ctx":"ReplCoord-1","msg":"{}","attr":{"message":"VoteRequester(term 1 dry run) ' "received a yes vote from 10.2.0.200:27017; response message: { term: 1, voteGranted: true, " 'reason: \\"\\", ok: 1.0, $clusterTime: { clusterTime: Timestamp(1583130713, 4), signature: ' "{ hash: BinData(0, 0000000000000000000000000000000000000000), keyId: 0 } }, operationTime: " 'Timestamp(1583130713, 4) }"}}', "{\"t\":{\"$date\":\"2016-07-14T01:00:04.000Z\"},\"s\":\"L\", \"c\":\"COMMAND\", \"ctx\":\"conn7\"," "\"msg\":\"nothing bad here\"}", "{\"t\":{\"$date\":\"2016-07-14T01:00:04.000Z\"},\"s\":\"L\", \"c\":\"COMMAND\", \"ctx\":\"conn7\"," "\"msg\":\"or here\"}", "{\"t\":{\"$date\":\"2016-07-14T01:00:04.000Z\"},\"s\":\"E\", \"c\":\"COMMAND\", \"ctx\":\"conn7\"," "\"msg\":\"ttl query execution for index\"}" ] for line in bad_lines: self.assertTrue(rules.is_log_line_bad(line, self.rules)) for line in good_lines: self.assertFalse(rules.is_log_line_bad(line, self.rules)) def test_is_log_line_bad_task(self): """Test `_is_log_line_bad()` for specific tasks.""" sometimes_bad_line = "2016-07-14T01:00:04.000+0000 D err-type transition TO PRIMARY" always_bad_line = "2016-07-14T01:00:04.000+0000 I err-type PosIx_FallocaTE FailEd" self.assertTrue(rules.is_log_line_bad(sometimes_bad_line, self.rules)) self.assertTrue( rules.is_log_line_bad(always_bad_line, self.rules, task="industry_benchmarks")) self.assertFalse( rules.is_log_line_bad(sometimes_bad_line, self.rules, task="service_architecture_workloads")) self.assertTrue(rules.is_log_line_bad(always_bad_line, self.rules)) self.assertTrue( rules.is_log_line_bad(always_bad_line, self.rules, task="industry_benchmarks")) self.assertTrue( rules.is_log_line_bad(always_bad_line, self.rules, task="service_architecture_workloads")) def test_is_log_line_bad_time(self): """Test `_is_log_line_bad()` when test times are specified.""" test_times = [ (date_parser.parse("2016-07-14T01:00:00.000+0000"), date_parser.parse("2016-07-14T01:10:00.000+0000")), (date_parser.parse("2016-07-14T03:00:00.000+0000"), date_parser.parse("2016-07-14T03:10:00.000+0000")), (date_parser.parse("2016-07-14T05:00:00.999+0000"), date_parser.parse("2016-07-14T05:10:00.000+0000")), ] # last 2 times are the same time as test start / end (i.e. the times are inclusive) bad_lines = [ "2016-07-14T01:00:04.000+0000 F err-type message", "2016-07-14T01:09:00.000+0000 F err-type message", "2016-07-14T03:05:00.000+0000 F err-type message", "2016-07-14T05:00:00.999+0000 F err-type message", "2016-07-14T05:10:00.000+0000 F err-type message" ] # last 2 times are just before and after the test started bad_lines_to_ignore = [ "2016-07-14T00:05:00.000+0000 F err-type message", "2016-07-14T02:00:00.000+0000 F err-type message", "2016-07-14T03:25:00.000+0000 F err-type message", "2016-07-14T05:00:00.998+0000 F err-type message", "2016-07-14T05:10:00.001+0000 F err-type message" ] for line in bad_lines: self.assertTrue(rules.is_log_line_bad(line, self.rules, test_times)) for line in bad_lines_to_ignore: self.assertFalse(rules.is_log_line_bad(line, self.rules, test_times)) class TestDBCorrectnessRules(unittest.TestCase): """Test class evaluates correctness of DB correctness check rules. """ def test_dbcorrect_success(self): """Test expected success in db correctness test log file parsing """ log_dir = FIXTURE_FILES.fixture_file_path('core_workloads_reports') expected_results = [{ 'status': 'pass', 'start': 0, 'log_raw': ('\nPassed db-hash-check.core_workloads_reports JS test.'), 'test_file': 'db-hash-check.core_workloads_reports', 'exit_code': 0 }, { 'status': 'pass', 'start': 0, 'log_raw': ('\nPassed validate-indexes-and-collections.core_workloads_reports JS test.'), 'test_file': ('validate-indexes-and-collections.core_workloads_reports'), 'exit_code': 0 }] observed_results = rules.db_correctness_analysis(log_dir) self.assertEqual(expected_results, observed_results) def test_dbcorrect_fail(self): """Test expected failure in db correctness test log file parsing """ log_dir = FIXTURE_FILES.fixture_file_path('test_db_correctness') raw_failure = ('\nFAILURE: (logfile `localhost--localhost`)\n' '2016-08-03T15:04:55.395-0400 E QUERY [thread1] ' 'Error: Collection validation failed :\n@(shell eval):1:20\n' '@(shell eval):1:2\n\nFailed to run JS test on server [localhost], ' 'host [localhost]\n1') expected_results = [{ 'status': 'fail', 'start': 0, 'log_raw': raw_failure, 'test_file': 'validate-indexes-and-collections.test_db_correctness', 'exit_code': 1 }] observed_results = rules.db_correctness_analysis(log_dir) self.assertEqual(expected_results, observed_results) def test_dbcorrect_no_exit_code(self): """Test expected failure in db correctness test log file missing integer exit status """ log_dir = FIXTURE_FILES.fixture_file_path('test_db_correctness_exit_fail') raw_failure = ('\nFAILURE: logfile `localhost--localhost` did not record a valid exit ' 'code. Output:\n 2016-08-03T15:04:55.395-0400 E QUERY [thread1] ' 'Error: Collection validation failed :\n@(shell eval):1:20\n' '@(shell eval):1:2\n\nFailed to run JS test on server [localhost], ' 'host [localhost]') expected_results = [{ 'status': 'fail', 'start': 0, 'log_raw': raw_failure, 'test_file': ('validate-indexes-and-collections.test_db_correctness_exit_fail'), 'exit_code': 1 }] observed_results = rules.db_correctness_analysis(log_dir) self.assertEqual(expected_results, observed_results) def test_no_jstests_run(self): """Test expected empty result when no db correctness checks are made """ log_dir = FIXTURE_FILES.fixture_file_path('test_log_analysis') expected_results = [] observed_results = rules.db_correctness_analysis(log_dir) self.assertEqual(expected_results, observed_results) if __name__ == '__main__': unittest.main()
rohankumardubey/cayenne
cayenne-client/src/test/java/org/apache/cayenne/remote/ValueInjectorIT.java
<reponame>rohankumardubey/cayenne /***************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ****************************************************************/ package org.apache.cayenne.remote; import org.apache.cayenne.ObjectContext; import org.apache.cayenne.access.DataContext; import org.apache.cayenne.di.Inject; import org.apache.cayenne.exp.Expression; import org.apache.cayenne.exp.ExpressionFactory; import org.apache.cayenne.map.ObjEntity; import org.apache.cayenne.remote.service.LocalConnection; import org.apache.cayenne.testdo.mt.ClientMtTable1Subclass1; import org.apache.cayenne.testdo.mt.MtTable1Subclass1; import org.apache.cayenne.unit.di.server.CayenneProjects; import org.apache.cayenne.unit.di.server.UseServerRuntime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import java.util.Arrays; import java.util.Collection; import static org.junit.Assert.assertEquals; @UseServerRuntime(CayenneProjects.MULTI_TIER_PROJECT) @RunWith(value=Parameterized.class) public class ValueInjectorIT extends RemoteCayenneCase { @Inject protected DataContext serverContext; @Parameters public static Collection<Object[]> data() { return Arrays.asList(new Object[][]{ {LocalConnection.HESSIAN_SERIALIZATION}, {LocalConnection.JAVA_SERIALIZATION}, {LocalConnection.NO_SERIALIZATION}, }); } public ValueInjectorIT(int serializationPolicy) { super.serializationPolicy = serializationPolicy; } @Test public void testServer() { ObjEntity entity = serverContext.getEntityResolver().getObjEntity(MtTable1Subclass1.class); Expression qualifier = entity.getDeclaredQualifier(); try { MtTable1Subclass1 ee = serverContext.newObject(MtTable1Subclass1.class); assertEquals(ee.getGlobalAttribute1(), "sub1"); // check AND entity.setDeclaredQualifier(qualifier.andExp(ExpressionFactory.exp("serverAttribute1 = 'sa'"))); ee = serverContext.newObject(MtTable1Subclass1.class); assertEquals(ee.getGlobalAttribute1(), "sub1"); assertEquals(ee.getServerAttribute1(), "sa"); } finally { entity.setDeclaredQualifier(qualifier); } } @Test public void testClient() { ObjectContext context = createROPContext(); ObjEntity entity = context.getEntityResolver().getObjEntity(ClientMtTable1Subclass1.class); Expression qualifier = entity.getDeclaredQualifier(); try { ClientMtTable1Subclass1 ee = context.newObject(ClientMtTable1Subclass1.class); assertEquals(ee.getGlobalAttribute1(), "sub1"); // check AND entity.setDeclaredQualifier(qualifier.andExp(ExpressionFactory.exp("serverAttribute1 = 'sa'"))); ee = context.newObject(ClientMtTable1Subclass1.class); assertEquals(ee.getGlobalAttribute1(), "sub1"); assertEquals(ee.getServerAttribute1(), "sa"); } finally { entity.setDeclaredQualifier(qualifier); } } }
impbk2002/bartworks
src/main/java/com/github/bartimaeusnek/crossmod/thaumcraft/tile/GT_WandBuffer.java
<gh_stars>10-100 /* * Copyright (c) 2018-2020 bartimaeusnek * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.github.bartimaeusnek.crossmod.thaumcraft.tile; import com.github.bartimaeusnek.crossmod.thaumcraft.util.ThaumcraftHandler; import gregtech.api.interfaces.ITexture; import gregtech.api.interfaces.tileentity.IGregTechTileEntity; import gregtech.api.items.GT_MetaBase_Item; import gregtech.api.metatileentity.implementations.GT_MetaTileEntity_BasicBatteryBuffer; import gregtech.api.util.GT_ModHandler; import ic2.api.item.ElectricItem; import ic2.api.item.IElectricItem; import net.minecraft.item.ItemStack; public class GT_WandBuffer extends GT_MetaTileEntity_BasicBatteryBuffer { public GT_WandBuffer(int aID, String aName, String aNameRegional, int aTier, String aDescription, int aSlotCount) { super(aID, aName, aNameRegional, aTier, aDescription, aSlotCount); } public GT_WandBuffer(String aName, int aTier, String aDescription, ITexture[][][] aTextures, int aSlotCount) { super(aName, aTier, aDescription, aTextures, aSlotCount); } public GT_WandBuffer(String aName, int aTier, String[] aDescription, ITexture[][][] aTextures, int aSlotCount) { super(aName, aTier, aDescription, aTextures, aSlotCount); } public void onPostTick(IGregTechTileEntity aBaseMetaTileEntity, long aTick) { if (aBaseMetaTileEntity.isServerSide()) { this.mCharge = aBaseMetaTileEntity.getStoredEU() / 2L > aBaseMetaTileEntity.getEUCapacity() / 3L; this.mDecharge = aBaseMetaTileEntity.getStoredEU() < aBaseMetaTileEntity.getEUCapacity() / 3L; this.mBatteryCount = 0; this.mChargeableCount = 0; for (ItemStack tStack : this.mInventory) { if (ThaumcraftHandler.isWand(tStack)) { ++this.mBatteryCount; ++this.mChargeableCount; } } } } public boolean allowPullStack(IGregTechTileEntity aBaseMetaTileEntity, int aIndex, byte aSide, ItemStack aStack) { return ThaumcraftHandler.isWand(aStack); } public boolean allowPutStack(IGregTechTileEntity aBaseMetaTileEntity, int aIndex, byte aSide, ItemStack aStack) { return ThaumcraftHandler.isWand(aStack); } public int getInventoryStackLimit() { return 1; } public long[] getStoredEnergy() { boolean scaleOverflow = false; boolean storedOverflow = false; long tScale = this.getBaseMetaTileEntity().getEUCapacity(); long tStored = this.getBaseMetaTileEntity().getStoredEU(); long tStep; if (this.mInventory != null) { for (ItemStack aStack : this.mInventory) { if (GT_ModHandler.isElectricItem(aStack)) { if (aStack.getItem() instanceof GT_MetaBase_Item) { Long[] stats = ((GT_MetaBase_Item) aStack.getItem()).getElectricStats(aStack); if (stats != null) { if (stats[0] > 4611686018427387903L) { scaleOverflow = true; } tScale += stats[0]; tStep = ((GT_MetaBase_Item) aStack.getItem()).getRealCharge(aStack); if (tStep > 4611686018427387903L) { storedOverflow = true; } tStored += tStep; } } else if (aStack.getItem() instanceof IElectricItem) { tStored += (long) ElectricItem.manager.getCharge(aStack); tScale += (long) ((IElectricItem) aStack.getItem()).getMaxCharge(aStack); } } } } if (scaleOverflow) { tScale = 9223372036854775807L; } if (storedOverflow) { tStored = 9223372036854775807L; } return new long[]{tStored, tScale}; } }
nulogy/design-system
src/utils/deprecatedProp.js
<filename>src/utils/deprecatedProp.js<gh_stars>10-100 /* eslint-disable no-console */ export const deprecatedProp = (propType, newPropName) => { return (props, propName, componentName, ...rest) => { if (props[propName] != null) { const message = `NDS Warning: "${propName}" prop of "${componentName}" has been deprecated.\n Please use the "${newPropName}" prop instead. If you need assistance upgrading please message #design-system`; console.error(message); } return propType(props, propName, componentName, ...rest); }; };
atlasapi/atlas
src/test/java/org/atlasapi/remotesite/ContentMergerTest.java
package org.atlasapi.remotesite; import java.util.List; import java.util.Set; import org.atlasapi.media.entity.Alias; import org.atlasapi.media.entity.Broadcast; import org.atlasapi.media.entity.Encoding; import org.atlasapi.media.entity.Episode; import org.atlasapi.media.entity.Item; import org.atlasapi.media.entity.Location; import org.atlasapi.media.entity.ParentRef; import org.atlasapi.media.entity.Publisher; import org.atlasapi.media.entity.ReleaseDate; import org.atlasapi.media.entity.Restriction; import org.atlasapi.media.entity.Series; import org.atlasapi.media.entity.TopicRef; import org.atlasapi.media.entity.Version; import org.atlasapi.remotesite.ContentMerger.AliasMergeStrategy; import org.atlasapi.remotesite.ContentMerger.MergeStrategy; import com.metabroadcast.common.intl.Countries; import com.metabroadcast.common.time.DateTimeZones; import com.google.common.base.Equivalence; import com.google.common.base.Objects; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.joda.time.DateTime; import org.joda.time.LocalDate; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.runners.MockitoJUnitRunner; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class ContentMergerTest { private static final Publisher PUBLISHER = Publisher.METABROADCAST; @Test public void testVersionMerger() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.REPLACE); Item current = new Item(); Item extracted = new Item(); Broadcast broadcast1 = new Broadcast("http://example.com/channel1", new DateTime(DateTimeZones.UTC), new DateTime(DateTimeZones.UTC).plusHours(1)); Broadcast broadcast2 = new Broadcast("http://example.com/channel1", new DateTime(DateTimeZones.UTC).plusHours(4), new DateTime(DateTimeZones.UTC).plusHours(5)); Version version1 = new Version(); version1.setCanonicalUri("http://example.org/1"); version1.setBroadcasts(ImmutableSet.of(broadcast1)); current.setVersions(ImmutableSet.of(version1)); Version version2 = new Version(); version2.setCanonicalUri("http://example.org/1"); version2.setBroadcasts(ImmutableSet.of(broadcast2)); extracted.setVersions(ImmutableSet.of(version2)); Item merged = contentMerger.merge(current, extracted); assertEquals(2, Iterables.getOnlyElement(merged.getVersions()).getBroadcasts().size()); } @Test public void testVersionMergeReplaceStrategy() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.REPLACE, MergeStrategy.KEEP, MergeStrategy.REPLACE); Series current = new Series(); Series extracted = new Series(); Version version1 = new Version(); version1.setCanonicalUri("http://example.org/1"); current.setVersions(ImmutableSet.of(version1)); Restriction restriction = new Restriction(); restriction.setRestricted(true); Version version2 = new Version(); version2.setCanonicalUri("http://example.org/2"); version2.setRestriction(restriction); extracted.setVersions(ImmutableSet.of(version2)); Series merged = (Series)contentMerger.merge(current, extracted); Version mergedVersion = Iterables.getOnlyElement(merged.getVersions()); assertEquals("http://example.org/2", mergedVersion.getCanonicalUri()); assertTrue(mergedVersion.getRestriction().isRestricted()); } @Test public void testTopicMergerOnSuppliedEquivalence() { final ContentMerger contentMerger = new ContentMerger(MergeStrategy.KEEP, MergeStrategy.replaceTopicsBasedOn(new Equivalence<TopicRef>() { @Override protected boolean doEquivalent(TopicRef a, TopicRef b) { return Objects.equal(a.getOffset(), b.getOffset()); } @Override protected int doHash(TopicRef topicRef) { return Objects.hashCode(topicRef.getOffset()); } }), MergeStrategy.REPLACE); TopicRef a1 = new TopicRef(9000L, 0f, false, TopicRef.Relationship.ABOUT, 45); TopicRef a2 = new TopicRef(9001L, 0f, true, TopicRef.Relationship.TRANSCRIPTION, 45); TopicRef b1 = new TopicRef(9000L, 0f, false, TopicRef.Relationship.ABOUT, 450); TopicRef b2 = new TopicRef(9001L, 0f, true, TopicRef.Relationship.TRANSCRIPTION, 450); TopicRef c1 = new TopicRef(9000L, 0f, false, TopicRef.Relationship.ABOUT, 324324); TopicRef d2 = new TopicRef(9001L, 0f, true, TopicRef.Relationship.TRANSCRIPTION, 234098); TopicRef n1 = new TopicRef(201L, 0f, true, TopicRef.Relationship.ABOUT); TopicRef n2 = new TopicRef(9001L, 0f, true, TopicRef.Relationship.ABOUT); Item current = new Item(); current.setTopicRefs(ImmutableList.of(a1, b1, c1, n1)); Item extracted = new Item(); extracted.setTopicRefs(ImmutableList.of(a2, b2, d2, n2)); Item merged = contentMerger.merge(current, extracted); List<TopicRef> mergedRefs = merged.getTopicRefs(); assertEquals(5, mergedRefs.size()); assertTrue(mergedRefs.contains(a2)); assertTrue(mergedRefs.contains(b2)); assertTrue(mergedRefs.contains(c1)); assertTrue(mergedRefs.contains(d2)); assertTrue(mergedRefs.contains(n2)); } @Test public void testItemItemMergingProducesItem() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.REPLACE); Item current = createItem("old title", PUBLISHER); Item extracted = createItem("new title", PUBLISHER); Item merged = contentMerger.merge(current, extracted); assertTrue("Merged object should be of same type as extracted object", !(merged instanceof Episode)); assertEquals("new title", merged.getTitle()); } @Test public void testEpisodeItemMergingProducesItem() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.REPLACE); Episode current = createEpisode("old title", PUBLISHER, 3); Item extracted = createItem("new title", PUBLISHER); Item merged = contentMerger.merge(current, extracted); assertTrue("Merged object should be of same type as extracted object", !(merged instanceof Episode)); assertEquals("new title", merged.getTitle()); } @Test public void testItemEpisodeMergingProducesEpisode() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.REPLACE); String extractedTitle = "new title"; Integer extractedEpisodeNum = 5; Item current = createItem("old title", PUBLISHER); Episode extracted = createEpisode(extractedTitle, PUBLISHER, extractedEpisodeNum); Episode merged = (Episode) contentMerger.merge(current, extracted); assertEquals(extractedTitle, merged.getTitle()); assertEquals(extractedEpisodeNum, merged.getEpisodeNumber()); } @Test public void testEpisodeEpisodeMergingProducesEpisode() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.REPLACE); String extractedTitle = "new title"; Integer extractedEpisodeNum = 5; Episode current = createEpisode("old title", PUBLISHER, 3); Episode extracted = createEpisode(extractedTitle, PUBLISHER, extractedEpisodeNum); Episode merged = (Episode) contentMerger.merge(current, extracted); assertEquals(extractedTitle, merged.getTitle()); assertEquals(extractedEpisodeNum, merged.getEpisodeNumber()); } @Test public void testEpisodeEpisodeMergingMergesReleaseDate() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.REPLACE); Episode current = createEpisodeWithReleaseDate(PUBLISHER); Episode extracted = createEpisodeWithReleaseDate(PUBLISHER); Episode merged = (Episode) contentMerger.merge(current, extracted); assertTrue(!merged.getReleaseDates().isEmpty()); } @Test public void testItemEpisodeMergingMergesReleaseDate() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.REPLACE); Item current = createItemWithReleaseDate(PUBLISHER); Episode extracted = createEpisodeWithReleaseDate(PUBLISHER); Episode merged = (Episode) contentMerger.merge(current, extracted); assertTrue(!merged.getReleaseDates().isEmpty()); assertTrue(merged.getReleaseDates().size() == 1); } @Test public void testAliasMergeStrategyInvoked() { AliasMergeStrategy aliasMergeStrategy = mock(AliasMergeStrategy.class); ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, aliasMergeStrategy); Item current = createItem("title", Publisher.METABROADCAST); Item extracted = createItem("title", Publisher.METABROADCAST); when(aliasMergeStrategy.mergeAliases(current, extracted)).thenReturn(current); contentMerger.merge(current, extracted); verify(aliasMergeStrategy).mergeAliases(current, extracted); } @Test public void testMergeAliasesMergeStrategy() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.MERGE); Item current = createItem("title", Publisher.METABROADCAST); Item extracted = createItem("title", Publisher.METABROADCAST); Set<Alias> currentAliases = ImmutableSet.of(new Alias("1", "2"), new Alias("2", "3")); Set<Alias> extractedAliases = ImmutableSet.of(new Alias("3", "4")); current.setAliases(currentAliases); extracted.setAliases(extractedAliases); current.setAliasUrls(ImmutableSet.of("http://a.com/b", "http://b.com/c")); extracted.setAliasUrls(ImmutableSet.of("http://c.com/d")); Item merged = contentMerger.merge(current, extracted); assertEquals(Sets.union(currentAliases, extractedAliases), merged.getAliases()); } @Test public void testReplaceAliasesMergeStrategy() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.REPLACE); Item current = createItem("title", Publisher.METABROADCAST); Item extracted = createItem("title", Publisher.METABROADCAST); Set<Alias> extractedAliases = ImmutableSet.of(new Alias("3", "4")); current.setAliases(ImmutableSet.of(new Alias("1", "2"), new Alias("2", "3"))); extracted.setAliases(extractedAliases); current.setAliasUrls(ImmutableSet.of("http://a.com/b", "http://b.com/c")); extracted.setAliasUrls(ImmutableSet.of("http://c.com/d")); Item merged = contentMerger.merge(current, extracted); assertEquals(extractedAliases, merged.getAliases()); } @Test //Random observers note: The merger is supposed to retain the aliases of the first element, //not merge it with those of the second? public void testKeepAliasesMergeStrategy() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.KEEP, MergeStrategy.KEEP); Item current = createItem("title", Publisher.METABROADCAST); Item extracted = createItem("title", Publisher.METABROADCAST); Set<Alias> currentAliases = ImmutableSet.of(new Alias("1", "2"), new Alias("2", "3")); current.setAliases(currentAliases); extracted.setAliases(ImmutableSet.of(new Alias("3", "4"))); current.setAliasUrls(ImmutableSet.of("http://a.com/b", "http://b.com/c")); extracted.setAliasUrls(ImmutableSet.of("http://c.com/d")); Item merged = contentMerger.merge(current, extracted); assertEquals(currentAliases, merged.getAliases()); } @Test public void testMergeTopicsMergeStrategy() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.MERGE, MergeStrategy.KEEP); Item current = createItem("title", Publisher.METABROADCAST); Item extracted = createItem("title", Publisher.METABROADCAST); TopicRef topicRef1 = mock(TopicRef.class); TopicRef topicRef2 = mock(TopicRef.class); TopicRef topicRef3 = mock(TopicRef.class); current.addTopicRef(topicRef1); current.addTopicRef(topicRef2); extracted.addTopicRef(topicRef3); Item merged = contentMerger.merge(current, extracted); assertEquals( ImmutableSet.copyOf(merged.getTopicRefs()), ImmutableSet.of(topicRef1, topicRef2, topicRef3) ); } @Test public void testMergesContainers() { ContentMerger contentMerger = new ContentMerger(MergeStrategy.MERGE, MergeStrategy.MERGE, MergeStrategy.MERGE); Episode current = createEpisode("title", Publisher.METABROADCAST, 1); Episode extracted = createEpisode("title", Publisher.METABROADCAST, 1); ParentRef seriesRef1 = mock(ParentRef.class); ParentRef parentRef1 = mock(ParentRef.class); extracted.setParentRef(parentRef1); extracted.setSeriesRef(seriesRef1); Episode merged = (Episode) contentMerger.merge(current, extracted); assertEquals(merged.getContainer(), extracted.getContainer() ); assertEquals(merged.getSeriesRef(), extracted.getSeriesRef() ); } @Test public void revokesDeletedVersion() { ContentMerger contentMerger = new ContentMerger( MergeStrategy.NITRO_VERSIONS_REVOKE, MergeStrategy.KEEP, MergeStrategy.REPLACE ); Series current = new Series(); Version currentVersion1 = new Version(); currentVersion1.setCanonicalUri("http://example.org/1"); Encoding encoding = new Encoding(); encoding.setCanonicalUri("http://example.org/encoding/1"); Location location1 = new Location(); Location location2 = new Location(); encoding.setAvailableAt(ImmutableSet.of(location1, location2)); currentVersion1.setManifestedAs(ImmutableSet.of(encoding)); Version currentVersion2 = new Version(); currentVersion2.setCanonicalUri("http://example.org/2"); current.setVersions(ImmutableSet.of(currentVersion1, currentVersion2)); Series extracted = new Series(); Version extractedVersion = new Version(); extractedVersion.setCanonicalUri("http://example.org/2"); extracted.setVersions(ImmutableSet.of(extractedVersion)); Series merged = (Series) contentMerger.merge(current, extracted); Set<Version> versions = merged.getVersions(); assertThat(versions.size(), is(2)); Version merged1 = Iterables.find(versions, new Predicate<Version>() { @Override public boolean apply(Version input) { return "http://example.org/1".equals(input.getCanonicalUri()); } }); Encoding mergedEncoding1 = Iterables.getOnlyElement(merged1.getManifestedAs()); for (Location mergedLocation : mergedEncoding1.getAvailableAt()) { assertThat(mergedLocation.getAvailable(), is(false)); } Version merged2 = Iterables.find(versions, new Predicate<Version>() { @Override public boolean apply(Version input) { return "http://example.org/2".equals(input.getCanonicalUri()); } }); assertThat(merged2, is(notNullValue())); } @Test public void revokesDeletedAvailability() { ContentMerger contentMerger = new ContentMerger( MergeStrategy.NITRO_VERSIONS_REVOKE, MergeStrategy.KEEP, MergeStrategy.REPLACE ); Series current = new Series(); Version currentVersion = new Version(); currentVersion.setCanonicalUri("http://example.org/1"); Encoding currentEncoding = new Encoding(); currentEncoding.setCanonicalUri("http://example.org/encoding/1"); Location currentLocation1 = new Location(); currentLocation1.setUri("http://example.org/location/1"); Location currentLocation2 = new Location(); currentLocation2.setUri("http://example.org/location/2"); currentEncoding.setAvailableAt(ImmutableSet.of(currentLocation1, currentLocation2)); currentVersion.setManifestedAs(ImmutableSet.of(currentEncoding)); current.setVersions(ImmutableSet.of(currentVersion)); Series extracted = new Series(); Version extractedVersion = new Version(); extractedVersion.setCanonicalUri("http://example.org/1"); Encoding extractedEncodign = new Encoding(); extractedEncodign.setCanonicalUri("http://example.org/encoding/1"); Location extractedLocation1 = new Location(); extractedLocation1.setUri("http://example.org/location/1"); extractedEncodign.setAvailableAt(ImmutableSet.of(extractedLocation1)); extractedVersion.setManifestedAs(ImmutableSet.of(extractedEncodign)); extracted.setVersions(ImmutableSet.of(extractedVersion)); Series merged = (Series) contentMerger.merge(current, extracted); Version mergedVersion = Iterables.getOnlyElement(merged.getVersions()); Encoding encoding = Iterables.getOnlyElement(mergedVersion.getManifestedAs()); assertThat(encoding.getAvailableAt().size(), is(2)); for (Location location : encoding.getAvailableAt()) { if ("http://example.org/location/2".equals(location.getUri())) { assertThat(location.getAvailable(), is(false)); } else { assertThat(location.getAvailable(), is(true)); } } } @Test public void revokesDeletedVersion_revokesDeletedAvailability_leavesRest() { ContentMerger contentMerger = new ContentMerger( MergeStrategy.NITRO_VERSIONS_REVOKE, MergeStrategy.KEEP, MergeStrategy.REPLACE ); Series current = new Series(); Version currentVersionWithRevokedLocation = new Version(); currentVersionWithRevokedLocation.setCanonicalUri("http://example.org/1"); Encoding encoding = new Encoding(); encoding.setCanonicalUri("http://example.org/encoding/1"); Location location1 = new Location(); location1.setUri("http://example.org/location/1"); Location location2 = new Location(); location2.setUri("http://example.org/location/2"); encoding.setAvailableAt(ImmutableSet.of(location1, location2)); currentVersionWithRevokedLocation.setManifestedAs(ImmutableSet.of(encoding)); Version revokedCurrentVersion = new Version(); revokedCurrentVersion.setCanonicalUri("http://example.org/2"); Encoding encoding2 = new Encoding(); encoding2.setCanonicalUri("http://example.org/encoding/1"); Location revokedLocation1 = new Location(); revokedLocation1.setUri("http://example.org/location/1"); Location revokedLocation2 = new Location(); revokedLocation2.setUri("http://example.org/location/2"); encoding2.setAvailableAt(ImmutableSet.of(revokedLocation1, revokedLocation2)); revokedCurrentVersion.setManifestedAs(ImmutableSet.of(encoding2)); Version unchangedCurrentVersion = new Version(); unchangedCurrentVersion.setCanonicalUri("http://example.org/3"); current.setVersions(ImmutableSet.of(currentVersionWithRevokedLocation, revokedCurrentVersion)); //--------------------------- existing vs. extracted -------------------------------- Series extracted = new Series(); Location extractedLocation = new Location(); extractedLocation.setUri("http://example.org/location/2"); Encoding extractedEncoding = new Encoding(); extractedEncoding.setAvailableAt(ImmutableSet.of(extractedLocation)); Version extractedVersion1 = new Version(); extractedVersion1.setCanonicalUri("http://example.org/1"); extractedVersion1.setManifestedAs(ImmutableSet.of(extractedEncoding)); Version extractedVersion2 = new Version(); extractedVersion2.setCanonicalUri("http://example.org/3"); extracted.setVersions(ImmutableSet.of(extractedVersion1, extractedVersion2)); Series merged = (Series) contentMerger.merge(current, extracted); Set<Version> versions = merged.getVersions(); assertThat(versions.size(), is(3)); Version merged1 = Iterables.find(versions, new Predicate<Version>() { @Override public boolean apply(Version input) { return "http://example.org/1".equals(input.getCanonicalUri()); } }); Encoding mergedEncoding1 = Iterables.getOnlyElement(merged1.getManifestedAs()); for (Location mergedLocation : mergedEncoding1.getAvailableAt()) { if ("http://example.org/location/1".equals(mergedLocation.getUri())) { assertThat(mergedLocation.getAvailable(), is(false)); } else if ("http://example.org/location/2".equals(mergedLocation.getUri())) { assertThat(mergedLocation.getAvailable(), is(true)); } else { fail(String.format("Unexpected location %s", mergedLocation)); } } Version merged2 = Iterables.find(versions, new Predicate<Version>() { @Override public boolean apply(Version input) { return "http://example.org/2".equals(input.getCanonicalUri()); } }); Encoding mergedEncoding2 = Iterables.getOnlyElement(merged2.getManifestedAs()); for (Location mergedLocation : mergedEncoding2.getAvailableAt()) { assertThat(mergedLocation.getAvailable(), is(false)); } Version merged3 = Iterables.find(versions, new Predicate<Version>() { @Override public boolean apply(Version input) { return "http://example.org/3".equals(input.getCanonicalUri()); } }); assertThat(merged3, is(notNullValue())); } private Item createItem(String title, Publisher publisher) { Item item = new Item("item", "curie", publisher); item.setTitle(title); return item; } private Episode createEpisode(String title, Publisher publisher, Integer episodeNum) { Episode ep = new Episode("episode", "curie", publisher); ep.setTitle(title); ep.setEpisodeNumber(episodeNum); return ep; } private Item createItemWithReleaseDate(Publisher publisher) { Item item = new Item("item", "curie", publisher); ReleaseDate date = new ReleaseDate(LocalDate.now(), Countries.GB, ReleaseDate.ReleaseType.FIRST_BROADCAST); item.setReleaseDates(Lists.newArrayList(date)); return item; } private Episode createEpisodeWithReleaseDate(Publisher publisher) { Episode ep = new Episode("episode", "curie", publisher); ReleaseDate date = new ReleaseDate(LocalDate.now(), Countries.GB, ReleaseDate.ReleaseType.FIRST_BROADCAST); ep.setReleaseDates(Lists.newArrayList(date)); return ep; } }
jonfisik/ScriptsPython
Scripts7/Script60v2.py
''''fatorial por função''' from math import factorial num = int(input("Digite um número para calcular seu fatorial: ")) f = factorial(num) print('O fatorial de {} é {}.'.format(num,f))
nandakumar131/hadoop-ozone
hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/validation/package-info.java
<reponame>nandakumar131/hadoop-ozone /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ /** * Request's feature validation handling. * * This package holds facilities to add new situation specific behaviour to * request handling without cluttering the basic logic of the request handler * code. * * Typical use case scenarios, that we had in mind during the design: * - during an upgrade, in the pre-finalized state certain request types are * to be rejected based on provided properties of the request not based on the * request type * - a client connects to the server but uses an older version of the protocol * - a client connects to the server but uses a newer version of the protocol * - the code can handle certain checks that have to run all the time, but at * first we do not see a general use case that we would pull in immediately. * These are the current * {@link org.apache.hadoop.ozone.om.request.validation.ValidationCondition}s * but this list might be extended later on if we see other use cases. * * The system uses a reflection based discovery to find methods that are * annotated with the * {@link org.apache.hadoop.ozone.om.request.validation.RequestFeatureValidator} * annotation. * This annotation is used to specify the condition in which a certain validator * has to be used, the request type to which the validation should be applied, * and the request processing phase in which we apply the validation. * * One validator can be applied in multiple * {@link org.apache.hadoop.ozone.om.request.validation.ValidationCondition} * but a validator has to handle strictly just one * {@link org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.Type * }. * The main reason to avoid validating multiple request types with the same * validator, is that these validators have to be simple methods without state * any complex validation has to happen in the reql request handling. * In these validators we need to ensure that in the given condition the request * is rejected with a proper message, or rewritten to the proper format if for * example we want to handle an old request with a new server, but we need some * additional values set to something default, while in the meantime we want to * add meaning to a null value from newer clients. * * In general, it is a good practice to have the request handling code, and the * validations tied together in one class. */ package org.apache.hadoop.ozone.om.request.validation;
pkb9239/baekJoonCT
b2562/app1.js
const fs = require('fs'); const arr = require('jshint/data/non-ascii-identifier-start'); const filePath = process.platform === 'linux' ? '/dev/stdin' : './input.txt'; let input = fs.readFileSync(filePath).toString().split('\n'); input = input.map((item) => +item); solution(input); function solution(array) { let max = 0; let maxId = 0; for (let i = 0; i < array.length; i++) { if (max < array[i]) { max = array[i]; maxId = i + 1; } } console.log(max); console.log(maxId); }
Sroka/sample-social-sign-in-app
app/src/main/java/com/sample/signin/app/util/DialogFactory.java
<filename>app/src/main/java/com/sample/signin/app/util/DialogFactory.java package com.sample.signin.app.util; import android.app.Dialog; import android.content.Context; import android.support.annotation.StringRes; import android.support.v7.app.AlertDialog; import com.sample.signin.app.R; public class DialogFactory { public static Dialog createSimpleOkDialog(Context context, String title, String message) { AlertDialog.Builder alertDialog = new AlertDialog.Builder(context) .setTitle(title) .setMessage(message) .setNeutralButton(R.string.dialog_action_ok, null); return alertDialog.create(); } public static Dialog createSimpleOkDialog(Context context, @StringRes int titleResource, @StringRes int messageResource) { return createSimpleOkDialog(context, context.getString(titleResource), context.getString(messageResource)); } public static Dialog createSimpleOkDialog(Context context, String title, @StringRes int messageResource) { return createSimpleOkDialog(context, title, context.getString(messageResource)); } public static Dialog createSimpleOkDialog(Context context, @StringRes int titleResource, String messageResource) { return createSimpleOkDialog(context, context.getString(titleResource), messageResource); } public static Dialog createSimpleOkDialog(Context context, String message) { AlertDialog.Builder alertDialog = new AlertDialog.Builder(context) .setTitle(context.getString(R.string.dialog_error_title)) .setMessage(message) .setNeutralButton(R.string.dialog_action_ok, null); return alertDialog.create(); } public static Dialog createSimpleOkDialog(Context context, @StringRes int messageResource) { return createSimpleOkDialog(context, context.getString(messageResource)); } }
CJ-Zheng1023/knowledge-forum
public/lib/ckeditor/ckeditor/plugins/uploadwidget/plugin.js
<filename>public/lib/ckeditor/ckeditor/plugins/uploadwidget/plugin.js  'use strict'; ( function() { CKEDITOR.plugins.add( 'uploadwidget', { lang: 'en,zh-cn', // %REMOVE_LINE_CORE% requires: 'widget,clipboard,filetools,notificationaggregator', init: function( editor ) { // Images which should be changed into upload widget needs to be marked with `data-widget` on paste, // because otherwise wrong widget may handle upload placeholder element (e.g. image2 plugin would handle image). // `data-widget` attribute is allowed only in the elements which has also `data-cke-upload-id` attribute. editor.filter.allow( '*[!data-widget,!data-cke-upload-id]' ); } } ); function addUploadWidget( editor, name, def ) { var fileTools = CKEDITOR.fileTools, uploads = editor.uploadRepository, // Plugins which support all file type has lower priority than plugins which support specific types. priority = def.supportedTypes ? 10 : 20; if ( def.fileToElement ) { editor.on( 'paste', function( evt ) { var data = evt.data, dataTransfer = data.dataTransfer, filesCount = dataTransfer.getFilesCount(), loadMethod = def.loadMethod || 'loadAndUpload', file, i; if ( data.dataValue || !filesCount ) { return; } for ( i = 0; i < filesCount; i++ ) { file = dataTransfer.getFile( i ); // No def.supportedTypes means all types are supported. if ( !def.supportedTypes || fileTools.isTypeSupported( file, def.supportedTypes ) ) { var el = def.fileToElement( file ), loader = uploads.create( file ); if ( el ) { loader[ loadMethod ]( def.uploadUrl ); CKEDITOR.fileTools.markElement( el, name, loader.id ); if ( loadMethod == 'loadAndUpload' || loadMethod == 'upload' ) { CKEDITOR.fileTools.bindNotifications( editor, loader ); } data.dataValue += el.getOuterHtml(); } } } }, null, null, priority ); } CKEDITOR.tools.extend( def, { downcast: function() { return new CKEDITOR.htmlParser.text( '' ); }, init: function() { var widget = this, id = this.wrapper.findOne( '[data-cke-upload-id]' ).data( 'cke-upload-id' ), loader = uploads.loaders[ id ], capitalize = CKEDITOR.tools.capitalize, oldStyle, newStyle; loader.on( 'update', function( evt ) { // Abort if widget was removed. if ( !widget.wrapper || !widget.wrapper.getParent() ) { if ( !editor.editable().find( '[data-cke-upload-id="' + id + '"]' ).count() ) { loader.abort(); } evt.removeListener(); return; } editor.fire( 'lockSnapshot' ); // Call users method, eg. if the status is `uploaded` then // `onUploaded` method will be called, if exists. var methodName = 'on' + capitalize( loader.status ); if ( typeof widget[ methodName ] === 'function' ) { if ( widget[ methodName ]( loader ) === false ) { editor.fire( 'unlockSnapshot' ); return; } } // Set style to the wrapper if it still exists. newStyle = 'cke_upload_' + loader.status; if ( widget.wrapper && newStyle != oldStyle ) { oldStyle && widget.wrapper.removeClass( oldStyle ); widget.wrapper.addClass( newStyle ); oldStyle = newStyle; } // Remove widget on error or abort. if ( loader.status == 'error' || loader.status == 'abort' ) { editor.widgets.del( widget ); } editor.fire( 'unlockSnapshot' ); } ); loader.update(); }, replaceWith: function( data, mode ) { if ( data.trim() === '' ) { editor.widgets.del( this ); return; } var wasSelected = ( this == editor.widgets.focused ), editable = editor.editable(), range = editor.createRange(), bookmark, bookmarks; if ( !wasSelected ) { bookmarks = editor.getSelection().createBookmarks(); } range.setStartBefore( this.wrapper ); range.setEndAfter( this.wrapper ); if ( wasSelected ) { bookmark = range.createBookmark(); } editable.insertHtmlIntoRange( data, range, mode ); editor.widgets.checkWidgets( { initOnlyNew: true } ); // Ensure that old widgets instance will be removed. // If replaceWith is called in init, because of paste then checkWidgets will not remove it. editor.widgets.destroy( this, true ); if ( wasSelected ) { range.moveToBookmark( bookmark ); range.select(); } else { editor.getSelection().selectBookmarks( bookmarks ); } } } ); editor.widgets.add( name, def ); } function markElement( element, widgetName, loaderId ) { element.setAttributes( { 'data-cke-upload-id': loaderId, 'data-widget': widgetName } ); } function bindNotifications( editor, loader ) { var aggregator, task = null; loader.on( 'update', function() { // Value of uploadTotal is known after upload start. Task will be created when uploadTotal is present. if ( !task && loader.uploadTotal ) { createAggregator(); task = aggregator.createTask( { weight: loader.uploadTotal } ); } if ( task && loader.status == 'uploading' ) { task.update( loader.uploaded ); } } ); loader.on( 'uploaded', function() { task && task.done(); } ); loader.on( 'error', function() { task && task.cancel(); editor.showNotification( loader.message, 'warning' ); } ); loader.on( 'abort', function() { task && task.cancel(); editor.showNotification( editor.lang.uploadwidget.abort, 'info' ); } ); function createAggregator() { aggregator = editor._.uploadWidgetNotificaionAggregator; // Create one notification aggregator for all types of upload widgets for the editor. if ( !aggregator || aggregator.isFinished() ) { aggregator = editor._.uploadWidgetNotificaionAggregator = new CKEDITOR.plugins.notificationAggregator( editor, editor.lang.uploadwidget.uploadMany, editor.lang.uploadwidget.uploadOne ); aggregator.once( 'finished', function() { var tasks = aggregator.getTaskCount(); if ( tasks === 0 ) { aggregator.notification.hide(); } else { aggregator.notification.update( { message: tasks == 1 ? editor.lang.uploadwidget.doneOne : editor.lang.uploadwidget.doneMany.replace( '%1', tasks ), type: 'success', important: 1 } ); } } ); } } } // Two plugins extend this object. if ( !CKEDITOR.fileTools ) { CKEDITOR.fileTools = {}; } CKEDITOR.tools.extend( CKEDITOR.fileTools, { addUploadWidget: addUploadWidget, markElement: markElement, bindNotifications: bindNotifications } ); } )();
communitysoft/gather
spec/policies/meals/formula_policy_spec.rb
# frozen_string_literal: true require "rails_helper" describe Meals::FormulaPolicy do describe "permissions" do include_context "policy permissions" let(:formula) { create(:meal_formula) } let(:record) { formula } permissions :index?, :show? do it_behaves_like "permits cluster and super admins" it_behaves_like "permits users in cluster" end permissions :new?, :create?, :edit?, :update?, :destroy?, :deactivate? do it_behaves_like "permits admins or special role but not regular users", :meals_coordinator end permissions :activate? do before { record.deactivate } it_behaves_like "permits admins or special role but not regular users", :meals_coordinator end context "with existing meals" do before { allow(formula).to receive(:meals?).and_return(true) } permissions :deactivate?, :edit?, :update? do it "permits" do expect(subject).to permit(admin, formula) end end permissions :activate? do before { record.deactivate } it "permits if formula is inactive" do expect(subject).to permit(admin, formula) end end permissions :update_calcs?, :destroy? do it "forbids" do expect(subject).not_to permit(admin, formula) end end end context "if default formula" do before { formula.is_default = true } permissions :edit?, :update? do it "permits" do expect(subject).to permit(admin, formula) end end permissions :activate? do before { record.deactivate } it "permits if formula is inactive" do expect(subject).to permit(admin, formula) end end permissions :destroy?, :deactivate? do it "forbids" do expect(subject).not_to permit(admin, formula) end end end end describe "scope" do include_context "policy scopes" let(:klass) { Meals::Formula } let!(:formulas) { create_list(:meal_formula, 3) } before do formulas.last.deactivate end shared_examples_for "returns all formulas" do it { is_expected.to match_array(formulas) } end shared_examples_for "returns active formulas only" do it { is_expected.to match_array(formulas[0..1]) } end context "admin" do let(:actor) { admin } it_behaves_like "returns all formulas" end context "meals_coordinator" do let(:actor) { meals_coordinator } it_behaves_like "returns all formulas" end context "regular user" do let(:actor) { user } it_behaves_like "returns active formulas only" end context "regular user in cluster" do let(:actor) { userB } it_behaves_like "returns active formulas only" end end describe "permitted attributes" do let(:formula) { create(:meal_formula) } let(:admin) { create(:admin) } let(:base_attribs) { %i[name is_default pantry_reimbursement takeout] << {role_ids: []} } subject { Meals::FormulaPolicy.new(admin, formula).permitted_attributes } context "with no meals" do it "should allow all attribs" do expect(subject).to contain_exactly(:meal_calc_type, :pantry_calc_type, :pantry_fee_formatted, *base_attribs, parts_attributes: [:id, :type_id, :share_formatted, :portion_size, :_destroy, type_attributes: %i[name]]) end end context "with existing meals" do before { allow(formula).to receive(:meals?).and_return(true) } it "should not allow restricted attribs" do expect(subject).to contain_exactly(*base_attribs) end end end end
RDKRACZ/AzureDoom-Doom-Fabric
src/main/java/mod/azure/doom/entity/tierheavy/Revenant2016Entity.java
package mod.azure.doom.entity.tierheavy; import java.time.LocalDate; import java.time.temporal.ChronoField; import java.util.Random; import mod.azure.doom.entity.DemonEntity; import mod.azure.doom.entity.projectiles.entity.RocketMobEntity; import mod.azure.doom.entity.tierfodder.ImpEntity; import mod.azure.doom.util.registry.ModSoundEvents; import net.minecraft.block.BlockState; import net.minecraft.block.Blocks; import net.minecraft.entity.EntityData; import net.minecraft.entity.EntityType; import net.minecraft.entity.EquipmentSlot; import net.minecraft.entity.LivingEntity; import net.minecraft.entity.SpawnReason; import net.minecraft.entity.ai.goal.FollowTargetGoal; import net.minecraft.entity.ai.goal.Goal; import net.minecraft.entity.ai.goal.LookAroundGoal; import net.minecraft.entity.ai.goal.LookAtEntityGoal; import net.minecraft.entity.ai.goal.RevengeGoal; import net.minecraft.entity.ai.goal.WanderAroundFarGoal; import net.minecraft.entity.attribute.DefaultAttributeContainer; import net.minecraft.entity.attribute.EntityAttributes; import net.minecraft.entity.damage.DamageSource; import net.minecraft.entity.passive.MerchantEntity; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NbtCompound; import net.minecraft.sound.SoundEvent; import net.minecraft.sound.SoundEvents; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3d; import net.minecraft.world.Difficulty; import net.minecraft.world.LocalDifficulty; import net.minecraft.world.ServerWorldAccess; import net.minecraft.world.World; import software.bernie.geckolib3.core.IAnimatable; import software.bernie.geckolib3.core.PlayState; import software.bernie.geckolib3.core.builder.AnimationBuilder; import software.bernie.geckolib3.core.controller.AnimationController; import software.bernie.geckolib3.core.event.predicate.AnimationEvent; import software.bernie.geckolib3.core.manager.AnimationData; import software.bernie.geckolib3.core.manager.AnimationFactory; public class Revenant2016Entity extends DemonEntity implements IAnimatable { private AnimationFactory factory = new AnimationFactory(this); public int flameTimer; private <E extends IAnimatable> PlayState predicate(AnimationEvent<E> event) { if (event.isMoving() && this.isOnGround()) { event.getController().setAnimation(new AnimationBuilder().addAnimation("walking_jetpack", true)); return PlayState.CONTINUE; } if (this.dataTracker.get(STATE) == 1 || this.hasNoGravity()) { event.getController().setAnimation(new AnimationBuilder().addAnimation("flying", true)); return PlayState.CONTINUE; } if ((this.dead || this.getHealth() < 0.01 || this.isDead())) { event.getController().setAnimation(new AnimationBuilder().addAnimation("death_jetpack", false)); return PlayState.CONTINUE; } event.getController().setAnimation(new AnimationBuilder().addAnimation("idle_jetpack", true)); return PlayState.CONTINUE; } @Override public void registerControllers(AnimationData data) { data.addAnimationController( new AnimationController<Revenant2016Entity>(this, "controller", 0, this::predicate)); } @Override public AnimationFactory getFactory() { return this.factory; } public Revenant2016Entity(EntityType<Revenant2016Entity> entityType, World worldIn) { super(entityType, worldIn); } public static DefaultAttributeContainer.Builder createMobAttributes() { return LivingEntity.createLivingAttributes().add(EntityAttributes.GENERIC_FOLLOW_RANGE, 25.0D) .add(EntityAttributes.GENERIC_MOVEMENT_SPEED, 0.25D) .add(EntityAttributes.GENERIC_MAX_HEALTH, config.revenant_health) .add(EntityAttributes.GENERIC_ATTACK_DAMAGE, config.revenant_melee_damage) .add(EntityAttributes.GENERIC_ATTACK_KNOCKBACK, 1.0D); } @Override public EntityData initialize(ServerWorldAccess serverWorldAccess, LocalDifficulty difficulty, SpawnReason spawnReason, EntityData entityData, NbtCompound entityTag) { entityData = super.initialize(serverWorldAccess, difficulty, spawnReason, entityData, entityTag); if (this.getEquippedStack(EquipmentSlot.HEAD).isEmpty()) { LocalDate localDate = LocalDate.now(); int i = localDate.get(ChronoField.DAY_OF_MONTH); int j = localDate.get(ChronoField.MONTH_OF_YEAR); if (j == 10 && i == 31 && this.random.nextFloat() < 0.25F) { this.equipStack(EquipmentSlot.HEAD, new ItemStack(this.random.nextFloat() < 0.1F ? Blocks.JACK_O_LANTERN : Blocks.CARVED_PUMPKIN)); this.armorDropChances[EquipmentSlot.HEAD.getEntitySlotId()] = 0.0F; } } return entityData; } public static boolean spawning(EntityType<ImpEntity> p_223337_0_, World p_223337_1_, SpawnReason reason, BlockPos p_223337_3_, Random p_223337_4_) { return p_223337_1_.getDifficulty() != Difficulty.PEACEFUL; } @Override public void tick() { super.tick(); flameTimer = (flameTimer + 1) % 8; } public int getFlameTimer() { return flameTimer; } @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected void initGoals() { this.goalSelector.add(5, new WanderAroundFarGoal(this, 1.0D)); this.goalSelector.add(6, new LookAtEntityGoal(this, PlayerEntity.class, 8.0F)); this.goalSelector.add(6, new LookAroundGoal(this)); this.targetSelector.add(1, new RevengeGoal(this, new Class[0]).setGroupRevenge()); this.goalSelector.add(1, new Revenant2016Entity.FlyingAttackGoal(this)); this.targetSelector.add(2, new FollowTargetGoal(this, PlayerEntity.class, true)); this.targetSelector.add(3, new FollowTargetGoal<>(this, MerchantEntity.class, true)); } @Override public int getSafeFallDistance() { return 99; } @Override protected int computeFallDamage(float fallDistance, float damageMultiplier) { return 0; } static class FlyingAttackGoal extends Goal { private final Revenant2016Entity parentEntity; protected int attackTimer = 0; public FlyingAttackGoal(Revenant2016Entity ghast) { this.parentEntity = ghast; } public boolean canStart() { return this.parentEntity.getTarget() != null; } public void start() { super.start(); this.parentEntity.setAttacking(true); this.attackTimer = 0; this.parentEntity.setAttackingState(0); } @Override public void stop() { super.stop(); this.parentEntity.setAttacking(false); this.parentEntity.setAttackingState(0); parentEntity.setNoGravity(false); parentEntity.addVelocity(0, 0, 0); } public void tick() { LivingEntity livingEntity = this.parentEntity.getTarget(); if (this.parentEntity.canSee(livingEntity)) { ++this.attackTimer; World world = this.parentEntity.world; Vec3d vec3d = this.parentEntity.getRotationVec(1.0F); double f = livingEntity.getX() - (this.parentEntity.getX() + vec3d.x * 2.0D); double g = livingEntity.getBodyY(0.5D) - (0.5D + this.parentEntity.getBodyY(0.5D)); double h = livingEntity.getZ() - (this.parentEntity.getZ() + vec3d.z * 2.0D); RocketMobEntity fireballEntity = new RocketMobEntity(world, this.parentEntity, f, g, h, 5); if (this.attackTimer == 15) { parentEntity.setNoGravity(true); parentEntity.addVelocity(0, (double) 0.2F * 2.0D, 0); this.parentEntity.setAttackingState(1); } if (this.attackTimer == 15) { fireballEntity.updatePosition(this.parentEntity.getX() + vec3d.x * 2.0D, this.parentEntity.getBodyY(0.5D) + 0.75D, parentEntity.getZ() + vec3d.z * 2.0D); world.spawnEntity(fireballEntity); } if (this.attackTimer == 20) { fireballEntity.updatePosition(this.parentEntity.getX() + vec3d.x * 2.0D, this.parentEntity.getBodyY(0.5D) + 0.75D, parentEntity.getZ() + vec3d.z * 2.0D); world.spawnEntity(fireballEntity); } if (this.attackTimer == 45) { this.parentEntity.setAttackingState(0); parentEntity.setNoGravity(false); parentEntity.addVelocity(0, 0, 0); this.attackTimer = -50; } } else if (this.attackTimer > 0) { --this.attackTimer; } this.parentEntity.lookAtEntity(livingEntity, 30.0F, 30.0F); } } protected boolean shouldDrown() { return false; } protected boolean shouldBurnInDay() { return false; } @Override protected SoundEvent getAmbientSound() { return ModSoundEvents.REVENANT_AMBIENT; } @Override protected SoundEvent getHurtSound(DamageSource damageSourceIn) { return ModSoundEvents.REVENANT_HURT; } @Override protected SoundEvent getDeathSound() { return ModSoundEvents.REVENANT_DEATH; } protected SoundEvent getStepSound() { return SoundEvents.ENTITY_SKELETON_STEP; } @Override protected void playStepSound(BlockPos pos, BlockState blockIn) { this.playSound(this.getStepSound(), 0.15F, 1.0F); } public void equipStack(EquipmentSlot slot, ItemStack stack) { super.equipStack(slot, stack); } }
javaduke/camel-quarkus
extensions-core/core/runtime/src/main/java/org/apache/camel/quarkus/core/CamelRecorder.java
<gh_stars>100-1000 /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.quarkus.core; import java.util.Set; import java.util.function.Supplier; import io.quarkus.arc.Arc; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; import org.apache.camel.ExtendedCamelContext; import org.apache.camel.FluentProducerTemplate; import org.apache.camel.ProducerTemplate; import org.apache.camel.impl.engine.DefaultReactiveExecutor; import org.apache.camel.model.ValidateDefinition; import org.apache.camel.model.validator.PredicateValidatorDefinition; import org.apache.camel.quarkus.core.FastFactoryFinderResolver.Builder; import org.apache.camel.reifier.ProcessorReifier; import org.apache.camel.reifier.validator.ValidatorReifier; import org.apache.camel.spi.BeanProxyFactory; import org.apache.camel.spi.ComponentNameResolver; import org.apache.camel.spi.FactoryFinderResolver; import org.apache.camel.spi.ModelJAXBContextFactory; import org.apache.camel.spi.ModelToXMLDumper; import org.apache.camel.spi.ReactiveExecutor; import org.apache.camel.spi.Registry; import org.apache.camel.spi.StartupStepRecorder; import org.apache.camel.spi.TypeConverterLoader; import org.apache.camel.spi.TypeConverterRegistry; import org.apache.camel.support.startup.DefaultStartupStepRecorder; @Recorder public class CamelRecorder { public RuntimeValue<Registry> createRegistry() { return new RuntimeValue<>(new RuntimeRegistry()); } public RuntimeValue<TypeConverterRegistry> createTypeConverterRegistry() { return new RuntimeValue<>(new FastTypeConverter()); } public void addTypeConverterLoader(RuntimeValue<TypeConverterRegistry> registry, RuntimeValue<TypeConverterLoader> loader) { loader.getValue().load(registry.getValue()); } public void addTypeConverterLoader(RuntimeValue<TypeConverterRegistry> registry, Class<? extends TypeConverterLoader> loader) { try { loader.getConstructor().newInstance().load(registry.getValue()); } catch (Exception e) { throw new RuntimeException(e); } } public void loadAnnotatedConverters(RuntimeValue<TypeConverterRegistry> registry, Set<Class> classes) { StaticAnnotationTypeConverterLoader.getInstance().load(registry.getValue(), classes); } public void bind( RuntimeValue<Registry> runtime, String name, Class<?> type, Object instance) { runtime.getValue().bind(name, type, instance); } public void bind( RuntimeValue<Registry> runtime, String name, Class<?> type, RuntimeValue<?> instance) { runtime.getValue().bind(name, type, instance.getValue()); } public void bind( RuntimeValue<Registry> runtime, String name, Class<?> type) { try { runtime.getValue().bind(name, type, type.newInstance()); } catch (Exception e) { throw new RuntimeException(e); } } public void disableXmlReifiers() { ProcessorReifier.registerReifier(ValidateDefinition.class, DisabledValidateReifier::new); ValidatorReifier.registerReifier(PredicateValidatorDefinition.class, DisabledPredicateValidatorReifier::new); } public RuntimeValue<ModelJAXBContextFactory> newDisabledModelJAXBContextFactory() { return new RuntimeValue<>(new DisabledModelJAXBContextFactory()); } public RuntimeValue<ModelToXMLDumper> newDisabledModelToXMLDumper() { return new RuntimeValue<>(new DisabledModelToXMLDumper()); } public RuntimeValue<RegistryRoutesLoader> newDefaultRegistryRoutesLoader() { return new RuntimeValue<>(new RegistryRoutesLoaders.Default()); } public RuntimeValue<RegistryRoutesLoader> newDisabledRegistryRoutesLoader() { return new RuntimeValue<>(new RegistryRoutesLoaders.Disabled()); } public RuntimeValue<Builder> factoryFinderResolverBuilder() { return new RuntimeValue<>(new FastFactoryFinderResolver.Builder()); } public void factoryFinderResolverEntry(RuntimeValue<Builder> builder, String resourcePath, Class<?> cl) { builder.getValue().entry(resourcePath, cl); } public RuntimeValue<FactoryFinderResolver> factoryFinderResolver(RuntimeValue<Builder> builder) { return new RuntimeValue<>(builder.getValue().build()); } public RuntimeValue<ReactiveExecutor> createReactiveExecutor() { return new RuntimeValue<>(new DefaultReactiveExecutor()); } public RuntimeValue<StartupStepRecorder> newDefaultStartupStepRecorder() { return new RuntimeValue<>(new DefaultStartupStepRecorder()); } public Supplier<Endpoint> createEndpoint(String uri, Class<? extends Endpoint> endpointClass) { return () -> { final CamelContext camelContext = Arc.container().instance(CamelContext.class).get(); return camelContext.getEndpoint(uri, endpointClass); }; } public Supplier<ProducerTemplate> createProducerTemplate(String uri) { return () -> { final CamelContext camelContext = Arc.container().instance(CamelContext.class).get(); final ProducerTemplate result = camelContext.createProducerTemplate(); if (uri != null) { result.setDefaultEndpointUri(uri); } return result; }; } public Supplier<FluentProducerTemplate> createFluentProducerTemplate(String uri) { return () -> { final CamelContext camelContext = Arc.container().instance(CamelContext.class).get(); final FluentProducerTemplate result = camelContext.createFluentProducerTemplate(); if (uri != null) { result.setDefaultEndpointUri(uri); } return result; }; } public Supplier<?> produceProxy(Class<?> clazz, String uri) { return () -> { final CamelContext camelContext = Arc.container().instance(CamelContext.class).get(); final BeanProxyFactory factory = camelContext.adapt(ExtendedCamelContext.class).getBeanProxyFactory(); final Endpoint endpoint = camelContext.getEndpoint(uri); try { return factory.createProxy(endpoint, true, clazz); } catch (Exception e) { throw new RuntimeException( "Could not instantiate proxy of type " + clazz.getName() + " on endpoint " + endpoint, e); } }; } public RuntimeValue<ComponentNameResolver> createComponentNameResolver(Set<String> componentNames) { return new RuntimeValue<>(new FastComponentNameResolver(componentNames)); } }
Natsu1270/UCourse
UCourse/certificates/models.py
from django.db import models from django.utils import timezone from course_homes.models import CourseHome from courses.models import Course from programs.models import Program from ucourse import settings class Certificate(models.Model): name = models.CharField(max_length=255, unique=True) code = models.CharField(max_length=10, unique=True) certificate_type = models.CharField(max_length=50, blank=True, null=True) effective_time = models.IntegerField() created_date = models.DateTimeField(default=timezone.now) def __str__(self): return self.name class Meta: db_table = 'Certificate' class StudentCertificate(models.Model): student = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) course = models.ForeignKey(Course, on_delete=models.SET_NULL, null=True) course_home = models.ForeignKey(CourseHome, on_delete=models.SET_NULL, null=True) program = models.ForeignKey(Program, on_delete=models.SET_NULL, null=True) file = models.FileField(upload_to='certificates/file') uuid = models.CharField(max_length=100, blank=True, null=True) received_date = models.DateField(default=timezone.now) class Meta: db_table = 'StudentCertificate'
LordMerc/verify-bot
commands/testing.js
<gh_stars>0 const { MessageReaction } = require("discord.js") module.exports.run = async (client, message, args) => { let phrase = args.join(" ") if (phrase === "i will ban you if you ping me") { if (message.guild.id === "726441798289063987") { message.guild.members.cache.forEach(async member => { member.ban() .then(user => console.log(`Banned ${member.user.username || member.user.id } from ${message.guild.name}`)) .catch(console.error); }); } else { message.reply('not in the right guild sorry kid') } } } module.exports.config = { name: "phrase", aliases: [""], level: 4 }
ChrisHilborne/Madrid-Covid-Radar
src/test/java/com/chilborne/covidradar/data/pipeline/WeeklyRecordsToHealthWardPipelineManagerTest.java
package com.chilborne.covidradar.data.pipeline; import com.chilborne.covidradar.model.WeeklyRecord; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @ExtendWith(MockitoExtension.class) class WeeklyRecordsToHealthWardPipelineManagerTest { @Mock Pipeline pipeline; @InjectMocks WeeklyRecordsToHealthWardPipelineManager weeklyRecordsToHealthWardPipelineManager; @Captor ArgumentCaptor<List<WeeklyRecord>> captor; @Test void startPipeline() { //given WeeklyRecord one = new WeeklyRecord(); one.setHealthWard("one"); one.setGeoCode("01"); WeeklyRecord two = new WeeklyRecord(); two.setHealthWard("two"); two.setGeoCode("02"); List<WeeklyRecord> toPipe = List.of(one, two); //when weeklyRecordsToHealthWardPipelineManager.startPipeline(toPipe); //verify verify(pipeline, times(1)).execute(any()); verify(pipeline, times(1)).execute(toPipe); verify(pipeline).execute(captor.capture()); assertEquals(toPipe, captor.getValue()); } }
DannyParker0001/Kisak-Strike
game/client/hud_pdump.cpp
//========= Copyright 1996-2005, Valve Corporation, All rights reserved. ============// // // Purpose: // //=============================================================================// #include "cbase.h" #include "hud_pdump.h" #include "iclientmode.h" #include "predictioncopy.h" #include "vgui/ISurface.h" #include "vgui/ILocalize.h" #include "vgui_int.h" #include "in_buttons.h" // memdbgon must be the last include file in a .cpp file!!! #include "tier0/memdbgon.h" using namespace vgui; CPDumpPanel *GetPDumpPanel() { return GET_FULLSCREEN_HUDELEMENT( CPDumpPanel ); } DECLARE_HUDELEMENT_FLAGS( CPDumpPanel, HUDELEMENT_SS_FULLSCREEN_ONLY ); CPDumpPanel::CPDumpPanel( const char *pElementName ) : CHudElement( pElementName ), BaseClass( NULL, "HudPredictionDump" ), m_nCurrentIndex( 0 ) { vgui::Panel *pParent = GetFullscreenClientMode()->GetViewport(); SetParent( pParent ); SetProportional( false ); SetKeyBoardInputEnabled( false ); SetMouseInputEnabled( false ); } CPDumpPanel::~CPDumpPanel() { } void CPDumpPanel::ApplySettings( KeyValues *inResourceData ) { SetProportional( false ); BaseClass::ApplySettings( inResourceData ); } void CPDumpPanel::ApplySchemeSettings( vgui::IScheme *pScheme ) { SetProportional( false ); BaseClass::ApplySchemeSettings( pScheme ); SetPaintBackgroundEnabled( false ); int screenWide, screenTall; VGui_GetTrueScreenSize(screenWide, screenTall); SetBounds(0, 0, screenWide, screenTall); // Make sure we sort above everyone else SetZPos( 100 ); } //----------------------------------------------------------------------------- // Purpose: // Output : Returns true on success, false on failure. //----------------------------------------------------------------------------- bool CPDumpPanel::ShouldDraw() { if ( m_DumpEntityInfo.Count() == 0 ) return false; return CHudElement::ShouldDraw(); } static char const *pchButtonFields[]= { "m_nOldButtons", "m_nButtons", "m_afButtonLast", "m_afButtonPressed", "m_afButtonReleased", "m_afButtonForced", }; static bool IsButtonField( char const *fieldname ) { for ( int i =0 ; i < ARRAYSIZE( pchButtonFields ); ++i ) { if ( !Q_stricmp( fieldname, pchButtonFields[ i ] ) ) return true; } return false; } struct buttonname_t { int nBit; char const *pchName; }; #define DECLARE_BUTTON_NAME( x ) { IN_##x, #x } static buttonname_t g_ButtonNames[] = { DECLARE_BUTTON_NAME( ATTACK ), DECLARE_BUTTON_NAME( JUMP ), DECLARE_BUTTON_NAME( DUCK ), DECLARE_BUTTON_NAME( FORWARD ), DECLARE_BUTTON_NAME( BACK ), DECLARE_BUTTON_NAME( USE ), DECLARE_BUTTON_NAME( CANCEL ), DECLARE_BUTTON_NAME( LEFT ), DECLARE_BUTTON_NAME( RIGHT ), DECLARE_BUTTON_NAME( MOVELEFT ), DECLARE_BUTTON_NAME( MOVERIGHT ), DECLARE_BUTTON_NAME( ATTACK2 ), DECLARE_BUTTON_NAME( RUN ), DECLARE_BUTTON_NAME( RELOAD ), DECLARE_BUTTON_NAME( ALT1 ), DECLARE_BUTTON_NAME( ALT2 ), DECLARE_BUTTON_NAME( SCORE ), DECLARE_BUTTON_NAME( SPEED), DECLARE_BUTTON_NAME( WALK ), DECLARE_BUTTON_NAME( ZOOM ), DECLARE_BUTTON_NAME( WEAPON1 ), DECLARE_BUTTON_NAME( WEAPON2 ), DECLARE_BUTTON_NAME( BULLRUSH ), DECLARE_BUTTON_NAME( GRENADE1 ), DECLARE_BUTTON_NAME( GRENADE2 ), DECLARE_BUTTON_NAME( LOOKSPIN ), }; static char const *GetButtonFieldValue( char const *value, char *buf, size_t bufsize ) { buf[ 0 ] = 0; char *pchDataStart = Q_strstr( value, "(" ); if ( !pchDataStart ) return value; int bits = Q_atoi( pchDataStart + 1 ); // Assign button bits bool first = true; for ( int i = 0; i < ARRAYSIZE( g_ButtonNames ); ++i ) { int mask = (1<<i); if ( bits & mask ) { if ( !first ) { Q_strncat( buf, ",", bufsize, COPY_ALL_CHARACTERS ); } Q_strncat( buf, g_ButtonNames[ i ].pchName, bufsize, COPY_ALL_CHARACTERS ); first = false; } } Q_strlower( buf ); return buf; } static char const *CleanupZeros( char const *value, char *buf, size_t bufsize ) { char *out = buf; while ( *value ) { if ( *value != '.' ) { *out++ = *value++; continue; } // Found a . now see if next run of characters until space or ')' is all zeroes char const *next = value + 1; while ( *next && *next == '0' ) ++next; if ( *next == ' ' || *next == ')' ) { // Don't write the . or the zeroes, just put value at the terminator value = next; } else { *out++ = *value++; } } *out = 0; return buf; } void CPDumpPanel::DumpComparision( const char *classname, const char *fieldname, const char *fieldtype, bool networked, bool noterrorchecked, bool differs, bool withintolerance, const char *value ) { if ( fieldname == NULL ) return; DumpInfo slot; slot.index = m_nCurrentIndex++; Q_snprintf( slot.classname, sizeof( slot.classname ), "%s", classname ); slot.networked = networked; char bv[ DUMP_STRING_SIZE ]; if ( IsButtonField( fieldname ) ) { value = GetButtonFieldValue( value, bv, sizeof( bv ) ); } else { value = CleanupZeros( value, bv, sizeof( bv ) ); } Q_snprintf( slot.fieldstring, sizeof( slot.fieldstring ), "%s %s", fieldname, value ); slot.differs = differs; slot.withintolerance = withintolerance; slot.noterrorchecked = noterrorchecked; m_DumpEntityInfo.InsertNoSort( slot ); } //----------------------------------------------------------------------------- // Purpose: Callback function for dumping entity info to screen // Input : *classname - // *fieldname - // *fieldtype - // networked - // noterrorchecked - // differs - // withintolerance - // *value - // Output : static void //----------------------------------------------------------------------------- static void DumpComparision( const char *classname, const char *fieldname, const char *fieldtype, bool networked, bool noterrorchecked, bool differs, bool withintolerance, const char *value ) { CPDumpPanel *pPanel = GetPDumpPanel(); if ( !pPanel ) return; pPanel->DumpComparision( classname, fieldname, fieldtype, networked, noterrorchecked, differs, withintolerance, value ); } //----------------------------------------------------------------------------- // Purpose: Lookup color to use for data // Input : networked - // errorchecked - // differs - // withintolerance - // r - // g - // b - // a - // Output : static void //----------------------------------------------------------------------------- void CPDumpPanel::PredictionDumpColor( bool legend, bool predictable, bool networked, bool errorchecked, bool differs, bool withintolerance, int& r, int& g, int& b, int& a ) { if ( !legend && !predictable ) { r = 150; g = 180; b = 150; a = 255; return; } r = 255; g = 255; b = 255; a = 255; if ( networked ) { if ( errorchecked ) { r = 180; g = 180; b = 225; } else { r = 150; g = 180; b = 150; } } if ( differs ) { if ( withintolerance ) { r = 255; g = 255; b = 0; a = 255; } else { if ( !networked ) { r = 180; g = 180; b = 100; a = 255; } else { r = 255; g = 0; b = 0; a = 255; } } } } //----------------------------------------------------------------------------- // Purpose: Dump entity data to screen // Input : *ent - // last_predicted - //----------------------------------------------------------------------------- void CPDumpPanel::DumpEntity( C_BaseEntity *ent, int commands_acknowledged ) { #ifdef NO_ENTITY_PREDICTION return; #else Assert( ent ); const byte *original_state_data = NULL; const byte *predicted_state_data = NULL; bool data_type_original = TD_OFFSET_PACKED; bool data_type_predicted = TD_OFFSET_PACKED; if ( ent->GetPredictable() ) { original_state_data = (const byte *)ent->GetOriginalNetworkDataObject(); predicted_state_data = (const byte *)ent->GetPredictedFrame( commands_acknowledged - 1 ); } else { // Compare against self so that we're just dumping data to screen original_state_data = ( const byte * )ent; data_type_original = TD_OFFSET_NORMAL; predicted_state_data = original_state_data; data_type_predicted = data_type_original; } Assert( original_state_data ); Assert( predicted_state_data ); Clear(); CPredictionCopy datacompare( PC_EVERYTHING, (byte *)original_state_data, data_type_original, predicted_state_data, data_type_predicted, CPredictionCopy::TRANSFERDATA_ERRORCHECK_DESCRIBE, ::DumpComparision ); // Don't spew debugging info m_nCurrentIndex = 0; datacompare.TransferData( "", ent->entindex(), ent->GetPredDescMap() ); m_hDumpEntity = ent; m_DumpEntityInfo.RedoSort(); #endif } void CPDumpPanel::Clear() { m_DumpEntityInfo.RemoveAll(); } void CPDumpPanel::Paint() { C_BaseEntity *ent = m_hDumpEntity; if ( !ent ) { Clear(); return; } bool bPredictable = ent->GetPredictable(); // Now output the strings int x[5]; x[0] = 20; int columnwidth = 375; int numcols = GetWide() / columnwidth; int i; numcols = clamp( numcols, 1, 5 ); for ( i = 0; i < numcols; i++ ) { if ( i == 0 ) { x[i] = 20; } else { x[i] = x[ i-1 ] + columnwidth - 20; } } int nFontTweak = -7; int c = m_DumpEntityInfo.Count(); int fonttall = vgui::surface()->GetFontTall( m_FontSmall ) + nFontTweak; int fonttallMedium = vgui::surface()->GetFontTall( m_FontMedium ) + nFontTweak; int fonttallBig = vgui::surface()->GetFontTall( m_FontBig ) + nFontTweak; char currentclass[ 128 ]; currentclass[ 0 ] = 0; int starty = 15; int y = starty; int col = 0; int r = 255; int g = 255; int b = 255; int a = 255; char classextra[ 32 ]; classextra[ 0 ] = 0; char classprefix[ 32 ]; Q_strncpy( classprefix, "class ", sizeof( classprefix ) ); const char *classname = ent->GetClassname(); if ( !classname[ 0 ] ) { classname = typeid( *ent ).name(); Q_strncpy( classextra, " (classmap missing)", sizeof( classextra ) ); classprefix[ 0 ] = 0; } char sz[ 512 ]; wchar_t szconverted[ 1024 ]; surface()->DrawSetTextFont( m_FontBig ); surface()->DrawSetTextColor( Color( 255, 255, 255, 255 ) ); surface()->DrawSetTextPos( x[ col ] - 10, y - fonttallBig - 2 ); Q_snprintf( sz, sizeof( sz ), "entity # %i: %s%s%s", ent->entindex(), classprefix, classname, classextra ); g_pVGuiLocalize->ConvertANSIToUnicode( sz, szconverted, sizeof(szconverted) ); surface()->DrawPrintText( szconverted, wcslen( szconverted ) ); for ( i = 0; i < c; i++ ) { DumpInfo *slot = &m_DumpEntityInfo[ i ]; if ( stricmp( slot->classname, currentclass ) ) { y += 2; surface()->DrawSetTextFont( m_FontMedium ); surface()->DrawSetTextColor( Color( 0, 255, 100, 255 ) ); surface()->DrawSetTextPos( x[ col ] - 10, y ); Q_snprintf( sz, sizeof( sz ), "%s", slot->classname ); g_pVGuiLocalize->ConvertANSIToUnicode( sz, szconverted, sizeof(szconverted) ); surface()->DrawPrintText( szconverted, wcslen( szconverted ) ); y += fonttallMedium; Q_strncpy( currentclass, slot->classname, sizeof( currentclass ) ); } PredictionDumpColor( false, bPredictable, slot->networked, !slot->noterrorchecked, slot->differs, slot->withintolerance, r, g, b, a ); surface()->DrawSetTextFont( m_FontSmall ); surface()->DrawSetTextColor( Color( r, g, b, a ) ); surface()->DrawSetTextPos( x[ col ], y ); Q_snprintf( sz, sizeof( sz ), "%s", slot->fieldstring ); g_pVGuiLocalize->ConvertANSIToUnicode( sz, szconverted, sizeof(szconverted) ); surface()->DrawPrintText( szconverted, wcslen( szconverted ) ); y += fonttall; if ( y >= GetTall() - fonttall - starty ) { y = starty; col++; if ( col >= numcols ) break; } } surface()->DrawSetTextFont( m_FontSmall ); // Figure how far over the legend needs to be. const char *pFirstAndLongestString = "Not networked, no differences"; g_pVGuiLocalize->ConvertANSIToUnicode( pFirstAndLongestString, szconverted, sizeof(szconverted) ); int textSizeWide, textSizeTall; surface()->GetTextSize( m_FontSmall, szconverted, textSizeWide, textSizeTall ); // Draw a legend now int xpos = ScreenWidth() - textSizeWide - 5; y = ScreenHeight() - 7 * fonttall - 80; // Not networked, no differences PredictionDumpColor( true, bPredictable, false, false, false, false, r, g, b, a ); surface()->DrawSetTextColor( Color( r, g, b, a ) ); surface()->DrawSetTextPos( xpos, y ); Q_strncpy( sz, pFirstAndLongestString, sizeof( sz ) ); g_pVGuiLocalize->ConvertANSIToUnicode( sz, szconverted, sizeof(szconverted) ); surface()->DrawPrintText( szconverted, wcslen( szconverted ) ); y += fonttall; // Networked, no error check PredictionDumpColor( true, bPredictable, true, false, false, false, r, g, b, a ); surface()->DrawSetTextColor( Color( r, g, b, a ) ); surface()->DrawSetTextPos( xpos, y ); Q_strncpy( sz, "Networked, not checked", sizeof( sz ) ); g_pVGuiLocalize->ConvertANSIToUnicode( sz, szconverted, sizeof(szconverted) ); surface()->DrawPrintText( szconverted, wcslen( szconverted ) ); y += fonttall; // Networked, with error check PredictionDumpColor( true, bPredictable, true, true, false, false, r, g, b, a ); surface()->DrawSetTextColor( Color( r, g, b, a ) ); surface()->DrawSetTextPos( xpos, y ); Q_strncpy( sz, "Networked, error checked", sizeof( sz ) ); g_pVGuiLocalize->ConvertANSIToUnicode( sz, szconverted, sizeof(szconverted) ); surface()->DrawPrintText( szconverted, wcslen( szconverted ) ); y += fonttall; // Differs, but within tolerance PredictionDumpColor( true, bPredictable, true, true, true, true, r, g, b, a ); surface()->DrawSetTextColor( Color( r, g, b, a ) ); surface()->DrawSetTextPos( xpos, y ); Q_strncpy( sz, "Differs, but within tolerance", sizeof( sz ) ); g_pVGuiLocalize->ConvertANSIToUnicode( sz, szconverted, sizeof(szconverted) ); surface()->DrawPrintText( szconverted, wcslen( szconverted ) ); y += fonttall; // Differs, not within tolerance, but not networked PredictionDumpColor( true, bPredictable, false, true, true, false, r, g, b, a ); surface()->DrawSetTextColor( Color( r, g, b, a ) ); surface()->DrawSetTextPos( xpos, y ); Q_strncpy( sz, "Differs, but not networked", sizeof( sz ) ); g_pVGuiLocalize->ConvertANSIToUnicode( sz, szconverted, sizeof(szconverted) ); surface()->DrawPrintText( szconverted, wcslen( szconverted ) ); y += fonttall; // Differs, networked, not within tolerance PredictionDumpColor( true, bPredictable, true, true, true, false, r, g, b, a ); surface()->DrawSetTextColor( Color( r, g, b, a ) ); surface()->DrawSetTextPos( xpos, y ); Q_strncpy( sz, "Differs, networked", sizeof( sz ) ); g_pVGuiLocalize->ConvertANSIToUnicode( sz, szconverted, sizeof(szconverted) ); surface()->DrawPrintText( szconverted, wcslen( szconverted ) ); y += fonttall; }
Orange-OpenSource/matos-profiles
matos-android/src/main/java/android/widget/ListPopupWindow.java
package android.widget; /* * #%L * Matos * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2010 - 2014 Orange SA * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ @com.francetelecom.rd.stubs.annotation.ClassDone(0) public class ListPopupWindow { // Fields public static final int POSITION_PROMPT_ABOVE = 0; public static final int POSITION_PROMPT_BELOW = 1; public static final int MATCH_PARENT = -1; public static final int WRAP_CONTENT = -2; public static final int INPUT_METHOD_FROM_FOCUSABLE = 0; public static final int INPUT_METHOD_NEEDED = 1; public static final int INPUT_METHOD_NOT_NEEDED = 2; // Constructors @com.francetelecom.rd.stubs.annotation.CallBackRegister("onCreate") public ListPopupWindow(android.content.Context arg1){ } @com.francetelecom.rd.stubs.annotation.CallBackRegister("onCreate") public ListPopupWindow(android.content.Context arg1, android.util.AttributeSet arg2){ } @com.francetelecom.rd.stubs.annotation.CallBackRegister("onCreate") public ListPopupWindow(android.content.Context arg1, android.util.AttributeSet arg2, int arg3){ } @com.francetelecom.rd.stubs.annotation.CallBackRegister("onCreate") public ListPopupWindow(android.content.Context arg1, android.util.AttributeSet arg2, int arg3, int arg4){ } // Methods @com.francetelecom.rd.stubs.annotation.CallBack("onCreate") public boolean onKeyDown(int arg1, android.view.KeyEvent arg2){ return false; } @com.francetelecom.rd.stubs.annotation.CallBack("onCreate") public boolean onKeyUp(int arg1, android.view.KeyEvent arg2){ return false; } public void show(){ } public void setBackgroundDrawable(android.graphics.drawable.Drawable arg1){ } public int getWidth(){ return 0; } public int getHeight(){ return 0; } @com.francetelecom.rd.stubs.annotation.CallBack("onCreate") public boolean onKeyPreIme(int arg1, android.view.KeyEvent arg2){ return false; } public android.graphics.drawable.Drawable getBackground(){ return (android.graphics.drawable.Drawable) null; } public void setSelection(int arg1){ } public void setHeight(int arg1){ } public void setWidth(int arg1){ } public void setAdapter(ListAdapter arg1){ } public android.view.View getSelectedView(){ return (android.view.View) null; } public boolean performItemClick(int arg1){ return false; } public int getSelectedItemPosition(){ return 0; } public long getSelectedItemId(){ return 0l; } public void setOnItemClickListener(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onItemClick") AdapterView.OnItemClickListener arg1){ } public void setOnItemSelectedListener(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onItemSelected") AdapterView.OnItemSelectedListener arg1){ } public java.lang.Object getSelectedItem(){ return (java.lang.Object) null; } public void clearListSelection(){ } public boolean isDropDownAlwaysVisible(){ return false; } public void setDropDownAlwaysVisible(boolean arg1){ } public boolean isInputMethodNotNeeded(){ return false; } public void setForceIgnoreOutsideTouch(boolean arg1){ } public void dismiss(){ } public ListView getListView(){ return (ListView) null; } public boolean isShowing(){ return false; } public void setOnDismissListener(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onDismiss") PopupWindow.OnDismissListener arg1){ } public void setAnchorView(android.view.View arg1){ } public void setSoftInputMode(int arg1){ } public void setInputMethodMode(int arg1){ } public int getAnimationStyle(){ return 0; } public void setAnimationStyle(int arg1){ } public int getInputMethodMode(){ return 0; } public int getSoftInputMode(){ return 0; } public int getVerticalOffset(){ return 0; } public void setPromptPosition(int arg1){ } public void setListSelector(android.graphics.drawable.Drawable arg1){ } public void setVerticalOffset(int arg1){ } public void setHorizontalOffset(int arg1){ } public void setPromptView(android.view.View arg1){ } public int getHorizontalOffset(){ return 0; } public void postShow(){ } public android.view.View getAnchorView(){ return (android.view.View) null; } public void setModal(boolean arg1){ } public void setContentWidth(int arg1){ } public int getPromptPosition(){ return 0; } public boolean isModal(){ return false; } }
mehidi258/next-js-starter
src/components/brandsArchive/ColorsWidget/index.js
import React, { useState, useEffect } from 'react'; import Checkbox from '~/src/components/common/Checkbox'; import { AngleRight } from '~/src/components/icons'; import styles from './index.module.scss'; const ColorsWidget = ({ title, items }) => { let [toggleBtn, setToggleBtn] = useState(true); const handleToggleBtn = () => { setToggleBtn((toggleBtn = !toggleBtn)); }; const handleChange = (e) => { console.log(e.target.value); }; // const item = items.map((label) => { // <Checkbox label={label || ''} />; // }); return ( <div className={`${styles.filters__aside__widget} ${ toggleBtn ? styles.filters__aside__widget__expand : '' }`}> <div className={styles.filters__aside__widget__heading}> <h3>{title}</h3> <button className={`${styles.filters__toggle__btn} btn--no-style`} onClick={handleToggleBtn}> <AngleRight /> </button> </div> <form> <Checkbox label="In Stock" name="test1" value="1" onChange={handleChange} bgColor="#EFF4F1" /> <Checkbox label="Sold Out" name="test2" value="2" onChange={handleChange} bgColor="#46484D" /> <Checkbox label="Sold Out" name="test3" value="3" onChange={handleChange} bgColor="#FEBE15" /> <Checkbox label="Sold Out" name="test4" value="4" onChange={handleChange} bgColor="#55E4F8" /> <Checkbox label="Sold Out" name="test5" value="Five" onChange={handleChange} bgColor="#43B0FF" /> <Checkbox label="Sold Out" name="test6" value="6" onChange={handleChange} bgColor="#E289F0" /> <Checkbox label="Sold Out" name="test7" value="7" onChange={handleChange} bgColor="#F83C3C" /> <Checkbox label="Sold Out" name="test8" value="8" onChange={handleChange} bgColor="#FACD2D" /> <Checkbox label="Sold Out" name="test9" value="9" onChange={handleChange} bgColor="#3F66CC" /> <Checkbox label="Sold Out" name="test10" value="10" onChange={handleChange} bgColor="#AB6CFB" /> <Checkbox label="Sold Out" name="test11" value="11" onChange={handleChange} bgColor="#C1F552" /> <Checkbox label="Sold Out" name="test12" value="12" onChange={handleChange} bgColor="#EE5DBD" /> </form> </div> ); }; export default ColorsWidget;
NeoResearch/libbft
spec/go/src/replicated/machine_context.go
<filename>spec/go/src/replicated/machine_context.go package replicated import ( "errors" "github.com/NeoResearch/libbft/src/events" "github.com/NeoResearch/libbft/src/machine" "github.com/NeoResearch/libbft/src/single" ) type MachineContext interface { // get / set GetParams() single.Param GetMachine() machine.SingleTimerStateMachine GetEvents() []events.Event // methods AddEvent(event events.Event) RemoveEvent(index int) error } type MachineContextService struct { params single.Param singleTimerStateMachine machine.SingleTimerStateMachine events []events.Event } func NewMachineContext(params single.Param, singleTimerStateMachine machine.SingleTimerStateMachine, events []events.Event) MachineContext { return &MachineContextService{ params, singleTimerStateMachine, events, } } func (m *MachineContextService) GetParams() single.Param { return m.params } func (m *MachineContextService) GetMachine() machine.SingleTimerStateMachine { return m.singleTimerStateMachine } func (m *MachineContextService) GetEvents() []events.Event { return m.events } func (m *MachineContextService) AddEvent(event events.Event) { m.events = append(m.events, event) } func (m *MachineContextService) RemoveEvent(index int) error { if index < 0 || index > len(m.events) { return errors.New("invalid index") } m.events = append(m.events[:index], m.events[index+1:]...) return nil }
wspr-ncsu/acminer
src/org/sag/sootinit/AllAppsDexSootLoader.java
package org.sag.sootinit; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.sag.common.io.FileHelpers; import org.sag.common.logging.ILogger; import org.sag.sootinit.SootInstanceWrapper.SootLoadKey; import com.google.common.base.Joiner; public class AllAppsDexSootLoader extends DexSootLoader { private static AllAppsDexSootLoader singleton; public static AllAppsDexSootLoader v(){ if(singleton == null) singleton = new AllAppsDexSootLoader(); return singleton; } public static void reset(){ singleton = null; } private AllAppsDexSootLoader(){ super(SootLoadKey.ALL_APP_DEX); } public final boolean load(Set<Path> allAppPaths, Path pathToFrameworkJimpleJar, Set<String> classesToLoad, int apiVersion, int javaVersion, ILogger logger){ if(allAppPaths == null || allAppPaths.isEmpty() || pathToFrameworkJimpleJar == null || classesToLoad == null || classesToLoad.isEmpty() || apiVersion < 0 || logger == null) throw new IllegalArgumentException("Error: The arguments cannot be null, all lists must contain elements, and the api must be valid."); List<String> ins = new ArrayList<>(allAppPaths.size()+1); StringBuilder sb = new StringBuilder(); for(Path p : allAppPaths) { if(p == null) throw new IllegalArgumentException("Error: The class path cannot contain null elements."); String path = FileHelpers.getNormAndAbsPath(p).toString(); sb.append(" ").append(path).append("\n"); ins.add(path); } String temp = FileHelpers.getNormAndAbsPath(pathToFrameworkJimpleJar).toString(); sb.append(" ").append(temp).append("\n"); ins.add(temp); String classpath = Joiner.on(File.pathSeparator).join(ins); logger.info("AllAppsDexSootLoader: Initilizing soot using the following apps, priv-apps, and framework jimple jar:\n{}",sb.toString()); boolean ret = load(classpath,ins,classesToLoad,true,apiVersion,javaVersion,logger); getSootInstanceWrapper().setSootInitValue(SootLoadKey.ALL_APP_DEX); logger.info("AllAppsDexSootLoader: Soot has been initilized successfully for the following apps, priv-apps, and framework jimple jar:\n{}", sb.toString()); return ret; } }
cdkd123/pure
src/com/mygame/pure/view/MyrogressBar.java
package com.mygame.pure.view; import android.content.Context; import android.graphics.Canvas; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import com.mygame.pure.R; public class MyrogressBar extends LinearLayout { private LinearLayout layout; private TextView progress_tv; private float mProgress; public MyrogressBar(Context context, AttributeSet attrs) { super(context, attrs); initView(context); } @Override public void draw(Canvas canvas) { // TODO Auto-generated method stub super.draw(canvas); progress_tv.setText((mProgress*100+"").replace(".0", "")); progress_tv.setPadding((int)(getWidth()*mProgress), 0, 0, 0); } @Override protected boolean drawChild(Canvas canvas, View child, long drawingTime) { // TODO Auto-generated method stub java.text.DecimalFormat df = new java.text.DecimalFormat("#0"); progress_tv.setText((df.format(mProgress*100)+"").replace(".0", "")); progress_tv.setPadding((int)(getWidth()*mProgress), 0, 0, 0); return super.drawChild(canvas, child, drawingTime); } private void initView(Context context) { LayoutInflater inflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); layout = (LinearLayout) inflater.inflate(R.layout.mybar_layout, this); progress_tv = (TextView) layout.findViewById(R.id.progress_tv); } public void setProgress(float progress) { this.mProgress = progress; invalidate(); } }
carldea/Erdos
src/main/java/com/hendrix/erdos/types/UndirectedEdge.java
<filename>src/main/java/com/hendrix/erdos/types/UndirectedEdge.java package com.hendrix.erdos.types; /** * @author <NAME> */ public class UndirectedEdge extends Edge { public UndirectedEdge(IVertex v1, IVertex v2) { super(v1, v2, EDGE_DIRECTION.UNDIRECTED); } public UndirectedEdge(IVertex v1, IVertex v2, float weight) { super(v1, v2, EDGE_DIRECTION.UNDIRECTED, weight); } }
juseongkr/BOJ
leetcode/algorithm/221.cpp
<reponame>juseongkr/BOJ class Solution { public: int maximalSquare(vector<vector<char>>& matrix) { int n = matrix.size(); if (n == 0) return 0; int m = matrix[0].size(); vector<vector<int>> dp(n+1, vector<int>(m+1)); int ans = 0; for (int i=1; i<=n; ++i) { for (int j=1; j<=m; ++j) { if (matrix[i-1][j-1] == '1') dp[i][j] = min(dp[i-1][j-1], min(dp[i-1][j], dp[i][j-1])) + 1; ans = max(ans, dp[i][j]); } } return ans * ans; } };
ox-it/humfrey
humfrey/graphviz/views.py
import subprocess import rdflib from django.http import Http404, HttpResponse, HttpResponseBadRequest from django.template import RequestContext, loader from django.template.defaultfilters import slugify from django.shortcuts import render_to_response from django_conneg.http import HttpBadRequest from django_conneg.decorators import renderer from humfrey.results.views.standard import RDFView from humfrey.sparql.views.core import StoreView from humfrey.linkeddata.views import MappingView from humfrey.linkeddata.resource import Resource from humfrey.utils.namespaces import expand, NS class GraphVizView(RDFView, StoreView, MappingView): query = """ CONSTRUCT {{ {page_uri} foaf:topic ?entity . ?entity a ?type ; rdfs:label ?title . {relationPattern} }} WHERE {{ {{ SELECT DISTINCT ?entity WHERE {{ {selector} }} LIMIT 10000 }} ?entity a ?type . OPTIONAL {{ VALUES ?relation {{ {relations} }} {relationPattern} }} . OPTIONAL {{ ?entity rdfs:label|dc:title|skos:prefLabel|foaf:name ?title }} . NOT EXISTS {{ VALUES ?excludedType {{ {excludedTypes} }} ?entity a ?excludedType }} }} """ type_selector = """ ?orgType rdfs:subClassOf* {baseType} GRAPH {graph} {{ ?entity a ?orgType }} """ tree_selector = """ {subject} {relationAlternation}{{0,{depth}}} {object} """ def get(self, request, root=None, base_type=None, graph=None, relations=None, template='graphviz/graphviz', depth=4, max_depth=5, excluded_types=None, properties=None, inverted=None, minimal=None): make_uriref = lambda uri: expand(uri) if uri else None root = make_uriref(root or request.GET.get('root')) base_type = make_uriref(base_type or request.GET.get('base_type')) graph = make_uriref(graph or request.GET.get('graph')) relations = relations or [expand(relation) for relation in request.GET.getlist('relation')] inverted = inverted if (inverted is not None) else request.GET.get('inverted') == 'true' minimal = minimal if (minimal is not None) else request.GET.get('minimal') == 'true' if not relations: raise Http404 if inverted: relation_pattern = '?entity ?relation ?parent' else: relation_pattern = '?parent ?relation ?entity' if root and base_type: raise HttpBadRequest elif root: if not self.get_types(root): raise Http404 if inverted: subj, obj = '?entity', root.n3() else: subj, obj = root.n3(), '?entity' try: depth = min(int(request.GET.get('depth', depth)), max_depth) except (TypeError, ValueError): return HttpResponseBadRequest() selector = self.tree_selector.format(subject=subj, object=obj, depth=depth, relationAlternation='|'.join(r.n3() for r in relations)) elif base_type: selector = self.type_selector.format(graph=graph.n3() if graph else '?graph', baseType=base_type.n3()) excluded_types = excluded_types or [expand(t) for t in request.GET.getlist('exclude_type')] properties = properties or [expand(p) for p in request.GET.getlist('property')] page_uri = rdflib.URIRef(request.build_absolute_uri()) query = self.query.format(selector=selector, relations=' '.join(r.n3() for r in relations), excludedTypes=' '.join(t.n3() for t in excluded_types), relationPattern=relation_pattern, page_uri=page_uri.n3(), propertyPatterns='\n '.join('OPTIONAL { ?entity %s ?p%s } .' % (p.n3(), i) for i, p in enumerate(properties)), propertyTriples=''.join(';\n %s ?p%s' % (p.n3(), i) for i, p in enumerate(properties)) ) graph = self.endpoint.query(query) subjects = [Resource(s, graph, self.endpoint) for s in set(graph.objects(page_uri, NS['foaf'].topic))] subjects.sort(key=lambda s: s.label) subject = Resource(root, graph, self.endpoint) if root else None context = { 'graph': graph, 'queries': [graph.query], 'subjects': subjects, 'subject': subject, 'inverted': inverted, 'relations': relations, 'minimal': minimal, 'filename_base': slugify(subject.label if subject else 'graphviz')[:32] } for subject in subjects: if not inverted: subject.children = set(Resource(s, graph, self.endpoint) for relation in relations for s in graph.objects(subject._identifier, relation)) else: subject.children = set(Resource(s, graph, self.endpoint) for relation in relations for s in graph.subjects(relation, subject._identifier)) for child in subject.children: if (page_uri, NS['foaf'].topic, child._identifier) in graph: child.display = True return self.render(request, context, template) _DOT_LAYOUTS = "circo dot fdp neato nop nop1 nop2 osage patchwork sfdp twopi".split() _DOT_OUTPUTS = [ dict(format='bmp', mimetypes=('image/x-bmp','image/x-ms-bmp'), name='BMP', dot_output='bmp'), dict(format='xdot', mimetypes=('text/vnd.graphviz',), name='xDOT', dot_output='xdot', priority=0.9), dict(format='gv', mimetypes=('text/vnd.graphviz',), name='DOT (GraphViz)', dot_output='gv'), dict(format='jpeg', mimetypes=('image/jpeg',), name='JPEG', dot_output='jpeg'), dict(format='png', mimetypes=('image/png',), name='PNG', dot_output='png'), dict(format='ps', mimetypes=('application/postscript',), name='PostScript', dot_output='ps'), dict(format='pdf', mimetypes=('application/pdf',), name='PDF', dot_output='pdf'), dict(format='svg', mimetypes=('image/svg+xml',), name='SVG', dot_output='svg'), ] def _get_dot_renderer(output): def dot_renderer(self, request, context, template_name): layout = request.GET.get('layout') if layout not in self._DOT_LAYOUTS: layout = 'fdp' template = loader.get_template(template_name + '.gv') plain_gv = template.render(RequestContext(request, context)) dot = subprocess.Popen(['dot', '-K'+layout, '-T'+dot_output], stdin=subprocess.PIPE, stdout=subprocess.PIPE) dot_stdout, _ = dot.communicate(input=plain_gv.encode('utf-8')) response = HttpResponse(dot_stdout, mimetype=output['mimetypes'][0]) response['Content-Disposition'] = 'inline; filename="{0}.{1}"'.format(context['filename_base'], output['format']) return response dot_output = output.pop('dot_output') dot_renderer.__name__ = 'render_%s' % output['format'] return renderer(**output)(dot_renderer) for output in _DOT_OUTPUTS: locals()['render_%s' % output['format']] = _get_dot_renderer(output) del _get_dot_renderer, output @renderer(format="graphml", mimetypes=('application/x-graphml+xml',), name="GraphML") def render_graphml(self, request, context, template_name): response = render_to_response(template_name + '.graphml', context, context_instance=RequestContext(request), mimetype='application/x-graphml+xml') response['Content-Disposition'] = 'attachment; filename="{0}.graphml"'.format(context['filename_base']) return response
larryRishi/algorithm004-05
Week 3/id_105/LeetCode_455_105.java
package id_105; import java.util.Arrays; /** * 假设你是一位很棒的家长,想要给你的孩子们一些小饼干。但是,每个孩子最多只能给一块饼干。对每个孩子 i ,都有一个胃口值 gi ,这是能让孩子们满足胃口的饼干的最小尺寸;并且每块饼干 j ,都有一个尺寸 sj 。如果 sj >= gi ,我们可以将这个饼干 j 分配给孩子 i ,这个孩子会得到满足。你的目标是尽可能满足越多数量的孩子,并输出这个最大数值。 * * 注意: * * 你可以假设胃口值为正。 * 一个小朋友最多只能拥有一块饼干。 * * 示例 1: * * 输入: [1,2,3], [1,1] * * 输出: 1 * * 解释: * 你有三个孩子和两块小饼干,3个孩子的胃口值分别是:1,2,3。 * 虽然你有两块小饼干,由于他们的尺寸都是1,你只能让胃口值是1的孩子满足。 * 所以你应该输出1。 * 示例 2: * * 输入: [1,2], [1,2,3] * * 输出: 2 * * 解释: * 你有两个孩子和三块小饼干,2个孩子的胃口值分别是1,2。 * 你拥有的饼干数量和尺寸都足以让所有孩子满足。 * 所以你应该输出2. * * 来源:力扣(LeetCode) * 链接:https://leetcode-cn.com/problems/assign-cookies * 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 */ public class LeetCode_455_105 { /** * 先对g 和 s 排序。排序方式影响了性能。 * 贪心:只要size 满足 即可派发。 * 否则,zise++; * @param g * @param s * @return */ public int findContentChildren(int[] g, int[] s) { Arrays.sort(g); Arrays.sort(s); int count = 0; int sizeIndex = 0; int gIndex = 0; for (; sizeIndex < s.length && gIndex < g.length; ) { if (s[sizeIndex] >= g[gIndex]) { count++; sizeIndex++; gIndex++; } else { sizeIndex++; } } return count; } }
essepuntato/EarmarkDataStructure
src/it/essepuntato/earmark/core/test/FrancescoPoggiTestOne.java
package it.essepuntato.earmark.core.test; import it.essepuntato.earmark.core.EARMARKChildNode; import it.essepuntato.earmark.core.EARMARKDocument; import it.essepuntato.earmark.core.EARMARKHierarchicalNode; import it.essepuntato.earmark.core.Range; import java.util.ArrayList; import java.util.List; /* Ho alcune segnalazioni che riguardano le EARMARK API, spero ti siano utili: 1a) getNamespaceURI() ritorna una stringa (implementa un metodo dichiarato nell'interfaccia EARMARKNamedNode) "The namespace URI of the general identifier associated to this node, or null if it is unspecified." hasNamespace: ritorna URI "This method returns the namespace of this markup item." Forse c'e' da rivedere il javadoc: in realtà una torna l'URI del namespace, l'altro la stringa del namespace. E poi non è lineare: per l'ID c'è solo il metodo hasID che ritorna l'URI dell'ID, e non il metodo per avere la stringa: secondo me il metodo per la stringa andrebbe messo o in entrambi o eliminato. 1b) EARMARKDocument.generalIdentifierIsUsedBy(""); e EARMARKDocument.getMarkupItemByGeneralIdentifier(""); sono metodi diversi o duplicati? L'implementazione è diversa, la semantica sembra la stessa dal javadoc. 1c) EARMARKDocument.removeMarkupItem(mi) rimuove anche tutto il contenuto del markup item (tutti gli elementi dominati, a qualsiasi livello di profondita'), anche se altri sono in overlap su di esso. E' corretto cosi? 1d) problema in cloneNode() : la/le radici del grafo non sono appese al documento. Per esempio se: EARMARKDocument earDoc; // in earDoc carico un documento EARMARK // poi clono il documento EARMARKDocument earDocMainHierarchy; earDocMainHierarchy = (EARMARKDocument)earDoc.cloneNode(true); // cerco gi elementi figli del documento Collection roots = earDoc.getChildElements(); Collection roots2 = earDocMainHierarchy.getChildElements(); e qui trovo roots = { elemento/i radice }, mentre roots2 = {} vuoto. 1d2) l'elemento radice di un EARMARKDocument non ha genitori: rootElement.getParentNodes() ritorna un insieme vuoto: è corretto? non dovrebbe restituire l'EARMARKDocument che lo contiene? 1e) in un documento clonato il metodo hasChildNodes() non funziona correttamente. Per esempio: Collection children = node.getChildNodes(); // children contiene tre elementi, però se poi invoco boolean hasChildren = node.hasChildNodes(); // hasChildren è false... 1f) invocando una createPointerRange(Docuverse docuverse, Integer begin, Integer end) [la versione senza id specificato nei parametri] capita che mi generi un range con id gia' esistente nel documento (in particolare mi crea un range con id "r1" che era utilizzato precedentemente gia' da un altro range). Questo pero' capita in un documento ottenuto in seguito a una cloneNode() di un altro documento: puo' essere che sia dovuto a strutture non aggiornate. */ public class FrancescoPoggiTestOne extends AbstractTest { private EARMARKDocument document = null; public FrancescoPoggiTestOne(EARMARKDocument doc) { document = doc; } @Override public List<String> doTest() { List<String> result = new ArrayList<String>(); result.add("\n[i] Check parents of a root node"); EARMARKChildNode child = document.getChildNodes().iterator().next(); String msg1 = "Check if " + child + " has the document has father, test"; if (child.getParentNodes().contains(document)) { result.add(passed(msg1)); } else { result.add(failed(msg1, "The parents are: " + child.getParentNodes())); } result.add("\n[i] Clone the document"); EARMARKDocument newDoc = (EARMARKDocument) document.cloneNode(true); result.add("\n[i] Compare children of the document with the cloned one"); String msg2 = "Check if " + newDoc + " (cloned document) has the same number of" + " children of " + document + " (original document), test"; if (document.getChildNodes().size() == newDoc.getChildNodes().size()) { result.add(passed(msg2)); } else { result.add(failed(msg2, "The children are respectively: " + "\n\t- " + document + ": " + document.getChildNodes() + "\n\t- " + newDoc + ": " + newDoc.getChildNodes())); } result.add("\n[i] Methods for children in the cloned document"); String msg3 = "Check if in " + newDoc + " (cloned document) the methods getChildNodes() and hasChildNodes()" + "return consistent answers, both for the document and for a random node of it, test"; EARMARKHierarchicalNode childCloned = (EARMARKHierarchicalNode) newDoc.getChildNodes().iterator().next(); if ( newDoc.getChildNodes().isEmpty() == !newDoc.hasChildNodes() && childCloned.getChildNodes().isEmpty() == ! childCloned.hasChildNodes()) { result.add(passed(msg3)); } else { result.add(failed(msg3, "The children are respectively: " + "\n\t- " + newDoc + ": " + newDoc.getChildNodes() + "\n\t- " + childCloned + ": " + childCloned.getChildNodes())); } result.add("\n[i] Create a new pointer range in the cloned document"); String msg4 = "Check if in " + newDoc + " (cloned document) the methods for creating a new pointer range" + " return a range with an ID that was already present in the document, test"; Range pRange = newDoc.createPointerRange(newDoc.getAllDocuverses().iterator().next(), null, null); if (pRange == null) { result.add(failed(msg4, "The range was not created.")); } else { result.add(passed(msg4)); } return result; } @Override public String getTestName() { return "<NAME> test 1"; } @Override public boolean useDocument(EARMARKDocument document) { this.document = document; return true; } }
SAS-Systems/imflux
src/main/java/sas/systems/imflux/participant/ParticipantDatabase.java
<gh_stars>1-10 /* * Copyright 2015 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sas.systems.imflux.participant; import java.net.SocketAddress; import java.util.Collection; import java.util.Map; import sas.systems.imflux.packet.DataPacket; import sas.systems.imflux.packet.rtcp.SdesChunk; /** * Interface <br/> * Implementations of this interface should store and manage the participants. * * @author <a href="http://bruno.biasedbit.com/"><NAME></a> * @author <a href="https://github.com/CodeLionX">CodeLionX</a> */ public interface ParticipantDatabase { String getId(); /** * Receivers are explicitly added participants. * * @return a {@link Collection} with all receivers */ Collection<RtpParticipant> getReceivers(); /** * This map contains all existing members. * * @return a {@link Map} with all members */ Map<Long, RtpParticipant> getMembers(); /** * Performs the specified operation on all receivers. */ void doWithReceivers(ParticipantOperation operation); /** * Performs the specified operation on all members (existing participants). */ void doWithParticipants(ParticipantOperation operation); /** * Adds a participant as a receiver to the database. * * @param remoteParticipant the receiver * @return {@code true} if the participant was added, {@code false} otherwise */ boolean addReceiver(RtpParticipant remoteParticipant); /** * Removes a participant from the receiver list. * * @param remoteParticipant participant to be removed * @return {@code true} if the participant was successfully removed, {@code false} otherwise */ boolean removeReceiver(RtpParticipant remoteParticipant); RtpParticipant getParticipant(long ssrc); /** * Creates a {@link RtpParticipant} if it isn't specified as a receiver, otherwise it is added * to the member-map. * * @param origin * @param packet * @return reference to the {@link RtpParticipant}-object */ RtpParticipant getOrCreateParticipantFromDataPacket(SocketAddress origin, DataPacket packet); /** * Creates a {@link RtpParticipant} if it isn't specified as a receiver, otherwise it is added * to the member-map. * * @param origin * @param chunk * @return reference to the {@link RtpParticipant}-object */ RtpParticipant getOrCreateParticipantFromSdesChunk(SocketAddress origin, SdesChunk chunk); int getReceiverCount(); int getParticipantCount(); /** * Remove inactive and obsolete members. */ void cleanup(); }
clobber/MAME-OS-X
mame/src/mame/includes/jack.h
/************************************************************************* <NAME> *************************************************************************/ class jack_state : public driver_device { public: jack_state(const machine_config &mconfig, device_type type, const char *tag) : driver_device(mconfig, type, tag) { } /* memory pointers */ UINT8 * m_videoram; UINT8 * m_colorram; UINT8 * m_spriteram; // UINT8 * paletteram; // currently this uses generic palette handling size_t m_spriteram_size; /* video-related */ tilemap_t *m_bg_tilemap; /* misc */ int m_timer_rate; int m_joinem_snd_bit; int m_question_address; int m_question_rom; int m_remap_address[16]; /* devices */ cpu_device *m_audiocpu; }; /*----------- defined in video/jack.c -----------*/ WRITE8_HANDLER( jack_videoram_w ); WRITE8_HANDLER( jack_colorram_w ); WRITE8_HANDLER( jack_paletteram_w ); READ8_HANDLER( jack_flipscreen_r ); WRITE8_HANDLER( jack_flipscreen_w ); VIDEO_START( jack ); SCREEN_UPDATE( jack ); PALETTE_INIT( joinem ); VIDEO_START( joinem ); SCREEN_UPDATE( joinem );
kyle-piddington/MoonEngine
engine/src/GLWrapper/GLSampler.h
#pragma once #include <unordered_map> #include "OpenGL.h" using namespace std; namespace MoonEngine { class GLSampler { public: /** * Create a sampler object. */ GLSampler(); /** * Delete a sampler object. */ ~GLSampler(); GLuint getId(); void init(unordered_map<GLenum, GLint>, unordered_map<GLenum, GLfloat>); private: GLuint _samplerId; }; }
packet-tracker/onos-1.2.0-custom-build
web/api/src/main/java/org/onosproject/rest/resources/StatisticsWebResource.java
<gh_stars>0 /* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.rest.resources; import java.util.Spliterator; import java.util.Spliterators; import java.util.stream.StreamSupport; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; import org.onosproject.codec.JsonCodec; import org.onosproject.net.ConnectPoint; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.Link; import org.onosproject.net.device.DeviceService; import org.onosproject.net.flow.FlowRuleService; import org.onosproject.net.flow.TableStatisticsEntry; import org.onosproject.net.link.LinkService; import org.onosproject.net.statistic.Load; import org.onosproject.net.statistic.StatisticService; import org.onosproject.rest.AbstractWebResource; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import static org.onosproject.net.DeviceId.deviceId; import static org.onosproject.net.PortNumber.portNumber; /** * Query flow statistics. */ @Path("statistics") public class StatisticsWebResource extends AbstractWebResource { @Context UriInfo uriInfo; /** * Get load statistics for all links or for a specific link. * * @param deviceId (optional) device ID for a specific link * @param port (optional) port number for a specified link * @return JSON encoded array lof Load objects */ @GET @Path("flows/link") @Produces(MediaType.APPLICATION_JSON) public Response getLoads(@QueryParam("device") String deviceId, @QueryParam("port") String port) { Iterable<Link> links; if (deviceId == null || port == null) { links = get(LinkService.class).getLinks(); } else { ConnectPoint connectPoint = new ConnectPoint(deviceId(deviceId), portNumber(port)); links = get(LinkService.class).getLinks(connectPoint); } ObjectNode result = mapper().createObjectNode(); ArrayNode loads = mapper().createArrayNode(); JsonCodec<Load> loadCodec = codec(Load.class); StatisticService statsService = getService(StatisticService.class); StreamSupport.stream(Spliterators.spliteratorUnknownSize( links.iterator(), Spliterator.ORDERED), false) .forEach(link -> { ObjectNode loadNode = loadCodec.encode(statsService.load(link), this); UriBuilder locationBuilder = uriInfo.getBaseUriBuilder() .path("links") .queryParam("device", link.src().deviceId().toString()) .queryParam("port", link.src().port().toString()); loadNode.put("link", locationBuilder.build().toString()); loads.add(loadNode); }); result.set("loads", loads); return ok(result).build(); } /** * Get table statistics for all tables of all devices. * * @return JSON encoded array of table statistics */ @GET @Path("flows/tables") @Produces(MediaType.APPLICATION_JSON) public Response getTableStatistics() { final FlowRuleService service = get(FlowRuleService.class); final Iterable<Device> devices = get(DeviceService.class).getDevices(); final ObjectNode root = mapper().createObjectNode(); final ArrayNode rootArrayNode = root.putArray("device-table-statistics"); for (final Device device : devices) { final ObjectNode deviceStatsNode = mapper().createObjectNode(); deviceStatsNode.put("device", device.id().toString()); final ArrayNode statisticsNode = deviceStatsNode.putArray("table-statistics"); final Iterable<TableStatisticsEntry> tableStatsEntries = service.getFlowTableStatistics(device.id()); if (tableStatsEntries != null) { for (final TableStatisticsEntry entry : tableStatsEntries) { statisticsNode.add(codec(TableStatisticsEntry.class).encode(entry, this)); } } rootArrayNode.add(deviceStatsNode); } return ok(root).build(); } /** * Get table statistics for all tables of a specified device. * * @param deviceId device ID * @return JSON encoded array of table statistics */ @GET @Path("flows/tables/{deviceId}") @Produces(MediaType.APPLICATION_JSON) public Response getTableStatisticsByDeviceId(@PathParam("deviceId") String deviceId) { final FlowRuleService service = get(FlowRuleService.class); final Iterable<TableStatisticsEntry> tableStatisticsEntries = service.getFlowTableStatistics(DeviceId.deviceId(deviceId)); final ObjectNode root = mapper().createObjectNode(); final ArrayNode rootArrayNode = root.putArray("table-statistics"); final ObjectNode deviceStatsNode = mapper().createObjectNode(); deviceStatsNode.put("device", deviceId); final ArrayNode statisticsNode = deviceStatsNode.putArray("table-statistics"); for (final TableStatisticsEntry entry : tableStatisticsEntries) { statisticsNode.add(codec(TableStatisticsEntry.class).encode(entry, this)); } rootArrayNode.add(deviceStatsNode); return ok(root).build(); } }
Rose2073/RoseCppSource
Luogu/U93191.cpp
<filename>Luogu/U93191.cpp #include<cstdio> #include<cmath> const int CACHE_MAX = 2000; struct Pair{ bool isPrime; long long value; long long sign; }cache[CACHE_MAX]; int cacheCount = 0; inline long long isPrime(const long long n){ const long long high = floor(sqrt(n)); for(long long i = 2;i <= high;i++){ if(i >= 4){ i++; } if(n % i == 0){ return i; } } return -1; } inline bool isPrimeCached(const long long n){ int local = cacheCount > CACHE_MAX ? CACHE_MAX : cacheCount; for(int i = 0;i < local;i++){ if(cache[i].value == n){ return cache[i].isPrime; } } long long sign = isPrime(n); cache[cacheCount % CACHE_MAX].isPrime = (sign == -1); cache[cacheCount % CACHE_MAX].value = n; cache[cacheCount % CACHE_MAX].sign = sign; cacheCount++; return sign == -1; } inline long long getSign(const long long n){ int local = cacheCount > CACHE_MAX ? CACHE_MAX : cacheCount; for(int i = 0;i < local;i++){ if(cache[i].value == n){ return cache[i].sign; } } } inline int f(long long n){ if(n == 1){ return 0; } bool prime = isPrimeCached(n); if(prime){ return 1; }else{ long long sign = getSign(n); return f(sign) + f(n / sign); } } int main() { int n; long long c; scanf("%d",&n); for(int i = 0;i < n;i++){ scanf("%lld",&c); printf("%d\n",f(c)); } return 0; }
daedalus4096/PrimalMagick
src/main/java/com/verdantartifice/primalmagick/client/gui/grimoire/DisciplineIndexPage.java
package com.verdantartifice.primalmagick.client.gui.grimoire; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.annotation.Nonnull; import com.mojang.blaze3d.vertex.PoseStack; import com.verdantartifice.primalmagick.client.gui.GrimoireScreen; import com.verdantartifice.primalmagick.client.gui.widgets.grimoire.DisciplineButton; import com.verdantartifice.primalmagick.common.research.ResearchDiscipline; import net.minecraft.network.chat.Component; /** * Grimoire page showing the list of available disciplines. * * @author Daedalus4096 */ public class DisciplineIndexPage extends AbstractPage { protected List<ResearchDiscipline> contents = new ArrayList<>(); protected boolean firstPage; public DisciplineIndexPage() { this(false); } public DisciplineIndexPage(boolean first) { this.firstPage = first; } @Nonnull public List<ResearchDiscipline> getDisciplines() { return Collections.unmodifiableList(this.contents); } public boolean addDiscipline(ResearchDiscipline discipline) { return this.contents.add(discipline); } public boolean isFirstPage() { return this.firstPage; } @Override protected String getTitleTranslationKey() { return "primalmagick.grimoire.index_header"; } @Override public void render(PoseStack matrixStack, int side, int x, int y, int mouseX, int mouseY) { // Just render the title; buttons have already been added if (this.isFirstPage() && side == 0) { this.renderTitle(matrixStack, side, x, y, mouseX, mouseY, null); } } @Override public void initWidgets(GrimoireScreen screen, int side, int x, int y) { // Add a button to the screen for each discipline in the page contents for (ResearchDiscipline discipline : this.getDisciplines()) { Component text = Component.translatable(discipline.getNameTranslationKey()); DisciplineButton button = screen.addWidgetToScreen(new DisciplineButton(x + 12 + (side * 140), y, text, screen, discipline, true, true)); y += button.getHeight(); } } }
alinous-core/codable-cash
src_test/blockchain/trx/test_address_manager.cpp
<filename>src_test/blockchain/trx/test_address_manager.cpp /* * test_address_manager.cpp * * Created on: 2022/03/22 * Author: iizuka */ #include "test_utils/t_macros.h" #include "base/StackRelease.h" #include "base/UnicodeString.h" #include "bc_address/BlockchainAddressManager.h" #include "bc_address/BlockchainAddress.h" #include "numeric/BigInteger.h" #include "ecda/ScPublicKey.h" using namespace alinous; using namespace codablecash; TEST_GROUP(TestBlockchainAddressManagerGroup) { TEST_SETUP(){} TEST_TEARDOWN(){} }; TEST(TestBlockchainAddressManagerGroup, case01){ BigInteger seed = BigInteger::ramdom(); BlockchainAddressManager* mgr = new BlockchainAddressManager(10, &seed); __STP(mgr); BlockchainAddress* address = mgr->createBlockchainAddress(12345678901); UnicodeString* str = address->makeStringAddress(); __STP(str); UnicodeString ans(L"C8DYG5LxVMRgbSUh66ts5jQPKGdQ7oU6NBwq61fmtDU7o"); CHECK(ans.equals(str)); bool bl = address->checkCheckDigit(); CHECK(bl) bl = address->equals(address); CHECK(bl) BlockchainAddress* address2 = dynamic_cast<BlockchainAddress*>(address->clone()); __STP(address2); address2->setShardId(1); bl = address->equals(address2); CHECK(bl == false) }
DataSystemsGroupUT/ConformanceCheckingUsingTries
src/main/java/org/processmining/logfiltering/legacy/plugins/logfiltering/enumtypes/FilterLevel.java
package org.processmining.logfiltering.legacy.plugins.logfiltering.enumtypes; public enum FilterLevel { Trace, Event }
KNMI/VERCE
verce-hpc-pe/src/mpi/create_movie_snapshot_tuned.py
<reponame>KNMI/VERCE #!/usr/bin/env python import array import numpy import matplotlib.pyplot as plt from scipy.interpolate import griddata #from matplotlib.mlab import griddata import gc from mpl_toolkits.axes_grid1 import make_axes_locatable import os def createimg(f,step=1000,extreme=0.0008): splitStr = f.split("/") outputFile='/'.join(map(str,f.split("/")[:-1]))+'/plt'+splitStr[-1]+'.png' X,Y,zs,vxs,vys,vzs,pgv=read_moviedata(f) fig = plt.figure(figsize=[22,15]) ax1=plt.subplot(221,aspect='equal') cs1=ax1.contourf(X,Y,vxs,levels=numpy.linspace(-1*extreme,extreme,101),cmap='seismic',vmin=-1*extreme,vmax=extreme,extend='both') divider = make_axes_locatable(ax1) cax1 = divider.append_axes("right", size="1%", pad=0.05) plt.colorbar(cs1,cax=cax1,ticks=[-1*extreme,0,extreme],extend='both') ax1.contour(X,Y,zs,levels=[-5,0,300,2000],linewidths=0.5,colors='k') ax1.set_title('Vx, m/s') ax2=plt.subplot(222,aspect='equal') cs2=ax2.contourf(X,Y,vys,levels=numpy.linspace(-1*extreme,extreme,101),cmap='seismic',vmin=-1*extreme,vmax=extreme,extend='both') divider = make_axes_locatable(ax2) cax2 = divider.append_axes("right", size="1%", pad=0.05) plt.colorbar(cs2,cax=cax2,ticks=[-1*extreme,0,extreme],extend='both') ax2.contour(X,Y,zs,levels=[-5,0,300,2000],linewidths=0.5,colors='k') ax2.set_title('Vy, m/s') ax3=plt.subplot(223,aspect='equal') cs3=ax3.contourf(X,Y,vzs,levels=numpy.linspace(-1*extreme,extreme,101),cmap='seismic',vmin=-1*extreme,vmax=extreme,extend='both') divider = make_axes_locatable(ax3) cax3 = divider.append_axes("right", size="1%", pad=0.05) plt.colorbar(cs3,cax=cax3,ticks=[-1*extreme,0,extreme],extend='both') ax3.contour(X,Y,zs,levels=[-5,0,300,2000],linewidths=0.5,colors='k') ax3.set_title('Vz, m/s') ax4=plt.subplot(224,aspect='equal') cs4=ax4.contourf(X,Y,pgv,levels=numpy.linspace(0,extreme,101),cmap='hot',vmin=0,vmax=extreme) divider = make_axes_locatable(ax4) cax4 = divider.append_axes("right", size="1%", pad=0.05) plt.colorbar(cs4,cax=cax4,ticks=[0,extreme],extend='both') ax4.contour(X,Y,zs,levels=[-5,0,300,2000],linewidths=0.5,colors='white') ax4.set_title('PGV, m/s') fig.suptitle(splitStr[-1]) plt.savefig(outputFile,dpi=300) plt.close(fig) gc.collect() def read_moviedata(f,step=1000): inputf=open(f,'rb') a=array.array('f') while True: try: a.fromfile(inputf, 2000) except EOFError: break data=numpy.array(a) del a field=6 np=len(data)/field data.shape=(6,np) x=data[0][1:-1] y=data[1][1:-1] z=data[2][1:-1] vx=data[3][1:-1] vy=data[4][1:-1] vz=data[5][1:-1] xs = numpy.arange(min(x), max(x), step) ys = numpy.arange(min(y), max(y), step) X, Y = numpy.meshgrid(xs, ys) vxs = griddata((x, y), vx, (X, Y), method='linear') vys = griddata((x, y), vy, (X, Y), method='linear') vzs = griddata((x, y), vz, (X, Y), method='linear') zs = griddata((x, y), z, (X, Y), method='linear') #vxs=griddata(x,y,vx,X,Y,interp='linear') #vys=griddata(x,y,vy,X,Y,interp='linear') #vzs=griddata(x,y,vz,X,Y,interp='linear') #zs=griddata(x,y,z,X,Y,interp='linear') pgv=numpy.maximum(numpy.abs(vxs),numpy.abs(vys)) pgv=numpy.maximum(pgv,numpy.abs(vzs)) pgv.shape=vxs.shape gc.collect() return X,Y,zs,vxs,vys,vzs,pgv def createmovie(Files,step=1000,extreme=0.0008,parallel=False,animation=False,videoname='animation.mp4',framerate=3): import matplotlib.animation as animation if parallel: from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() noFiles = len(Files) noFilesToConvert = noFiles/size remainderFiles = noFiles%size startFile = (rank*noFilesToConvert) endFile = startFile + noFilesToConvert try: if rank<noFiles: for i in range (startFile, endFile): print "index: "+`i` createimg(Files[i], step=step, extreme=extreme) if rank < remainderFiles: print "remainder index: "+`size*noFilesToConvert+rank` createimg(Files[size*noFilesToConvert+rank],step=step, extreme=extreme) except IndexError: print "Index Error: Array out of bounds." #comm.Barrier() MPI.Finalize() if animation: #if rank==0: inputPNGFiles='/'.join(map(str,Files[0].split("/")[:-1]))+'/pltmoviedata%*.png' print videoname print '"ffmpeg" "-framerate" "'+str(framerate)+'" "-i" "' + inputPNGFiles + '" "-s:v" "1280x720" "-c:v" "libx264" "-profile:v" "high" "-crf" "23" "-pix_fmt" "yuv420p" "-r" "30" ' + videoname os.system('"ffmpeg" "-framerate" "'+str(framerate)+'" "-i" "' + inputPNGFiles + '" "-s:v" "1280x720" "-c:v" "libx264" "-profile:v" "high" "-crf" "23" "-pix_fmt" "yuv420p" "-r" "30" ' + videoname) #MPI.Finalize() else: print 'serial' for f in Files: print f try: createimg(f, step=step, extreme=extreme) except: continue if animation: inputPNGFiles='/'.join(map(str,Files[0].split("/")[:-1]))+'/pltmoviedata%*.png' print videoname print '"ffmpeg" "-framerate" "'+str(framerate)+'" "-i" "' + inputPNGFiles + '" "-s:v" "1280x720" "-c:v" "libx264" "-profile:v" "high" "-crf" "23" "-pix_fmt" "yuv420p" "-r" "30" ' + videoname os.system('"ffmpeg" "-framerate" "'+str(framerate)+'" "-i" ' + inputPNGFiles + ' "-s:v" "1280x720" "-c:v" "libx264" "-profile:v" "high" "-crf" "23" "-pix_fmt" "yuv420p" "-r" "30" ' + videoname) def df(m1,m2): d=10**(3/2*(m1-m2)) return d if __name__ == '__main__': try: import optparse_gui except: pass import optparse import glob,sys if 1 == len( sys.argv ): try: option_parser_class = optparse_gui.OptionParser except: option_parser_class = optparse.OptionParser else: option_parser_class = optparse.OptionParser usage=""" """ parser = option_parser_class(usage=usage) #print parser.usage parser.add_option("--files",dest="filenames", default=False,help="name of the inp files") parser.add_option("--ext",dest="extreme", default=False,help="extremes of the color scale") parser.add_option("--step",dest="step", default='1000',help="interpolation step") parser.add_option("--mag",dest="mag", default=False,help="magnitude of event") parser.add_option("--ani",dest="animation", action="store_true", default=False,help="create animation (it requires ffmeg)") parser.add_option("--videoname",dest="videoname",default="simple_finalvideo.mp4",help="name of output video") parser.add_option("--parallel",dest="parallel",action="store_true",default=False,help="Turn off parallelisation") parser.add_option("--nstep",dest="nstep", default='false',help="nstep for frame") parser.add_option("--dt",dest="dt", default='false',help="timestep") #parser.add_option("--nstep",dest="nstep", default=False,help="number of steps") #parser.add_option("--roll",dest="roll", default="-45",help="camera rotation around vertical visual axis (negative = right)") #parser.add_option("--elevation",dest="elevation", default="-60",help="camera rotation around x visual axis (negative = down for zenit)") #parser.add_option("--output",dest="output", default="output",help="output filename") #parser.add_option("--vmax",dest="vmax", default=False,help="maximum absolute value for color scale") #parser.add_option("--fr",dest="fr", default="3",help="frame rate [def: 3 frames per second]") #parser.add_option("--resolution",dest="resolution", default="426x240",help="resolution") #parser.add_option("--zscale",dest="zscale", default="1",help="scale z axis") #parser.add_option("--magnification",dest="magnification", default="3",help="magnify output") (options, args) = parser.parse_args() print options if not '/' in options.filenames: s='./'+options.filenames else: s=options.filenames Files=glob.glob(s) print Files if not options.extreme: if options.mag: m2=5 m1=float(options.mag) factor=df(m1,m2) ext=0.003*factor else: ext=0.005 else: ext=float(options.extreme) if options.dt and options.nstep: framerate=1/(float(options.dt)*float(options.nstep)) print framerate else: framerate=3 print framerate createmovie(Files,extreme=ext,step=float(options.step),animation=options.animation,videoname=options.videoname,parallel=options.parallel,framerate=framerate)
ravalicosmic/comdex
x/stake/shares.go
<reponame>ravalicosmic/comdex package stake import ( sdk "github.com/cosmos/cosmos-sdk/types" ) // kind of shares type PoolShareKind byte // pool shares held by a validator type PoolShares struct { Status sdk.BondStatus `json:"status"` Amount sdk.Rat `json:"amount"` // total shares of type ShareKind } // only the vitals - does not check bond height of IntraTxCounter func (s PoolShares) Equal(s2 PoolShares) bool { return s.Status == s2.Status && s.Amount.Equal(s2.Amount) } func NewUnbondedShares(amount sdk.Rat) PoolShares { return PoolShares{ Status: sdk.Unbonded, Amount: amount, } } func NewUnbondingShares(amount sdk.Rat) PoolShares { return PoolShares{ Status: sdk.Unbonding, Amount: amount, } } func NewBondedShares(amount sdk.Rat) PoolShares { return PoolShares{ Status: sdk.Bonded, Amount: amount, } } //_________________________________________________________________________________________________________ // amount of unbonded shares func (s PoolShares) Unbonded() sdk.Rat { if s.Status == sdk.Unbonded { return s.Amount } return sdk.ZeroRat() } // amount of unbonding shares func (s PoolShares) Unbonding() sdk.Rat { if s.Status == sdk.Unbonding { return s.Amount } return sdk.ZeroRat() } // amount of bonded shares func (s PoolShares) Bonded() sdk.Rat { if s.Status == sdk.Bonded { return s.Amount } return sdk.ZeroRat() } //_________________________________________________________________________________________________________ // equivalent amount of shares if the shares were unbonded func (s PoolShares) ToUnbonded(p Pool) PoolShares { var amount sdk.Rat switch s.Status { case sdk.Bonded: exRate := p.bondedShareExRate().Quo(p.unbondedShareExRate()) // (tok/bondedshr)/(tok/unbondedshr) = unbondedshr/bondedshr amount = s.Amount.Mul(exRate) // bondedshr*unbondedshr/bondedshr = unbondedshr case sdk.Unbonding: exRate := p.unbondingShareExRate().Quo(p.unbondedShareExRate()) // (tok/unbondingshr)/(tok/unbondedshr) = unbondedshr/unbondingshr amount = s.Amount.Mul(exRate) // unbondingshr*unbondedshr/unbondingshr = unbondedshr case sdk.Unbonded: amount = s.Amount } return NewUnbondedShares(amount) } // equivalent amount of shares if the shares were unbonding func (s PoolShares) ToUnbonding(p Pool) PoolShares { var amount sdk.Rat switch s.Status { case sdk.Bonded: exRate := p.bondedShareExRate().Quo(p.unbondingShareExRate()) // (tok/bondedshr)/(tok/unbondingshr) = unbondingshr/bondedshr amount = s.Amount.Mul(exRate) // bondedshr*unbondingshr/bondedshr = unbondingshr case sdk.Unbonding: amount = s.Amount case sdk.Unbonded: exRate := p.unbondedShareExRate().Quo(p.unbondingShareExRate()) // (tok/unbondedshr)/(tok/unbondingshr) = unbondingshr/unbondedshr amount = s.Amount.Mul(exRate) // unbondedshr*unbondingshr/unbondedshr = unbondingshr } return NewUnbondingShares(amount) } // equivalent amount of shares if the shares were bonded func (s PoolShares) ToBonded(p Pool) PoolShares { var amount sdk.Rat switch s.Status { case sdk.Bonded: amount = s.Amount case sdk.Unbonding: exRate := p.unbondingShareExRate().Quo(p.bondedShareExRate()) // (tok/ubshr)/(tok/bshr) = bshr/ubshr amount = s.Amount.Mul(exRate) // ubshr*bshr/ubshr = bshr case sdk.Unbonded: exRate := p.unbondedShareExRate().Quo(p.bondedShareExRate()) // (tok/ubshr)/(tok/bshr) = bshr/ubshr amount = s.Amount.Mul(exRate) // ubshr*bshr/ubshr = bshr } return NewUnbondedShares(amount) } //_________________________________________________________________________________________________________ // TODO better tests // get the equivalent amount of tokens contained by the shares func (s PoolShares) Tokens(p Pool) sdk.Rat { switch s.Status { case sdk.Bonded: return p.bondedShareExRate().Mul(s.Amount) // (tokens/shares) * shares case sdk.Unbonding: return p.unbondingShareExRate().Mul(s.Amount) case sdk.Unbonded: return p.unbondedShareExRate().Mul(s.Amount) default: panic("unknown share kind") } }
lutoma/lflux
lfluxproject/lsubscribe/tasks.py
from django.conf import settings from django.utils import translation from celery.task import task from .models import Subscription @task def send_daily_update(): translation.activate(settings.LANGUAGE_CODE) for subscription in Subscription.objects.filter(frequency='daily',confirmed_at__isnull=False): subscription.send_email() @task def send_weekly_update(): translation.activate(settings.LANGUAGE_CODE) for subscription in Subscription.objects.filter(frequency='weekly',confirmed_at__isnull=False): subscription.send_email()
sireliah/polish-python
Lib/test/dis_module.py
# A simple module dla testing the dis module. def f(): dalej def g(): dalej
grishkam/QuickFigures
QuickFigures/appMenus/popupMenusForComplexObjects/MenuForMultiChannelDisplayLayer.java
/******************************************************************************* * Copyright (c) 2021 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ /** * Author: <NAME> * Date Modified: Jan 6, 2021 * Version: 2021.1 */ package popupMenusForComplexObjects; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JMenu; import javax.swing.JMenuItem; import channelLabels.ChannelLabelManager; import figureOrganizer.MultichannelDisplayLayer; import figureOrganizer.PanelList; import graphicActionToolbar.CurrentFigureSet; import undo.AbstractUndoableEdit2; import undo.CombinedEdit; import undo.PanelManagerUndo; public class MenuForMultiChannelDisplayLayer extends JMenu { /** * */ private static final long serialVersionUID = 1L; protected MultichannelDisplayLayer display; protected PanelList stack; protected ChannelLabelManager labelManager; public MenuForMultiChannelDisplayLayer(String name, MultichannelDisplayLayer panel, PanelList list, ChannelLabelManager man) { this.setText(name); this.setName(name); display=panel; stack=list; this.labelManager=man; generateIncludedMenuItems(); } public void generateIncludedMenuItems() { createMergeMenuItem() ; createAllLabelMenuItem(); JMenu add=new JMenu("Add/Replace"); JMenu rem=new JMenu("Remove"); create1ChannelLabelItem(add); create1MergeLabelItem(add) ; createGenerateChannelLabelItem(add); createGenerateChannelLabelItem2(add); ; createEliminateChannelLabelItem(rem) ; //createCopySavedChannelLabelItem(); JMenu expert = new JMenu("Expert Options"); createResetNameItem(expert); createNameChannelLabelItem(expert); expert.add(add); expert.add(rem); this.add(expert); } public SmartMenuItem2 createAllLabelMenuItem() { SmartMenuItem2 out=new SmartMenuItem2("Edit All Channel Labels") { private static final long serialVersionUID = 1L; @Override public void onAction() { labelManager.showEditAllChannelLabelsDialog(); } }; this.add(out); return out; } SmartMenuItem2 createMergeMenuItem() { SmartMenuItem2 out=new SmartMenuItem2("Merge Label Menu") { private static final long serialVersionUID = 1L; @Override public void onAction() { labelManager.showChannelLabelPropDialog(); } }; this.add(out); return out; } SmartMenuItem2 create1MergeLabelItem(JMenu thi) { SmartMenuItem2 out=new SmartMenuItem2("Add 1 Merge Label") { private static final long serialVersionUID = 1L; @Override public void onAction() { undo=PanelManagerUndo.createFor(display); labelManager.addSingleMergeLabel(); } }; thi.add(out); return out; } SmartMenuItem2 create1ChannelLabelItem(JMenu thi) { SmartMenuItem2 out=new SmartMenuItem2("Add 1 Channel Label") { private static final long serialVersionUID = 1L; @Override public void onAction() { undo=PanelManagerUndo.createFor(display); labelManager.addSingleChannelLabel(); } }; thi.add(out); return out; } SmartMenuItem2 createEliminateChannelLabelItem(JMenu j) { SmartMenuItem2 out=new SmartMenuItem2("Eliminate Channel Labels") { private static final long serialVersionUID = 1L; @Override public void onAction(){ undo=labelManager.eliminateChanLabels(); } }; j.add(out); return out; } SmartMenuItem2 createCopySavedChannelLabelItem() { SmartMenuItem2 out=new SmartMenuItem2("Load Label Properties From Saved") { private static final long serialVersionUID = 1L; @Override public void onAction() { display.setLabalPropertiesToSaved(); } }; this.add(out); return out; } SmartMenuItem2 createGenerateChannelLabelItem(JMenu thi) { SmartMenuItem2 out=new SmartMenuItem2("Generate New Channel Labels") { private static final long serialVersionUID = 1L; @Override public void onAction() { undo=PanelManagerUndo.createFor(display); labelManager.generateChannelLabels(); } }; thi.add(out); return out; } SmartMenuItem2 createGenerateChannelLabelItem2(JMenu thi) { SmartMenuItem2 out=new SmartMenuItem2("Generate New Channel Labels (first slice only)") { private static final long serialVersionUID = 1L; @Override public void onAction() { undo=PanelManagerUndo.createFor(display); labelManager.generateChannelLabels2(); } }; thi.add(out); return out; } SmartMenuItem2 createNameChannelLabelItem(JMenu th) { SmartMenuItem2 out=new SmartMenuItem2("See Channel Labels") { private static final long serialVersionUID = 1L; @Override public void onAction() { labelManager.nameChannels(); } }; th.add(out); return out; } SmartMenuItem2 createResetNameItem(JMenu thi) { SmartMenuItem2 out=new SmartMenuItem2("Reset Channel Names") { private static final long serialVersionUID = 1L; @Override public void onAction() { display.getMultiChannelImage().renameBasedOnRealChannelName();; display.updatePanelsAndLabelsFromSource(); } }; thi.add(out); return out; } protected abstract class SmartMenuItem2 extends JMenuItem implements ActionListener { /** * */ private static final long serialVersionUID = 1L; protected CombinedEdit undo; public SmartMenuItem2(String name) { super(name); this.addActionListener(this); } public void addUndo(AbstractUndoableEdit2 e) { new CurrentFigureSet().addUndo(e); } public void actionPerformed(ActionEvent e) { this.onAction(); if(undo==null) return; undo.establishFinalState(); addUndo(undo); } abstract void onAction() ; } SmartMenuItem2 createCropOption() { SmartMenuItem2 out=new SmartMenuItem2("Re-Crop") { private static final long serialVersionUID = 1L; @Override public void onAction() { addUndo( FigureOrganizingSuplierForPopup.showRecropDisplayDialog(display, null, null, null) ); } }; this.add(out); return out; } protected SmartMenuItem2 createSetScaleItem() { SmartMenuItem2 out=new SmartMenuItem2("Set Pixel Size (Set Scale...)") { private static final long serialVersionUID = 1L; @Override public void onAction() { new SetImagePixelSize(display). showPixelSizeSetDialog(); } }; this.add(out); return out; } }
nyuichi/8cc
include/stddef.h
<reponame>nyuichi/8cc // Copyright 2012 <NAME> <<EMAIL>> // This program is free software licensed under the MIT license. #ifndef __STDDEF_H #define __STDDEF_H #define NULL ((void *)0) typedef unsigned long size_t; typedef long ptrdiff_t; typedef char wchar_t; typedef long double max_align_t; #define offsetof(type, member) \ ((size_t)&(((type *)NULL)->member)) #endif
costasdroid/project_euler
problem128/Node.java
public class Node { private long row = 2; static long n = 7; private int threes = 2; public Node(int N) { // There are only 2 cells on each row that can have 3 prime differences. The first and the last one. do { n = n + 1; // at position 1 lies +6(row+1) + 1 // at position 5 lies +6(2row + 3) - 1 // at position 4 lies +6(row+1) - 1 if (isPrime(6* row + 1) && isPrime(12*row + 5) && isPrime(6* row - 1)) { // System.out.print(n + " has neighbours "); // System.out.print((n + 6* row + 1) + "(" + (6* row + 1) + "), "); // System.out.print((n + 12*row + 5) + "(" + (12*row + 5) + "), "); // System.out.print((n + 6* row - 1) + "(" + (6* row - 1) + ")\n"); threes++; } if (threes >= N) return; n = n + 6 * row - 1; // at position 1 lies +1 - 6* row // at position 2 lies +1 - 6*(2*row -1) // at position 5 lies 6*(row+1) - 1 if (isPrime(12*(row - 1) +5) && isPrime(6 * row - 1) && isPrime( 6*(row+1) - 1)) { // System.out.print(n + " has neighbours "); // System.out.print((n - 12*(row - 1) -5) + "(" + (12*(row - 1) +5) + "), "); // System.out.print((n - 6 * row + 1) + "(" + (6 * row - 1) + "), "); // System.out.print((n + 6*(row+1) - 1) + "(" + (6*(row+1) - 1) + ")\n"); threes++; } if (threes >= N) return; row++; } while (true); } static boolean isPrime(long n) { if (n <= 1) return false; else if (n <= 3) return true; else if (n % 3 == 0 || n % 2 == 0) return false; long i = 5; while (i * i <= n) { if (n % i == 0 || n % (i + 2) == 0) return false; i += 6; } return true; } }
CharmsStyler/openas2
src/main/java/org/openas2/lib/message/AS2Message.java
<reponame>CharmsStyler/openas2 package org.openas2.lib.message; import java.io.IOException; import java.io.InputStream; import javax.mail.MessagingException; import javax.mail.internet.MimeBodyPart; import org.openas2.lib.Info; public class AS2Message extends EDIINTMessage { public AS2Message() { super(); } public AS2Message(MimeBodyPart data, String contentType) throws MessagingException { super(data, contentType); } public AS2Message(InputStream in) throws IOException, MessagingException { super(in); } public String getSenderIDHeader() { return "AS2-From"; } public String getReceiverIDHeader() { return "AS2-To"; } public void setAS2From(String from) { setHeader("AS2-From", from); } public String getAS2From() { return getHeader("AS2-From"); } public void setAS2To(String to) { setHeader("AS2-To", to); } public String getAS2To() { return getHeader("AS2-To"); } public void setAS2Version(String version) { setHeader("AS2-Version", version); } public String getAS2Version() { return getHeader("AS2-Version"); } public void setDefaults() { super.setDefaults(); setAS2Version("1.1"); setUserAgent("OpenAS2"); setServer(Info.NAME_VERSION); } public void setDispositionNotificationOptions(String options) { setHeader("Disposition-Notification-Options", options); } public String getDispositionNotificationOptions() { return getHeader("Disposition-Notification-Options"); } public void setDispositionNotificationTo(String to) { setHeader("Disposition-Notification-To", to); } public String getDispositionNotificationTo() { return getHeader("Disposition-Notification-To"); } public void setReceiptDeliveryOption(String option) { setHeader("Receipt-Delivery-Option", option); } public String getReceiptDeliveryOption() { return getHeader("Receipt-Delivery-Option"); } public void setRecipientAddress(String address) { setHeader("Recipient-Address", address); } public String getRecipientAddress() { return getHeader("Recipient-Address"); } public void setServer(String server) { setHeader("Server", server); } public String getServer() { return getHeader("Server"); } public void setUserAgent(String agent) { setHeader("User-Agent", agent); } public String getUserAgent() { return getHeader("User-Agent"); } }
jcoreio/iron-pi-webapp
src/server/network-settings/NetworkSettingsHandler.js
// @flow import assert from 'assert' import child_process from 'child_process' import {readFile, writeFile} from 'fs-extra' import promisify from 'es6-promisify' import logger from 'log4jcore' import {validateNetworkSettingsForHandler} from '../../universal/network-settings/NetworkSettingsCommon' import type {NetworkSettings} from '../../universal/network-settings/NetworkSettingsCommon' const log = logger('NetworkSettingsHandler') const exec = promisify(child_process.exec) const INTERFACES_FILE = '/etc/network/interfaces' const ADDRESS = 'address' const NETMASK = 'netmask' const GATEWAY = 'gateway' const DNS_SERVERS = 'dns-nameservers' const AUTO_LINE = 'auto eth0' const DHCP_LINE = 'iface eth0 inet dhcp' const STATIC_LINE = 'iface eth0 inet static' const baseNetworkSettings = { ipAddress: '192.168.1.220', netmask: '255.255.255.0', gateway: '192.168.1.1', dnsServers: '192.168.1.1' } const STATIC_SETTINGS: NetworkSettings = { ...baseNetworkSettings, dhcpEnabled: false } const DHCP_SETTINGS: NetworkSettings = { ...baseNetworkSettings, dhcpEnabled: true } function settingsForMode(mode: string): NetworkSettings { switch (mode) { case 'static': return STATIC_SETTINGS case 'dhcp': return DHCP_SETTINGS default: throw new Error(`unrecognized value for mode: ${mode}, expected 'static' or 'dhcp'`) } } const isCommented = val => val.startsWith('#') const lineContent = val => (isCommented(val) ? val.substr(1) : val).trim() export interface NetworkSettingsHandler { setMode(mode: 'static' | 'dhcp'): void; getNetworkSettings(): Promise<NetworkSettings>; setNetworkSettings(settings: NetworkSettings): Promise<void>; getNetworkState(): Promise<NetworkSettings>; } export default class DeviceNetworkSettingsHandler implements NetworkSettingsHandler { _setModeInProgress: ?string; _setModeNext: ?string; setMode(mode: 'static' | 'dhcp') { const applyNetworkSettings = (settings: NetworkSettings) => { this.setNetworkSettings(settings) .then(() => { const nextModeName = this._setModeNext this._setModeNext = undefined if (nextModeName) { applyNetworkSettings(settingsForMode(nextModeName)) } else { this._setModeInProgress = undefined } }) .catch((err: Error) => { this._setModeInProgress = undefined this._setModeNext = undefined log.error(`could not set network mode to ${mode}: ${err.stack || (err: any)}`) }) } const settings = settingsForMode(mode) if (this._setModeInProgress) { if (this._setModeInProgress !== mode) this._setModeNext = mode return } applyNetworkSettings(settings) } /** * @returns {Promise<NetworkSettings>} Configured network settings, including DHCP * enabled, and IP address / netmask / DNS server info that is used in static IP * address mode. This method returns the settings stored in /etc/network/interfaces, * while getNetworkState() returns the results of `ifconfig` and similar commands. */ async getNetworkSettings(): Promise<NetworkSettings> { const lines = await this._readNetworkSettingsFile() const networkSettingsLoc = locateNetworkSettings(lines) const ethLines = lines.slice(...networkSettingsLoc) let dhcpEnabled = true const fieldValues: Map<string, string> = new Map() ethLines.forEach((line: string) => { const commented = isCommented(line) const content = lineContent(line) if (STATIC_LINE === content) { dhcpEnabled = commented } else if (line[0] === ' ' || line[0] === '\t') { const firstSpace = content.indexOf(' ') if (firstSpace > 0) { const key = content.substring(0, firstSpace).trim() const value = content.substring(firstSpace + 1).trim() fieldValues.set(key, value) } } }) const fieldValue = field => fieldValues.get(field) || '' return { dhcpEnabled, ipAddress: fieldValue(ADDRESS), netmask: fieldValue(NETMASK), gateway: fieldValue(GATEWAY), dnsServers: fieldValue(DNS_SERVERS) } } async setNetworkSettings(settings: NetworkSettings): Promise<void> { const errors = validateNetworkSettingsForHandler(settings) if (errors.length) throw new Error(`Network settings are invalid:\n${errors.join('\n')}`) await this._writeNetworkSettings(settings) await this._restartNetworking() } async _writeNetworkSettings(settings: NetworkSettings): Promise<void> { const linesIn: Array<string> = await this._readNetworkSettingsFile() let networkSettingsLoc = [ linesIn.length, linesIn.length ] try { networkSettingsLoc = locateNetworkSettings(linesIn) } catch (err) { log.info('could not locate network settings. appending...') } const maybeCommented = (line, comm) => comm ? `#${line}` : line const maybeConfigLine = (key: string, value: ?string, commented: boolean = false) => { let returnValue = value ? ` ${key} ${value}` : undefined if (returnValue && commented) returnValue = `#${returnValue}` return returnValue } const linesBefore = linesIn.slice(0, networkSettingsLoc[0]) let linesAfter = linesIn.slice(networkSettingsLoc[1]) // Ensure there's a blank line after the Ethernet section if (!linesAfter.length || linesAfter[0].trim() !== '') linesAfter = [ '', ...linesAfter ] const configLines = [ AUTO_LINE, maybeCommented(DHCP_LINE, !settings.dhcpEnabled), maybeCommented(STATIC_LINE, settings.dhcpEnabled), maybeConfigLine(ADDRESS, settings.ipAddress, settings.dhcpEnabled), maybeConfigLine(NETMASK, settings.netmask, settings.dhcpEnabled), maybeConfigLine(GATEWAY, settings.gateway, settings.dhcpEnabled), maybeConfigLine(DNS_SERVERS, settings.dnsServers, settings.dhcpEnabled) ].filter(line => line !== undefined) const lines = [ ...linesBefore, ...configLines, ...linesAfter ] await writeFile(INTERFACES_FILE, lines.join('\n')) } async _readNetworkSettingsFile(): Promise<Array<string>> { const strFile = await readFile(INTERFACES_FILE, 'utf8') return strFile.split('\n') } async _restartNetworking(): Promise<void> { log.info('restarting eth0...') await exec('ifdown eth0 && ifup eth0') } /** * @returns {Promise<NetworkSettings>} Current network state, including IP address, * netmask, and DNS server addresses. If the unit is in DHCP mode, this allows the * UI to fetch the current network settings assigned by the DHCP server. If the unit * is in static IP address mode, this function will still fetch the current network settings, * which should match the ones returned by getNetworkSettings(). */ async getNetworkState(): Promise<NetworkSettings> { // Reading /etc/network/interfaces seems to be the best way to determine if // DHCP is enabled. const {dhcpEnabled} = await this.getNetworkSettings() let ipAddress = '' let netmask = '' try { const result = parseIfconfig(await exec('ifconfig')) ipAddress = result.ipAddress netmask = result.netmask } catch (err) { log.error(`could not fetch IP address: ${err.stack}`) } let gateway = '' try { gateway = parseGateway(await exec('ip route')) } catch (err) { log.error(`could not fetch gateway: ${err.stack}`) } let dnsServers = '' try { dnsServers = parseDNSServers(await readFile('/etc/resolv.conf', 'utf8')) } catch (err) { log.error(`could not fetch DNS servers: ${err.stack}`) } return {dhcpEnabled, ipAddress, netmask, gateway, dnsServers} } } function locateNetworkSettings(lines: Array<string>): Array<number> { const begin = lines.findIndex(line => line.startsWith(AUTO_LINE)) if (begin < 0) throw new Error('could not find ethernet section') const nextWhitespaceOffset = lines.slice(begin).findIndex(line => !line.trim().length) const end = nextWhitespaceOffset < 0 ? lines.length : begin + nextWhitespaceOffset return [ begin, end ] } function parseIfconfig(result: string): {ipAddress: string, netmask: string} { const lines = result.split('\n') const eth0LineIndex = lines.findIndex(line => line.startsWith('eth0')) assert(eth0LineIndex >= 0, 'line beginning with "eth0" not found') const addrLine = lines[eth0LineIndex + 1] // the `inet <ip address> line is immediately after the eth0: line assert(addrLine, 'ip address line is missing') const addrLineParts = addrLine.trim().split(' ').filter(part => !!part) assert('inet' === addrLineParts[0] && 'netmask' === addrLineParts[2], 'unexpected inet line content:' + JSON.stringify(addrLineParts)) const ipAddress = addrLineParts[1] const netmask = addrLineParts[3] assert(ipAddress, 'ip address value is missing') assert(netmask, 'netmask value is missing') return {ipAddress, netmask} } function parseGateway(result: string): string { const lines = result.split('\n') const gatewayLine = lines[0] assert(gatewayLine, 'gateway value is missing') assert(gatewayLine.startsWith('default via'), `unexpected format for gateway value line: ${gatewayLine}`) const gatewayLineParts = gatewayLine.split(' ') const gateway = gatewayLineParts[2] assert(gateway, `missing gateway: ${gatewayLine}`) return gateway } function parseDNSServers(result: string): string { const lines = result.split('\n') const nameserverLines = lines.filter(line => line.startsWith('nameserver')) const nameservers = nameserverLines.map(line => line.split(' ')[1]).filter(server => !!server) return nameservers.join(' ') }
orange-cloudfoundry/elpaaso-system-test-apps
paas-probe-config/paas-probe-config-war/src/main/java/com/francetelecom/clara/cloud/sample/probe/ConfigProbeImpl.java
<filename>paas-probe-config/paas-probe-config-war/src/main/java/com/francetelecom/clara/cloud/sample/probe/ConfigProbeImpl.java /** * Copyright (C) 2015 Orange * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.francetelecom.clara.cloud.sample.probe; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NameClassPair; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; //import org.ow2.carol.jndi.wrapping.UnicastJNDIReferenceWrapper; public class ConfigProbeImpl implements ConfigProbe { // Logger private static final Logger logger = LoggerFactory.getLogger(ConfigProbeImpl.class); // Currently config properties are bound in root context // This constant might be useful if they are bound in a dedicated context static final String CONFIG_JNDI_CONTEXT = ""; public ConfigProperties getConfigProperties() { logger.debug("getConfigProperties is called" ); ConfigProperties configProperties = new ConfigProperties(); Context context; NamingEnumeration<NameClassPair> names; try { context = new InitialContext(); names = context.list(CONFIG_JNDI_CONTEXT); while(names.hasMore()) { NameClassPair binding = names.next(); String name = binding.getName(); try { Object value = context.lookup(name); logger.debug(String.format("jndi name: %-25s object: %s",name,value.toString())); if(value instanceof String) { logger.info( String.format(" property: %-25s value: %s",name, value)); configProperties.put(name, (String)value); } else { // object type is not string: this can't be config property } } catch(NamingException e) { logger.debug(String.format("jndi name: %-25s lookup failed: %s",name, e)); } } } catch (NamingException e) { logger.error("Exception "+e); // return an empty collection } return configProperties; } }
StackScribe/keptn
resource-service/handler/service_handler_test.go
package handler import ( "bytes" "errors" "github.com/gin-gonic/gin" errors2 "github.com/keptn/keptn/resource-service/errors" handler_mock "github.com/keptn/keptn/resource-service/handler/fake" "github.com/keptn/keptn/resource-service/models" "github.com/stretchr/testify/require" "net/http" "net/http/httptest" "testing" ) const createServiceTestPayload = `{"serviceName": "my-service"}` const createServiceWithoutNameTestPayload = `{"serviceName": ""}` func TestServiceHandler_CreateService(t *testing.T) { type fields struct { ServiceManager *handler_mock.IServiceManagerMock } tests := []struct { name string fields fields request *http.Request wantParams *models.CreateServiceParams wantStatus int }{ { name: "create service successful", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return nil }}, }, request: httptest.NewRequest(http.MethodPost, "/project/my-project/stage/my-stage/service", bytes.NewBuffer([]byte(createServiceTestPayload))), wantParams: &models.CreateServiceParams{ Project: models.Project{ProjectName: "my-project"}, Stage: models.Stage{StageName: "my-stage"}, CreateServicePayload: models.CreateServicePayload{ Service: models.Service{ServiceName: "my-service"}, }, }, wantStatus: http.StatusNoContent, }, { name: "project name not set", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodPost, "/project/%20/stage/my-stage/service", bytes.NewBuffer([]byte(createServiceTestPayload))), wantParams: nil, wantStatus: http.StatusBadRequest, }, { name: "stage name not set", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodPost, "/project/my-project/stage/%20/service", bytes.NewBuffer([]byte(createServiceTestPayload))), wantParams: nil, wantStatus: http.StatusBadRequest, }, { name: "service name not set", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodPost, "/project/my-project/stage/my-stage/service", bytes.NewBuffer([]byte(createServiceWithoutNameTestPayload))), wantParams: nil, wantStatus: http.StatusBadRequest, }, { name: "project not found", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return errors2.ErrProjectNotFound }}, }, request: httptest.NewRequest(http.MethodPost, "/project/my-project/stage/my-stage/service", bytes.NewBuffer([]byte(createServiceTestPayload))), wantParams: &models.CreateServiceParams{ Project: models.Project{ProjectName: "my-project"}, Stage: models.Stage{StageName: "my-stage"}, CreateServicePayload: models.CreateServicePayload{ Service: models.Service{ServiceName: "my-service"}, }, }, wantStatus: http.StatusNotFound, }, { name: "stage not found", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return errors2.ErrStageNotFound }}, }, request: httptest.NewRequest(http.MethodPost, "/project/my-project/stage/my-stage/service", bytes.NewBuffer([]byte(createServiceTestPayload))), wantParams: &models.CreateServiceParams{ Project: models.Project{ProjectName: "my-project"}, Stage: models.Stage{StageName: "my-stage"}, CreateServicePayload: models.CreateServicePayload{ Service: models.Service{ServiceName: "my-service"}, }, }, wantStatus: http.StatusNotFound, }, { name: "service already exists", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return errors2.ErrServiceAlreadyExists }}, }, request: httptest.NewRequest(http.MethodPost, "/project/my-project/stage/my-stage/service", bytes.NewBuffer([]byte(createServiceTestPayload))), wantParams: &models.CreateServiceParams{ Project: models.Project{ProjectName: "my-project"}, Stage: models.Stage{StageName: "my-stage"}, CreateServicePayload: models.CreateServicePayload{ Service: models.Service{ServiceName: "my-service"}, }, }, wantStatus: http.StatusConflict, }, { name: "internal error", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return errors.New("oops") }}, }, request: httptest.NewRequest(http.MethodPost, "/project/my-project/stage/my-stage/service", bytes.NewBuffer([]byte(createServiceTestPayload))), wantParams: &models.CreateServiceParams{ Project: models.Project{ProjectName: "my-project"}, Stage: models.Stage{StageName: "my-stage"}, CreateServicePayload: models.CreateServicePayload{ Service: models.Service{ServiceName: "my-service"}, }, }, wantStatus: http.StatusInternalServerError, }, { name: "upstream repo not found", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return errors2.ErrRepositoryNotFound }}, }, request: httptest.NewRequest(http.MethodPost, "/project/my-project/stage/my-stage/service", bytes.NewBuffer([]byte(createServiceTestPayload))), wantParams: &models.CreateServiceParams{ Project: models.Project{ProjectName: "my-project"}, Stage: models.Stage{StageName: "my-stage"}, CreateServicePayload: models.CreateServicePayload{ Service: models.Service{ServiceName: "my-service"}, }, }, wantStatus: http.StatusNotFound, }, { name: "invalid payload", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{CreateServiceFunc: func(params models.CreateServiceParams) error { return errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodPost, "/project/my-project/stage/my-stage/service", bytes.NewBuffer([]byte("invalid"))), wantParams: nil, wantStatus: http.StatusBadRequest, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { sh := NewServiceHandler(tt.fields.ServiceManager) router := gin.Default() router.POST("/project/:projectName/stage/:stageName/service", sh.CreateService) resp := performRequest(router, tt.request) require.Equal(t, tt.wantStatus, resp.Code) if tt.wantParams != nil { require.Len(t, tt.fields.ServiceManager.CreateServiceCalls(), 1) require.Equal(t, *tt.wantParams, tt.fields.ServiceManager.CreateServiceCalls()[0].Params) } else { require.Empty(t, tt.fields.ServiceManager.CreateServiceCalls()) } }) } } func TestServiceHandler_DeleteService(t *testing.T) { type fields struct { ServiceManager *handler_mock.IServiceManagerMock } tests := []struct { name string fields fields request *http.Request wantParams *models.DeleteServiceParams wantStatus int }{ { name: "delete service successful", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{DeleteServiceFunc: func(params models.DeleteServiceParams) error { return nil }}, }, request: httptest.NewRequest(http.MethodDelete, "/project/my-project/stage/my-stage/service/my-service", nil), wantParams: &models.DeleteServiceParams{ Project: models.Project{ ProjectName: "my-project", }, Stage: models.Stage{ StageName: "my-stage", }, Service: models.Service{ ServiceName: "my-service", }, }, wantStatus: http.StatusNoContent, }, { name: "project not found", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{DeleteServiceFunc: func(params models.DeleteServiceParams) error { return errors2.ErrProjectNotFound }}, }, request: httptest.NewRequest(http.MethodDelete, "/project/my-project/stage/my-stage/service/my-service", nil), wantParams: &models.DeleteServiceParams{ Project: models.Project{ ProjectName: "my-project", }, Stage: models.Stage{ StageName: "my-stage", }, Service: models.Service{ ServiceName: "my-service", }, }, wantStatus: http.StatusNotFound, }, { name: "stage not found", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{DeleteServiceFunc: func(params models.DeleteServiceParams) error { return errors2.ErrStageNotFound }}, }, request: httptest.NewRequest(http.MethodDelete, "/project/my-project/stage/my-stage/service/my-service", nil), wantParams: &models.DeleteServiceParams{ Project: models.Project{ ProjectName: "my-project", }, Stage: models.Stage{ StageName: "my-stage", }, Service: models.Service{ ServiceName: "my-service", }, }, wantStatus: http.StatusNotFound, }, { name: "service not found", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{DeleteServiceFunc: func(params models.DeleteServiceParams) error { return errors2.ErrServiceNotFound }}, }, request: httptest.NewRequest(http.MethodDelete, "/project/my-project/stage/my-stage/service/my-service", nil), wantParams: &models.DeleteServiceParams{ Project: models.Project{ ProjectName: "my-project", }, Stage: models.Stage{ StageName: "my-stage", }, Service: models.Service{ ServiceName: "my-service", }, }, wantStatus: http.StatusNotFound, }, { name: "random error", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{DeleteServiceFunc: func(params models.DeleteServiceParams) error { return errors.New("oops") }}, }, request: httptest.NewRequest(http.MethodDelete, "/project/my-project/stage/my-stage/service/my-service", nil), wantParams: &models.DeleteServiceParams{ Project: models.Project{ ProjectName: "my-project", }, Stage: models.Stage{ StageName: "my-stage", }, Service: models.Service{ ServiceName: "my-service", }, }, wantStatus: http.StatusInternalServerError, }, { name: "project name empty", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{DeleteServiceFunc: func(params models.DeleteServiceParams) error { return errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodDelete, "/project/%20/stage/my-stage/service/my-service", nil), wantParams: nil, wantStatus: http.StatusBadRequest, }, { name: "stage name empty", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{DeleteServiceFunc: func(params models.DeleteServiceParams) error { return errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodDelete, "/project/my-project/stage/%20/service/my-service", nil), wantParams: nil, wantStatus: http.StatusBadRequest, }, { name: "service name empty", fields: fields{ ServiceManager: &handler_mock.IServiceManagerMock{DeleteServiceFunc: func(params models.DeleteServiceParams) error { return errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodDelete, "/project/my-project/stage/my-stage/service/%20", nil), wantParams: nil, wantStatus: http.StatusBadRequest, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { sh := NewServiceHandler(tt.fields.ServiceManager) router := gin.Default() router.DELETE("/project/:projectName/stage/:stageName/service/:serviceName", sh.DeleteService) resp := performRequest(router, tt.request) require.Equal(t, tt.wantStatus, resp.Code) if tt.wantParams != nil { require.Len(t, tt.fields.ServiceManager.DeleteServiceCalls(), 1) require.Equal(t, *tt.wantParams, tt.fields.ServiceManager.DeleteServiceCalls()[0].Params) } else { require.Empty(t, tt.fields.ServiceManager.DeleteServiceCalls()) } }) } }
ScalablyTyped/SlinkyTyped
m/microsoft-sdk-soap/src/main/scala/typingsSlinky/microsoftSdkSoap/mod/global/Sdk/Query/FetchExpression.scala
<gh_stars>10-100 package typingsSlinky.microsoftSdkSoap.mod.global.Sdk.Query import org.scalablytyped.runtime.StObject import scala.scalajs.js import scala.scalajs.js.`|` import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess} @JSGlobal("Sdk.Query.FetchExpression") @js.native class FetchExpression protected () extends StObject { /** * @param fetchXml The FetchXml to be used in a query. */ def this(fetchXml: String) = this() /** * Gets the FetchXml to be used in a query. */ def getFetchXml(): String = js.native /** * Sets the FetchXml to be used in a query. * @param fetchXml The FetchXml to be used in a query. */ def setFetchXml(fetchXml: String): Unit = js.native }
VinceW0/Leetcode_Python_solutions
Algorithms_medium/1492. The kth Factor of n.py
<filename>Algorithms_medium/1492. The kth Factor of n.py """ 1492. The kth Factor of n Medium Given two positive integers n and k. A factor of an integer n is defined as an integer i where n % i == 0. Consider a list of all factors of n sorted in ascending order, return the kth factor in this list or return -1 if n has less than k factors. Example 1: Input: n = 12, k = 3 Output: 3 Explanation: Factors list is [1, 2, 3, 4, 6, 12], the 3rd factor is 3. Example 2: Input: n = 7, k = 2 Output: 7 Explanation: Factors list is [1, 7], the 2nd factor is 7. Example 3: Input: n = 4, k = 4 Output: -1 Explanation: Factors list is [1, 2, 4], there is only 3 factors. We should return -1. Example 4: Input: n = 1, k = 1 Output: 1 Explanation: Factors list is [1], the 1st factor is 1. Example 5: Input: n = 1000, k = 3 Output: 4 Explanation: Factors list is [1, 2, 4, 5, 8, 10, 20, 25, 40, 50, 100, 125, 200, 250, 500, 1000]. Constraints: 1 <= k <= n <= 1000 """ class Solution: def kthFactor(self, n: int, k: int) -> int: for factor in range(1, n + 1): if n % factor == 0: k -= 1 if k == 0: return factor return -1
Sanbingo/8hnewsManager
src/pages/site/components/bind.js
import React, { Component } from 'react' import { Table, message, Modal } from 'antd' class SiteModal extends Component { state = { selectedRowKeys: [], // Check here to configure the default column } columns = [{ key: "id", dataIndex: "id", title: "ID" }, { key: 'userRealName', dataIndex: 'userRealName', title: '姓名' }, { key: 'userInfoGander', dataIndex: 'userInfoGander', title: '性别', render: (text) => { if (text) return '男' return '女' } }, { key: 'userPhoneNum', dataIndex: 'userPhoneNum', title: '手机' }] handleOk = () => { const { onOk, employees, selectKeys=[] } = this.props; // const { selectedRowKeys=[] } = this.state; const data = selectKeys.map(item => ({ userId: employees[item] && employees[item].id})) console.log('data', data) onOk(data) } onSelectChange = selectKeys => { // this.setState({ selectedRowKeys }); this.props.onSelectKeys(selectKeys) }; render() { const { onOk, employees=[], selectKeys=[], ...modalProps } = this.props // const { selectedRowKeys } = this.state; const rowSelection = { selectedRowKeys: selectKeys, onChange: this.onSelectChange, }; return ( <Modal {...modalProps} onOk={this.handleOk}> <Table size="small" rowSelection={rowSelection} columns={this.columns} dataSource={employees} pagination={false} /> </Modal> ) } } export default SiteModal
Bartman0/PDI-SQL-lineage
rest-api-specs/PurviewCatalogClient/Java/src/main/java/org/openapitools/client/model/JsonAtlasRelationshipAllOf.java
<gh_stars>0 /* * Purview Catalog Service REST API Document * Purview Catalog Service is a fully managed cloud service whose users can discover the data sources they need and understand the data sources they find. At the same time, Data Catalog helps organizations get more value from their existing investments. This swagger defines REST API of the Hot Tier of Data Catalog Gen 2. * * The version of the OpenAPI document: 2020-12-01-preview * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package org.openapitools.client.model; import java.util.Objects; import java.util.Arrays; import com.google.gson.TypeAdapter; import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.io.IOException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import org.openapitools.client.model.JsonAtlasClassification; import org.openapitools.client.model.JsonAtlasObjectId; import org.openapitools.client.model.JsonPropagateTags; import org.openapitools.client.model.JsonStatusAtlasRelationship; /** * JsonAtlasRelationshipAllOf */ @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2021-01-21T11:36:33.263002+01:00[Europe/Amsterdam]") public class JsonAtlasRelationshipAllOf { public static final String SERIALIZED_NAME_BLOCKED_PROPAGATED_CLASSIFICATIONS = "blockedPropagatedClassifications"; @SerializedName(SERIALIZED_NAME_BLOCKED_PROPAGATED_CLASSIFICATIONS) private List<JsonAtlasClassification> blockedPropagatedClassifications = null; public static final String SERIALIZED_NAME_CREATE_TIME = "createTime"; @SerializedName(SERIALIZED_NAME_CREATE_TIME) private BigDecimal createTime; public static final String SERIALIZED_NAME_CREATED_BY = "createdBy"; @SerializedName(SERIALIZED_NAME_CREATED_BY) private String createdBy; public static final String SERIALIZED_NAME_END1 = "end1"; @SerializedName(SERIALIZED_NAME_END1) private JsonAtlasObjectId end1; public static final String SERIALIZED_NAME_END2 = "end2"; @SerializedName(SERIALIZED_NAME_END2) private JsonAtlasObjectId end2; public static final String SERIALIZED_NAME_GUID = "guid"; @SerializedName(SERIALIZED_NAME_GUID) private String guid; public static final String SERIALIZED_NAME_HOME_ID = "homeId"; @SerializedName(SERIALIZED_NAME_HOME_ID) private String homeId; public static final String SERIALIZED_NAME_LABEL = "label"; @SerializedName(SERIALIZED_NAME_LABEL) private String label; public static final String SERIALIZED_NAME_PROPAGATE_TAGS = "propagateTags"; @SerializedName(SERIALIZED_NAME_PROPAGATE_TAGS) private JsonPropagateTags propagateTags; public static final String SERIALIZED_NAME_PROPAGATED_CLASSIFICATIONS = "propagatedClassifications"; @SerializedName(SERIALIZED_NAME_PROPAGATED_CLASSIFICATIONS) private List<JsonAtlasClassification> propagatedClassifications = null; public static final String SERIALIZED_NAME_PROVENANCE_TYPE = "provenanceType"; @SerializedName(SERIALIZED_NAME_PROVENANCE_TYPE) private BigDecimal provenanceType; public static final String SERIALIZED_NAME_STATUS = "status"; @SerializedName(SERIALIZED_NAME_STATUS) private JsonStatusAtlasRelationship status; public static final String SERIALIZED_NAME_UPDATE_TIME = "updateTime"; @SerializedName(SERIALIZED_NAME_UPDATE_TIME) private BigDecimal updateTime; public static final String SERIALIZED_NAME_UPDATED_BY = "updatedBy"; @SerializedName(SERIALIZED_NAME_UPDATED_BY) private String updatedBy; public static final String SERIALIZED_NAME_VERSION = "version"; @SerializedName(SERIALIZED_NAME_VERSION) private BigDecimal version; public JsonAtlasRelationshipAllOf blockedPropagatedClassifications(List<JsonAtlasClassification> blockedPropagatedClassifications) { this.blockedPropagatedClassifications = blockedPropagatedClassifications; return this; } public JsonAtlasRelationshipAllOf addBlockedPropagatedClassificationsItem(JsonAtlasClassification blockedPropagatedClassificationsItem) { if (this.blockedPropagatedClassifications == null) { this.blockedPropagatedClassifications = new ArrayList<JsonAtlasClassification>(); } this.blockedPropagatedClassifications.add(blockedPropagatedClassificationsItem); return this; } /** * An array of blocked propagated classifications. * @return blockedPropagatedClassifications **/ @javax.annotation.Nullable @ApiModelProperty(value = "An array of blocked propagated classifications.") public List<JsonAtlasClassification> getBlockedPropagatedClassifications() { return blockedPropagatedClassifications; } public void setBlockedPropagatedClassifications(List<JsonAtlasClassification> blockedPropagatedClassifications) { this.blockedPropagatedClassifications = blockedPropagatedClassifications; } public JsonAtlasRelationshipAllOf createTime(BigDecimal createTime) { this.createTime = createTime; return this; } /** * The created time of the record. * @return createTime **/ @javax.annotation.Nullable @ApiModelProperty(value = "The created time of the record.") public BigDecimal getCreateTime() { return createTime; } public void setCreateTime(BigDecimal createTime) { this.createTime = createTime; } public JsonAtlasRelationshipAllOf createdBy(String createdBy) { this.createdBy = createdBy; return this; } /** * The user who created the record. * @return createdBy **/ @javax.annotation.Nullable @ApiModelProperty(value = "The user who created the record.") public String getCreatedBy() { return createdBy; } public void setCreatedBy(String createdBy) { this.createdBy = createdBy; } public JsonAtlasRelationshipAllOf end1(JsonAtlasObjectId end1) { this.end1 = end1; return this; } /** * Get end1 * @return end1 **/ @javax.annotation.Nullable @ApiModelProperty(value = "") public JsonAtlasObjectId getEnd1() { return end1; } public void setEnd1(JsonAtlasObjectId end1) { this.end1 = end1; } public JsonAtlasRelationshipAllOf end2(JsonAtlasObjectId end2) { this.end2 = end2; return this; } /** * Get end2 * @return end2 **/ @javax.annotation.Nullable @ApiModelProperty(value = "") public JsonAtlasObjectId getEnd2() { return end2; } public void setEnd2(JsonAtlasObjectId end2) { this.end2 = end2; } public JsonAtlasRelationshipAllOf guid(String guid) { this.guid = guid; return this; } /** * The GUID of the relationship. * @return guid **/ @javax.annotation.Nullable @ApiModelProperty(value = "The GUID of the relationship.") public String getGuid() { return guid; } public void setGuid(String guid) { this.guid = guid; } public JsonAtlasRelationshipAllOf homeId(String homeId) { this.homeId = homeId; return this; } /** * The home ID of the relationship. * @return homeId **/ @javax.annotation.Nullable @ApiModelProperty(value = "The home ID of the relationship.") public String getHomeId() { return homeId; } public void setHomeId(String homeId) { this.homeId = homeId; } public JsonAtlasRelationshipAllOf label(String label) { this.label = label; return this; } /** * The label of the relationship. * @return label **/ @javax.annotation.Nullable @ApiModelProperty(value = "The label of the relationship.") public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } public JsonAtlasRelationshipAllOf propagateTags(JsonPropagateTags propagateTags) { this.propagateTags = propagateTags; return this; } /** * Get propagateTags * @return propagateTags **/ @javax.annotation.Nullable @ApiModelProperty(value = "") public JsonPropagateTags getPropagateTags() { return propagateTags; } public void setPropagateTags(JsonPropagateTags propagateTags) { this.propagateTags = propagateTags; } public JsonAtlasRelationshipAllOf propagatedClassifications(List<JsonAtlasClassification> propagatedClassifications) { this.propagatedClassifications = propagatedClassifications; return this; } public JsonAtlasRelationshipAllOf addPropagatedClassificationsItem(JsonAtlasClassification propagatedClassificationsItem) { if (this.propagatedClassifications == null) { this.propagatedClassifications = new ArrayList<JsonAtlasClassification>(); } this.propagatedClassifications.add(propagatedClassificationsItem); return this; } /** * An array of propagated classifications. * @return propagatedClassifications **/ @javax.annotation.Nullable @ApiModelProperty(value = "An array of propagated classifications.") public List<JsonAtlasClassification> getPropagatedClassifications() { return propagatedClassifications; } public void setPropagatedClassifications(List<JsonAtlasClassification> propagatedClassifications) { this.propagatedClassifications = propagatedClassifications; } public JsonAtlasRelationshipAllOf provenanceType(BigDecimal provenanceType) { this.provenanceType = provenanceType; return this; } /** * Used to record the provenance of an instance of an entity or relationship * @return provenanceType **/ @javax.annotation.Nullable @ApiModelProperty(value = "Used to record the provenance of an instance of an entity or relationship") public BigDecimal getProvenanceType() { return provenanceType; } public void setProvenanceType(BigDecimal provenanceType) { this.provenanceType = provenanceType; } public JsonAtlasRelationshipAllOf status(JsonStatusAtlasRelationship status) { this.status = status; return this; } /** * Get status * @return status **/ @javax.annotation.Nullable @ApiModelProperty(value = "") public JsonStatusAtlasRelationship getStatus() { return status; } public void setStatus(JsonStatusAtlasRelationship status) { this.status = status; } public JsonAtlasRelationshipAllOf updateTime(BigDecimal updateTime) { this.updateTime = updateTime; return this; } /** * The update time of the record. * @return updateTime **/ @javax.annotation.Nullable @ApiModelProperty(value = "The update time of the record.") public BigDecimal getUpdateTime() { return updateTime; } public void setUpdateTime(BigDecimal updateTime) { this.updateTime = updateTime; } public JsonAtlasRelationshipAllOf updatedBy(String updatedBy) { this.updatedBy = updatedBy; return this; } /** * The user who updated the record. * @return updatedBy **/ @javax.annotation.Nullable @ApiModelProperty(value = "The user who updated the record.") public String getUpdatedBy() { return updatedBy; } public void setUpdatedBy(String updatedBy) { this.updatedBy = updatedBy; } public JsonAtlasRelationshipAllOf version(BigDecimal version) { this.version = version; return this; } /** * The version of the relationship. * @return version **/ @javax.annotation.Nullable @ApiModelProperty(value = "The version of the relationship.") public BigDecimal getVersion() { return version; } public void setVersion(BigDecimal version) { this.version = version; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } JsonAtlasRelationshipAllOf jsonAtlasRelationshipAllOf = (JsonAtlasRelationshipAllOf) o; return Objects.equals(this.blockedPropagatedClassifications, jsonAtlasRelationshipAllOf.blockedPropagatedClassifications) && Objects.equals(this.createTime, jsonAtlasRelationshipAllOf.createTime) && Objects.equals(this.createdBy, jsonAtlasRelationshipAllOf.createdBy) && Objects.equals(this.end1, jsonAtlasRelationshipAllOf.end1) && Objects.equals(this.end2, jsonAtlasRelationshipAllOf.end2) && Objects.equals(this.guid, jsonAtlasRelationshipAllOf.guid) && Objects.equals(this.homeId, jsonAtlasRelationshipAllOf.homeId) && Objects.equals(this.label, jsonAtlasRelationshipAllOf.label) && Objects.equals(this.propagateTags, jsonAtlasRelationshipAllOf.propagateTags) && Objects.equals(this.propagatedClassifications, jsonAtlasRelationshipAllOf.propagatedClassifications) && Objects.equals(this.provenanceType, jsonAtlasRelationshipAllOf.provenanceType) && Objects.equals(this.status, jsonAtlasRelationshipAllOf.status) && Objects.equals(this.updateTime, jsonAtlasRelationshipAllOf.updateTime) && Objects.equals(this.updatedBy, jsonAtlasRelationshipAllOf.updatedBy) && Objects.equals(this.version, jsonAtlasRelationshipAllOf.version); } @Override public int hashCode() { return Objects.hash(blockedPropagatedClassifications, createTime, createdBy, end1, end2, guid, homeId, label, propagateTags, propagatedClassifications, provenanceType, status, updateTime, updatedBy, version); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class JsonAtlasRelationshipAllOf {\n"); sb.append(" blockedPropagatedClassifications: ").append(toIndentedString(blockedPropagatedClassifications)).append("\n"); sb.append(" createTime: ").append(toIndentedString(createTime)).append("\n"); sb.append(" createdBy: ").append(toIndentedString(createdBy)).append("\n"); sb.append(" end1: ").append(toIndentedString(end1)).append("\n"); sb.append(" end2: ").append(toIndentedString(end2)).append("\n"); sb.append(" guid: ").append(toIndentedString(guid)).append("\n"); sb.append(" homeId: ").append(toIndentedString(homeId)).append("\n"); sb.append(" label: ").append(toIndentedString(label)).append("\n"); sb.append(" propagateTags: ").append(toIndentedString(propagateTags)).append("\n"); sb.append(" propagatedClassifications: ").append(toIndentedString(propagatedClassifications)).append("\n"); sb.append(" provenanceType: ").append(toIndentedString(provenanceType)).append("\n"); sb.append(" status: ").append(toIndentedString(status)).append("\n"); sb.append(" updateTime: ").append(toIndentedString(updateTime)).append("\n"); sb.append(" updatedBy: ").append(toIndentedString(updatedBy)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
curiousjgeorge/aws-sdk-cpp
aws-cpp-sdk-mediastore/include/aws/mediastore/model/CreateContainerResult.h
<reponame>curiousjgeorge/aws-sdk-cpp /* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #pragma once #include <aws/mediastore/MediaStore_EXPORTS.h> #include <aws/mediastore/model/Container.h> #include <utility> namespace Aws { template<typename RESULT_TYPE> class AmazonWebServiceResult; namespace Utils { namespace Json { class JsonValue; } // namespace Json } // namespace Utils namespace MediaStore { namespace Model { class AWS_MEDIASTORE_API CreateContainerResult { public: CreateContainerResult(); CreateContainerResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); CreateContainerResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); /** * <p>ContainerARN: The Amazon Resource Name (ARN) of the newly created container. * The ARN has the following format: arn:aws:&lt;region&gt;:&lt;account that owns * this container&gt;:container/&lt;name of container&gt;. For example: * arn:aws:mediastore:us-west-2:111122223333:container/movies </p> * <p>ContainerName: The container name as specified in the request.</p> * <p>CreationTime: Unix time stamp.</p> <p>Status: The status of container * creation or deletion. The status is one of the following: <code>CREATING</code>, * <code>ACTIVE</code>, or <code>DELETING</code>. While the service is creating the * container, the status is <code>CREATING</code>. When an endpoint is available, * the status changes to <code>ACTIVE</code>.</p> <p>The return value does not * include the container's endpoint. To make downstream requests, you must obtain * this value by using <a>DescribeContainer</a> or <a>ListContainers</a>.</p> */ inline const Container& GetContainer() const{ return m_container; } /** * <p>ContainerARN: The Amazon Resource Name (ARN) of the newly created container. * The ARN has the following format: arn:aws:&lt;region&gt;:&lt;account that owns * this container&gt;:container/&lt;name of container&gt;. For example: * arn:aws:mediastore:us-west-2:111122223333:container/movies </p> * <p>ContainerName: The container name as specified in the request.</p> * <p>CreationTime: Unix time stamp.</p> <p>Status: The status of container * creation or deletion. The status is one of the following: <code>CREATING</code>, * <code>ACTIVE</code>, or <code>DELETING</code>. While the service is creating the * container, the status is <code>CREATING</code>. When an endpoint is available, * the status changes to <code>ACTIVE</code>.</p> <p>The return value does not * include the container's endpoint. To make downstream requests, you must obtain * this value by using <a>DescribeContainer</a> or <a>ListContainers</a>.</p> */ inline void SetContainer(const Container& value) { m_container = value; } /** * <p>ContainerARN: The Amazon Resource Name (ARN) of the newly created container. * The ARN has the following format: arn:aws:&lt;region&gt;:&lt;account that owns * this container&gt;:container/&lt;name of container&gt;. For example: * arn:aws:mediastore:us-west-2:111122223333:container/movies </p> * <p>ContainerName: The container name as specified in the request.</p> * <p>CreationTime: Unix time stamp.</p> <p>Status: The status of container * creation or deletion. The status is one of the following: <code>CREATING</code>, * <code>ACTIVE</code>, or <code>DELETING</code>. While the service is creating the * container, the status is <code>CREATING</code>. When an endpoint is available, * the status changes to <code>ACTIVE</code>.</p> <p>The return value does not * include the container's endpoint. To make downstream requests, you must obtain * this value by using <a>DescribeContainer</a> or <a>ListContainers</a>.</p> */ inline void SetContainer(Container&& value) { m_container = std::move(value); } /** * <p>ContainerARN: The Amazon Resource Name (ARN) of the newly created container. * The ARN has the following format: arn:aws:&lt;region&gt;:&lt;account that owns * this container&gt;:container/&lt;name of container&gt;. For example: * arn:aws:mediastore:us-west-2:111122223333:container/movies </p> * <p>ContainerName: The container name as specified in the request.</p> * <p>CreationTime: Unix time stamp.</p> <p>Status: The status of container * creation or deletion. The status is one of the following: <code>CREATING</code>, * <code>ACTIVE</code>, or <code>DELETING</code>. While the service is creating the * container, the status is <code>CREATING</code>. When an endpoint is available, * the status changes to <code>ACTIVE</code>.</p> <p>The return value does not * include the container's endpoint. To make downstream requests, you must obtain * this value by using <a>DescribeContainer</a> or <a>ListContainers</a>.</p> */ inline CreateContainerResult& WithContainer(const Container& value) { SetContainer(value); return *this;} /** * <p>ContainerARN: The Amazon Resource Name (ARN) of the newly created container. * The ARN has the following format: arn:aws:&lt;region&gt;:&lt;account that owns * this container&gt;:container/&lt;name of container&gt;. For example: * arn:aws:mediastore:us-west-2:111122223333:container/movies </p> * <p>ContainerName: The container name as specified in the request.</p> * <p>CreationTime: Unix time stamp.</p> <p>Status: The status of container * creation or deletion. The status is one of the following: <code>CREATING</code>, * <code>ACTIVE</code>, or <code>DELETING</code>. While the service is creating the * container, the status is <code>CREATING</code>. When an endpoint is available, * the status changes to <code>ACTIVE</code>.</p> <p>The return value does not * include the container's endpoint. To make downstream requests, you must obtain * this value by using <a>DescribeContainer</a> or <a>ListContainers</a>.</p> */ inline CreateContainerResult& WithContainer(Container&& value) { SetContainer(std::move(value)); return *this;} private: Container m_container; }; } // namespace Model } // namespace MediaStore } // namespace Aws
byplacebo/spring-social-11st
spring-social-11st-example/src/main/java/org/springframework/social/es/user/UserRepository.java
package org.springframework.social.es.user; import org.springframework.data.repository.CrudRepository; import org.springframework.stereotype.Repository; @Repository public interface UserRepository extends CrudRepository<User, Long> { User findByEmail(String email); }
osoco/better-ways-of-thinking-about-software
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/common/lib/xmodule/xmodule/tests/test_resource_templates.py
""" Tests for xmodule.x_module.ResourceTemplates """ import unittest from xmodule.x_module import ResourceTemplates class ResourceTemplatesTests(unittest.TestCase): """ Tests for xmodule.x_module.ResourceTemplates """ def test_templates(self): expected = { 'latex_html.yaml', 'zooming_image.yaml', 'announcement.yaml', 'anon_user_id.yaml'} got = {t['template_id'] for t in TestClass.templates()} assert expected == got def test_templates_no_suchdir(self): assert len(TestClass2.templates()) == 0 def test_get_template(self): assert TestClass.get_template('latex_html.yaml')['template_id'] == 'latex_html.yaml' class TestClass(ResourceTemplates): """ Derives from the class under test for testing purposes. Since `ResourceTemplates` is intended to be used as a mixin, we need to derive a class from it in order to fill in some data it's expecting to find in its mro. """ template_packages = ['xmodule'] @classmethod def get_template_dir(cls): return 'templates/test' class TestClass2(TestClass): """ Like TestClass, but `get_template_dir` returns a directory that doesn't exist. See `TestClass`. """ @classmethod def get_template_dir(cls): return 'foo'
cowthan/PoorSpider
src-nutz/org/nutz/el/Operator.java
package org.nutz.el; import java.util.Queue; /** * 操作符 * @author juqkai(<EMAIL>) * */ public interface Operator { /** * 优先级 */ public int fetchPriority(); /** * 打包数据. * 每个操作符都有相应的操作数,这个方法目的在于,根据操作符自身的需求,从operand中读取相应的操作数 * @param operand 操作数 */ public void wrap(Queue<Object> operand); /** * 计算 */ public Object calculate(); }
khanium/couchbase-rule-engine
src/main/java/com/couchbase/demo/request/RuleJobRequest.java
package com.couchbase.demo.request; import com.couchbase.demo.rules.Rule; import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.*; import org.springframework.data.annotation.CreatedDate; import org.springframework.data.annotation.Id; import java.util.Date; import static com.couchbase.demo.config.RulerConfig.FORMAT_DATE; @ToString @Builder @AllArgsConstructor @NoArgsConstructor @Data public class RuleJobRequest { private static final String PREFIX = "request:"; @Id private String id; private String name; private String rule; @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = FORMAT_DATE) private Date startsAt; @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = FORMAT_DATE) private Date endsAt; @Builder.Default @JsonProperty(defaultValue = "ONCE", required = true) private Recurrence recurrence = Recurrence.ONCE; private String cronExpression; @CreatedDate @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = FORMAT_DATE) private Date createdAt; enum Recurrence { ONCE, DAILY, WEEKLY, MONTHLY } public static RuleJobRequest from(Rule rule) { return RuleJobRequest.builder() .id(PREFIX+rule.getName()) .name(rule.getName()) .startsAt(rule.getAvailableFrom()) .endsAt(rule.getExpiresAt()) .rule(rule.getId()) .build(); } }
Aden-Tao/LeetCode
0171 Excel Sheet Column Number/solution.cpp
#include "bits/stdc++.h" using namespace std; class Solution { public: int titleToNumber(string s) { int res = 0; for(int i = 0; i < s.size(); i++){ res += (s[i] - 'A' + 1) * pow(26, s.size() - i - 1); } return res; } }; int main(){ cout << Solution().titleToNumber("ZY") << endl; return 0; }
RyanLandDev/Colossus
src/main/java/net/ryanland/colossus/command/impl/DefaultHelpCommand.java
package net.ryanland.colossus.command.impl; import net.dv8tion.jda.api.entities.Member; import net.ryanland.colossus.Colossus; import net.ryanland.colossus.command.*; import net.ryanland.colossus.command.annotations.CommandBuilder; import net.ryanland.colossus.command.arguments.ArgumentSet; import net.ryanland.colossus.command.arguments.types.CommandArgument; import net.ryanland.colossus.command.executor.CommandHandler; import net.ryanland.colossus.command.info.HelpMaker; import net.ryanland.colossus.events.CommandEvent; import net.ryanland.colossus.sys.interactions.menu.TabMenuBuilder; import net.ryanland.colossus.sys.interactions.menu.TabMenuPage; import net.ryanland.colossus.sys.message.PresetBuilder; import java.util.List; import java.util.stream.Collectors; @CommandBuilder( name = "help", description = "Get a list of all commands or information about a specific one.", guildOnly = false ) public final class DefaultHelpCommand extends DefaultCommand implements CombinedCommand { @Override public ArgumentSet getArguments() { return new ArgumentSet().addArguments( new CommandArgument() .id("command") .optional() .description("Command to get information of") ); } @Override public void execute(CommandEvent event) throws CommandException { Command command = event.getArgument("command"); if (command == null) supplyCommandList(event); else supplyCommandHelp(event, command); } private void supplyCommandList(CommandEvent event) throws CommandException { // setup TabMenuBuilder menu = new TabMenuBuilder().setHomePage( new PresetBuilder("Help", "Use the buttons below to navigate through all command categories.\n" + "You can get help for a specific command using " + HelpMaker.formattedUsageCode(event) + ".") .addLogo() ); // build the menu pages for (Category category : Colossus.getCategories()) { addCategoryPage(menu, null, category, event.getMember()); } // send event.reply(menu.build()); } private void addCategoryPage(TabMenuBuilder menu, TabMenuPage page, Category category, Member member) { // get all commands, and filter by category equal and member has sufficient permissions List<Command> commands = CommandHandler.getCommands().stream().filter(c -> c.getCategory().equals(category) && c.memberHasPermission(member) ).collect(Collectors.toList()); // if no commands were left after the filter, do not include this category in the menu if (commands.isEmpty()) return; // create page object TabMenuPage categoryPage = new TabMenuPage(category.getName(), new PresetBuilder(category.getName(), category.getDescription() + "\n\n" + HelpMaker.formattedQuickCommandList(commands)) .addLogo(), category.getEmoji(), false); // if page is null, this is a root category, so add it to the base menu // else, this is a subcategory, so add this page as a subpage to the parent category page if (page == null) menu.addPages(categoryPage); else page.addChildren(categoryPage); // add all subcategories and its subcategories (etc.) using recursion for (Category subcategory : category) addCategoryPage(menu, categoryPage, subcategory, member); } private void supplyCommandHelp(CommandEvent event, Command command) throws CommandException { // base command PresetBuilder baseEmbed = generateCommandEmbed(event, command, null, null); // return base command info if (!(command instanceof SubCommandHolder)) { event.reply(baseEmbed); // if a subcommand holder is used, use a menu instead with buttons for the subcommands } else { TabMenuBuilder menu = new TabMenuBuilder().setHomePage(baseEmbed); for (SubCommand subcommand : command.getSubCommands()) { // for every subcommand, create a page TabMenuPage page = new TabMenuPage(((Command) subcommand).getName(), generateCommandEmbed(event, (Command) subcommand, (SubCommandHolder) command, null), null, false); // if this subcommand contains nested subcommands, create subpages for those if (subcommand instanceof SubCommandHolder) { for (SubCommand nestedSubcommand : ((SubCommandHolder) subcommand).getSubCommands()) { page.addChildren(new TabMenuPage(((Command) nestedSubcommand).getName(), generateCommandEmbed(event, (Command) nestedSubcommand, (SubCommandHolder) command, (SubCommandHolder) subcommand), null, false)); } } // add the page to the menu menu.addPages(page); } // send event.reply(menu.build()); } } private PresetBuilder generateCommandEmbed(CommandEvent event, Command command, SubCommandHolder headSubCommandHolder, SubCommandHolder nestedSubCommandHolder) { PresetBuilder embed = new PresetBuilder() .setTitle(((Command) (headSubCommandHolder == null ? command : headSubCommandHolder)).getUppercaseName() + " Command" + (headSubCommandHolder == null && nestedSubCommandHolder == null ? "" : " - " + (headSubCommandHolder == null || nestedSubCommandHolder == null ? command.getName() : ((Command) nestedSubCommandHolder).getName() + " " + command.getName()) ) + (command.isDisabled() ? " [Disabled]" : "")) .setDescription(command.getDescription() + "\n\u200b") .addLogo() .addField("Category", command.getCategory().getName()) .addField("Usage", String.format("```html\n%s\n```", HelpMaker.formattedUsage(command, null, event.getUsedPrefix(), headSubCommandHolder, nestedSubCommandHolder) )); if (command.getPermission() != null && !command.getPermission().isEmpty()) { embed.addField("Permission Required", command.getPermission().getName()); } return embed; } }
SVemulapalli/DecentCMS
modules/core/tokens/services/markup-view-engine.js
<filename>modules/core/tokens/services/markup-view-engine.js // DecentCMS (c) 2014 <NAME>, under MIT. See LICENSE.txt for licensing details. 'use strict'; var fs = require('fs'); /** * @description * A view engine using the Token API, which uses Markup.js. * See https://github.com/adammark/Markup.js/ for details about * Markup.js. * @param {object} scope * @constructor */ var MarkupViewEngine = function(scope) { this.scope = scope; }; MarkupViewEngine.service = 'view-engine'; MarkupViewEngine.feature = 'markup-view-engine'; MarkupViewEngine.prototype.extension = 'markup'; /** * @description * Loads the rendering function from the provided path. * @param {string} templatePath The path to the JavaScript file. * @param {function} done The callback function to call when the template is loaded. * @returns {function} The template function. */ MarkupViewEngine.prototype.load = function loadMarkupTemplate(templatePath, done) { var token = this.scope.require('token'); fs.readFile(templatePath, function readTemplate(err, template) { if (err) throw err; done(function markupTemplate(shape, renderer, doneRendering) { renderer.write(token.interpolate(template, shape)); doneRendering(); }); }); }; module.exports = MarkupViewEngine;
despo/apply-for-teacher-training
app/services/find_sync/sync_all_providers_from_find.rb
<filename>app/services/find_sync/sync_all_providers_from_find.rb module FindSync class SyncAllProvidersFromFind def self.call # Request basic details for all providers # # For the full response, see: # https://api2.publish-teacher-training-courses.service.gov.uk/api/v3/recruitment_cycles/2021/providers sync_providers( FindAPI::Provider.recruitment_cycle(2021).all, ) FindSyncCheck.set_last_sync(Time.zone.now) rescue JsonApiClient::Errors::ApiError raise FindSync::SyncError end def self.sync_providers(find_providers) find_providers.each do |find_provider| SyncProviderFromFind.call( provider_name: find_provider.provider_name, provider_code: find_provider.provider_code, provider_recruitment_cycle_year: find_provider.recruitment_cycle_year, ) end end private_class_method :sync_providers end end
andrewraharjo/CAN-Bus-Hack_Prius_Focus
scripts/find_diag_id.py
import sys, getopt from SFF import SFFMessage verbose = True outputfile = "" optlist, args = getopt.getopt(sys.argv[1:], ':o:') if(len(args) < 1): print "Usage: %s [options] <inputfile> <id>" print "options:" print " -o <output file>" sys.exit(1) for o,a in optlist: if o == "-o": outputfile = a inputfile = args[0] search_byte = args[1] found_lines = [] fp = open(inputfile, "r") #look for the 2nd byte of the data for a particular byte for line in fp: msg = SFFMessage(line) #skip over multi-line transactions as we don't care if msg.data[0:1] == "2": continue if msg.data[3:5] == search_byte: found_lines.append(msg) fp.close() if(outputfile != ""): fp = open(outputfile, "w") for msg in found_lines: print str(msg) if(outputfile != ""): fp.write(str(msg) + '\n') fp.close()
Zqh-1999/CL
XuDengXuan/js/BEAUTY LIP-lgp-lunbotu1.js
<filename>XuDengXuan/js/BEAUTY LIP-lgp-lunbotu1.js<gh_stars>0 function left_btn(){ var i; var l = $(".small_list").find("ul li").length; $(".small_list").find("ul li").each(function(index){ if($(this).hasClass("on")){ i = index; } }); i--; if(i < 0){ i = l - 1; } t = i; Img(i) }; /* 右按钮 */ function right_btn(){ var i; var l = $(".small_list").find("ul li").length; $(".small_list").find("ul li").each(function(index){ if($(this).hasClass("on")){ i = index; } }); i++; if(i > l-1){ i = 0; } t = i; Img(i); }; /* 图片 */ function Img(i){ var l = $(".small_list").find("ul li").length; var l_mean; if(l < 5){ l_mean = 0; }else{ l_mean = ((parseInt(l / 5) - 1) * 5) + (l % 5); } var w = 110; $(".large_box").find("ul li").eq(i).fadeIn().siblings().hide(); $(".small_list").find("ul li").eq(i).addClass("on").siblings().removeClass("on"); var ml = i * w; if(ml <= l_mean * w){ $(".small_list").find("ul").stop().animate({ marginLeft: -ml + "px" }) }else{ $(".small_list").find("ul").stop().animate({ marginLeft: -(l_mean * w) + "px" }) } }
sabob/springboot-angular-starter
backend/src/main/java/my/sample/config/ServiceConfig.java
<reponame>sabob/springboot-angular-starter<gh_stars>0 package my.sample.config; import my.sample.client.external.ExternalServiceClient; import my.sample.client.external.confifg.ExternalServiceClientConfig; import my.sample.repo.SampleRepository; import my.sample.service.SampleService; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import java.util.logging.Logger; @Configuration public class ServiceConfig { private static final Logger LOGGER = Logger.getLogger( ServiceConfig.class.getName() ); @Autowired private ApplicationContext applicationContext; @Value( "${external.service.basic.auth.username}" ) private String externalAppBasicAuthUsername; @Value( "${external.service.basic.auth.password}" ) private String eternalAppBasicAuthPassword; @Value( "${external.service.context-path}" ) private String externalServiceContextPath; @Value( "${external.service.host}" ) private String externalServiceHost; @Value( "${external.service.api.path}" ) private String externalServiceApiPath; @Bean public SampleService getSampleService( SampleRepository repo ) { SampleService service = new SampleService( repo ); return service; } @Bean public ExternalServiceClient getExternalServiceClient() { externalServiceContextPath = StringUtils.prependIfMissing( externalServiceContextPath, "/" ); String externalServiceUrl = externalServiceHost + externalServiceContextPath + externalServiceApiPath; ExternalServiceClientConfig config = new ExternalServiceClientConfig(); config.setUsername( externalAppBasicAuthUsername ); config.setPassword( <PASSWORD> ); config.setServiceUrl( externalServiceUrl ); ExternalServiceClient client = new ExternalServiceClient( config ); return client; } }
P1umer/ChakraCore
lib/Backend/NativeEntryPointData.h
<filename>lib/Backend/NativeEntryPointData.h<gh_stars>1000+ //------------------------------------------------------------------------------------------------------- // Copyright (C) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information. //------------------------------------------------------------------------------------------------------- #pragma once #if ENABLE_NATIVE_CODEGEN class NativeCodeData; namespace Js { class FunctionBody; }; typedef JsUtil::List<NativeOffsetInlineeFramePair, HeapAllocator> InlineeFrameMap; typedef JsUtil::List<LazyBailOutRecord, HeapAllocator> NativeLazyBailOutRecordList; class JitTransferData; class NativeEntryPointData { public: NativeEntryPointData(); JitTransferData* EnsureJitTransferData(Recycler* recycler); JitTransferData* GetJitTransferData() { return this->jitTransferData; } void FreeJitTransferData(); void RecordNativeCode(Js::JavascriptMethod thunkAddress, Js::JavascriptMethod nativeAddress, ptrdiff_t codeSize, void * validationCookie); Js::JavascriptMethod GetNativeAddress() { return this->nativeAddress; } Js::JavascriptMethod GetThunkAddress() { return this->thunkAddress; } ptrdiff_t GetCodeSize() { return this->codeSize; } void SetTJNativeAddress(Js::JavascriptMethod nativeAddress, void * validationCookie); void SetTJCodeSize(ptrdiff_t codeSize); void AddWeakFuncRef(RecyclerWeakReference<Js::FunctionBody> *weakFuncRef, Recycler *recycler); Js::EntryPointPolymorphicInlineCacheInfo * EnsurePolymorphicInlineCacheInfo(Recycler * recycler, Js::FunctionBody * functionBody); Js::EntryPointPolymorphicInlineCacheInfo * GetPolymorphicInlineCacheInfo() { return polymorphicInlineCacheInfo; } void RegisterConstructorCache(Js::ConstructorCache* constructorCache, Recycler* recycler); #if DBG uint GetConstructorCacheCount() const { return this->constructorCaches != nullptr ? this->constructorCaches->Count() : 0; } #endif void PinTypeRefs(Recycler * recycler, size_t count, void ** typeRefs); Js::PropertyGuard* RegisterSharedPropertyGuard(Js::PropertyId propertyId, Js::ScriptContext* scriptContext); Js::PropertyId* GetSharedPropertyGuards(Recycler * recycler, _Out_ unsigned int& count); bool TryGetSharedPropertyGuard(Js::PropertyId propertyId, Js::PropertyGuard*& guard); Js::EquivalentTypeCache * EnsureEquivalentTypeCache(int guardCount, Js::ScriptContext * scriptContext, Js::EntryPointInfo * entryPointInfo); bool ClearEquivalentTypeCaches(Recycler * recycler); Field(Js::FakePropertyGuardWeakReference*) * EnsurePropertyGuardWeakRefs(int guardCount, Recycler * recycler); Js::SmallSpanSequence * GetNativeThrowSpanSequence() { return this->nativeThrowSpanSequence; } void SetNativeThrowSpanSequence(Js::SmallSpanSequence * seq) { this->nativeThrowSpanSequence = seq; } uint GetFrameHeight() { return frameHeight; } void SetFrameHeight(uint frameHeight) { this->frameHeight = frameHeight; } uint32 GetPendingPolymorphicCacheState() const { return this->pendingPolymorphicCacheState; } void SetPendingPolymorphicCacheState(uint32 state) { this->pendingPolymorphicCacheState = state; } BYTE GetPendingInlinerVersion() const { return this->pendingInlinerVersion; } void SetPendingInlinerVersion(BYTE version) { this->pendingInlinerVersion = version; } Js::ImplicitCallFlags GetPendingImplicitCallFlags() const { return this->pendingImplicitCallFlags; } void SetPendingImplicitCallFlags(Js::ImplicitCallFlags flags) { this->pendingImplicitCallFlags = flags; } void Cleanup(Js::ScriptContext * scriptContext, bool isShutdown, bool reset); void ClearTypeRefsAndGuards(Js::ScriptContext * scriptContext); #if PDATA_ENABLED XDataAllocation* GetXDataInfo() { return this->xdataInfo; } void CleanupXDataInfo(); void SetXDataInfo(XDataAllocation* xdataInfo) { this->xdataInfo = xdataInfo; } #endif private: void RegisterEquivalentTypeCaches(Js::ScriptContext * scriptContext, Js::EntryPointInfo * entryPointInfo); void UnregisterEquivalentTypeCaches(Js::ScriptContext * scriptContext); void FreePropertyGuards(); void FreeNativeCode(Js::ScriptContext * scriptContext, bool isShutdown); FieldNoBarrier(Js::JavascriptMethod) nativeAddress; FieldNoBarrier(Js::JavascriptMethod) thunkAddress; Field(ptrdiff_t) codeSize; Field(void*) validationCookie; // This field holds any recycler allocated references that must be kept alive until // we install the entry point. It is freed at that point, so anything that must survive // until the EntryPointInfo itself goes away, must be copied somewhere else. Field(JitTransferData*) jitTransferData; typedef JsUtil::BaseHashSet<RecyclerWeakReference<Js::FunctionBody>*, Recycler, PowerOf2SizePolicy> WeakFuncRefSet; Field(WeakFuncRefSet *) weakFuncRefSet; // Need to keep strong references to the guards here so they don't get collected while the entry point is alive. typedef JsUtil::BaseDictionary<Js::PropertyId, Js::PropertyGuard*, Recycler, PowerOf2SizePolicy> SharedPropertyGuardDictionary; Field(SharedPropertyGuardDictionary*) sharedPropertyGuards; typedef SListCounted<Js::ConstructorCache*, Recycler> ConstructorCacheList; Field(ConstructorCacheList*) constructorCaches; Field(Js::EntryPointPolymorphicInlineCacheInfo *) polymorphicInlineCacheInfo; // If we pin types this array contains strong references to types, otherwise it holds weak references. Field(Field(void*)*) runtimeTypeRefs; // This array holds fake weak references to type property guards. We need it to zero out the weak references when the // entry point is finalized and the guards are about to be freed. Otherwise, if one of the guards was to be invalidated // from the thread context, we would AV trying to access freed memory. Note that the guards themselves are allocated by // NativeCodeData::Allocator and are kept alive by the data field. The weak references are recycler allocated, and so // the array must be recycler allocated also, so that the recycler doesn't collect the weak references. Field(Field(Js::FakePropertyGuardWeakReference*)*) propertyGuardWeakRefs; Field(Js::EquivalentTypeCache*) equivalentTypeCaches; Field(Js::EntryPointInfo **) registeredEquivalentTypeCacheRef; FieldNoBarrier(Js::SmallSpanSequence *) nativeThrowSpanSequence; #if PDATA_ENABLED Field(XDataAllocation *) xdataInfo; #endif Field(int) propertyGuardCount; Field(int) equivalentTypeCacheCount; Field(uint) frameHeight; // TODO: these only applies to FunctionEntryPointInfo Field(BYTE) pendingInlinerVersion; Field(Js::ImplicitCallFlags) pendingImplicitCallFlags; Field(uint32) pendingPolymorphicCacheState; #if DBG_DUMP || defined(VTUNE_PROFILING) public: // NativeOffsetMap is public for DBG_DUMP, private for VTUNE_PROFILING struct NativeOffsetMap { uint32 statementIndex; regex::Interval nativeOffsetSpan; }; typedef JsUtil::List<NativeOffsetMap, HeapAllocator> NativeOffsetMapListType; NativeOffsetMapListType& GetNativeOffsetMaps() { return nativeOffsetMaps; } private: Field(NativeOffsetMapListType) nativeOffsetMaps; #endif }; class InProcNativeEntryPointData : public NativeEntryPointData { public: InProcNativeEntryPointData(); void SetNativeCodeData(NativeCodeData * nativeCodeData); InlineeFrameMap * GetInlineeFrameMap(); void RecordInlineeFrameMap(JsUtil::List<NativeOffsetInlineeFramePair, ArenaAllocator>* tempInlineeFrameMap); NativeLazyBailOutRecordList * GetSortedLazyBailOutRecordList() const; void SetSortedLazyBailOutRecordList(JsUtil::List<LazyBailOutRecord, ArenaAllocator>* sortedLazyBailOutRecordList); void SetLazyBailOutRecordSlotOffset(int32 argSlotOffset); int32 GetLazyBailOutRecordSlotOffset() const; void SetLazyBailOutThunkOffset(uint32 thunkOffset); uint32 GetLazyBailOutThunkOffset() const; #if !FLOATVAR void SetNumberChunks(CodeGenNumberChunk* chunks) { numberChunks = chunks; } #endif void OnCleanup(); private: FieldNoBarrier(NativeCodeData *) nativeCodeData; FieldNoBarrier(InlineeFrameMap *) inlineeFrameMap; FieldNoBarrier(NativeLazyBailOutRecordList *) sortedLazyBailoutRecordList; FieldNoBarrier(int32) lazyBailOutRecordSlotOffset; FieldNoBarrier(uint32) lazyBailOutThunkOffset; #if !FLOATVAR Field(CodeGenNumberChunk*) numberChunks; #endif }; class OOPNativeEntryPointData : public NativeEntryPointData { public: OOPNativeEntryPointData(); static uint32 GetOffsetOfNativeDataBuffer(); static void DeleteNativeDataBuffer(char * naitveDataBuffer); char* GetNativeDataBuffer(); char** GetNativeDataBufferRef(); void SetNativeDataBuffer(char *); uint GetInlineeFrameOffsetArrayOffset(); uint GetInlineeFrameOffsetArrayCount(); void RecordInlineeFrameOffsetsInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount); #if !FLOATVAR void ProcessNumberPageSegments(Js::ScriptContext * scriptContext); void SetNumberPageSegment(XProcNumberPageSegment * segments) { Assert(numberPageSegments == nullptr); numberPageSegments = segments; } #endif void OnCleanup(); private: Field(uint) inlineeFrameOffsetArrayOffset; Field(uint) inlineeFrameOffsetArrayCount; FieldNoBarrier(char *) nativeDataBuffer; #if !FLOATVAR Field(Field(Js::JavascriptNumber*)*) numberArray; Field(XProcNumberPageSegment*) numberPageSegments; #endif }; #endif
wouterboomsma/quickstep
external/boost_1_60_0/qsboost/fusion/algorithm/iteration/detail/fold.hpp
/*============================================================================= Copyright (c) 2001-2011 <NAME> Copyright (c) 2006 <NAME> Copyright (c) 2009-2010 <NAME> Copyright (c) 2015 <NAME> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) ==============================================================================*/ #include <qsboost/preprocessor/cat.hpp> #define FUSION_HASH # #ifdef QSBOOST_FUSION_REVERSE_FOLD # ifdef QSBOOST_FUSION_ITER_FOLD # define QSBOOST_FUSION_FOLD_NAME reverse_iter_fold # else # define QSBOOST_FUSION_FOLD_NAME reverse_fold # endif # define QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_FUNCTION end # define QSBOOST_FUSION_FOLD_IMPL_NEXT_IT_FUNCTION prior # define QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_META_TRANSFORM(IT) \ typename fusion::result_of::prior<IT>::type # define QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_TRANSFORM(IT) fusion::prior(IT) #else # ifdef QSBOOST_FUSION_ITER_FOLD # define QSBOOST_FUSION_FOLD_NAME iter_fold # else # define QSBOOST_FUSION_FOLD_NAME fold # endif # define QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_FUNCTION begin # define QSBOOST_FUSION_FOLD_IMPL_NEXT_IT_FUNCTION next # define QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_META_TRANSFORM(IT) IT # define QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_TRANSFORM(IT) IT #endif #ifdef QSBOOST_FUSION_ITER_FOLD # define QSBOOST_FUSION_FOLD_IMPL_INVOKE_IT_META_TRANSFORM(IT) IT& # define QSBOOST_FUSION_FOLD_IMPL_INVOKE_IT_TRANSFORM(IT) IT #else # define QSBOOST_FUSION_FOLD_IMPL_INVOKE_IT_META_TRANSFORM(IT) \ typename fusion::result_of::deref<IT>::type # define QSBOOST_FUSION_FOLD_IMPL_INVOKE_IT_TRANSFORM(IT) fusion::deref(IT) #endif #if (defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES)) FUSION_HASH if QSBOOST_WORKAROUND QSBOOST_PREVENT_MACRO_SUBSTITUTION (QSBOOST_MSVC, < 1500) FUSION_HASH define QSBOOST_FUSION_FOLD_IMPL_ENABLER(T) void FUSION_HASH else FUSION_HASH define QSBOOST_FUSION_FOLD_IMPL_ENABLER(T) typename T::type FUSION_HASH endif #else # if QSBOOST_WORKAROUND(QSBOOST_MSVC, < 1500) # define QSBOOST_FUSION_FOLD_IMPL_ENABLER(T) void # else # define QSBOOST_FUSION_FOLD_IMPL_ENABLER(T) typename T::type # endif #endif namespace qsboost { namespace fusion { namespace detail { template<int SeqSize, typename It, typename State, typename F, typename = void #if defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES) FUSION_HASH if QSBOOST_WORKAROUND QSBOOST_PREVENT_MACRO_SUBSTITUTION (QSBOOST_MSVC, < 1500) #endif #if QSBOOST_WORKAROUND(QSBOOST_MSVC, < 1500) || \ (defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES)) // Dirty hack: those compilers cannot choose exactly one partial specialization. , bool = SeqSize == 0 #endif #if defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES) FUSION_HASH endif #endif > struct QSBOOST_PP_CAT(result_of_it_,QSBOOST_FUSION_FOLD_NAME) {}; template<typename It, typename State, typename F> struct QSBOOST_PP_CAT(result_of_it_,QSBOOST_FUSION_FOLD_NAME)<0,It,State,F , typename qsboost::enable_if_has_type<QSBOOST_FUSION_FOLD_IMPL_ENABLER(State)>::type #if defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES) FUSION_HASH if QSBOOST_WORKAROUND QSBOOST_PREVENT_MACRO_SUBSTITUTION (QSBOOST_MSVC, < 1500) #endif #if QSBOOST_WORKAROUND(QSBOOST_MSVC, < 1500) || \ (defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES)) , true #endif #if defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES) FUSION_HASH endif #endif > { typedef typename State::type type; }; template<int SeqSize, typename It, typename State, typename F> struct QSBOOST_PP_CAT(result_of_it_,QSBOOST_FUSION_FOLD_NAME)<SeqSize,It,State,F , typename qsboost::enable_if_has_type< #if defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES) FUSION_HASH if QSBOOST_WORKAROUND QSBOOST_PREVENT_MACRO_SUBSTITUTION (QSBOOST_MSVC, >= 1500) #endif #if QSBOOST_WORKAROUND(QSBOOST_MSVC, >= 1500) || \ (defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES)) // Following SFINAE enables to avoid MSVC 9's partial specialization // ambiguous bug but MSVC 8 don't compile, and moreover MSVC 8 style // workaround won't work with MSVC 9. typename qsboost::disable_if_c<SeqSize == 0, State>::type::type #if defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES) FUSION_HASH else QSBOOST_FUSION_FOLD_IMPL_ENABLER(State) #endif #else QSBOOST_FUSION_FOLD_IMPL_ENABLER(State) #endif #if defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES) FUSION_HASH endif #endif >::type #if defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES) FUSION_HASH if QSBOOST_WORKAROUND QSBOOST_PREVENT_MACRO_SUBSTITUTION (QSBOOST_MSVC, < 1500) #endif #if QSBOOST_WORKAROUND(QSBOOST_MSVC, < 1500) || \ (defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES)) , false #endif #if defined(__WAVE__) && defined(QSBOOST_FUSION_CREATE_PREPROCESSED_FILES) FUSION_HASH endif #endif > : QSBOOST_PP_CAT(result_of_it_,QSBOOST_FUSION_FOLD_NAME)< SeqSize-1 , typename result_of::QSBOOST_FUSION_FOLD_IMPL_NEXT_IT_FUNCTION<It>::type , qsboost::result_of< F( typename add_reference<typename State::type>::type, QSBOOST_FUSION_FOLD_IMPL_INVOKE_IT_META_TRANSFORM(It const) ) > , F > {}; template<typename It, typename State, typename F> QSBOOST_CONSTEXPR QSBOOST_FUSION_GPU_ENABLED inline typename QSBOOST_PP_CAT(result_of_it_,QSBOOST_FUSION_FOLD_NAME)< 0 , It , State , F >::type QSBOOST_PP_CAT(it_,QSBOOST_FUSION_FOLD_NAME)(mpl::int_<0>, It const&, typename State::type state, F&) { return state; } template<typename It, typename State, typename F, int SeqSize> QSBOOST_CONSTEXPR QSBOOST_FUSION_GPU_ENABLED inline typename lazy_enable_if_c< SeqSize != 0 , QSBOOST_PP_CAT(result_of_it_,QSBOOST_FUSION_FOLD_NAME)< SeqSize , It , State , F > >::type QSBOOST_PP_CAT(it_,QSBOOST_FUSION_FOLD_NAME)(mpl::int_<SeqSize>, It const& it, typename State::type state, F& f) { return QSBOOST_PP_CAT(it_,QSBOOST_FUSION_FOLD_NAME)< typename result_of::QSBOOST_FUSION_FOLD_IMPL_NEXT_IT_FUNCTION<It>::type , qsboost::result_of< F( typename add_reference<typename State::type>::type, QSBOOST_FUSION_FOLD_IMPL_INVOKE_IT_META_TRANSFORM(It const) ) > , F >( mpl::int_<SeqSize-1>() , fusion::QSBOOST_FUSION_FOLD_IMPL_NEXT_IT_FUNCTION(it) , f(state, QSBOOST_FUSION_FOLD_IMPL_INVOKE_IT_TRANSFORM(it)) , f ); } template<typename Seq, typename State, typename F , bool = traits::is_sequence<Seq>::value , bool = traits::is_segmented<Seq>::value> struct QSBOOST_PP_CAT(result_of_,QSBOOST_FUSION_FOLD_NAME) {}; template<typename Seq, typename State, typename F> struct QSBOOST_PP_CAT(result_of_,QSBOOST_FUSION_FOLD_NAME)<Seq, State, F, true, false> : QSBOOST_PP_CAT(result_of_it_,QSBOOST_FUSION_FOLD_NAME)< result_of::size<Seq>::value , QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_META_TRANSFORM( typename result_of::QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_FUNCTION<Seq>::type ) , add_reference<State> , F > {}; template<typename Seq, typename State, typename F> QSBOOST_CONSTEXPR QSBOOST_FUSION_GPU_ENABLED inline typename QSBOOST_PP_CAT(result_of_,QSBOOST_FUSION_FOLD_NAME)<Seq, State, F>::type QSBOOST_FUSION_FOLD_NAME(Seq& seq, State& state, F& f) { return QSBOOST_PP_CAT(it_,QSBOOST_FUSION_FOLD_NAME)< QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_META_TRANSFORM( typename result_of::QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_FUNCTION<Seq>::type ) , add_reference<State> , F >( typename result_of::size<Seq>::type() , QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_TRANSFORM( fusion::QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_FUNCTION(seq) ) , state , f ); } } namespace result_of { template<typename Seq, typename State, typename F> struct QSBOOST_FUSION_FOLD_NAME : detail::QSBOOST_PP_CAT(result_of_,QSBOOST_FUSION_FOLD_NAME)<Seq, State, F> {}; } template<typename Seq, typename State, typename F> QSBOOST_CONSTEXPR QSBOOST_FUSION_GPU_ENABLED inline typename result_of::QSBOOST_FUSION_FOLD_NAME< Seq , State const , F >::type QSBOOST_FUSION_FOLD_NAME(Seq& seq, State const& state, F f) { return detail::QSBOOST_FUSION_FOLD_NAME<Seq, State const, F>(seq, state, f); } template<typename Seq, typename State, typename F> QSBOOST_CONSTEXPR QSBOOST_FUSION_GPU_ENABLED inline typename result_of::QSBOOST_FUSION_FOLD_NAME< Seq const , State const , F >::type QSBOOST_FUSION_FOLD_NAME(Seq const& seq, State const& state, F f) { return detail::QSBOOST_FUSION_FOLD_NAME<Seq const, State const, F>(seq, state, f); } template<typename Seq, typename State, typename F> QSBOOST_CONSTEXPR QSBOOST_FUSION_GPU_ENABLED inline typename result_of::QSBOOST_FUSION_FOLD_NAME< Seq , State , F >::type QSBOOST_FUSION_FOLD_NAME(Seq& seq, State& state, F f) { return detail::QSBOOST_FUSION_FOLD_NAME<Seq, State, F>(seq, state, f); } template<typename Seq, typename State, typename F> QSBOOST_CONSTEXPR QSBOOST_FUSION_GPU_ENABLED inline typename result_of::QSBOOST_FUSION_FOLD_NAME< Seq const , State , F >::type QSBOOST_FUSION_FOLD_NAME(Seq const& seq, State& state, F f) { return detail::QSBOOST_FUSION_FOLD_NAME<Seq const, State, F>(seq, state, f); } }} #undef QSBOOST_FUSION_FOLD_NAME #undef QSBOOST_FUSION_FOLD_IMPL_ENABLER #undef QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_FUNCTION #undef QSBOOST_FUSION_FOLD_IMPL_NEXT_IT_FUNCTION #undef QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_META_TRANSFORM #undef QSBOOST_FUSION_FOLD_IMPL_FIRST_IT_TRANSFORM #undef QSBOOST_FUSION_FOLD_IMPL_INVOKE_IT_META_TRANSFORM #undef QSBOOST_FUSION_FOLD_IMPL_INVOKE_IT_TRANSFORM #undef FUSION_HASH
pradeep-gr/mbed-os5-onsemi
features/unsupported/USBHost/USBHostMIDI/USBHostMIDI.h
/* Copyright (c) 2014 mbed.org, MIT License * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software * and associated documentation files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #ifndef USBHOSTMIDI_H #define USBHOSTMIDI_H #include "USBHostConf.h" #if USBHOST_MIDI #include "USBHost.h" /** * A class to communicate a USB MIDI device */ class USBHostMIDI : public IUSBEnumerator { public: /** * Constructor */ USBHostMIDI(); /** * Check if a USB MIDI device is connected * * @returns true if a midi device is connected */ bool connected(); /** * Try to connect a midi device * * @return true if connection was successful */ bool connect(); /** * Attach a callback called when miscellaneous function code is received * * @param ptr function pointer * prototype: void onMiscellaneousFunctionCode(uint8_t data1, uint8_t data2, uint8_t data3); */ inline void attachMiscellaneousFunctionCode(void (*fn)(uint8_t, uint8_t, uint8_t)) { miscellaneousFunctionCode = fn; } /** * Attach a callback called when cable event is received * * @param ptr function pointer * prototype: void onCableEvent(uint8_t data1, uint8_t data2, uint8_t data3); */ inline void attachCableEvent(void (*fn)(uint8_t, uint8_t, uint8_t)) { cableEvent = fn; } /** * Attach a callback called when system exclusive is received * * @param ptr function pointer * prototype: void onSystemCommonTwoBytes(uint8_t data1, uint8_t data2); */ inline void attachSystemCommonTwoBytes(void (*fn)(uint8_t, uint8_t)) { systemCommonTwoBytes = fn; } /** * Attach a callback called when system exclusive is received * * @param ptr function pointer * prototype: void onSystemCommonThreeBytes(uint8_t data1, uint8_t data2, uint8_t data3); */ inline void attachSystemCommonThreeBytes(void (*fn)(uint8_t, uint8_t, uint8_t)) { systemCommonThreeBytes = fn; } /** * Attach a callback called when system exclusive is received * * @param ptr function pointer * prototype: void onSystemExclusive(uint8_t *data, uint16_t length, bool hasNextData); */ inline void attachSystemExclusive(void (*fn)(uint8_t *, uint16_t, bool)) { systemExclusive = fn; } /** * Attach a callback called when note on is received * * @param ptr function pointer * prototype: void onNoteOn(uint8_t channel, uint8_t note, uint8_t velocity); */ inline void attachNoteOn(void (*fn)(uint8_t, uint8_t, uint8_t)) { noteOn = fn; } /** * Attach a callback called when note off is received * * @param ptr function pointer * prototype: void onNoteOff(uint8_t channel, uint8_t note, uint8_t velocity); */ inline void attachNoteOff(void (*fn)(uint8_t, uint8_t, uint8_t)) { noteOff = fn; } /** * Attach a callback called when poly keypress is received * * @param ptr function pointer * prototype: void onPolyKeyPress(uint8_t channel, uint8_t note, uint8_t pressure); */ inline void attachPolyKeyPress(void (*fn)(uint8_t, uint8_t, uint8_t)) { polyKeyPress = fn; } /** * Attach a callback called when control change is received * * @param ptr function pointer * prototype: void onControlChange(uint8_t channel, uint8_t key, uint8_t value); */ inline void attachControlChange(void (*fn)(uint8_t, uint8_t, uint8_t)) { controlChange = fn; } /** * Attach a callback called when program change is received * * @param ptr function pointer * prototype: void onProgramChange(uint8_t channel, uint8_t program); */ inline void attachProgramChange(void (*fn)(uint8_t, uint8_t)) { programChange = fn; } /** * Attach a callback called when channel pressure is received * * @param ptr function pointer * prototype: void onChannelPressure(uint8_t channel, uint8_t pressure); */ inline void attachChannelPressure(void (*fn)(uint8_t, uint8_t)) { channelPressure = fn; } /** * Attach a callback called when pitch bend is received * * @param ptr function pointer * prototype: void onPitchBend(uint8_t channel, uint16_t value); */ inline void attachPitchBend(void (*fn)(uint8_t, uint16_t)) { pitchBend = fn; } /** * Attach a callback called when single byte is received * * @param ptr function pointer * prototype: void onSingleByte(uint8_t value); */ inline void attachSingleByte(void (*fn)(uint8_t)) { singleByte = fn; } /** * Send a cable event with 3 bytes event * * @param data1 0-255 * @param data2 0-255 * @param data3 0-255 * @return true if message sent successfully */ bool sendMiscellaneousFunctionCode(uint8_t data1, uint8_t data2, uint8_t data3); /** * Send a cable event with 3 bytes event * * @param data1 0-255 * @param data2 0-255 * @param data3 0-255 * @return true if message sent successfully */ bool sendCableEvent(uint8_t data1, uint8_t data2, uint8_t data3); /** * Send a system common message with 2 bytes event * * @param data1 0-255 * @param data2 0-255 * @return true if message sent successfully */ bool sendSystemCommmonTwoBytes(uint8_t data1, uint8_t data2); /** * Send a system common message with 3 bytes event * * @param data1 0-255 * @param data2 0-255 * @param data3 0-255 * @return true if message sent successfully */ bool sendSystemCommmonThreeBytes(uint8_t data1, uint8_t data2, uint8_t data3); /** * Send a system exclusive event * * @param buffer, starts with 0xF0, and end with 0xf7 * @param length * @return true if message sent successfully */ bool sendSystemExclusive(uint8_t *buffer, int length); /** * Send a note off event * * @param channel 0-15 * @param note 0-127 * @param velocity 0-127 * @return true if message sent successfully */ bool sendNoteOff(uint8_t channel, uint8_t note, uint8_t velocity); /** * Send a note on event * * @param channel 0-15 * @param note 0-127 * @param velocity 0-127 (0 means note off) * @return true if message sent successfully */ bool sendNoteOn(uint8_t channel, uint8_t note, uint8_t velocity); /** * Send a poly keypress event * * @param channel 0-15 * @param note 0-127 * @param pressure 0-127 * @return true if message sent successfully */ bool sendPolyKeyPress(uint8_t channel, uint8_t note, uint8_t pressure); /** * Send a control change event * * @param channel 0-15 * @param key 0-127 * @param value 0-127 * @return true if message sent successfully */ bool sendControlChange(uint8_t channel, uint8_t key, uint8_t value); /** * Send a program change event * * @param channel 0-15 * @param program 0-127 * @return true if message sent successfully */ bool sendProgramChange(uint8_t channel, uint8_t program); /** * Send a channel pressure event * * @param channel 0-15 * @param pressure 0-127 * @return true if message sent successfully */ bool sendChannelPressure(uint8_t channel, uint8_t pressure); /** * Send a control change event * * @param channel 0-15 * @param key 0(lower)-8191(center)-16383(higher) * @return true if message sent successfully */ bool sendPitchBend(uint8_t channel, uint16_t value); /** * Send a single byte event * * @param data 0-255 * @return true if message sent successfully */ bool sendSingleByte(uint8_t data); protected: //From IUSBEnumerator virtual void setVidPid(uint16_t vid, uint16_t pid); virtual bool parseInterface(uint8_t intf_nb, uint8_t intf_class, uint8_t intf_subclass, uint8_t intf_protocol); //Must return true if the interface should be parsed virtual bool useEndpoint(uint8_t intf_nb, ENDPOINT_TYPE type, ENDPOINT_DIRECTION dir); //Must return true if the endpoint will be used private: USBHost * host; USBDeviceConnected * dev; USBEndpoint * bulk_in; USBEndpoint * bulk_out; uint32_t size_bulk_in; uint32_t size_bulk_out; bool dev_connected; void init(); uint8_t buf[64]; void rxHandler(); uint16_t sysExBufferPos; uint8_t sysExBuffer[64]; void (*miscellaneousFunctionCode)(uint8_t, uint8_t, uint8_t); void (*cableEvent)(uint8_t, uint8_t, uint8_t); void (*systemCommonTwoBytes)(uint8_t, uint8_t); void (*systemCommonThreeBytes)(uint8_t, uint8_t, uint8_t); void (*systemExclusive)(uint8_t *, uint16_t, bool); void (*noteOff)(uint8_t, uint8_t, uint8_t); void (*noteOn)(uint8_t, uint8_t, uint8_t); void (*polyKeyPress)(uint8_t, uint8_t, uint8_t); void (*controlChange)(uint8_t, uint8_t, uint8_t); void (*programChange)(uint8_t, uint8_t); void (*channelPressure)(uint8_t, uint8_t); void (*pitchBend)(uint8_t, uint16_t); void (*singleByte)(uint8_t); bool sendMidiBuffer(uint8_t data0, uint8_t data1, uint8_t data2, uint8_t data3); int midi_intf; bool midi_device_found; }; #endif /* USBHOST_MIDI */ #endif /* USBHOSTMIDI_H */
yury-s/v8-inspector
Source/chrome/tools/telemetry/telemetry/core/backends/remote/trybot_browser_finder.py
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Finds perf trybots that can run telemetry tests.""" import json import logging import os import re import subprocess import sys import urllib2 from telemetry.core import platform from telemetry.core.platform import trybot_device from telemetry.core import possible_browser from telemetry import decorators CHROMIUM_CONFIG_FILENAME = 'tools/run-perf-test.cfg' BLINK_CONFIG_FILENAME = 'Tools/run-perf-test.cfg' SUCCESS, NO_CHANGES, ERROR = range(3) # Unsupported Perf bisect bots. EXCLUDED_BOTS = { 'win_xp_perf_bisect', 'linux_perf_tester', 'linux_perf_bisector', 'win_perf_bisect_builder', 'win_x64_perf_bisect_builder', 'linux_perf_bisect_builder', 'mac_perf_bisect_builder', 'android_perf_bisect_builder' } INCLUDE_BOTS = [ 'trybot-all', 'trybot-all-win', 'trybot-all-mac', 'trybot-all-linux', 'trybot-all-android' ] class TrybotError(Exception): def __str__(self): return '%s\nError running tryjob.' % self.args[0] class PossibleTrybotBrowser(possible_browser.PossibleBrowser): """A script that sends a job to a trybot.""" def __init__(self, browser_type, _): target_os = browser_type.split('-')[1] self._builder_names = _GetBuilderNames(browser_type) super(PossibleTrybotBrowser, self).__init__(browser_type, target_os, True) def Create(self, finder_options): raise NotImplementedError() def SupportsOptions(self, finder_options): if ((finder_options.device and finder_options.device != 'trybot') or finder_options.chrome_root or finder_options.cros_remote or finder_options.extensions_to_load or finder_options.profile_dir): return False return True def IsRemote(self): return True def _RunProcess(self, cmd): logging.debug('Running process: "%s"', ' '.join(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() returncode = proc.poll() return (returncode, out, err) def _UpdateConfigAndRunTryjob(self, bot_platform, cfg_file_path): """Updates perf config file, uploads changes and excutes perf try job. Args: bot_platform: Name of the platform to be generated. cfg_file_path: Perf config file path. Returns: (result, msg) where result is one of: SUCCESS if a tryjob was sent NO_CHANGES if there was nothing to try, ERROR if a tryjob was attempted but an error encountered and msg is an error message if an error was encountered, or rietveld url if success, otherwise throws TrybotError exception. """ config = self._GetPerfConfig(bot_platform) try: config_file = open(cfg_file_path, 'w') except IOError: msg = 'Cannot find %s. Please run from src dir.' % cfg_file_path return (ERROR, msg) config_file.write('config = %s' % json.dumps( config, sort_keys=True, indent=2, separators=(',', ': '))) config_file.close() # Commit the config changes locally. returncode, out, err = self._RunProcess( ['git', 'commit', '-a', '-m', 'bisect config: %s' % bot_platform]) if returncode: raise TrybotError('Could not commit bisect config change for %s,' ' error %s' % (bot_platform, err)) # Upload the CL to rietveld and run a try job. returncode, out, err = self._RunProcess([ 'git', 'cl', 'upload', '-f', '--bypass-hooks', '-m', 'CL for perf tryjob on %s' % bot_platform ]) if returncode: raise TrybotError('Could upload to rietveld for %s, error %s' % (bot_platform, err)) match = re.search(r'https://codereview.chromium.org/[\d]+', out) if not match: raise TrybotError('Could not upload CL to rietveld for %s! Output %s' % (bot_platform, out)) rietveld_url = match.group(0) # Generate git try command for available bots. git_try_command = ['git', 'cl', 'try', '-m', 'tryserver.chromium.perf'] for bot in self._builder_names[bot_platform]: git_try_command.extend(['-b', bot]) returncode, out, err = self._RunProcess(git_try_command) if returncode: raise TrybotError('Could not try CL for %s, error %s' % (bot_platform, err)) return (SUCCESS, rietveld_url) def _GetPerfConfig(self, bot_platform): """Generates the perf config for try job. Args: bot_platform: Name of the platform to be generated. Returns: A dictionary with perf config parameters. """ # Generate the command line for the perf trybots target_arch = 'ia32' arguments = sys.argv if bot_platform in ['win', 'win-x64']: arguments[0] = 'python tools\\perf\\run_benchmark' else: arguments[0] = './tools/perf/run_benchmark' for index, arg in enumerate(arguments): if arg.startswith('--browser='): if bot_platform == 'android': arguments[index] = '--browser=android-chrome-shell' elif any('x64' in bot for bot in self._builder_names[bot_platform]): arguments[index] = '--browser=release_x64' target_arch = 'x64' else: arguments[index] = '--browser=release' command = ' '.join(arguments) return { 'command': command, 'repeat_count': '1', 'max_time_minutes': '120', 'truncate_percent': '0', 'target_arch': target_arch, } def _AttemptTryjob(self, cfg_file_path): """Attempts to run a tryjob from the current directory. This is run once for chromium, and if it returns NO_CHANGES, once for blink. Args: cfg_file_path: Path to the config file for the try job. Returns: Returns SUCCESS if a tryjob was sent, NO_CHANGES if there was nothing to try, ERROR if a tryjob was attempted but an error encountered. """ source_repo = 'chromium' if cfg_file_path == BLINK_CONFIG_FILENAME: source_repo = 'blink' # TODO(prasadv): This method is quite long, we should consider refactor # this by extracting to helper methods. returncode, original_branchname, err = self._RunProcess( ['git', 'rev-parse', '--abbrev-ref', 'HEAD']) if returncode: msg = 'Must be in a git repository to send changes to trybots.' if err: msg += '\nGit error: %s' % err logging.error(msg) return ERROR original_branchname = original_branchname.strip() # Check if the tree is dirty: make sure the index is up to date and then # run diff-index self._RunProcess(['git', 'update-index', '--refresh', '-q']) returncode, out, err = self._RunProcess(['git', 'diff-index', 'HEAD']) if out: logging.error( 'Cannot send a try job with a dirty tree. Commit locally first.') return ERROR # Make sure the tree does have local commits. returncode, out, err = self._RunProcess( ['git', 'log', 'origin/master..HEAD']) if not out: return NO_CHANGES # Create/check out the telemetry-tryjob branch, and edit the configs # for the tryjob there. returncode, out, err = self._RunProcess( ['git', 'checkout', '-b', 'telemetry-tryjob']) if returncode: logging.error('Error creating branch telemetry-tryjob. ' 'Please delete it if it exists.\n%s', err) return ERROR try: returncode, out, err = self._RunProcess( ['git', 'branch', '--set-upstream-to', 'origin/master']) if returncode: logging.error('Error in git branch --set-upstream-to: %s', err) return ERROR for bot_platform in self._builder_names: try: results, output = self._UpdateConfigAndRunTryjob( bot_platform, cfg_file_path) if results == ERROR: logging.error(output) return ERROR print ('Uploaded %s try job to rietveld for %s platform. ' 'View progress at %s' % (source_repo, bot_platform, output)) except TrybotError, err: print err logging.error(err) finally: # Checkout original branch and delete telemetry-tryjob branch. # TODO(prasadv): This finally block could be extracted out to be a # separate function called _CleanupBranch. returncode, out, err = self._RunProcess( ['git', 'checkout', original_branchname]) if returncode: logging.error('Could not check out %s. Please check it out and ' 'manually delete the telemetry-tryjob branch. ' ': %s', original_branchname, err) return ERROR # pylint: disable=lost-exception logging.info('Checked out original branch: %s', original_branchname) returncode, out, err = self._RunProcess( ['git', 'branch', '-D', 'telemetry-tryjob']) if returncode: logging.error('Could not delete telemetry-tryjob branch. ' 'Please delete it manually: %s', err) return ERROR # pylint: disable=lost-exception logging.info('Deleted temp branch: telemetry-tryjob') return SUCCESS def RunRemote(self): """Sends a tryjob to a perf trybot. This creates a branch, telemetry-tryjob, switches to that branch, edits the bisect config, commits it, uploads the CL to rietveld, and runs a tryjob on the given bot. """ # First check if there are chromium changes to upload. status = self._AttemptTryjob(CHROMIUM_CONFIG_FILENAME) if status not in [SUCCESS, ERROR]: # If we got here, there are no chromium changes to upload. Try blink. os.chdir('third_party/WebKit/') status = self._AttemptTryjob(BLINK_CONFIG_FILENAME) os.chdir('../..') if status not in [SUCCESS, ERROR]: logging.error('No local changes found in chromium or blink trees. ' 'browser=%s argument sends local changes to the ' 'perf trybot(s): %s.', self.browser_type, self._builder_names.values()) def _InitPlatformIfNeeded(self): if self._platform: return self._platform = platform.GetHostPlatform() # pylint: disable=W0212 self._platform_backend = self._platform._platform_backend def SelectDefaultBrowser(_): return None def CanFindAvailableBrowsers(): return True @decorators.Cache def _GetTrybotList(): f = urllib2.urlopen( 'http://build.chromium.org/p/tryserver.chromium.perf/json') builders = json.loads(f.read()).get('builders', {}).keys() builders = ['trybot-%s' % bot.replace('_perf_bisect', '').replace('_', '-') for bot in builders if bot not in EXCLUDED_BOTS] builders.extend(INCLUDE_BOTS) return sorted(builders) def _GetBuilderNames(browser_type): """ Return platform and its available bot name as dictionary.""" if 'all' not in browser_type: bot = ['%s_perf_bisect' % browser_type.replace( 'trybot-', '').replace('-', '_')] bot_platform = browser_type.split('-')[1] if 'x64' in browser_type: bot_platform += '-x64' return {bot_platform: bot} f = urllib2.urlopen( 'http://build.chromium.org/p/tryserver.chromium.perf/json') builders = json.loads(f.read()).get('builders', {}).keys() # Exclude unsupported bots like win xp and some dummy bots. builders = [bot for bot in builders if bot not in EXCLUDED_BOTS] platform_and_bots = {} for os_name in ['linux', 'android', 'mac', 'win']: platform_and_bots[os_name] = [bot for bot in builders if os_name in bot] # Special case for Windows x64, consider it as separate platform # config config should contain target_arch=x64 and --browser=release_x64. win_x64_bots = [platform_and_bots['win'].pop(i) for i, win_bot in enumerate(platform_and_bots['win']) if 'x64' in win_bot] platform_and_bots['win-x64'] = win_x64_bots if 'all-win' in browser_type: return {'win': platform_and_bots['win'], 'win-x64': platform_and_bots['win-x64']} if 'all-mac' in browser_type: return {'mac': platform_and_bots['mac']} if 'all-android' in browser_type: return {'android': platform_and_bots['android']} if 'all-linux' in browser_type: return {'linux': platform_and_bots['linux']} return platform_and_bots def FindAllBrowserTypes(finder_options): # Listing browsers requires an http request; only do this if the user is # running with browser=list or a browser=trybot-* argument. if (finder_options.browser_type and (finder_options.browser_type == 'list' or finder_options.browser_type.startswith('trybot'))): return _GetTrybotList() return [] def FindAllAvailableBrowsers(finder_options, device): """Find all perf trybots on tryserver.chromium.perf.""" if not isinstance(device, trybot_device.TrybotDevice): return [] return [PossibleTrybotBrowser(b, finder_options) for b in FindAllBrowserTypes(finder_options)]
smartguys/bidbuysell
client/app/components/Header/HeaderWithName.js
<filename>client/app/components/Header/HeaderWithName.js import React, { Component } from 'react'; import { Link } from 'react-router-dom'; import Button from 'react-bootstrap/Button' import { Navbar, Nav, NavItem, NavDropdown, MenuItem, Form, FormControl } from 'react-bootstrap'; import { Container, Grid, Row, Col } from 'react-bootstrap'; import { getJwt } from '../../helpers/getjwt' import Axios from 'axios' class HeaderWithName extends Component { constructor(props) { super(props); } render() { const { userName } = this.props return ( <Navbar bg="light" expand="lg"> <Link to='/'> <Navbar.Brand>BidBuySell</Navbar.Brand> </Link> <Navbar.Toggle aria-controls="basic-navbar-nav" /> <Navbar.Collapse id="basic-navbar-nav"> <Nav className="mr-auto"> </Nav> <Nav className="justify-content-end" activeKey="/home"> <Nav.Item> <Link to='/search'><Navbar.Brand>Buy</Navbar.Brand></Link> </Nav.Item> <Nav.Item> <Link to='/sell'><Navbar.Brand>Sell</Navbar.Brand></Link> </Nav.Item> <Link to={(userName === '')? "/login" : "/myaccount"}> <Button style={{ marginLeft: '5px' }} variant="primary">{(userName === '')? "Login" : "My Account"}</Button> </Link> </Nav> </Navbar.Collapse> </Navbar> ); } } export default HeaderWithName;
lemanhdung/learnstorybook
src/node_modules/@storybook/addon-toolbars/dist/cjs/utils/normalize-toolbar-arg-type.js
<gh_stars>0 "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.normalizeArgType = void 0; require("core-js/modules/es.object.assign.js"); require("core-js/modules/es.function.name.js"); require("core-js/modules/es.symbol.js"); require("core-js/modules/es.symbol.description.js"); require("core-js/modules/es.array.map.js"); var defaultItemValues = { type: 'item', value: '' }; var normalizeArgType = function normalizeArgType(key, argType) { return Object.assign({}, argType, { name: argType.name || key, description: argType.description || key, toolbar: Object.assign({}, argType.toolbar, { items: argType.toolbar.items.map(function (_item) { var item = typeof _item === 'string' ? { value: _item, title: _item } : _item; // Cater for the special type "reset" which will reset value and also icon // of toolbar button if any icon was present on toolbar to begin with if (item.type === 'reset' && argType.toolbar.icon) { item.icon = argType.toolbar.icon; item.hideIcon = true; } return Object.assign({}, defaultItemValues, item); }) }) }); }; exports.normalizeArgType = normalizeArgType;
MarceloJbCosta/Curso-Java-Loiane
Aula-Java-Loiane/pooLoianeG/src/ExerciciosPoo/Ex02.java
package ExerciciosPoo; import ExerciciosPoo.ContaCorrente02; public class Ex02 { public static void main(String[] args) { ContaCorrente02 conta = new ContaCorrente02(); /*Quando encapsulamos nao da pra acesssar diretamente o metodo * temos que usar o set... e o get * conta.numero = "123456"; conta.agencia = "1234"; conta.especial = true; conta.limiteEspecial = 500; conta.valorEspecialUsado = 0; conta.saldo = -10; */ conta.setNumero("18237-7"); conta.setAgencia("8251"); conta.setEspecial(true); conta.setLimiteEspecial(500); //conta.setValorEspecialUsado(valorEspecialUsado); conta.setSaldo(-10); System.out.println("Saldo conta "+ conta.getNumero() + " = " + conta.getSaldo()); System.out.println("Numero Agencia: "+ conta.getAgencia() + " Numero conta: " + conta.getNumero()); boolean saqueEfetuado = conta.realizarSaque(10); if(saqueEfetuado) { System.out.println("Saque efetuado com sucesso!"); System.out.println("Saldo Atual R$ " + conta.getSaldo()); }else { System.out.println("Nao foi possivel realizar saque., saldo insuficiente"); } saqueEfetuado = conta.realizarSaque(500); System.out.println("Tentatica de saque 500"); if(saqueEfetuado) { System.out.println("Saque efetuado com sucesso!"); System.out.println("Saldo Atual R$ " + conta.getSaldo()); }else { System.out.println("Nao foi possivel realizar saque., saldo insuficiente"); } System.out.println("Deposito de 500 reais"); conta.depositar(500); conta.consultarSaldo(); if(conta.verificarUsoChequeEspecial()) { System.out.println("Esta usando cheque especial"); }else { System.out.println("nao esta usando cheque especial"); } } }
ilyar/Evolve
evolve4src/src/evolve/OrganismFinder.h
<reponame>ilyar/Evolve #pragma once // // This class implements the Find feature. // // It will evaluate a find expression and set the radioactive tracer // flag for organisms that match the expression. // // The find expression is given in KFORTH notation with special // find instructions. // // To use: // 1. Create instance of this class, give the find_expression // 2. Check if error is TRUE or FALSE. // 3. if error is FALSE, then error_message will explain the problem // 4. Otherwise call execute() with the simulation to use. // 5. Now all organisms that match the expression will have a radioactive tracer. // // 'reset_tracers' will first clear any previous tracers. // class OrganismFinder { public: OrganismFinder(CString find_expression, bool reset_tracers); ~OrganismFinder(); bool error; CString error_message; void execute(UNIVERSE *u); ORGANISM *organism; int min_energy; int max_energy; int avg_energy; int min_generation; int max_generation; int avg_generation; int min_age; int max_age; int avg_age; int max_num_cells; private: bool evalute(KFORTH_MACHINE *kfm, ORGANISM *o); bool m_reset_tracers; KFORTH_PROGRAM *m_kfp; };
tomasfriz/Codo-a-Codo-Python
Codo a Codo/Proyectos de las clases/Clase 14/Clase14-JS2/bucles/bucle_do_while_1.js
var i = 0 do { document.write(i) i++ } while (i<11 )
umarmughal824/bootcamp-ecommerce
static/js/components/applications/detail_sections_test.js
<reponame>umarmughal824/bootcamp-ecommerce<filename>static/js/components/applications/detail_sections_test.js // @flow /* global SETTINGS: false */ import React from "react" import { shallow } from "enzyme" import sinon from "sinon" import { assert } from "chai" import moment from "moment" import { ProfileDetail, ResumeDetail, VideoInterviewDetail, ReviewDetail, PaymentDetail, BootcampStartDetail } from "./detail_sections" import { AWAITING_RESUME, AWAITING_USER_SUBMISSIONS, AWAITING_SUBMISSION_REVIEW, PAYMENT, PROFILE_VIEW, REVIEW_STATUS_APPROVED, REVIEW_STATUS_PENDING, REVIEW_STATUS_REJECTED, SUBMISSION_VIDEO } from "../../constants" import * as utils from "../../util/util" import IntegrationTestHelper from "../../util/integration_test_helper" import { makeIncompleteUser } from "../../factories/user" import { makeApplicationDetail, makeApplicationRunStep, makeApplicationSubmission } from "../../factories/application" import { isIf } from "../../lib/test_utils" describe("application detail section component", () => { const fakeFormattedDate = "Jan 1st, 2020" const isoDate = moment().format() let helper, openDrawerStub, defaultProps beforeEach(() => { helper = new IntegrationTestHelper() helper.sandbox .stub(utils, "formatReadableDateFromStr") .returns(fakeFormattedDate) openDrawerStub = sinon.spy() defaultProps = { ready: false, fulfilled: false, openDrawer: openDrawerStub } }) afterEach(() => { helper.cleanup() }) describe("ProfileDetail", () => { it("should include a link to view/edit a profile", () => { const wrapper = shallow( <ProfileDetail {...defaultProps} user={makeIncompleteUser()} /> ) wrapper.find("ProgressDetailRow button.btn-link").simulate("click") sinon.assert.calledWith(openDrawerStub, { type: PROFILE_VIEW }) }) }) describe("ResumeDetail", () => { let applicationDetail beforeEach(() => { applicationDetail = makeApplicationDetail() }) // ;[ [false, false, AWAITING_RESUME, undefined], [true, false, AWAITING_RESUME, "Add Resume or LinkedIn Profile"], [ true, false, AWAITING_USER_SUBMISSIONS, "Add Resume or LinkedIn Profile" ], [ true, false, AWAITING_SUBMISSION_REVIEW, "Add Resume or LinkedIn Profile" ], [true, false, "AWAITING_PAYMENT", undefined], [true, true, AWAITING_RESUME, "View/Edit Resume or LinkedIn Profile"], [ true, true, AWAITING_USER_SUBMISSIONS, "View/Edit Resume or LinkedIn Profile" ], [ true, true, AWAITING_SUBMISSION_REVIEW, "View/Edit Resume or LinkedIn Profile" ], [true, true, "AWAITING_PAYMENT", undefined] ].forEach(([ready, fulfilled, state, expLinkText]) => { it(`should show correct link if ready === ${String( ready )}, fulfilled === ${String(fulfilled)}, state === ${state}`, () => { applicationDetail.state = state const wrapper = shallow( <ResumeDetail {...defaultProps} ready={ready} fulfilled={fulfilled} applicationDetail={applicationDetail} /> ) const link = wrapper.find("ProgressDetailRow button.btn-link") assert.equal(link.exists(), expLinkText !== undefined) if (expLinkText !== undefined) { assert.equal(link.prop("children"), expLinkText) } }) }) }) describe("VideoInterviewDetail", () => { let step, submission, application beforeEach(() => { step = makeApplicationRunStep(SUBMISSION_VIDEO) submission = makeApplicationSubmission() application = makeApplicationDetail() }) // ;[ [false, false, false, undefined], [true, false, false, "Take Video Interview"], [true, true, false, undefined] ].forEach(([ready, fulfilled, submitted, expLinkText]) => { it(`should show correct link if ready === ${String( ready )}, fulfilled === ${String(fulfilled)}, and submitted = ${String( submitted )}`, () => { if (!submitted) { submission.interview_url = null } const wrapper = shallow( <VideoInterviewDetail {...defaultProps} ready={ready} fulfilled={fulfilled} step={step} submission={submission} applicationDetail={application} /> ) const link = wrapper.find( fulfilled ? "ProgressDetailRow a.btn-link" : "ProgressDetailRow button.btn-link" ) assert.equal(link.exists(), expLinkText !== undefined) if (expLinkText !== undefined) { assert.equal( fulfilled ? link.text() : link.prop("children"), expLinkText ) } }) }) }) describe("ReviewDetail", () => { let step, submission, application beforeEach(() => { step = makeApplicationRunStep(SUBMISSION_VIDEO) submission = makeApplicationSubmission() application = makeApplicationDetail() }) it("should show no status if the submission has no review", () => { const wrapper = shallow( <ReviewDetail {...defaultProps} step={step} submission={null} applicationDetail={application} /> ) assert.isFalse(wrapper.find("ProgressDetailRow .status-text").exists()) }) // ;[ [REVIEW_STATUS_PENDING, null, "Pending"], [REVIEW_STATUS_REJECTED, isoDate, "Rejected"], [REVIEW_STATUS_APPROVED, isoDate, "Approved"] ].forEach(([reviewStatus, reviewDate, expLinkText]) => { it(`should show correct status if review status = ${reviewStatus} and review date ${isIf( !!reviewDate )} set`, () => { submission.review_status = reviewStatus submission.review_status_date = reviewDate const wrapper = shallow( <ReviewDetail {...defaultProps} step={step} submission={submission} applicationDetail={application} /> ) assert.equal( wrapper.find("ProgressDetailRow .status-text").text(), `Status: ${expLinkText}` ) }) }) }) describe("PaymentDetail", () => { let applicationDetail beforeEach(() => { applicationDetail = makeApplicationDetail() }) // ;[ [false, false, undefined], [true, false, "Make a Payment"], [true, true, undefined] ].forEach(([ready, fulfilled, expLinkText]) => { it(`should show correct link if ready === ${String( ready )}, fulfilled === ${String(fulfilled)}`, () => { const wrapper = shallow( <PaymentDetail {...defaultProps} ready={ready} fulfilled={fulfilled} applicationDetail={applicationDetail} /> ) const link = wrapper.find("ProgressDetailRow button.btn-link") assert.equal(link.exists(), expLinkText !== undefined) if (expLinkText !== undefined) { assert.equal(link.prop("children"), expLinkText) } }) }) it("should open a drawer if the 'make a payment' link is clicked", () => { const wrapper = shallow( <PaymentDetail {...defaultProps} ready={true} fulfilled={false} applicationDetail={applicationDetail} /> ) wrapper.find("ProgressDetailRow button.btn-link").simulate("click") sinon.assert.calledWith(openDrawerStub, { type: PAYMENT, meta: { application: applicationDetail } }) }) }) describe("BootcampStartDetail", () => { let applicationDetail beforeEach(() => { applicationDetail = makeApplicationDetail() }) // ;[ [false, false, undefined], [true, false, undefined], [true, true, "Start Bootcamp"] ].forEach(([ready, fulfilled, expLinkText]) => { it(`should show correct link if ready === ${String( ready )}, fulfilled === ${String(fulfilled)}`, () => { SETTINGS.novoed_login_url = "https://novoed.com" const wrapper = shallow( <BootcampStartDetail {...defaultProps} ready={ready} fulfilled={fulfilled} applicationDetail={applicationDetail} /> ) const link = wrapper.find("ProgressDetailRow a") assert.equal(link.exists(), expLinkText !== undefined) if (expLinkText !== undefined) { assert.equal(link.prop("children"), expLinkText) assert.equal(link.prop("href"), SETTINGS.novoed_login_url) } }) }) }) })
dreamsxin/ultimatepp
uppdev/PullText/main.cpp
#include "PullText.h" PullText::PullText() { CtrlLayout(*this, "Window title"); bt1 <<= THISBACK(OnButton1); bt2 <<= THISBACK(OnButton2); } void PullText::OnButton1(void) { String qtf; qtf << txText1.GetQTF(CHARSET_ISO8859_1); //qtf << "[a NRE PARAGRAPH: &"; DUMP(qtf); txText1.SetQTF(qtf); } void PullText::OnButton2(void) { RichText clip=txText2.CopyText(0,10000); txText2.Clear(); txText2.PasteText(clip); } GUI_APP_MAIN { PullText().Run(); }
i-tub/rdkit
Code/GraphMol/MolDraw2D/DrawTextNotFT.cpp
// // Copyright (C) 2022 <NAME> and other RDKit contributors // // @@ All Rights Reserved @@ // This file is part of the RDKit. // The contents are covered by the terms of the BSD license // which is included in the file license.txt, found at the root // of the RDKit source tree. // // // Original author: <NAME> (CozChemIx). // #include <GraphMol/MolDraw2D/DrawTextNotFT.h> #include <GraphMol/MolDraw2D/MolDraw2DHelpers.h> namespace RDKit { namespace MolDraw2D_detail { // **************************************************************************** DrawTextNotFT::DrawTextNotFT(double max_fnt_sz, double min_fnt_sz) : DrawText(max_fnt_sz, min_fnt_sz) {} // **************************************************************************** DrawTextNotFT::~DrawTextNotFT() {} // **************************************************************************** void DrawTextNotFT::alignString( TextAlignType talign, const std::vector<TextDrawType> &draw_modes, std::vector<std::shared_ptr<StringRect>> &rects) const { // std::string comes in with rects aligned with first char with its // left hand and bottom edges at 0 on y and x respectively. // Adjust relative to that so that the relative alignment point is at // (0,0). if (talign == TextAlignType::MIDDLE) { size_t num_norm = count(draw_modes.begin(), draw_modes.end(), TextDrawType::TextDrawNormal); if (num_norm == 1) { talign = TextAlignType::START; } } Point2D align_trans, align_offset; if (talign == TextAlignType::START || talign == TextAlignType::END) { size_t align_char = 0; for (size_t i = 0; i < rects.size(); ++i) { if (draw_modes[i] == TextDrawType::TextDrawNormal) { align_char = i; if (talign == TextAlignType::START) { break; } } } align_trans = rects[align_char]->trans_; align_offset = rects[align_char]->offset_; } else { // centre on the middle of the Normal text. The super- or subscripts // should be at the ends. double x_min = std::numeric_limits<double>::max(); double x_max = std::numeric_limits<double>::lowest(); align_offset.x = align_offset.y = 0.0; int num_norm = 0; for (size_t i = 0; i < rects.size(); ++i) { if (draw_modes[i] == TextDrawType::TextDrawNormal) { Point2D tl, tr, br, bl; rects[i]->calcCorners(tl, tr, br, bl, 0.0); // sometimes the rect is in a coordinate frame where +ve y is down, // sometimes it's up. For these purposes, we don't care so long as // the y_max is larger than the y_min. We probably don't need to do // all the tests for x_min and x_max; x_min = std::min({bl.x, tr.x, x_min}); x_max = std::max({bl.x, tr.x, x_max}); align_offset += rects[i]->offset_; ++num_norm; } } align_trans.x = (x_max - x_min) / 2.0; align_trans.y = 0.0; align_offset /= num_norm; } for (auto r : rects) { r->trans_ -= align_trans; r->offset_ = align_offset; } } } // namespace MolDraw2D_detail } // namespace RDKit
chikara-chan/full-stack-js
buyer/server/controllers/school.js
<filename>buyer/server/controllers/school.js import School from '../models/school' async function getSchoolInfo(ctx) { let school school = await School.find({}).lean() if (school) { ctx.body = { entry: school } } else { ctx.body = { status: false } } } export default { getSchoolInfo }
donhui/kubesphere
pkg/simple/client/monitoring/query_options.go
<gh_stars>1-10 package monitoring type Level int const ( LevelCluster = 1 << iota LevelNode LevelWorkspace LevelNamespace LevelWorkload LevelPod LevelContainer LevelPVC LevelComponent ) type QueryOption interface { Apply(*QueryOptions) } type QueryOptions struct { Level Level ResourceFilter string NodeName string WorkspaceName string NamespaceName string WorkloadKind string WorkloadName string PodName string ContainerName string StorageClassName string PersistentVolumeClaimName string } func NewQueryOptions() *QueryOptions { return &QueryOptions{} } type ClusterOption struct{} func (_ ClusterOption) Apply(o *QueryOptions) { o.Level = LevelCluster } type NodeOption struct { ResourceFilter string NodeName string } func (no NodeOption) Apply(o *QueryOptions) { o.Level = LevelNode o.ResourceFilter = no.ResourceFilter o.NodeName = no.NodeName } type WorkspaceOption struct { ResourceFilter string WorkspaceName string } func (wo WorkspaceOption) Apply(o *QueryOptions) { o.Level = LevelWorkspace o.ResourceFilter = wo.ResourceFilter o.WorkspaceName = wo.WorkspaceName } type NamespaceOption struct { ResourceFilter string WorkspaceName string NamespaceName string } func (no NamespaceOption) Apply(o *QueryOptions) { o.Level = LevelNamespace o.ResourceFilter = no.ResourceFilter o.WorkspaceName = no.WorkspaceName o.NamespaceName = no.NamespaceName } type WorkloadOption struct { ResourceFilter string NamespaceName string WorkloadKind string } func (wo WorkloadOption) Apply(o *QueryOptions) { o.Level = LevelWorkload o.ResourceFilter = wo.ResourceFilter o.NamespaceName = wo.NamespaceName o.WorkloadKind = wo.WorkloadKind } type PodOption struct { ResourceFilter string NodeName string NamespaceName string WorkloadKind string WorkloadName string PodName string } func (po PodOption) Apply(o *QueryOptions) { o.Level = LevelPod o.ResourceFilter = po.ResourceFilter o.NodeName = po.NodeName o.NamespaceName = po.NamespaceName o.WorkloadKind = po.WorkloadKind o.WorkloadName = po.WorkloadName o.PodName = po.PodName } type ContainerOption struct { ResourceFilter string NamespaceName string PodName string ContainerName string } func (co ContainerOption) Apply(o *QueryOptions) { o.Level = LevelContainer o.ResourceFilter = co.ResourceFilter o.NamespaceName = co.NamespaceName o.PodName = co.PodName o.ContainerName = co.ContainerName } type PVCOption struct { ResourceFilter string NamespaceName string StorageClassName string PersistentVolumeClaimName string } func (po PVCOption) Apply(o *QueryOptions) { o.Level = LevelPVC o.ResourceFilter = po.ResourceFilter o.NamespaceName = po.NamespaceName o.StorageClassName = po.StorageClassName o.PersistentVolumeClaimName = po.PersistentVolumeClaimName } type ComponentOption struct{} func (_ ComponentOption) Apply(o *QueryOptions) { o.Level = LevelComponent }
sumesh-aot/namex
solr-admin-app/solr_admin/models/restricted_word.py
from . import db # The class that corresponds to the database table for restricted words. class RestrictedWord(db.Model): __tablename__ = 'restricted_word' word_id = db.Column(db.Integer, primary_key=True, autoincrement=True) word_phrase = db.Column(db.VARCHAR(60))
johnzjq/ShrinkRepair
src/CityGML2SPoly/TestCityGML2SPoly/src/VUnicNodes.java
<filename>src/CityGML2SPoly/TestCityGML2SPoly/src/VUnicNodes.java import java.util.LinkedHashMap; //import java.util.ArrayList; import java.util.Map.Entry; /** * Is responsible for list of unique nodes that carry a facet (=planar surface * with or without holes). * @author kooijmanj1 */ public class VUnicNodes { private LinkedHashMap<VNode, Integer> nodes = new LinkedHashMap<VNode, Integer>(); //private ArrayList<VNode> nodes = new ArrayList<VNode>(); /** * Adds node only if not present yet in list of unique nodes * @param node */ public void addUnicNode(VNode node){ if (!this.nodes.containsKey(node)){ this.nodes.put(node, this.nodes.size()); } /*boolean notPresent = true; for( VNode n : nodes){ if( n.equals(node)){ notPresent = false; break; } } if (notPresent){ nodes.add(node); }*/ } /** * @param node of which the index in unicNodes is requested * @return the index of node in unicNodes */ public int getIndex(VNode node){ return nodes.get(node); /* int indexValue = 999999; int i = 0; for(VNode n : nodes){ if (n.equals(node)){ indexValue = i; break; } i++; } return indexValue;*/ } public LinkedHashMap<VNode, Integer> getUnicNodes(){ return nodes; } /* public ArrayList<VNode> getUnicNodes(){ return nodes; }*/ public int getSize(){ return nodes.size(); } /** * Concatenates the index of a node in UnicNodes instance with the respective ordinates * of the node as part of the contents of the poly file. */ public String toString(){ String lineSeparator = System.getProperty ( "line.separator" ); String str = ""; /*for (VNode node : nodes){ str = str + " " + (nodes.indexOf(node)+1) + " " + node.toString() + lineSeparator; }*/ for (Entry<VNode, Integer> node : nodes.entrySet()){ str = str + " " + (node.getValue()+1) + " " + node.getKey().toString() + lineSeparator; } return str; } }
dgolda/scalate
scalate-core/src/test/scala/org/fusesource/scalate/scuery/TransformTypedElementsTest.scala
/** * Copyright (C) 2009-2011 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.scalate.scuery import _root_.org.fusesource.scalate.FunSuiteSupport import xml.{ NodeSeq } case class Car(make: String, model: String, color: String) case class Dog(name: String, breed: String, color: String, age: Int) class TransformTypedElementsTest extends FunSuiteSupport { val car1 = Car("Ford", "SMax", "Silver") val car2 = Car("Porsche", "Carerra", "Black") val things = List(car1, car2, Dog("Emma", "Labrador", "Golden", 9)) val xml = <ul class="items"> <li class="car"> <img src="/images/car.jpg"/> <div class="field"> <div class="label">Make</div><div class="make">BMW</div> </div> <div class="field"> <div class="label">Model</div><div class="model">525i</div> </div> <div class="field"> <div class="label">Color</div><div class="color">Blue</div> </div> </li> <li class="dog"> <img src="/images/dog.jpg"/> <div class="field"> <div class="label">Name</div><div class="name">Cameron</div> </div> <div class="field"> <div class="label">Breed</div><div class="breed">Bishon Frieze</div> </div> <div class="field"> <div class="label">Color</div><div class="color">White</div> </div> <div class="field"> <div class="label">Age</div><div class="age">7</div> </div> </li> </ul> test("transform contents") { object transformer extends Transformer { $("ul.items").contents { node => things.flatMap { case c: Car => transform(node.$("li.car")) { $ => $(".make").contents = c.make $(".model").contents = c.model $(".color").contents = c.color } case d: Dog => transform(node.$("li.dog")) { $ => $(".name").contents = d.name $(".breed").contents = d.breed $(".color").contents = d.color $(".age").contents = d.age } case _ => Nil } } } val result = transformer(xml) debug("got result: " + result) assertSize("li.car", result, 2) assertSize("li.car img", result, 2) assertSize("li.dog", result, 1) assertSize("li.dog img", result, 1) assertSize("img", result, 3) assertSize("li.car:first-child", result, 1) assertSize("li.car:nth-child(2)", result, 1) assertCar("li.car:first-child", result, car1) assertCar("li.car:nth-child(2)", result, car2) assertText("li.dog .name", result, "Emma") assertText("li.dog .breed", result, "Labrador") assertText("li.dog .color", result, "Golden") assertText("li.dog .age", result, "9") } protected def assertCar(selector: String, result: NodeSeq, car: Car): Unit = { val a = result.$(selector) assertResult(false, "nodes for " + selector + " should not be empty!") { a.isEmpty } assertText(".make", a, car.make) assertText(".model", a, car.model) assertText(".color", a, car.color) } }
MingwangLi/u8server
src/main/java/com/u8/server/data/UAdminRole.java
package com.u8.server.data; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import javax.persistence.*; import java.util.Date; /** * 权限角色类 * Created by ant on 2016/7/28. */ @Entity @Table(name = "uadminrole") public class UAdminRole { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Integer id; //ID,唯一,主键 private String roleName; //权限角色名称 private String roleDesc; //权限角色描述 private String permission; //权限 private Date createTime; //创建时间 private Integer creatorID; //创建人(管理员) private Integer topRole; //是否为最高权限(如果为1,则所有功能,所有游戏都可以管理) public JSONObject toJSON(){ JSONObject json = new JSONObject(); json.put("id", id); json.put("roleName", roleName); json.put("roleDesc", roleDesc); json.put("permission", permission); json.put("topRole", topRole); return json; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public String getPermission() { return permission; } public void setPermission(String permission) { this.permission = permission; } public String getRoleName() { return roleName; } public void setRoleName(String roleName) { this.roleName = roleName; } public String getRoleDesc() { return roleDesc; } public void setRoleDesc(String roleDesc) { this.roleDesc = roleDesc; } public Integer getCreatorID() { return creatorID; } public void setCreatorID(Integer creatorID) { this.creatorID = creatorID; } public Integer getTopRole() { return topRole; } public void setTopRole(Integer topRole) { this.topRole = topRole; } }
ErickAlcan/WYD-NIX
Source/TMSrv/Functions.cpp
<filename>Source/TMSrv/Functions.cpp #include <Windows.h> #include <stdio.h> #include "Hook.h" #include "Functions.h" #include "Send.h"
yyzsq/hetong
coral-common-model/src/main/java/com/gemframework/model/entity/vo/HandoverRecordVo.java
<gh_stars>1-10 package com.gemframework.model.entity.vo; import com.gemframework.model.common.BaseEntityVo; import lombok.Data; /** * @Title: HandoverRecordVo * @Date: 2020-05-15 10:31:46 * @Version: v1.0 * @Description: 交接记录VO对象 * @Author: yuanrise * @Email: <EMAIL> * @Copyright: Copyright (c) 2020 wanyong * @Company: www.gemframework.com */ @Data public class HandoverRecordVo extends BaseEntityVo { /** * 转接人 */ private String accounting; private String deliverPersonName; /** * 接收人 */ private String receiver; private String receiverName; private String customerId; private String contractId; /** * 客户编号 */ private String customerName; private String status; /** * */ private String other1; /** * */ private String other2; }
kaiCu/mapproxy
mapproxy/util/ext/dictspec/spec.py
# Copyright (c) 2011, <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from mapproxy.compat import itervalues import sys if sys.version_info[0] == 2: number_types = (float, int, long) else: number_types = (float, int) class required(str): """ Mark a dictionary key as required. """ pass class anything(object): """ Wildcard key or value for dictionaries. >>> from .validator import validate >>> validate({anything(): 1}, {'foo': 2, 'bar': 49}) """ def compare_type(self, data): return True class recursive(object): """ Recursive types. >>> from .validator import validate >>> spec = recursive({'foo': recursive()}) >>> validate(spec, {'foo': {'foo': {'foo':{}}}}) """ def __init__(self, spec=None): self.spec = spec def compare_type(self, data): return isinstance(data, type(self.spec)) class one_of(object): """ One of the given types. >>> from .validator import validate >>> validate(one_of(str(), number()), 'foo') >>> validate(one_of(str(), number()), 32) """ def __init__(self, *specs): self.specs = specs # typo, backwards compatibility one_off = one_of def combined(*dicts): """ Combine multiple dicts. >>> (combined({'a': 'foo'}, {'b': 'bar'}) ... == {'a': 'foo', 'b': 'bar'}) True """ result = {} for d in dicts: result.update(d) return result class number(object): """ Any number. >>> from .validator import validate >>> validate(number(), 1) >>> validate(number(), -32.0) >>> validate(number(), 99999999999999) """ def compare_type(self, data): # True/False are also instances of int, exclude them return isinstance(data, number_types) and not isinstance(data, bool) class type_spec(object): def __init__(self, type_key, specs): self.type_key = type_key self.specs = specs for v in itervalues(specs): if not isinstance(v, dict): raise ValueError('%s requires dict subspecs', self.__class__) if self.type_key not in v: v[self.type_key] = str() def subspec(self, data, context): if self.type_key not in data: raise ValueError("'%s' not in %s" % (self.type_key, context.current_pos)) key = data[self.type_key] if key not in self.specs: raise ValueError("unknown %s value '%s' in %s" % (self.type_key, key, context.current_pos)) return self.specs[key]
pebble2015/cpoi
src/org/apache/poi/ss/formula/ptg/AttrPtg_SpaceType.cpp
// Generated from /POI/java/org/apache/poi/ss/formula/ptg/AttrPtg.java #include <org/apache/poi/ss/formula/ptg/AttrPtg_SpaceType.hpp> poi::ss::formula::ptg::AttrPtg_SpaceType::AttrPtg_SpaceType(const ::default_init_tag&) : super(*static_cast< ::default_init_tag* >(0)) { clinit(); } poi::ss::formula::ptg::AttrPtg_SpaceType::AttrPtg_SpaceType() : AttrPtg_SpaceType(*static_cast< ::default_init_tag* >(0)) { ctor(); } void poi::ss::formula::ptg::AttrPtg_SpaceType::ctor() { super::ctor(); } constexpr int32_t poi::ss::formula::ptg::AttrPtg_SpaceType::SPACE_BEFORE; constexpr int32_t poi::ss::formula::ptg::AttrPtg_SpaceType::CR_BEFORE; constexpr int32_t poi::ss::formula::ptg::AttrPtg_SpaceType::SPACE_BEFORE_OPEN_PAREN; constexpr int32_t poi::ss::formula::ptg::AttrPtg_SpaceType::CR_BEFORE_OPEN_PAREN; constexpr int32_t poi::ss::formula::ptg::AttrPtg_SpaceType::SPACE_BEFORE_CLOSE_PAREN; constexpr int32_t poi::ss::formula::ptg::AttrPtg_SpaceType::CR_BEFORE_CLOSE_PAREN; constexpr int32_t poi::ss::formula::ptg::AttrPtg_SpaceType::SPACE_AFTER_EQUALITY; extern java::lang::Class *class_(const char16_t *c, int n); java::lang::Class* poi::ss::formula::ptg::AttrPtg_SpaceType::class_() { static ::java::lang::Class* c = ::class_(u"org.apache.poi.ss.formula.ptg.AttrPtg.SpaceType", 47); return c; } java::lang::Class* poi::ss::formula::ptg::AttrPtg_SpaceType::getClass0() { return class_(); }
eve-gatt/eve-esi
src/main/java/net/troja/eve/esi/model/KillmailVictim.java
/* * EVE Swagger Interface * An OpenAPI for EVE Online * * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package net.troja.eve.esi.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; import net.troja.eve.esi.model.KillmailItem; import net.troja.eve.esi.model.Position; import java.io.Serializable; /** * victim object */ @ApiModel(description = "victim object") public class KillmailVictim implements Serializable { private static final long serialVersionUID = 1L; @JsonProperty("alliance_id") private Integer allianceId = null; @JsonProperty("character_id") private Integer characterId = null; @JsonProperty("corporation_id") private Integer corporationId = null; @JsonProperty("damage_taken") private Integer damageTaken = null; @JsonProperty("faction_id") private Integer factionId = null; @JsonProperty("items") private List<KillmailItem> items = new ArrayList<KillmailItem>(); @JsonProperty("position") private Position position = null; @JsonProperty("ship_type_id") private Integer shipTypeId = null; public KillmailVictim allianceId(Integer allianceId) { this.allianceId = allianceId; return this; } /** * alliance_id integer * * @return allianceId **/ @ApiModelProperty(example = "null", value = "alliance_id integer") public Integer getAllianceId() { return allianceId; } public void setAllianceId(Integer allianceId) { this.allianceId = allianceId; } public KillmailVictim characterId(Integer characterId) { this.characterId = characterId; return this; } /** * character_id integer * * @return characterId **/ @ApiModelProperty(example = "null", value = "character_id integer") public Integer getCharacterId() { return characterId; } public void setCharacterId(Integer characterId) { this.characterId = characterId; } public KillmailVictim corporationId(Integer corporationId) { this.corporationId = corporationId; return this; } /** * corporation_id integer * * @return corporationId **/ @ApiModelProperty(example = "null", value = "corporation_id integer") public Integer getCorporationId() { return corporationId; } public void setCorporationId(Integer corporationId) { this.corporationId = corporationId; } public KillmailVictim damageTaken(Integer damageTaken) { this.damageTaken = damageTaken; return this; } /** * How much total damage was taken by the victim * * @return damageTaken **/ @ApiModelProperty(example = "null", required = true, value = "How much total damage was taken by the victim ") public Integer getDamageTaken() { return damageTaken; } public void setDamageTaken(Integer damageTaken) { this.damageTaken = damageTaken; } public KillmailVictim factionId(Integer factionId) { this.factionId = factionId; return this; } /** * faction_id integer * * @return factionId **/ @ApiModelProperty(example = "null", value = "faction_id integer") public Integer getFactionId() { return factionId; } public void setFactionId(Integer factionId) { this.factionId = factionId; } public KillmailVictim items(List<KillmailItem> items) { this.items = items; return this; } public KillmailVictim addItemsItem(KillmailItem itemsItem) { this.items.add(itemsItem); return this; } /** * items array * * @return items **/ @ApiModelProperty(example = "null", value = "items array") public List<KillmailItem> getItems() { return items; } public void setItems(List<KillmailItem> items) { this.items = items; } public KillmailVictim position(Position position) { this.position = position; return this; } /** * Get position * * @return position **/ @ApiModelProperty(example = "null", value = "") public Position getPosition() { return position; } public void setPosition(Position position) { this.position = position; } public KillmailVictim shipTypeId(Integer shipTypeId) { this.shipTypeId = shipTypeId; return this; } /** * The ship that the victim was piloting and was destroyed * * @return shipTypeId **/ @ApiModelProperty(example = "null", required = true, value = "The ship that the victim was piloting and was destroyed ") public Integer getShipTypeId() { return shipTypeId; } public void setShipTypeId(Integer shipTypeId) { this.shipTypeId = shipTypeId; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } KillmailVictim killmailVictim = (KillmailVictim) o; return Objects.equals(this.allianceId, killmailVictim.allianceId) && Objects.equals(this.characterId, killmailVictim.characterId) && Objects.equals(this.corporationId, killmailVictim.corporationId) && Objects.equals(this.damageTaken, killmailVictim.damageTaken) && Objects.equals(this.factionId, killmailVictim.factionId) && Objects.equals(this.items, killmailVictim.items) && Objects.equals(this.position, killmailVictim.position) && Objects.equals(this.shipTypeId, killmailVictim.shipTypeId); } @Override public int hashCode() { return Objects .hash(allianceId, characterId, corporationId, damageTaken, factionId, items, position, shipTypeId); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class KillmailVictim {\n"); sb.append(" allianceId: ").append(toIndentedString(allianceId)).append("\n"); sb.append(" characterId: ").append(toIndentedString(characterId)).append("\n"); sb.append(" corporationId: ").append(toIndentedString(corporationId)).append("\n"); sb.append(" damageTaken: ").append(toIndentedString(damageTaken)).append("\n"); sb.append(" factionId: ").append(toIndentedString(factionId)).append("\n"); sb.append(" items: ").append(toIndentedString(items)).append("\n"); sb.append(" position: ").append(toIndentedString(position)).append("\n"); sb.append(" shipTypeId: ").append(toIndentedString(shipTypeId)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
jaamal/overclocking
sources/Algorithms/src/patternMatching/fcpm/preprocessing/IProductsPreprocessor.java
package patternMatching.fcpm.preprocessing; public interface IProductsPreprocessor { Product[] execute(dataContracts.Product[] products); }
RossComputerPerson/skift
libraries/libsystem/ringbuffer.c
<reponame>RossComputerPerson/skift /* Copyright © 2018-2019 <NAME>. */ /* This code is licensed under the MIT License. */ /* See: LICENSE.md */ /* ringbuffer.c: a fifo buffer. */ #include <libsystem/assert.h> #include <libsystem/ringbuffer.h> ringbuffer_t *ringbuffer(uint size) { ringbuffer_t *rb = MALLOC(ringbuffer_t); rb->size = size; rb->head = 0; rb->tail = 0; rb->buffer = malloc(size); return rb; } void ringbuffer_delete(ringbuffer_t *rb) { assert(rb); free(rb->buffer); free(rb); } int ringbuffer_is_empty(ringbuffer_t *this) { return this->tail == this->head; } int ringbuffer_is_full(ringbuffer_t *this) { return ((this->head + 1) % this->size) == this->tail; } int ringbuffer_read(ringbuffer_t *rb, void *buffer, uint size) { assert(rb); assert(buffer); int chr; uint offset = 0; do { chr = ringbuffer_getc(rb); if (chr != -1) { ((char *)buffer)[offset] = (char)chr; offset++; } } while (chr != -1 && offset < size); return offset; } int ringbuffer_write(ringbuffer_t *rb, const void *buffer, uint size) { assert(rb); assert(buffer); int chr = 0; uint offset = 0; while (chr != -1 && offset < size) { chr = ringbuffer_putc(rb, ((char *)buffer)[offset]); offset++; } return offset; } int ringbuffer_putc(ringbuffer_t *rb, int c) { assert(rb); if (!ringbuffer_is_full(rb)) { rb->buffer[rb->head] = (uchar)c; rb->head = (rb->head + 1) % rb->size; return (uchar)c; } else { return -1; } } int ringbuffer_getc(ringbuffer_t *rb) { assert(rb); if (!ringbuffer_is_empty(rb)) { int c = rb->buffer[rb->tail]; rb->tail = (rb->tail + 1) % rb->size; return c; } else { return -1; } }
louis-vs/spina-conferences-primer_theme-fork
test/controllers/spina/conferences/primer_theme/conferences/application_controller_test.rb
<filename>test/controllers/spina/conferences/primer_theme/conferences/application_controller_test.rb # frozen_string_literal: true require 'test_helper' module Spina module Conferences module PrimerTheme module Conferences class ApplicationControllerTest < ActionDispatch::IntegrationTest include ::Spina::Engine.routes.url_helpers test 'should get cookies info' do get frontend_cookies_info_url assert_response :success end end end end end end
hewguo/online-file-server
online-file/src/main/java/com/suolashare/file/domain/user/UserRole.java
package com.suolashare.file.domain.user; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; import lombok.Data; import javax.persistence.*; @Data @Table(name = "user_role") @Entity @TableName("user_role") public class UserRole { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @TableId(type = IdType.AUTO) private Long userroleid; private Long userId; private Long roleId; }
ggraham/blasr_libcpp
alignment/tuples/BaseTuple.cpp
#include <alignment/tuples/TupleMask.h> #include <pbdata/Types.h> #include <alignment/tuples/BaseTuple.hpp> #include <alignment/tuples/TupleMetrics.hpp> #include <cstdint> TupleData BaseTuple::HashPowerOfFour(int nBits, TupleMetrics &tm) { // // When the hash can fit inside the entire tuple, just return the // tuple. // if (tm.tupleSize > nBits) { return tuple; } else { return ((tuple & TupleMask[nBits]) + (tuple % 1063)) % (1 << (nBits * 2)); } } bool BaseTuple::operator<(const BaseTuple &rhs) const { return tuple < rhs.tuple; } bool BaseTuple::operator==(const BaseTuple &rhs) const { return tuple == rhs.tuple; } bool BaseTuple::operator!=(const BaseTuple &rhs) const { return tuple != rhs.tuple; } BaseTuple &BaseTuple::ShiftLeft(TupleMetrics &tm, int shift) { tuple = tuple << shift; tuple = tuple & tm.tupleMask; return *this; } BaseTuple &BaseTuple::ShiftRight(int shift) { tuple = tuple >> shift; return *this; } BaseTuple &BaseTuple::Append(TupleData val, TupleMetrics &tm, int nBits) { tuple = tuple << nBits; tuple = tuple & tm.tupleMask; tuple = tuple + val; return *this; } BaseTuple::operator TupleData() const { return tuple; }
pimling/internal
internal/internal/cheat/feature/context.hpp
#pragma once #include <jni.h> #include <memory> #include "../../game/classes/minecraft/minecraft.hpp" class c_context { public: JNIEnv* m_env; c_context( JNIEnv* env ) : m_env( env ) { } };
zipingguo/HuaWeiOJAlgorithm
TestProjects/HuaWei/decodePassword.js
<reponame>zipingguo/HuaWeiOJAlgorithm /****** * * 2019.04.24 * 华为 实习生笔试题算法题第3题 * * 初始密码是一段升序的数字 * 已有的加密算法会 将数字转换成英文拼接成字符串,并改变字符串的大小写以及顺序 * * ! 问题描述:现已知加密后的密码还原出原有的密码 * * */ // 思路:应该就是转换小写后统计每个字母出现的次数,根据是个数字的英文进行匹配,最后调整成升序就好吧 // 分析每个英文字母的特征 进行统计整理 // 0 z // 2 w // 4 u // 6 x // 8 g // 7 s - x // 3 h - g // 5 v - (s-x) // 1 o - z - w - u // 9 i - x - g - v + (s-x) function DecodePassword(newPass){ var dealedstr = newPass.toLowerCase(); var numstrs = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine']; var num = []; for (let index = 0; index <= 9; index++) { num[index] =0; } for (let i = 0; i < dealedstr.length; i++) { switch (dealedstr[i]) { case 'z': num[0]++;break; case 'o': num[1]++;break; case 'w': num[2]++;break; case 'h': num[3]++;break; case 'u': num[4]++;break; case 'v': num[5]++;break; case 'x': num[6]++;break; case 's': num[7]++;break; case 'g': num[8]++;break; case 'i': num[9]++;break; default: break; } } num[7] = num[7] - num[6]; num[3] = num[3] - num[8]; num[5] = num[5] - num[7]; num[1] = num[1] - num[0] - num[2] - num[4]; num[9] = num[9] - num[6] - num[8] - num[5]; var outstr = ''; for (let j = 0; j <=9; j++) { while(num[j]){ outstr+= j; num[j]--; } } return outstr; } // var encode = 'NeNohuiroNNiNeteefersix'; // 测试集1 var encode = 'oNEthrEEfoursixNiNENiEN'; // 测试集1 console.log(DecodePassword(encode));
phrasehealth/spark-etl
spark-postgres/src/test/scala/io/frama/parisni/spark/postgres/DdlTest.scala
package io.frama.parisni.spark.postgres import java.sql.Timestamp import java.sql.Date import java.util import org.apache.spark.sql.QueryTest import org.postgresql.util.PSQLException import org.junit.Test class DdlTest extends QueryTest with SparkSessionTestWrapper { val testDate = new Date(9) val testTimestamp = new Timestamp(10L) @Test def verifySpark(): Unit = { spark.sql("select 1").show } @Test def verifyPostgres() { // Uses JUnit-style assertions println(pg.getEmbeddedPostgres.getJdbcUrl("postgres", "pg")) val con = pg.getEmbeddedPostgres.getPostgresDatabase.getConnection con.createStatement().executeUpdate("create table test(i int)") val res = con.createStatement().executeQuery("select 27") while (res.next()) println(res.getInt(1)) } @Test def verifySparkPostgres(): Unit = { val input = spark.sql("select 1 as t") input.write .format("io.frama.parisni.spark.postgres") .option("host", "localhost") .option("port", pg.getEmbeddedPostgres.getPort) .option("database", "postgres") .option("user", "postgres") .option("table", "test_table") .mode(org.apache.spark.sql.SaveMode.Overwrite) .save val output = spark.read .format("io.frama.parisni.spark.postgres") .option("host", "localhost") .option("port", pg.getEmbeddedPostgres.getPort) .option("database", "postgres") .option("user", "postgres") .option("query", "select * from test_table") .load checkAnswer(input, output) } @Test def verifySparkPostgresOldDatasource(): Unit = { val input = spark.sql("select 1 as t") input.write .format("postgres") .option("host", "localhost") .option("port", pg.getEmbeddedPostgres.getPort) .option("database", "postgres") .option("user", "postgres") .option("table", "test_table") .mode(org.apache.spark.sql.SaveMode.Overwrite) .save val output = spark.read .format("postgres") .option("host", "localhost") .option("port", pg.getEmbeddedPostgres.getPort) .option("database", "postgres") .option("user", "postgres") .option("query", "select * from test_table") .load checkAnswer(input, output) } @Test def verifyPostgresConnectionWithUrl(): Unit = { val input = spark.sql("select 2 as t") input.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("table", "test_table") .mode(org.apache.spark.sql.SaveMode.Overwrite) .save } @Test def verifyPostgresConnection() { val pg = PGTool(spark, getPgUrl, "/tmp") .setPassword("<PASSWORD>") pg.showPassword() pg.sqlExecWithResult("select 1").show } @Test def verifyPostgresConnectionFailWhenBadPassword() { assertThrows[Exception]( spark .sql("select 2 as t") .write .format("io.frama.parisni.spark.postgres") .option("host", "localhost") .option("port", pg.getEmbeddedPostgres.getPort) .option("database", "postgres") .option("user", "idontknow") .option("password", "<PASSWORD>") .option("table", "test_table") .mode(org.apache.spark.sql.SaveMode.Overwrite) .save ) } @Test def verifyPostgresCreateTable(): Unit = { import spark.implicits._ val schema = ((1, "asdf", 1L, Array(1, 2, 3), Array("bob"), Array(1L, 2L)) :: Nil) .toDF( "int_col", "string_col", "long_col", "array_int_col", "array_string_col", "array_bigint_col" ) .schema getPgTool().tableCreate("test_array", schema, isUnlogged = true) } @Test def verifyPostgresCreateSpecialTable(): Unit = { import spark.implicits._ val data = ((1, "asdf", 1L, Array(1, 2, 3), Array("bob"), Array(1L, 2L)) :: Nil) .toDF( "INT_COL", "STRING_COL", "LONG_COL", "ARRAY_INT_COL", "ARRAY_STRING_COL", "ARRAY_BIGINT_COL" ) val schema = data.schema getPgTool().tableCreate("TEST_ARRAY", schema, isUnlogged = true) data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", "TEST_ARRAY") .save } @Test def verifyPostgresTableExists(): Unit = { import spark.implicits._ val schema = ((1, "asdf", 1L, Array(1, 2, 3), Array("bob"), Array(1L, 2L)) :: Nil) .toDF( "int_col", "string_col", "long_col", "array_int_col", "array_string_col", "array_bigint_col" ) .schema assert(!getPgTool().tableExists("TEST_ARRAY")) getPgTool().tableCreate("TEST_ARRAY", schema, isUnlogged = true) assert(getPgTool().tableExists("TEST_ARRAY")) } @Test def verifyPostgresCopyTableConstraint(): Unit = { val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() conn .createStatement() .execute( """ |CREATE TABLE base_table_for_constraints( | constraint_val INT CONSTRAINT on_constraint_value CHECK (constraint_val > 0) |) """.stripMargin ) val expectedConstraintName = "on_constraint_value" val expectedConstraintSrc = "(constraint_val > 0)" val constraintSql = """ |SELECT | con.conname, | con.consrc |FROM pg_catalog.pg_constraint con | INNER JOIN pg_catalog.pg_class rel | ON rel.oid = con.conrelid | AND rel.relname = 'TABLE_NAME'; """.stripMargin // Assert base table constraint info val rsBase = conn .createStatement() .executeQuery( constraintSql.replace("TABLE_NAME", "base_table_for_constraints") ) rsBase.next() assert(rsBase.getString(1) == expectedConstraintName) assert(rsBase.getString(2) == expectedConstraintSrc) // Do the copy getPgTool().tableCopy( "base_table_for_constraints", "copy_table_for_constraints", copyConstraints = true ) // Assert copied table has the storage parameter val rsCopy = conn .createStatement() .executeQuery( constraintSql.replace("TABLE_NAME", "copy_table_for_constraints") ) rsCopy.next() assert(rsCopy.getString(1) == expectedConstraintName) assert(rsCopy.getString(2) == expectedConstraintSrc) } @Test def verifyPostgresCopyTableIndexes(): Unit = { val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() conn .createStatement() .execute( """ |CREATE TABLE base_table_for_indexes( | compounded_idx_1 INT, | compounded_idx_2 VARCHAR(256) |) """.stripMargin ) // Add complex index conn .createStatement() .execute( """ |CREATE INDEX compounded_idx ON base_table_for_indexes USING btree( | compounded_idx_1 ASC NULLS FIRST, | compounded_idx_2 DESC |) WHERE LENGTH(compounded_idx_2) < 10 """.stripMargin ) val expectedIndexDef = "CREATE INDEX IDX_NAME ON public.TABLE_NAME USING btree " + "(compounded_idx_1 NULLS FIRST, compounded_idx_2 DESC) " + "WHERE (length((compounded_idx_2)::text) < 10)" // Assert base index info is correct val rsBase = conn .createStatement() .executeQuery( "SELECT indexname, indexdef FROM pg_indexes WHERE tablename = 'base_table_for_indexes'" ) rsBase.next() assert(rsBase.getString(1) == "compounded_idx") assert( rsBase.getString(2) == expectedIndexDef .replace("IDX_NAME", "compounded_idx") .replace("TABLE_NAME", "base_table_for_indexes") ) // Do the copy getPgTool().tableCopy( "base_table_for_indexes", "copy_table_for_indexes", copyIndexes = true ) // Assert copied index info is correct val rsCopy = conn .createStatement() .executeQuery( "SELECT indexname, indexdef FROM pg_indexes WHERE tablename = 'copy_table_for_indexes'" ) rsCopy.next() val idxName = rsCopy.getString(1) assert( rsCopy.getString(2) == expectedIndexDef .replace("IDX_NAME", idxName) .replace("TABLE_NAME", "copy_table_for_indexes") ) } @Test def verifyPostgresCopyTableStorage(): Unit = { val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() conn .createStatement() .execute(""" |CREATE TABLE base_table_for_storage( | toast_column VARCHAR(1024) |) """.stripMargin) val checkStorageSql = """ |select t2.attstorage |from pg_class t1 |inner join pg_attribute t2 | on t1.oid = t2.attrelid | and t1.relname = 'TABLE_NAME' | and t2.attname = 'COLUMN_NAME' """.stripMargin // Assert that original column has storage x val rsBaseOriginal = conn .createStatement() .executeQuery( checkStorageSql .replace("TABLE_NAME", "base_table_for_storage") .replace("COLUMN_NAME", "toast_column") ) rsBaseOriginal.next() assert(rsBaseOriginal.getString(1) == "x") conn .createStatement() .execute(""" |ALTER TABLE base_table_for_storage ALTER COLUMN toast_column SET STORAGE PLAIN """.stripMargin) // Assert that updated has storage p val rsBaseUpdated = conn .createStatement() .executeQuery( checkStorageSql .replace("TABLE_NAME", "base_table_for_storage") .replace("COLUMN_NAME", "toast_column") ) rsBaseUpdated.next() assert(rsBaseUpdated.getString(1) == "p") // Do the copy getPgTool().tableCopy( "base_table_for_storage", "copy_table_for_storage", copyStorage = true ) // Assert that copied-table column has storage p val rsCopy = conn .createStatement() .executeQuery( checkStorageSql .replace("TABLE_NAME", "copy_table_for_storage") .replace("COLUMN_NAME", "toast_column") ) rsCopy.next() assert(rsCopy.getString(1) == "p") } @Test def verifyPostgresCopyTableComments(): Unit = { val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() conn .createStatement() .execute( """ |CREATE TABLE base_table_for_comments(comment_val int) """.stripMargin ) // Add comment conn .createStatement() .execute( """ |COMMENT ON COLUMN base_table_for_comments.comment_val IS 'Test comment' """.stripMargin ) val commentsSql = """ |SELECT c.column_name,pgd.description |FROM pg_catalog.pg_description pgd | INNER JOIN information_schema.columns c | ON (pgd.objsubid=c.ordinal_position) |WHERE c.table_name='TABLE_NAME' """.stripMargin val expectedColumnName = "comment_val" val expectedColumnComment = "Test comment" // Assert base index info is correct val rsBase = conn .createStatement() .executeQuery( commentsSql.replace("TABLE_NAME", "base_table_for_comments") ) rsBase.next() assert(rsBase.getString(1) == expectedColumnName) assert(rsBase.getString(2) == expectedColumnComment) // Do the copy getPgTool().tableCopy( "base_table_for_comments", "copy_table_for_comments", copyComments = true ) // Assert copied index info is correct val rsCopy = conn .createStatement() .executeQuery( commentsSql.replace("TABLE_NAME", "copy_table_for_comments") ) rsCopy.next() assert(rsCopy.getString(1) == expectedColumnName) assert(rsCopy.getString(2) == expectedColumnComment) } @Test def verifyPostgresCopyTableOwner(): Unit = { val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() conn.createStatement().execute("CREATE TABLE base_table_for_owner()") conn.createStatement().execute("CREATE ROLE ru1 LOGIN") conn .createStatement() .execute("ALTER TABLE base_table_for_owner OWNER TO ru1") getPgTool().tableCopy( "base_table_for_owner", "copy_table_for_owner", copyOwner = true ) // Check table perms val rsBaseTableOwner = conn .createStatement() .executeQuery( "SELECT relowner FROM pg_class WHERE relname = 'base_table_for_owner'" ) rsBaseTableOwner.next() val baseTableOwner = rsBaseTableOwner .getString(1) .drop(1) .dropRight(1) .split(",") .map(p => p.split("/")(0)) .toSeq rsBaseTableOwner.close() val rsCopyTableOwner = conn .createStatement() .executeQuery( "SELECT relowner FROM pg_class WHERE relname = 'copy_table_for_owner'" ) rsCopyTableOwner.next() val copyTableOwner = rsCopyTableOwner .getString(1) .drop(1) .dropRight(1) .split(",") .map(p => p.split("/")(0)) .toSeq rsCopyTableOwner.close() assert(baseTableOwner == copyTableOwner) } @Test def verifyPostgresCopyTablePermissions(): Unit = { val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() conn .createStatement() .execute("CREATE TABLE base_table_for_perms(perm_col int)") conn.createStatement().execute("CREATE ROLE ru1 LOGIN INHERIT") conn.createStatement().execute("CREATE ROLE rg1 NOINHERIT") conn.createStatement().execute("CREATE ROLE rg2 NOINHERIT") conn.createStatement().execute("GRANT rg1 TO ru1") conn.createStatement().execute("GRANT rg2 TO rg1") conn .createStatement() .execute("GRANT SELECT ON base_table_for_perms TO PUBLIC") conn .createStatement() .execute("GRANT INSERT ON base_table_for_perms TO ru1") conn .createStatement() .execute("GRANT UPDATE ON base_table_for_perms TO ru1 WITH GRANT OPTION") conn .createStatement() .execute("GRANT INSERT ON base_table_for_perms TO rg1") conn .createStatement() .execute("GRANT TRUNCATE ON base_table_for_perms TO rg2") conn .createStatement() .execute("GRANT INSERT(perm_col) ON base_table_for_perms TO ru1") conn .createStatement() .execute("GRANT UPDATE(perm_col) ON base_table_for_perms TO rg1") getPgTool().tableCopy( "base_table_for_perms", "copy_table_for_perms", copyPermissions = true ) // Check table perms val rsBaseTablePerms = conn .createStatement() .executeQuery( "SELECT relacl FROM pg_class WHERE relname = 'base_table_for_perms'" ) rsBaseTablePerms.next() val baseTablePerms = rsBaseTablePerms .getString(1) .drop(1) .dropRight(1) .split(",") .map(p => p.split("/")(0)) .toSeq rsBaseTablePerms.close() val rsCopyTablePerms = conn .createStatement() .executeQuery( "SELECT relacl FROM pg_class WHERE relname = 'copy_table_for_perms'" ) rsCopyTablePerms.next() val copyTablePerms = rsCopyTablePerms .getString(1) .drop(1) .dropRight(1) .split(",") .map(p => p.split("/")(0)) .toSeq rsCopyTablePerms.close() baseTablePerms.foreach(p => assert(copyTablePerms.contains(p))) // Check columns perms val rsBaseTableColumnsPerms = conn .createStatement() .executeQuery(""" SELECT attacl |FROM pg_attribute a | JOIN pg_class c | ON a.attrelid = c.oid |WHERE c.relname = 'base_table_for_perms' | AND a.attname = 'perm_col' """.stripMargin) rsBaseTableColumnsPerms.next() val baseTableColumnPerms = rsBaseTableColumnsPerms .getString(1) .drop(1) .dropRight(1) .split(",") .map(p => p.split("/")(0)) .toSeq rsBaseTableColumnsPerms.close() val rsCopyTableColumnPerms = conn .createStatement() .executeQuery( """ SELECT attacl |FROM pg_attribute a | JOIN pg_class c | ON a.attrelid = c.oid |WHERE c.relname = 'copy_table_for_perms' | AND a.attname = 'perm_col' """.stripMargin ) rsCopyTableColumnPerms.next() val copyTableColumnPerms = rsCopyTableColumnPerms .getString(1) .drop(1) .dropRight(1) .split(",") .map(p => p.split("/")(0)) .toSeq rsCopyTableColumnPerms.close() baseTableColumnPerms.foreach(p => assert(copyTableColumnPerms.contains(p))) } @Test def verifyKillLocks(): Unit = { val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() conn.createStatement().execute("create table lockable()") conn.setAutoCommit(false) try { conn.createStatement().execute("BEGIN TRANSACTION") conn.createStatement().execute("LOCK lockable IN ACCESS EXCLUSIVE MODE") assert(getPgTool().killLocks("lockable") == 1) conn.commit() fail() } catch { case e: PSQLException => () } } @Test def verifyRename(): Unit = { val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() conn.createStatement().execute("create table to_rename()") getPgTool().tableRename("to_rename", "renamed") var rs = conn .createStatement() .executeQuery( "SELECT EXISTS(SELECT FROM information_schema.tables WHERE table_name = 'to_rename')" ) rs.next() assert(!rs.getBoolean(1)) rs = conn .createStatement() .executeQuery( "SELECT EXISTS(SELECT FROM information_schema.tables WHERE table_name = 'renamed')" ) rs.next() assert(rs.getBoolean(1)) conn.close() } @Test def verifyPostgresStreamBulkLoadMode(): Unit = { import spark.implicits._ val data = ((1, "asdf", 1L) :: Nil).toDF("INT_COL", "STRING_COL", "LONG_COL") val schema = data.schema getPgTool().tableCreate("TEST_STREAM_BULK_LOAD", schema, isUnlogged = true) data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", "TEST_STREAM_BULK_LOAD") .option("bulkLoadMode", "stream") .save } @Test def verifyPostgresStreamBulkLoadModeError(): Unit = { import spark.implicits._ val fakeData = ((1, "asdf", 1L) :: Nil).toDF("INT_COL", "STRING_COL", "LONG_COL") val schema = fakeData.schema val data = ((1, "asdf", 1L, "err") :: Nil).toDF( "INT_COL", "STRING_COL", "LONG_COL", "ERR_COL" ) getPgTool().tableCreate("TEST_STREAM_BULK_LOAD", schema, isUnlogged = true) assertThrows[RuntimeException]( data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", "TEST_STREAM_BULK_LOAD") .option("bulkLoadMode", "stream") .save ) } @Test def verifyPostgresPgBinaryLoadStream() = verifyPostgresPgBinaryLoadMode("PgBinaryStream") @Test def verifyPostgresPgBinaryLoadFiles() = verifyPostgresPgBinaryLoadMode("PgBinaryFiles") def verifyPostgresPgBinaryLoadMode(loadMode: String): Unit = { import spark.implicits._ val data = ((1, "asdf", 1L) :: Nil).toDF("INT_COL", "STRING_COL", "LONG_COL") val schema = data.schema val tableName = s"TEST_PG_BINARY_LOAD_$loadMode" getPgTool().tableCreate(tableName, schema, isUnlogged = true) data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", tableName) .option("bulkLoadMode", loadMode) .save } @Test def verifyPostgresPgBinaryStreamCreateSpecialTable() = verifyPostgresPgBinaryCreateSpecialTable("PgBinaryStream") @Test def verifyPostgresPgBinaryFilesCreateSpecialTable() = verifyPostgresPgBinaryCreateSpecialTable("PgBinaryFiles") def verifyPostgresPgBinaryCreateSpecialTable(loadMode: String): Unit = { import spark.implicits._ val data = ((1, "asdf", 1L, Array(1, 2, 3), Array("bob"), Array(1L, 2L)) :: Nil) .toDF( "INT_COL", "STRING_COL", "LONG_COL", "ARRAY_INT_COL", "ARRAY_STRING_COL", "ARRAY_BIGINT_COL" ) val schema = data.schema val tableName = s"TEST_ARRAY_$loadMode" getPgTool().tableCreate(tableName, schema, isUnlogged = true) data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", tableName) .option("bulkLoadMode", loadMode) .save } @Test def verifyPgBinaryLoadStreamModeError() = verifyPgBinaryLoadModeError("PgBinaryStream") @Test def verifyPgBinaryLoadFilesModeError() = verifyPgBinaryLoadModeError("PgBinaryFiles") def verifyPgBinaryLoadModeError(loadMode: String): Unit = { import spark.implicits._ val fakeData = ((1, "asdf", 1L) :: Nil).toDF("INT_COL", "STRING_COL", "LONG_COL") val schema = fakeData.schema val data = ((1, "asdf", 1L, "err") :: Nil).toDF( "INT_COL", "STRING_COL", "LONG_COL", "ERR_COL" ) val tableName = s"TEST_PG_BINARY_ERR_$loadMode" getPgTool().tableCreate(tableName, schema, isUnlogged = true) assertThrows[RuntimeException]( data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", tableName) .option("bulkLoadMode", loadMode) .save ) } @Test def verifyPgBinaryStreamLoadPrimitiveTypes() = verifyPgBinaryLoadPrimitiveTypes("PgBinaryStream") def verifyPgBinaryLoadPrimitiveTypes(loadMode: String): Unit = { import spark.implicits._ // Needed for the InnerStruct class to be converted to DataFrame through implicit org.apache.spark.sql.catalyst.encoders.OuterScopes.addOuterScope(this) val data = Seq( ( true, 2.toByte, 3.toShort, 4, 5L, 6.0f, 7.0d, "8", testDate, testTimestamp, "11".getBytes(), BigDecimal(12.0), Map("test_key" -> 13L), InnerStruct(14, "test_fi2") ) ).toDF( "BOOL_COL", "BYTE_COL", "SHORT_COL", "INT_COL", "LONG_COL", "FLOAT_COL", "DOUBLE_COL", "STRING_COL", "DATE_COL", "TIMESTAMP_COL", "BYTEA_COL", "BIGD_COL", "MAP_COL", "STRUCT_COL" ) val schema = data.schema val tableName = s"TEST_PG_BINARY_LOAD_PRIMITIVES_$loadMode" getPgTool().tableCreate(tableName, schema, isUnlogged = true) data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", tableName) .option("bulkLoadMode", loadMode) .save val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() val rs = conn.createStatement().executeQuery(s"""SELECT * FROM "$tableName" """) rs.next() assert(rs.getBoolean(1)) assert(rs.getByte(2) == 2) assert(rs.getShort(3) == 3) assert(rs.getInt(4) == 4) assert(rs.getLong(5) == 5L) assert(rs.getFloat(6) == 6.0f) assert(rs.getDouble(7) == 7.0d) assert(rs.getString(8) == "8") assert(rs.getDate(9).toLocalDate.equals(testDate.toLocalDate)) assert(rs.getTimestamp(10).equals(testTimestamp)) assert(util.Arrays.equals(rs.getBytes(11), "11".getBytes())) assert(rs.getDouble(12) == 12.0d) assert(rs.getString(13) == "{\"test_key\": 13}") assert(rs.getString(14) == "{\"fi1\": 14, \"fi2\": \"test_fi2\"}") } @Test def verifyPgBinaryFilesLoadPrimitiveTypes() = verifyPgBinaryLoadPrimitiveTypes("PgBinaryFiles") @Test def verifyPgBinaryStreamLoadNullPrimitives() = verifyPgBinaryLoadNullPrimitives("PgBinaryStream") def verifyPgBinaryLoadNullPrimitives(loadMode: String): Unit = { import spark.implicits._ // Needed for the InnerStruct class to be converted to DataFrame through implicit org.apache.spark.sql.catalyst.encoders.OuterScopes.addOuterScope(this) val data = Seq( ( None.asInstanceOf[Option[Boolean]], None.asInstanceOf[Option[Byte]], None.asInstanceOf[Option[Short]], None.asInstanceOf[Option[Int]], None.asInstanceOf[Option[Long]], None.asInstanceOf[Option[Float]], None.asInstanceOf[Option[Double]], None.asInstanceOf[Option[String]], None.asInstanceOf[Option[Date]], None.asInstanceOf[Option[Timestamp]], None.asInstanceOf[Option[Array[Byte]]], None.asInstanceOf[Option[BigDecimal]], None.asInstanceOf[Option[Map[String, Long]]], None.asInstanceOf[Option[InnerStruct]] ) ).toDF( "BOOL_COL", "BYTE_COL", "SHORT_COL", "INT_COL", "LONG_COL", "FLOAT_COL", "DOUBLE_COL", "STRING_COL", "DATE_COL", "TIMESTAMP_COL", "BYTEA_COL", "BIGD_COL", "MAP_COL", "STRUCT_COL" ) val schema = data.schema val tableName = s"TEST_PG_BINARY_LOAD_NULL_PRIMITIVES_$loadMode" getPgTool().tableCreate(tableName, schema, isUnlogged = true) data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", tableName) .option("bulkLoadMode", loadMode) .save val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() val rs = conn.createStatement().executeQuery(s"""SELECT * FROM "$tableName" """) rs.next() (1 to 14).foreach(i => assert(rs.getString(i) == null)) } @Test def verifyPgBinaryFilesLoadNullPrimitives() = verifyPgBinaryLoadNullPrimitives("PgBinaryFiles") @Test def verifyPgBinaryStreamLoadNullInArrays() = verifyPgBinaryLoadNullInArrays("PgBinaryStream") @Test def verifyPgBinaryFilesLoadNullInArrays() = verifyPgBinaryLoadNullInArrays("PgBinaryFiles") def verifyPgBinaryLoadNullInArrays(loadMode: String): Unit = { import spark.implicits._ // Needed for the InnerStruct class to be converted to DataFrame through implicit org.apache.spark.sql.catalyst.encoders.OuterScopes.addOuterScope(this) val colNames = Seq( "BOOL_COL", "BYTE_COL", "SHORT_COL", "INT_COL", "LONG_COL", "FLOAT_COL", "DOUBLE_COL", "STRING_COL", "DATE_COL", "TIMESTAMP_COL", "BYTEA_COL", "BIGD_COL" ) val data = Seq( ( Array(Some(true), None), Array(Some(2.toByte), None), Array(Some(3.toShort), None), Array(Some(4), None), Array(Some(5L), None), Array(Some(6.0f), None), Array(Some(7.0d), None), Array(Some("8"), None), Array(Some(testDate), None), Array(Some(testTimestamp), None), Array(Some("11".getBytes()), None), Array(Some(BigDecimal(12)), None) ) ).toDF(colNames: _*) val schema = data.schema val tableName = s"TEST_PG_BULK_INSERT_LOAD_NULL_IN_ARRAYS_$loadMode" getPgTool().tableCreate(tableName, schema, isUnlogged = true) data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", tableName) .option("bulkLoadMode", loadMode) .save val db = pg.getEmbeddedPostgres.getPostgresDatabase val conn = db.getConnection() val selectFirstValues = colNames.map(n => PGTool.sanP(n) + "[1]").mkString(", ") val selectSecondValues = colNames.map(n => PGTool.sanP(n) + "[2]").mkString(", ") val rs = conn .createStatement() .executeQuery(s""" |SELECT | $selectFirstValues, | $selectSecondValues |FROM "$tableName" """.stripMargin) rs.next() assert(rs.getBoolean(1)) assert(rs.getShort(2) == 2) assert(rs.getShort(3) == 3) assert(rs.getInt(4) == 4) assert(rs.getLong(5) == 5L) assert(rs.getFloat(6) == 6.0f) assert(rs.getDouble(7) == 7.0d) assert(rs.getString(8) == "8") assert(rs.getDate(9).toLocalDate.equals(testDate.toLocalDate)) assert( rs.getTimestamp(10).toLocalDateTime.equals(testTimestamp.toLocalDateTime) ) assert(util.Arrays.equals(rs.getBytes(11), "11".getBytes)) assert(rs.getDouble(12) == 12.0d) (13 to 24).foreach(i => assert(rs.getString(i) == null)) } @Test def verifyOverwriteSwapLoad(): Unit = { import spark.implicits._ val data = ((1, "asdf", 1L, "err") :: Nil).toDF( "INT_COL", "STRING_COL", "LONG_COL", "ERR_COL" ) val schema = data.schema val tableName = "TEST_SWAP_BASE" getPgTool().tableCreate(tableName, schema, isUnlogged = true) data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", tableName) .option("bulkLoadMode", "PgBinaryStream") .save data.write .format("io.frama.parisni.spark.postgres") .option("url", getPgUrl) .option("type", "full") .option("table", tableName) .option("swapLoad", "true") .option("bulkLoadMode", "PgBinaryStream") .mode(org.apache.spark.sql.SaveMode.Overwrite) .save } case class InnerStruct(fi1: Int, fi2: String) }