text
stringlengths 31
1.04M
|
---|
//! Contains the definiton and logic for all the
//! output sections required to build the final file.
const std = @import("std");
const Symbol = @import("Symbol.zig");
const Object = @import("Object.zig");
const types = @import("types.zig");
const Wasm = @import("Wasm.zig");
const Allocator = std.mem.Allocator;
const log = std.log.scoped(.zwld);
/// Accepts a slice with mutable elements and sets the field `field_name`'s value
/// to the index within the list, based on the given `offset`.
fn setIndex(comptime field_name: []const u8, slice: anytype, offset: u32) void {
for (slice) |item, index| {
@field(item, field_name) = @intCast(u32, index + offset);
}
}
/// Output function section, holding a list of all
/// function with indexes to their type
pub const Functions = struct {
/// Holds the list of function type indexes.
/// The list is built from merging all defined functions into this single list.
/// Once appended, it becomes immutable and should not be mutated outside this list.
items: std.ArrayListUnmanaged(std.wasm.Func) = .{},
/// Adds a new function to the section while also setting the function index
/// of the `Func` itself.
pub fn append(self: *Functions, gpa: Allocator, offset: u32, func: std.wasm.Func) !u32 {
const index = offset + self.count();
try self.items.append(gpa, func);
return index;
}
/// Returns the count of entires within the function section
pub fn count(self: *Functions) u32 {
return @intCast(u32, self.items.items.len);
}
pub fn deinit(self: *Functions, gpa: Allocator) void {
self.items.deinit(gpa);
self.* = undefined;
}
};
/// Output import section, containing all the various import types
pub const Imports = struct {
/// Table where the key is represented by an import.
/// Each entry represents and imported function where the value contains the index of the function
/// as well as the index of the type.
imported_functions: std.ArrayHashMapUnmanaged(
ImportKey,
struct { index: u32, type: u32 },
ImportKey.Ctx,
true,
) = .{},
/// Table where the key is represented by an import.
/// Each entry represents an imported global from the host environment and maps to the index
/// within this map.
imported_globals: std.ArrayHashMapUnmanaged(
ImportKey,
struct { index: u32, global: std.wasm.GlobalType },
ImportKey.Ctx,
true,
) = .{},
/// Table where the key is represented by an import.
/// Each entry represents an imported table from the host environment and maps to the index
/// within this map.
imported_tables: std.ArrayHashMapUnmanaged(
ImportKey,
struct { index: u32, table: std.wasm.Table },
ImportKey.Ctx,
true,
) = .{},
/// A list of symbols representing objects that have been imported.
imported_symbols: std.ArrayListUnmanaged(Wasm.SymbolWithLoc) = .{},
const ImportKey = struct {
module_name: []const u8,
name: []const u8,
const Ctx = struct {
pub fn hash(ctx: Ctx, key: ImportKey) u32 {
_ = ctx;
const hashFunc = std.hash.autoHash;
var hasher = std.hash.Wyhash.init(0);
hashFunc(&hasher, key.module_name.len);
hashFunc(&hasher, key.module_name.ptr);
hashFunc(&hasher, key.name.len);
hashFunc(&hasher, key.name.ptr);
return @truncate(u32, hasher.final());
}
pub fn eql(ctx: Ctx, lhs: ImportKey, rhs: ImportKey, index: usize) bool {
_ = ctx;
_ = index;
return std.mem.eql(u8, lhs.name, rhs.name) and
std.mem.eql(u8, lhs.module_name, rhs.module_name);
}
};
};
const max_load = std.hash_map.default_max_load_percentage;
/// Appends an import symbol into the list of imports. Based on the type, also appends it
/// to their respective import list (such as imported_functions)
///
/// NOTE: The given symbol must reside within the given `Object`.
pub fn appendSymbol(
self: *Imports,
gpa: Allocator,
wasm: *const Wasm,
sym_with_loc: Wasm.SymbolWithLoc,
) !void {
const object: *Object = &wasm.objects.items[sym_with_loc.file.?];
const symbol = &object.symtable[sym_with_loc.sym_index];
const import = object.findImport(symbol.externalType(), symbol.index);
const module_name = import.module_name;
const import_name = symbol.name;
switch (symbol.tag) {
.function => {
const ret = try self.imported_functions.getOrPut(gpa, .{
.module_name = module_name,
.name = import_name,
});
if (!ret.found_existing) {
try self.imported_symbols.append(gpa, sym_with_loc);
ret.value_ptr.* = .{
.index = self.functionCount() - 1,
.type = import.kind.function,
};
}
symbol.index = ret.value_ptr.*.index;
log.debug("Imported function '{s}' at index ({d})", .{ import_name, symbol.index });
},
.global => {
const ret = try self.imported_globals.getOrPut(gpa, .{
.module_name = module_name,
.name = import_name,
});
if (!ret.found_existing) {
try self.imported_symbols.append(gpa, sym_with_loc);
ret.value_ptr.* = .{
.index = self.globalCount() - 1,
.global = import.kind.global,
};
}
symbol.index = ret.value_ptr.*.index;
log.debug("Imported global '{s}' at index ({d})", .{ import_name, symbol.index });
},
.table => {
const ret = try self.imported_tables.getOrPut(gpa, .{
.module_name = module_name,
.name = import_name,
});
if (!ret.found_existing) {
try self.imported_symbols.append(gpa, sym_with_loc);
ret.value_ptr.* = .{
.index = self.tableCount() - 1,
.table = import.kind.table,
};
}
symbol.index = ret.value_ptr.*.index;
log.debug("Imported table '{s}' at index ({d})", .{ import_name, symbol.index });
},
else => unreachable, // programmer error: Given symbol cannot be imported
}
}
/// Returns the count of functions that have been imported (so far)
pub fn functionCount(self: Imports) u32 {
return @intCast(u32, self.imported_functions.count());
}
/// Returns the count of tables that have been imported (so far)
pub fn tableCount(self: Imports) u32 {
return @intCast(u32, self.imported_tables.count());
}
/// Returns the count of globals that have been imported (so far)
pub fn globalCount(self: Imports) u32 {
return @intCast(u32, self.imported_globals.count());
}
pub fn deinit(self: *Imports, gpa: Allocator) void {
self.imported_functions.deinit(gpa);
self.imported_globals.deinit(gpa);
self.imported_tables.deinit(gpa);
self.imported_symbols.deinit(gpa);
self.* = undefined;
}
/// Returns a slice to pointers to symbols that have been imported
pub fn symbols(self: Imports) []const Wasm.SymbolWithLoc {
return self.imported_symbols.items;
}
/// Returns the count of symbols which have been imported
pub fn symbolCount(self: Imports) u32 {
return @intCast(u32, self.imported_symbols.items.len);
}
};
/// Represents the output global section, containing a list of globals
pub const Globals = struct {
/// A list of `wasm.Global`s
/// Once appended to this list, they should no longer be mutated
items: std.ArrayListUnmanaged(std.wasm.Global) = .{},
/// List of internal GOT symbols
got_symbols: std.ArrayListUnmanaged(*Symbol) = .{},
/// Appends a new global and sets the `global_idx` on the global based on the
/// current count of globals and the given `offset`.
pub fn append(self: *Globals, gpa: Allocator, offset: u32, global: std.wasm.Global) !u32 {
const index = offset + @intCast(u32, self.items.items.len);
try self.items.append(gpa, global);
return index;
}
/// Appends a new entry to the internal GOT
pub fn addGOTEntry(self: *Globals, gpa: Allocator, symbol: *Symbol, wasm_bin: *Wasm) !void {
if (symbol.kind == .function) {
try wasm_bin.tables.createIndirectFunctionTable(gpa, wasm_bin);
// try wasm_bin.elements.appendSymbol(gpa, symbol);
@panic("TODO: Implement GOT entries");
}
try self.got_symbols.append(gpa, symbol);
}
/// Returns the total amount of globals of the global section
pub fn count(self: Globals) u32 {
return @intCast(u32, self.items.items.len);
}
/// Creates a new linker-defined global with the given mutability and value type.
/// Also appends the new global to the output global section and returns a pointer
/// to the newly created global.
///
/// This will automatically set `init` to `null` and can manually be updated at a later point using
/// the returned pointer.
pub fn create(self: *Globals, gpa: Allocator, mutability: enum { mutable, immutable }, valtype: types.ValueType) !*types.Global {
const index = self.count();
try self.items.append(gpa, .{
.valtype = valtype,
.mutable = mutability == .mutable,
.init = null,
.global_idx = index,
});
return &self.items.items[index];
}
/// Assigns indexes to all functions based on the given `offset`
/// Meaning that for element 0, with offset 2, will have its first element's index
/// set to 2, rather than 0.
pub fn setIndexes(self: *Globals, offset: u32) void {
setIndex("global_idx", self.items.items, offset);
}
pub fn deinit(self: *Globals, gpa: Allocator) void {
self.items.deinit(gpa);
self.got_symbols.deinit(gpa);
self.* = undefined;
}
};
/// Represents the type section, containing a list of
/// wasm signature types.
pub const Types = struct {
/// A list of `wasm.FuncType`, when appending to
/// this list, duplicates will be removed.
///
/// TODO: Would a hashmap be more efficient?
items: std.ArrayListUnmanaged(std.wasm.Type) = .{},
/// Checks if a given type is already present within the list of types.
/// If not, the given type will be appended to the list.
/// In all cases, this will return the index within the list of types.
pub fn append(self: *Types, gpa: Allocator, func_type: std.wasm.Type) !u32 {
return self.find(func_type) orelse {
const index = self.count();
try self.items.append(gpa, func_type);
return index;
};
}
/// Returns a pointer to the function type at given `index`
/// Asserts the index is within bounds.
pub fn get(self: Types, index: u32) *std.wasm.Type {
return &self.items.items[index];
}
/// Checks if any type (read: function signature) already exists within
/// the type section. When it does exist, it will return its index
/// otherwise, returns `null`.
pub fn find(self: Types, func_type: std.wasm.Type) ?u32 {
return for (self.items.items) |ty, index| {
if (std.mem.eql(std.wasm.Valtype, ty.params, func_type.params) and
std.mem.eql(std.wasm.Valtype, ty.returns, func_type.returns))
{
return @intCast(u32, index);
}
} else null;
}
/// Returns the amount of entries in the type section
pub fn count(self: Types) u32 {
return @intCast(u32, self.items.items.len);
}
pub fn deinit(self: *Types, gpa: Allocator) void {
self.items.deinit(gpa);
self.* = undefined;
}
};
/// Represents the table section, containing a list
/// of tables, as well as the definition of linker-defined
/// tables such as the indirect function table
pub const Tables = struct {
/// The list of tables that have been merged from all
/// object files. This does not include any linker-defined
/// tables. Once inserted in this list, the object becomes immutable.
items: std.ArrayListUnmanaged(std.wasm.Table) = .{},
/// Appends a new table to the list of tables and sets its index to
/// the position within the list of tables.
pub fn append(self: *Tables, gpa: Allocator, offset: u32, table: std.wasm.Table) !u32 {
const index = offset + self.count();
try self.items.append(gpa, table);
return index;
}
/// Returns the amount of entries in the table section
pub fn count(self: Tables) u32 {
return @intCast(u32, self.items.items.len);
}
/// Sets the table indexes of all table elements relative to their position within
/// the list, starting from `offset` rather than '0'.
pub fn setIndexes(self: *Tables, offset: u32) void {
setIndex("table_idx", self.items.items, offset);
}
/// Creates a synthetic symbol for the indirect function table and appends it into the
/// table list.
pub fn createIndirectFunctionTable(self: *Tables, gpa: Allocator, wasm_bin: *Wasm) !void {
// Only create it if it doesn't exist yet
if (Symbol.linker_defined.indirect_function_table != null) {
log.debug("Indirect function table already exists, skipping creation...", .{});
return;
}
const index = self.count();
try self.items.append(gpa, .{
.limits = .{ .min = 0, .max = null },
.reftype = .funcref,
.table_idx = index,
});
var symbol: Symbol = .{
.flags = 0, // created defined symbol
.name = Symbol.linker_defined.names.indirect_function_table, // __indirect_function_table
.kind = .{ .table = .{ .index = index, .table = &self.items.items[index] } },
};
try wasm_bin.synthetic_symbols.append(gpa, symbol);
Symbol.linker_defined.indirect_function_table = &wasm_bin.synthetic_symbols.items[wasm_bin.synthetic_symbols.items.len - 1];
log.debug("Created indirect function table at index {d}", .{index});
}
pub fn deinit(self: *Tables, gpa: Allocator) void {
self.items.deinit(gpa);
self.* = undefined;
}
};
/// Represents the exports section, built from explicit exports
/// from all object files, as well as global defined symbols that are
/// non-hidden.
pub const Exports = struct {
/// List of exports, containing both merged exports
/// as linker-defined exports such as __stack_pointer.
items: std.ArrayListUnmanaged(std.wasm.Export) = .{},
/// Contains a list of pointers to symbols
/// TODO: Do we really need this list?
symbols: std.ArrayListUnmanaged(*Symbol) = .{},
/// Appends a given `wasm.Export` to the list of output exports.
pub fn append(self: *Exports, gpa: Allocator, exp: std.wasm.Export) !void {
try self.items.append(gpa, exp);
}
pub fn appendSymbol(self: *Exports, gpa: Allocator, symbol: *Symbol) !void {
try self.symbols.append(gpa, symbol);
}
/// Returns the amount of entries in the export section
pub fn count(self: Exports) u32 {
return @intCast(u32, self.items.items.len);
}
pub fn deinit(self: *Exports, gpa: Allocator) void {
self.items.deinit(gpa);
self.symbols.deinit(gpa);
self.* = undefined;
}
};
pub const Elements = struct {
/// A list of symbols for indirect function calls where the key
/// represents the symbol location, and the value represents the table index.
indirect_functions: std.AutoArrayHashMapUnmanaged(Wasm.SymbolWithLoc, u32) = .{},
/// Appends a function symbol to the list of indirect function calls.
/// The table index will be set on the symbol, based on the length
///
/// Asserts symbol represents a function.
pub fn appendSymbol(self: *Elements, gpa: Allocator, symbol_loc: Wasm.SymbolWithLoc) !void {
const gop = try self.indirect_functions.getOrPut(gpa, symbol_loc);
if (gop.found_existing) return;
// start at index 1 so the index '0' is an invalid function pointer
gop.value_ptr.* = self.functionCount() + 1;
}
pub fn functionCount(self: Elements) u32 {
return @intCast(u32, self.indirect_functions.count());
}
pub fn deinit(self: *Elements, gpa: Allocator) void {
self.indirect_functions.deinit(gpa);
self.* = undefined;
}
};
|
#lang scribble/doc
@(require "common.rkt")
@(tools-title "eval")
@(tools-include "eval")
|
#ifndef SETTING_H
#define SETTING_H
#include <QSize>
#include <QString>
class setting
{
public:
setting();
setting(bool theme,bool autogen,bool showFrame,int step_1,int step_2,int step_3,int cols, int rows, QString color);
setting get_setting();
bool get_theme();
bool get_autogen();
bool get_showFrame();
int get_step_1();
int get_step_2();
int get_step_3();
int get_cols();
int get_rows();
QString get_color();
private:
bool theme;
bool autogen;
bool showFrame;
int step_1 = 5;
int step_2 = 10;
int step_3 = 25;
int cols = 3;
int rows = 3;
QString color;
};
#endif // SETTING_H
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
parser grammar AtlasDSLParser;
options { tokenVocab=AtlasDSLLexer; }
// Core rules
identifier: ID ;
operator: (K_LT | K_LTE | K_EQ | K_NEQ | K_GT | K_GTE | K_LIKE) ;
sortOrder: K_ASC | K_DESC ;
valueArray: K_LBRACKET ID (K_COMMA ID)* K_RBRACKET ;
literal: BOOL | NUMBER | FLOATING_NUMBER | (ID | valueArray) ;
// Composite rules
limitClause: K_LIMIT NUMBER ;
offsetClause: K_OFFSET NUMBER ;
atomE: (identifier | literal) | K_LPAREN expr K_RPAREN ;
multiERight: (K_STAR | K_DIV) atomE ;
multiE: atomE multiERight* ;
arithERight: (K_PLUS | K_MINUS) multiE ;
arithE: multiE arithERight* ;
comparisonClause: arithE operator arithE ;
isClause: arithE (K_ISA | K_IS) (identifier | expr ) ;
hasTermClause: arithE K_HASTERM (identifier | expr );
hasClause: arithE K_HAS identifier ;
countClause: K_COUNT K_LPAREN K_RPAREN ;
maxClause: K_MAX K_LPAREN expr K_RPAREN ;
minClause: K_MIN K_LPAREN expr K_RPAREN ;
sumClause: K_SUM K_LPAREN expr K_RPAREN ;
exprRight: (K_AND | K_OR) compE ;
compE: comparisonClause
| isClause
| hasClause
| arithE
| countClause
| maxClause
| minClause
| sumClause
| hasTermClause
;
expr: compE exprRight* ;
limitOffset: limitClause offsetClause? ;
selectExpression: expr (K_AS identifier)? ;
selectExpr: selectExpression (K_COMMA selectExpression)* ;
aliasExpr: (identifier | literal) K_AS identifier ;
orderByExpr: K_ORDERBY expr sortOrder? ;
fromSrc: aliasExpr | (identifier | literal) ;
whereClause: K_WHERE expr ;
fromExpression: fromSrc whereClause? ;
fromClause: K_FROM fromExpression ;
selectClause: K_SELECT selectExpr ;
singleQrySrc: fromClause | whereClause | fromExpression | expr ;
groupByExpression: K_GROUPBY K_LPAREN selectExpr K_RPAREN ;
commaDelimitedQueries: singleQrySrc (K_COMMA singleQrySrc)* ;
spaceDelimitedQueries: singleQrySrc singleQrySrc* ;
querySrc: commaDelimitedQueries | spaceDelimitedQueries ;
query: querySrc groupByExpression?
selectClause?
orderByExpr?
limitOffset? EOF; |
********************************************************************************
********************************************************************************
* ERROR IN CODE CORRECTION
********************************************************************************
********************************************************************************
* Add correction or drop survey with code error
* Added 7th January: ID été saisie manuellement la première journée
drop if hhid == 681 & today ==d(27dec2020)
* Added 22th January
/*
Le superviseur Barka a interverti l’ID du partenaire de Najwa (39837) par l’ID de partenaire de Manoubia (39838), et inversement
*/
/*
g code_correct =.
replace code_correct = 39838 if key == ""
replace code_correct = 39837 if key == ""
g code_replace = 0
replace code_replace = 1 if key == "" | key == ""
replace code = code_correct if code_replace == 1
*/
*Added 2th february
/* Error in code, correct code already included in the dataset */
drop if key == "uuid:484e8e00-bcc4-4764-9124-23ee4781aad0" & hhid == 3349
|
%{
#include <stdio.h>
int yylex(void);
//int yydebug = 1;
%}
%code requires {
#include "docopt.h"
}
%union {
struct str str;
struct arg *arg;
}
%locations
%define parse.error verbose
%parse-param { struct ctx *ctx }
%token <str> ARG OPTARG POSARG POSARG_DDD WORD
%token EOL
%start input
%%
input: input line
| line
line: EOL { ctx_oneol(ctx); }
| ARG { CTX_NEWCMD(ctx); } list-args EOL { ctx_onparsed(ctx); }
| error EOL { CTX_ONERROR(ctx); }
list-args: list-args '|' { ARG_SET(ctx, F_SEP); } arg
| list-args arg
| arg
arg: { CMD_PUSH(ctx, T_REQGRP, NULL, 0); } '(' list-args ')' { CMD_POP(ctx); }
| { CMD_PUSH(ctx, T_OPTGRP, NULL, 0); } '[' list-args ']' { CMD_POP(ctx); }
| POSARG { CMD_PUSH(ctx, T_STR, $1.ptr, $1.len); }
| POSARG_DDD { CMD_PUSH(ctx, T_STR | F_ARR, $1.ptr, $1.len); }
| OPTARG { CMD_PUSH(ctx, T_FLAG, $1.ptr, $1.len); }
| OPTARG '=' POSARG { CMD_PUSH(ctx, T_STR | F_VAL, $1.ptr, $1.len); }
| OPTARG '=' POSARG_DDD { CMD_PUSH(ctx, T_STR | F_VAL | F_ARR, $1.ptr, $1.len); }
| ARG { CMD_PUSH(ctx, T_FLAG, $1.ptr, $1.len); }
%%
|
package user
import (
"github.com/Rhizomyidae/rat-server/app"
)
//定义控制器
type Controller struct {
app.Controller
}
// 注册输入参数
type SignUpInput struct {
Username string `v:"required|length:6,16#账号不能为空|账号长度应当在:min到:max之间"`
Password string `v:"required|length:6,16#请输入确认密码|密码长度应当在:min到:max之间"`
Password2 string `v:"required|length:6,16|same:Password#密码不能为空|密码长度应当在:min到:max之间|两次密码输入不相等"`
Nickname string
}
type SignUpRequest struct {
SignUpInput
}
//登录
type SignInRequest struct {
Username string `v:"required#账号不能为空"`
Password string `v:"required#密码不能为空"`
}
|
---
title: "Analysis of posterior samples - compare two groups across different model outputs"
output:
pdf_document: default
html_document:
highlight: pygments
theme: spacelab
---
```{r setup, echo =FALSE, include=FALSE}
knitr::opts_chunk$set(echo = TRUE, fig.pos = 'h', fig.align = 'center')
knitr::opts_chunk$set(fig.cap = "", fig.path = "Plot")
library(ggplot2)
require(MCMCpack)
library(RColorBrewer)
```
* * *
```{r}
source("../../utils/load_data.R")
source('../../utils/metadata.R')
source('../../utils/compare_two_groups.R')
```
## MAR section
```{r, fig.width=3, fig.height=6}
# dem of 1st seed pair
filepath_nonmissing = '../simulation_results/fully_observed_11.RData'
filepath_missing = '../simulation_results/MAR30_21.RData'
group_nonmissing = 3
group_missing = 6
group_desc = 'DEM'
compare_two_groups(filepath_nonmissing, group_nonmissing, filepath_missing, group_missing, group_desc)
```
```{r, fig.width=3, fig.height=6}
# rep of 1st seed pair
filepath_nonmissing = '../simulation_results/fully_observed_11.RData'
filepath_missing = '../simulation_results/MAR30_21.RData'
group_nonmissing = 1
group_missing = 3
group_desc = 'REP'
compare_two_groups(filepath_nonmissing, group_nonmissing, filepath_missing, group_missing, group_desc)
```
## MCAR section
```{r, fig.width=3, fig.height=6}
# democrat of 1st seed pair
filepath_nonmissing = '../simulation_results/fully_observed_11.RData'
filepath_missing = '../simulation_results/MCAR30_16.RData'
group_nonmissing = 3
group_missing = 1
group_desc = 'DEM'
compare_two_groups(filepath_nonmissing, group_nonmissing, filepath_missing, group_missing, group_desc)
```
```{r, fig.width=3, fig.height=6}
# rep of 1st seed pair
filepath_nonmissing = '../simulation_results/fully_observed_11.RData'
filepath_missing = '../simulation_results/MCAR30_16.RData'
group_nonmissing = 1
group_missing = 5
group_desc = 'REP'
compare_two_groups(filepath_nonmissing, group_nonmissing, filepath_missing, group_missing, group_desc)
```
* * *
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Timeline;
public class MusicPlay : MonoBehaviour
{
public AudioSource music;
public AudioClip nightsound;
public DayCicle daycontrol;
// Start is called before the first frame update
void Start()
{
daycontrol.myMorningCall += MorningPlay;
daycontrol.myNightCall += NightPlay;
}
// Update is called once per frame
void Update()
{
}
public void MorningPlay()
{
music.Play();
}
public void NightPlay()
{
music.PlayOneShot(nightsound);
}
}
|
html {
overflow-y: scroll;
}
.CodeMirror {
font-size:13px;
height: 350px;
}
.light-gray-block {
background-color: #F8F8F8;
}
.light-gray-block:hover {
background-color: white;
}
.input-group {
margin-bottom: 5px;
width: 100%;
}
.add-new-icon {
width: 25px;
vertical-align: inherit;
}
.input-middle-btn button{
border-radius: 0px;
border-left: 0px;
border-right: 0px;
} |
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
/**
* Description of Categorias_model
*
* @author ralf
*/
class Categorias_model extends CI_Model{
protected $table = array(
'tb_categorias'=>'macategorias',
);
public function __construct(){
parent::__construct();
}
public function get_total(){
$qry = $this->db->count_all_results($this->table['tb_categorias']);
return $qry;
}
public function get_current_page_records($limit, $start){
$this->db->limit($limit, $start);
$query = $this->db->get($this->table['tb_categorias']);
if ($query->num_rows() > 0){
foreach ($query->result() as $row){
$data[] = $row;
}
return $data;
}
return false;
}
public function selectbyid($params){
$conditions = array(
'id'=>$params['categoria_id']
);
$qry = $this->db->get_where($this->table['tb_categorias'], $conditions);
$result = $qry->result_array();
return $result;
}
public function insert($params){
$opc = array(
'nombre'=>$params['nombre']
);
$this->db->insert($this->table['tb_categorias'],$opc);
$insert_id = $this->db->insert_id();
return $insert_id;
}
public function update($params){
$data = array(
'nombre'=>$params['nombre']
);
$this->db->where('id', $params['id']);
$this->db->update($this->table['tb_categorias'], $data);
$resp = $this->db->affected_rows();
return $resp;
}
public function delete($id){
$opc = array(
'id'=>$id
);
$this->db->delete($this->table['tb_categorias'],$opc);
$resp = $this->db->affected_rows();
return $resp;
}
public function select_all(){
$qry = $this->db->get($this->table['tb_categorias']);
$result = $qry->result_array();
return $result;
}
}
|
package services.cybersource
import javax.inject.Inject
import javax.inject.Singleton
import akka.actor.ActorSystem
import com.decidir.coretx.messaging.KafkaConfigurationProvider
import play.api.Configuration
import akka.actor.Props
import play.Logger
import com.decidir.coretx.messaging.ConfigurableKafkaConsumerActor
import akka.actor.ActorRef
import com.decidir.coretx.api.ReviewCS
import play.api.libs.json.Json
import com.decidir.coretx.api.CybersourceJsonMessages.reviewStateReads
object CybersourceListener {
val topic = "cybersource-topic"
}
@Singleton
class CybersourceListenerFactory @Inject() (configuration: Configuration,
kafkaConfigurationProvider: KafkaConfigurationProvider,
actorSystem: ActorSystem,
cybersourceService: CybersourceService) {
Logger.info("Iniciando listener de cambio de estado para Cybersource")
val topics = List(CybersourceListener.topic)
val bootstrapServers = configuration.getString("sps.kafka.bootstrapServers").getOrElse(throw new Exception("No se definieron bootstrap servers"))
val listener = actorSystem.actorOf(Props(
new ConfigurableKafkaConsumerActor(
kafkaConfigurationProvider.consumerConf("cybersource", true),
topics,
onRecord, None)))
Logger.info("Listener de cambio de estado para Cybersource")
def onRecord(key: Option[String], msg: String, consumerActor: Option[ActorRef]) = {
Json.parse(msg).validate[ReviewCS].fold(
errors => Logger.error("Error cs message: " + errors),
reviewState => {
Logger.info("Recive cs message: " + msg)
cybersourceService.changeState(reviewState)
}
)
}
} |
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
SECONDS=0
# always set in stage params
SCRIPT_ENV=${SCRIPT_ENV:-dev}
# Check basic params
case "$SCRIPT_ENV" in
dev)
echo "RUNNING IN DEV ENV"
;;
prod)
echo "RUNNING IN PROD ENV"
;;
docker)
echo "RUNNING IN DOCKER ENV"
;;
*)
echo >&2 "Must set SCRIPT_ENV = (prod|dev|docker)"
exit 2
;;
esac
#####
## ## SETUP TMP DIR
#####
function setup_tmp_dir() {
LOCAL_TMP_DIR="/var/$(mktemp -d)"
}
setup_tmp_dir # CALLING RIGHT AWAY (to avoid issues with unbound var later)
function echo_tmp_dir_locaton() {
echo "TEMP DIR IS AT $LOCAL_TMP_DIR"
}
function remove_tmp_dir() {
if [[ -d "$LOCAL_TMP_DIR" ]]; then
rm -r "$LOCAL_TMP_DIR"
fi
}
#####
## ## REGISTER CLEANUP HOOKS
#####
function cleanup_hooks() {
remove_tmp_dir
}
trap cleanup_hooks EXIT
#####
## ## SETUP LOCAL WORK DIRECTORIES
#####
function setup_local_vars_and_dirs() {
LOCAL_GC_REPO_BASE_DIR="$LOCAL_TMP_DIR/app-repo"
LOCAL_GC_REPO_TGZ_PATH="$LOCAL_GC_REPO_BASE_DIR/repo.tgz"
LOCAL_GC_DIR="$LOCAL_TMP_DIR/gc"
mkdir -p "$LOCAL_GC_REPO_BASE_DIR"
mkdir -p "$LOCAL_GC_DIR"
}
#####
## ## Commands to use Python and AWS
#####
function setup_aws_and_python_exec_commands() {
case "$SCRIPT_ENV" in
dev)
PYTHON_CMD="/opt/gc-venv/bin/python"
AWS_CMD="aws"
ES_HOST="${ES_HOST}"
ES_PORT=443
;;
prod)
PYTHON_CMD="/opt/gc-venv/bin/python"
AWS_CMD="aws"
ES_HOST="${ES_HOST}"
ES_PORT=443
;;
docker)
PYTHON_CMD="/home/sdc/app-venv/bin/python"
AWS_CMD="aws --endpoint-url http://localstack:4572"
ES_HOST=elasticsearch
ES_PORT=9200
;;
*)
echo >&2 "Must set SCRIPT_ENV = (prod|dev|test)"
exit 2
;;
esac
echo "Using Python: $PYTHON_CMD"
echo "Using AWS: $AWS_CMD"
echo "Elasticsearch Update URL: $ES_HOST:$ES_PORT"
}
#####
## ## S3 ENV Vars
#####
function setup_s3_vars_and_dirs() {
echo "S3 GC Path Orchestartion App $S3_GC_REPO_TGZ_PATH"
}
#####
## ## Copy Gamechanger code from S3 to locally
#####
function setup_local_repo_copy() {
echo "FETCHING REPO"
export AWS_DEFAULT_REGION=$AWS_REGION
#echo $AWS_CMD s3 cp "s3://$S3_GC_REPO_TGZ_PATH" "$LOCAL_GC_REPO_TGZ_PATH"
$AWS_CMD s3 cp "s3://$S3_GC_REPO_TGZ_PATH" "$LOCAL_GC_REPO_TGZ_PATH"
tar -xvzf "$LOCAL_GC_REPO_TGZ_PATH" -C "$LOCAL_GC_REPO_BASE_DIR"
}
function change_into_local_repo_dir() {
cd "$LOCAL_GC_REPO_BASE_DIR"
}
#####
## ## Run Gamechanger Symphony
#####
function gamechanger_symphony() {
echo "RUNNING Gamechanger Symphony"
#export PYTHONPATH=$LOCAL_TMP_DIR/app-repo/gamechanger
#export GC_APP_CONFIG_NAME=$SCRIPT_ENV
$AWS_CMD s3 cp s3://advana-data-zone/$AWS_S3_CSV_PREFIX$destination $LOCAL_TMP_DIR/test.csv
"$PYTHON_CMD" -m configuration init "$SCRIPT_ENV"
"$PYTHON_CMD" -m configuration check-connections
"$PYTHON_CMD" -m dataPipelines.gc_hermes.gc_pipeline --alias "hermes" --staging-folder staging --es-index $ES_INDEX_NAME --es-host $ES_HOST --es-port $ES_PORT -d $LOCAL_TMP_DIR/test.csv
rm -r $LOCAL_TMP_DIR
}
echo "***************************** Start *****************************"
setup_aws_and_python_exec_commands
echo_tmp_dir_locaton
setup_local_vars_and_dirs
setup_s3_vars_and_dirs
# setup repo
setup_local_repo_copy
change_into_local_repo_dir
# Gamechanger Symphony
gamechanger_symphony
duration=$SECONDS
echo "$(($duration / 60)) minutes and $(($duration % 60)) seconds elapsed."
echo "***************************** Done *****************************" |
select * from event as e
left join event_attribute_value as eav on e.id = eav.event_id
left join event_attribute_name as ean on eav.event_attribute_name_id = ean.id
order by e.id
|
package main
import (
"bytes"
"runtime"
"text/template"
)
var (
// Version is version
Version = "0.3.0"
// BuildTime is BuildTime
BuildTime = "2019/07/19"
)
// VersionOptions include version
type VersionOptions struct {
GitCommit string
Version string
BuildTime string
GoVersion string
Os string
Arch string
}
var versionTemplate = ` Version: {{.Version}}
Go version: {{.GoVersion}}
Built: {{.BuildTime}}
OS/Arch: {{.Os}}/{{.Arch}}
`
func getVersion() string {
var doc bytes.Buffer
vo := VersionOptions{
Version: Version,
BuildTime: BuildTime,
GoVersion: runtime.Version(),
Os: runtime.GOOS,
Arch: runtime.GOARCH,
}
tmpl, _ := template.New("version").Parse(versionTemplate)
tmpl.Execute(&doc, vo)
return doc.String()
}
|
################################################################################
# Automatically-generated file. Do not edit!
################################################################################
# Add inputs and outputs from these tool invocations to the build variables
C_SRCS += \
../system/src/newlib/_exit.c \
../system/src/newlib/_sbrk.c \
../system/src/newlib/_startup.c \
../system/src/newlib/_syscalls.c \
../system/src/newlib/assert.c
CPP_SRCS += \
../system/src/newlib/_cxx.cpp
OBJS += \
./system/src/newlib/_cxx.o \
./system/src/newlib/_exit.o \
./system/src/newlib/_sbrk.o \
./system/src/newlib/_startup.o \
./system/src/newlib/_syscalls.o \
./system/src/newlib/assert.o
C_DEPS += \
./system/src/newlib/_exit.d \
./system/src/newlib/_sbrk.d \
./system/src/newlib/_startup.d \
./system/src/newlib/_syscalls.d \
./system/src/newlib/assert.d
CPP_DEPS += \
./system/src/newlib/_cxx.d
# Each subdirectory must supply rules for building sources it contributes
system/src/newlib/%.o: ../system/src/newlib/%.cpp
@echo 'Building file: $<'
@echo 'Invoking: Cross ARM GNU C++ Compiler'
arm-none-eabi-g++ -mcpu=cortex-m3 -mthumb -Og -fmessage-length=0 -fsigned-char -ffunction-sections -fdata-sections -ffreestanding -fno-move-loop-invariants -Wall -Wextra -g3 -DDEBUG -DUSE_FULL_ASSERT -DSTM32F10X_MD -DUSE_STDPERIPH_DRIVER -DHSE_VALUE=8000000 -I"../include" -I"../system/include" -I"../system/include/cmsis" -I"../system/include/stm32f1-stdperiph" -std=gnu++11 -fabi-version=0 -fno-exceptions -fno-rtti -fno-use-cxa-atexit -fno-threadsafe-statics -MMD -MP -MF"$(@:%.o=%.d)" -MT"$(@)" -c -o "$@" "$<"
@echo 'Finished building: $<'
@echo ' '
system/src/newlib/%.o: ../system/src/newlib/%.c
@echo 'Building file: $<'
@echo 'Invoking: Cross ARM GNU C Compiler'
arm-none-eabi-gcc -mcpu=cortex-m3 -mthumb -Og -fmessage-length=0 -fsigned-char -ffunction-sections -fdata-sections -ffreestanding -fno-move-loop-invariants -Wall -Wextra -g3 -DDEBUG -DUSE_FULL_ASSERT -DSTM32F10X_MD -DUSE_STDPERIPH_DRIVER -DHSE_VALUE=8000000 -I"../include" -I"../system/include" -I"../system/include/cmsis" -I"../system/include/stm32f1-stdperiph" -std=gnu11 -MMD -MP -MF"$(@:%.o=%.d)" -MT"$(@)" -c -o "$@" "$<"
@echo 'Finished building: $<'
@echo ' '
|
SET DEFINE OFF;
ALTER TABLE AFW_12_STRUC_APLIC ADD (
CONSTRAINT AFW_12_STRUC_APLIC_FK4
FOREIGN KEY (REF_PAGE_FORML)
REFERENCES AFW_13_PAGE (SEQNC)
ON DELETE SET NULL
ENABLE VALIDATE)
/
|
---
layout: post
title: "Práctica 04"
main-class: 'practica'
permalink: /MuestreoySeriesdeTiempo/MyST:title.html
tags:
introduction: |
Ejercicios sobre modelos paramétricos: <br>
- Identificación. <br>
- Selección de modelos. <br>
- Ajuste de modelos. <br>
- Validación de supuestos. <br>
- Pronóstico de modelos.
header-includes:
- \usepackage{amsmath,amssymb,amsthm,amsfonts}
- \usepackage[sectionbib]{natbib}
- \usepackage[hidelinks]{hyperref}
output:
md_document:
variant: markdown_strict+backtick_code_blocks+autolink_bare_uris+ascii_identifiers+tex_math_single_backslash
preserve_yaml: TRUE
always_allow_html: yes
knit: (function(inputFile, encoding) {
rmarkdown::render(inputFile, encoding = encoding,
output_dir = "../../MuestreoySeriesdeTiempo/_posts/", output_format = "all" ) })
bibliography: "../../referencias.bib"
csl: "../../apa.csl"
---
```{r knitr_init, echo=FALSE, cache=FALSE}
library(knitr)
## Global options
opts_chunk$set(echo=TRUE,
cache=TRUE,
prompt=FALSE,
tidy=TRUE,
comment=NA,
message=FALSE,
warning=FALSE,
fig.path = paste0("../../MuestreoySeriesdeTiempo/images/", "Practica04"),
cache.path = "../../MuestreoySeriesdeTiempo/cache/",
cache = FALSE)
```
# Lista de ejercicios
A partir de las series A partir de las series de tiempo suministradas en el archivo de los siguientes enlaces
* [Generación CO2](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/CO2.xlsx){:target="_blank"}
* [Consumo de Gas en España](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/GasEspaña.xlsx){:target="_blank"}
* [Tasa Desempleo Canada](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/CanadianUnemploymentRate.xlsx){:target="_blank"}
* [Muertos en Accidentes en USA](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/USAccDeaths.xlsx){:target="_blank"}
* [Lago Huron](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/LakeHuron.xlsx){:target="_blank"}
* [Serie 1](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim1.xlsx){:target="_blank"}
* [Serie 2](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim2.xlsx){:target="_blank"}
* [Serie 3](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim3.xlsx){:target="_blank"}
* [Serie 4](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim4.xlsx){:target="_blank"}
* [Serie 5](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim5.xlsx){:target="_blank"}
* [Serie 6](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim6.xlsx){:target="_blank"}
* [Serie 7](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim7.xlsx){:target="_blank"}
* [Serie 8](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim8.xlsx){:target="_blank"}
* [Serie 9](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim9.xlsx){:target="_blank"}
* [Serie 10](https://github.com/jiperezga/jiperezga.github.io/raw/master/Dataset/MySTSim10.xlsx){:target="_blank"}
y con cada una de ellas
<ol type = "a">
<li> Divida la serie temporal en dos, la primera parte serán las observaciones con las cuales se realizará el proceso de estimación, la segunda con la cual se realizará la validación del modelo. Para la parte de validación use las observaciones del último año o últimos dos años dependiendo de la periodicidad de la serie y la longitud de la serie temporal. </li>
<li> Identifique las componentes que posee la serie temporal. </li>
<li> Determine si es necesario realizar transformaciones a la serie temporal en caso de que la serie no sea estacionaria. </li>
<li> Determine si es necesario realizar diferenciaciones regulares y/o estacionales a la serie temporal en caso de que la serie no sea estacionaria. </li>
<li> Identifique aquellos modelos que pueden ajustar la serie temporal, y describa los hallazgos encontrados. </li>
<li> Para los modelos ajustados, realice las pruebas de significancia de los parámetros y realice los análisis correspondientes. </li>
<li> Para los modelos ajustados, realice la validación de los supuestos de los modelos con los estadísticos y gráficos que considere pertinentes. </li>
<li> Para aquellos modelos que cumplen los supuestos, realice los pronósticos para uno o dos años en el futuro, dependiendo de la cantidad de datos que dejó para validación. </li>
<li> Compare los resultados obtenido en los pronósticos respecto a los datos de validación, calculado las medidas de error que considere pertinentes, y explique cuál de todos es el mejor modelo para la serie de tiempo. </li>
<li> Compare los resultados obtenidos con el modelo no paramétrico más adecuado y determine cuál de las dos metodologías es mejor en cada caso. </li>
</ol>
|
HOW TO SETUP
1. setup your database name in config/databases.php
2. setup host, username, pass, and database name
3. setup config/config.php, change base_url
4. open config/migration.php and change version to latest timestamp of migration file in migrations folder
5. do migrate with base_url/migrate
6. do seed to insert master data with base_url/seed/masterdata
7. open config/rest.php, and add uri front url
|
import { observable, runInAction } from 'mobx'
import BankAccountModel from './BankAccountModel'
import PurchaserModel from './PurchaserModel'
import InvoiceRowModel from './InvoiceRowModel'
import {
DEFAULT_ORDER_NUMBER_FORMAT,
DEFAULT_LANGUAGE,
DEFAULT_CURRENCY,
DEFAULT_DUE_PERIOD,
VAT_AMOUNT,
} from 'consts'
export default class SupplierModel {
id
@observable label
@observable logo
@observable order_number_format
@observable default_language
@observable identification_text
@observable registered_for_vat
@observable default_currency
@observable default_due_date_period
@observable purchasers
@observable bank_accounts
@observable footer
@observable show_qr_code
@observable default_invoice_rows
constructor({
id,
label,
logo,
order_number_format,
default_language,
identification_text,
registered_for_vat,
default_currency,
default_due_date_period,
purchasers,
bank_accounts,
footer,
show_qr_code,
default_invoice_rows,
} = {}) {
runInAction(() => {
const idIsDefined = id !== void 0
this.id = id
this.logo = logo || ''
this.order_number_format = order_number_format || DEFAULT_ORDER_NUMBER_FORMAT
this.label = label || ''
this.default_language = default_language || DEFAULT_LANGUAGE
this.identification_text = identification_text || ''
this.registered_for_vat = registered_for_vat || false
this.default_currency = default_currency || DEFAULT_CURRENCY
this.default_due_date_period = default_due_date_period || DEFAULT_DUE_PERIOD
this.purchasers = purchasers || ( idIsDefined ? [] : [new PurchaserModel()] )
this.bank_accounts = bank_accounts || ( idIsDefined ? [] : [new BankAccountModel()] )
this.footer = footer || ''
this.show_qr_code = show_qr_code !== void 0 ? show_qr_code : true
this.default_invoice_rows = default_invoice_rows || ( idIsDefined ? [] : [new InvoiceRowModel({ vat: registered_for_vat ? VAT_AMOUNT : 0 })] )
})
}
} |
import 'package:flutter/material.dart';
import 'package:invoiceninja_flutter/constants.dart';
class ColorTheme {
ColorTheme(
{this.colorPrimary,
this.colorInfo,
this.colorSuccess,
this.colorWarning,
this.colorDanger,
this.colorLightGray = const Color(0xff888888),
this.colorDarkGray = const Color(0xff333333)});
Color colorPrimary;
Color colorInfo;
Color colorSuccess;
Color colorWarning;
Color colorDanger;
Color colorLightGray;
Color colorDarkGray;
}
Map<String, ColorTheme> colorThemesMap = {
kColorThemeLight: ColorTheme(
colorPrimary: const Color(0xff324da1),
colorInfo: const Color(0xff57a6e4),
colorSuccess: const Color(0xff4c9a1c),
colorWarning: const Color(0xffcd8900),
colorDanger: const Color(0xffb93700),
),
kColorThemeDark: ColorTheme(
colorPrimary: const Color(0xff0c45a3),
colorInfo: const Color(0xff298aab),
colorSuccess: const Color(0xff407535),
colorWarning: const Color(0xffa87000),
colorDanger: const Color(0xff8b3d40),
),
'cerulean': ColorTheme(
colorPrimary: const Color(0xff2fa4e7),
colorInfo: const Color(0xff033c73),
colorSuccess: const Color(0xff73a839),
colorWarning: const Color(0xffdd5600),
colorDanger: const Color(0xffc71c22),
),
'cosmo': ColorTheme(
colorPrimary: const Color(0xff2780e3),
colorInfo: const Color(0xff9954bb),
colorSuccess: const Color(0xff3fb618),
colorWarning: const Color(0xffff7518),
colorDanger: const Color(0xffff0039),
),
'cyborg': ColorTheme(
colorPrimary: const Color(0xff2a9fd6),
colorInfo: const Color(0xff9933cc),
colorSuccess: const Color(0xff77b300),
colorWarning: const Color(0xffff8800),
colorDanger: const Color(0xffcc0000),
),
'darkly': ColorTheme(
colorPrimary: const Color(0xff375a7f),
colorInfo: const Color(0xff3498db),
colorSuccess: const Color(0xff00bc8c),
colorWarning: const Color(0xfff39c12),
colorDanger: const Color(0xffe74c3c),
),
'flatly': ColorTheme(
colorPrimary: const Color(0xff2c3e50),
colorInfo: const Color(0xff3498db),
colorSuccess: const Color(0xff18bc9c),
colorWarning: const Color(0xfff39c12),
colorDanger: const Color(0xffe74c3c),
),
'journal': ColorTheme(
colorPrimary: const Color(0xffeb6864),
colorInfo: const Color(0xff336699),
colorSuccess: const Color(0xff22b24c),
colorWarning: const Color(0xfff5e625),
colorDanger: const Color(0xfff57a00),
),
'litera': ColorTheme(
colorPrimary: const Color(0xff4582ec),
colorInfo: const Color(0xff17a2b8),
colorSuccess: const Color(0xff02b875),
colorWarning: const Color(0xfff0ad4e),
colorDanger: const Color(0xffd9534f),
),
'lumen': ColorTheme(
colorPrimary: const Color(0xff158cba),
colorInfo: const Color(0xff75caeb),
colorSuccess: const Color(0xff28b62c),
colorWarning: const Color(0xffff851b),
colorDanger: const Color(0xffff4136),
),
'lux': ColorTheme(
colorPrimary: const Color(0xff1a1a1a),
colorInfo: const Color(0xff1f9bcf),
colorSuccess: const Color(0xff4bbf73),
colorWarning: const Color(0xfff0ad4e),
colorDanger: const Color(0xffd9534f),
),
'materia': ColorTheme(
colorPrimary: const Color(0xff2196f3),
colorInfo: const Color(0xff9c27b0),
colorSuccess: const Color(0xff4caf50),
colorWarning: const Color(0xffff9800),
colorDanger: const Color(0xffe51c23),
),
'minty': ColorTheme(
colorPrimary: const Color(0xff78c2ad),
colorInfo: const Color(0xff6cc3d5),
colorSuccess: const Color(0xff56cc9d),
colorWarning: const Color(0xffffce67),
colorDanger: const Color(0xffff7851),
),
'pulse': ColorTheme(
colorPrimary: const Color(0xff593196),
colorInfo: const Color(0xff009cdc),
colorSuccess: const Color(0xff13b955),
colorWarning: const Color(0xffefa31d),
colorDanger: const Color(0xfffc3939),
),
'sandstone': ColorTheme(
colorPrimary: const Color(0xff325d88),
colorInfo: const Color(0xff29abe0),
colorSuccess: const Color(0xff93c54b),
colorWarning: const Color(0xfff47c3c),
colorDanger: const Color(0xffd9534f),
),
'simplex': ColorTheme(
colorPrimary: const Color(0xffd9230f),
colorInfo: const Color(0xff029acf),
colorSuccess: const Color(0xff469408),
colorWarning: const Color(0xffd9831f),
colorDanger: const Color(0xff9b479f),
),
'sketchy': ColorTheme(
colorPrimary: const Color(0xff333333),
colorInfo: const Color(0xff17a2b8),
colorSuccess: const Color(0xff28a745),
colorWarning: const Color(0xffffc107),
colorDanger: const Color(0xffdc3545),
),
'slate': ColorTheme(
colorPrimary: const Color(0xff3a3f44),
colorInfo: const Color(0xff5bc0de),
colorSuccess: const Color(0xff62c462),
colorWarning: const Color(0xfff89406),
colorDanger: const Color(0xffee5f5b),
),
'solar': ColorTheme(
colorPrimary: const Color(0xffb58900),
colorInfo: const Color(0xff268bd2),
colorSuccess: const Color(0xff2aa198),
colorWarning: const Color(0xffcb4b16),
colorDanger: const Color(0xffd33682),
),
'spacelab': ColorTheme(
colorPrimary: const Color(0xff446e9b),
colorInfo: const Color(0xff3399f3),
colorSuccess: const Color(0xff3cb521),
colorWarning: const Color(0xffd47500),
colorDanger: const Color(0xffcd0200),
),
'superhero': ColorTheme(
colorPrimary: const Color(0xffdf691a),
colorInfo: const Color(0xff5bc0de),
colorSuccess: const Color(0xff5cb85c),
colorWarning: const Color(0xfff0ad4e),
colorDanger: const Color(0xffd9534f),
),
'united': ColorTheme(
colorPrimary: const Color(0xffe95420),
colorInfo: const Color(0xff17a2b8),
colorSuccess: const Color(0xff38b44a),
colorWarning: const Color(0xffefb73e),
colorDanger: const Color(0xffdf382c),
),
'yeti': ColorTheme(
colorPrimary: const Color(0xff008cba),
colorInfo: const Color(0xff5bc0de),
colorSuccess: const Color(0xff43ac6a),
colorWarning: const Color(0xffe99002),
colorDanger: const Color(0xfff04124),
),
};
|
SET build_path="C:\Program Files (x86)\Steam\steamapps\common\RimWorld\Mods"
xcopy "%CD%" %build_path% /E /Y /EXCLUDE:C:\excludeFiles.txt |
################################################################################
# Copyright (C) 2017 Advanced Micro Devices, Inc.
################################################################################
find_program(GIT NAMES git)
file(MAKE_DIRECTORY ${TMP_DIR}/repo)
test_exec(COMMAND ${GIT} init WORKING_DIRECTORY ${TMP_DIR}/repo)
write_version_cmake(${TMP_DIR}/repo 1.0 "
test_expect_eq(\${PROJECT_VERSION_MAJOR} 1)
test_expect_eq(\${PROJECT_VERSION_MINOR} 0)
test_expect_eq(\${PROJECT_VERSION_PATCH} 0)
test_expect_eq(\${PROJECT_VERSION} \${PROJECT_VERSION_MAJOR}.\${PROJECT_VERSION_MINOR}.\${PROJECT_VERSION_PATCH}.\${PROJECT_VERSION_TWEAK})
test_expect_matches(\${PROJECT_VERSION_TWEAK} ^1-[0-9a-f]+\$)
")
test_exec(COMMAND ${GIT} add . WORKING_DIRECTORY ${TMP_DIR}/repo)
test_exec(COMMAND ${GIT} commit -am "Init" WORKING_DIRECTORY ${TMP_DIR}/repo)
install_dir(${TMP_DIR}/repo)
|
REBOL [
System: "REBOL [R3] Language Interpreter and Run-time Environment"
Title: "Generate OS host API headers"
Rights: {
Copyright 2012 REBOL Technologies
REBOL is a trademark of REBOL Technologies
}
License: {
Licensed under the Apache License, Version 2.0
See: http://www.apache.org/licenses/LICENSE-2.0
}
Author: "Carl Sassenrath"
Needs: 2.100.100
]
verbose: false
version: load %../boot/version.r
lib-version: version/3
print ["--- Make OS Ext Lib --- Version:" lib-version]
; Set platform TARGET
do %systems.r
target: config-system/os-dir
do %form-header.r
change-dir append %../os/ target
files: [
%host-lib.c
%../host-device.c
]
; If it is graphics enabled:
if all [
not find any [system/options/args []] "no-gfx"
find [3] system/version/4
][
append files [%host-window.c]
]
cnt: 0
xlib: make string! 20000
rlib: make string! 1000
mlib: make string! 1000
dlib: make string! 1000
xsum: make string! 1000
emit: func [d] [append repend xlib d newline]
remit: func [d] [append repend rlib d newline]
demit: func [d] [append repend dlib d newline]
memit: func [d /nol] [
repend mlib d
if not nol [append mlib newline]
]
count: func [s c /local n] [
if find ["()" "(void)"] s [return "()"]
out: copy "(a"
n: 1
while [s: find/tail s c][
repend out [#"," #"a" + n]
n: n + 1
]
append out ")"
]
pads: func [start col] [
col: col - offset? start tail start
head insert/dup clear "" #" " col
]
func-header: [
[
thru "/***" 10 100 "*" newline
thru "*/"
copy spec to newline
(if all [
spec
trim spec
not find spec "static"
fn: any [ ; make sure we got only functions with "OS_" at the beginning
find spec " *OS_"
find spec " OS_"
]
fn: find spec "OS_"
find spec #"("
][
emit ["extern " spec "; // " the-file]
append xsum spec
p1: copy/part spec fn
p3: find fn #"("
p2: copy/part fn p3
p2u: uppercase copy p2
p2l: lowercase copy p2
demit [tab p2 ","]
remit [tab p1 "(*" p2l ")" p3 ";"]
args: count p3 #","
m: tail mlib
memit/nol ["#define " p2u args]
memit [pads m 35 " Host_Lib->" p2l args]
cnt: cnt + 1
]
)
newline
[
"/*" ; must be in func header section, not file banner
any [
thru "**"
[#" " | #"^-"]
copy line thru newline
]
thru "*/"
|
none
]
]
]
process: func [file] [
if verbose [?? file]
data: read the-file: file
data: to-string data ; R3
parse/all data [
any func-header
]
]
;process %mem_string.c halt
remit {
typedef struct REBOL_Host_Lib ^{
int size;
unsigned int ver_sum;
REBDEV **devices;}
memit {
extern REBOL_HOST_LIB *Host_Lib;
}
foreach file files [
print ["scanning" file]
if all [
%.c = suffix? file
][process file]
]
remit "} REBOL_HOST_LIB;"
out: reduce [
form-header/gen "Host Access Library" %host-lib.h %make-os-ext.r
{
#define HOST_LIB_VER } lib-version {
#define HOST_LIB_SUM } checksum/tcp to-binary xsum {
#define HOST_LIB_SIZE } cnt {
extern REBDEV *Devices[];
}
rlib
{
//** Included by HOST *********************************************
#ifndef REB_DEF
}
xlib
{
#ifdef OS_LIB_TABLE
REBOL_HOST_LIB *Host_Lib;
REBOL_HOST_LIB Host_Lib_Init = ^{ // Host library function vector table.
HOST_LIB_SIZE,
(HOST_LIB_VER << 16) + HOST_LIB_SUM,
(REBDEV**)&Devices,
}
dlib
{^};
#endif //OS_LIB_TABLE
#else //REB_DEF
//** Included by REBOL ********************************************
}
mlib
{
#endif //REB_DEF
}
]
;print out ;halt
;print ['checksum checksum/tcp xsum]
write %../../include/host-lib.h out
;ask "Done"
print " "
|
package io.github.kavahub.learnjava.format;
import java.util.List;
import java.util.Locale;
import java.util.ResourceBundle;
import lombok.extern.slf4j.Slf4j;
/**
*
* 国际化
*
* @author PinWei Wan
* @since 1.0.0
*/
@Slf4j
public class Localization {
public static String getLabel(Locale locale) {
final ResourceBundle bundle = ResourceBundle.getBundle("messages", locale);
return bundle.getString("label");
}
public static void run(List<Locale> locales) {
locales.forEach(locale -> log.info(getLabel(locale)));
}
}
|
#lang scribble/manual
@require[mechanics/private/numerics/integer/farey]
@require[scribble/eval]
@title[#:tag "farey"]{Farey Sequences}
@defmodule[mechanics/private/numerics/integer/farey]
@(define farey-eval
(make-base-eval
'(begin (require mechanics/private/numerics/integer/farey))))
@hyperlink["http://mathworld.wolfram.com/FareySequence.html"]{Farey Sequences} enumerate reduced fractions.
@defproc[(farey [lo integer?] [hi integer?]) (-> integer? (listof integer?))]{
Given a lower and upper bound, return a function on naturals @racket[n] that enumerates all fractions between @racket[lo] and @racket[hi] inclusive, with denominators no greater than @racket[n].
}
@examples[#:eval farey-eval
((farey 0 1) 1)
((farey 0 1) 5)
]
|
// Copyright © 2019-2021 VMware, Inc. All Rights Reserved.
// SPDX-License-Identifier: BSD-2-Clause
// Auto generated code. DO NOT EDIT.
// Data type definitions file for service: Bgp.
// Includes binding types of a structures and enumerations defined in the service.
// Shared by client-side stubs and server-side skeletons to ensure type
// compatibility.
package locale_services
import (
"github.com/vmware/vsphere-automation-sdk-go/runtime/bindings"
"github.com/vmware/vsphere-automation-sdk-go/runtime/data"
"github.com/vmware/vsphere-automation-sdk-go/runtime/protocol"
"github.com/vmware/vsphere-automation-sdk-go/services/nsxt-gm/model"
"reflect"
)
func bgpGetInputType() bindings.StructType {
fields := make(map[string]bindings.BindingType)
fieldNameMap := make(map[string]string)
fields["tier0_id"] = bindings.NewStringType()
fields["locale_service_id"] = bindings.NewStringType()
fieldNameMap["tier0_id"] = "Tier0Id"
fieldNameMap["locale_service_id"] = "LocaleServiceId"
var validators = []bindings.Validator{}
return bindings.NewStructType("operation-input", fields, reflect.TypeOf(data.StructValue{}), fieldNameMap, validators)
}
func bgpGetOutputType() bindings.BindingType {
return bindings.NewReferenceType(model.BgpRoutingConfigBindingType)
}
func bgpGetRestMetadata() protocol.OperationRestMetadata {
fields := map[string]bindings.BindingType{}
fieldNameMap := map[string]string{}
paramsTypeMap := map[string]bindings.BindingType{}
pathParams := map[string]string{}
queryParams := map[string]string{}
headerParams := map[string]string{}
dispatchHeaderParams := map[string]string{}
bodyFieldsMap := map[string]string{}
fields["tier0_id"] = bindings.NewStringType()
fields["locale_service_id"] = bindings.NewStringType()
fieldNameMap["tier0_id"] = "Tier0Id"
fieldNameMap["locale_service_id"] = "LocaleServiceId"
paramsTypeMap["tier0_id"] = bindings.NewStringType()
paramsTypeMap["locale_service_id"] = bindings.NewStringType()
paramsTypeMap["tier0Id"] = bindings.NewStringType()
paramsTypeMap["localeServiceId"] = bindings.NewStringType()
pathParams["tier0_id"] = "tier0Id"
pathParams["locale_service_id"] = "localeServiceId"
resultHeaders := map[string]string{}
errorHeaders := map[string]map[string]string{}
return protocol.NewOperationRestMetadata(
fields,
fieldNameMap,
paramsTypeMap,
pathParams,
queryParams,
headerParams,
dispatchHeaderParams,
bodyFieldsMap,
"",
"",
"GET",
"/global-manager/api/v1/global-infra/tier-0s/{tier0Id}/locale-services/{localeServiceId}/bgp",
"",
resultHeaders,
200,
"",
errorHeaders,
map[string]int{"com.vmware.vapi.std.errors.invalid_request": 400, "com.vmware.vapi.std.errors.unauthorized": 403, "com.vmware.vapi.std.errors.service_unavailable": 503, "com.vmware.vapi.std.errors.internal_server_error": 500, "com.vmware.vapi.std.errors.not_found": 404})
}
func bgpPatchInputType() bindings.StructType {
fields := make(map[string]bindings.BindingType)
fieldNameMap := make(map[string]string)
fields["tier0_id"] = bindings.NewStringType()
fields["locale_service_id"] = bindings.NewStringType()
fields["bgp_routing_config"] = bindings.NewReferenceType(model.BgpRoutingConfigBindingType)
fields["override"] = bindings.NewOptionalType(bindings.NewBooleanType())
fieldNameMap["tier0_id"] = "Tier0Id"
fieldNameMap["locale_service_id"] = "LocaleServiceId"
fieldNameMap["bgp_routing_config"] = "BgpRoutingConfig"
fieldNameMap["override"] = "Override"
var validators = []bindings.Validator{}
return bindings.NewStructType("operation-input", fields, reflect.TypeOf(data.StructValue{}), fieldNameMap, validators)
}
func bgpPatchOutputType() bindings.BindingType {
return bindings.NewVoidType()
}
func bgpPatchRestMetadata() protocol.OperationRestMetadata {
fields := map[string]bindings.BindingType{}
fieldNameMap := map[string]string{}
paramsTypeMap := map[string]bindings.BindingType{}
pathParams := map[string]string{}
queryParams := map[string]string{}
headerParams := map[string]string{}
dispatchHeaderParams := map[string]string{}
bodyFieldsMap := map[string]string{}
fields["tier0_id"] = bindings.NewStringType()
fields["locale_service_id"] = bindings.NewStringType()
fields["bgp_routing_config"] = bindings.NewReferenceType(model.BgpRoutingConfigBindingType)
fields["override"] = bindings.NewOptionalType(bindings.NewBooleanType())
fieldNameMap["tier0_id"] = "Tier0Id"
fieldNameMap["locale_service_id"] = "LocaleServiceId"
fieldNameMap["bgp_routing_config"] = "BgpRoutingConfig"
fieldNameMap["override"] = "Override"
paramsTypeMap["tier0_id"] = bindings.NewStringType()
paramsTypeMap["locale_service_id"] = bindings.NewStringType()
paramsTypeMap["override"] = bindings.NewOptionalType(bindings.NewBooleanType())
paramsTypeMap["bgp_routing_config"] = bindings.NewReferenceType(model.BgpRoutingConfigBindingType)
paramsTypeMap["tier0Id"] = bindings.NewStringType()
paramsTypeMap["localeServiceId"] = bindings.NewStringType()
pathParams["tier0_id"] = "tier0Id"
pathParams["locale_service_id"] = "localeServiceId"
queryParams["override"] = "override"
resultHeaders := map[string]string{}
errorHeaders := map[string]map[string]string{}
return protocol.NewOperationRestMetadata(
fields,
fieldNameMap,
paramsTypeMap,
pathParams,
queryParams,
headerParams,
dispatchHeaderParams,
bodyFieldsMap,
"",
"bgp_routing_config",
"PATCH",
"/global-manager/api/v1/global-infra/tier-0s/{tier0Id}/locale-services/{localeServiceId}/bgp",
"",
resultHeaders,
204,
"",
errorHeaders,
map[string]int{"com.vmware.vapi.std.errors.invalid_request": 400, "com.vmware.vapi.std.errors.unauthorized": 403, "com.vmware.vapi.std.errors.service_unavailable": 503, "com.vmware.vapi.std.errors.internal_server_error": 500, "com.vmware.vapi.std.errors.not_found": 404})
}
func bgpUpdateInputType() bindings.StructType {
fields := make(map[string]bindings.BindingType)
fieldNameMap := make(map[string]string)
fields["tier0_id"] = bindings.NewStringType()
fields["locale_service_id"] = bindings.NewStringType()
fields["bgp_routing_config"] = bindings.NewReferenceType(model.BgpRoutingConfigBindingType)
fields["override"] = bindings.NewOptionalType(bindings.NewBooleanType())
fieldNameMap["tier0_id"] = "Tier0Id"
fieldNameMap["locale_service_id"] = "LocaleServiceId"
fieldNameMap["bgp_routing_config"] = "BgpRoutingConfig"
fieldNameMap["override"] = "Override"
var validators = []bindings.Validator{}
return bindings.NewStructType("operation-input", fields, reflect.TypeOf(data.StructValue{}), fieldNameMap, validators)
}
func bgpUpdateOutputType() bindings.BindingType {
return bindings.NewReferenceType(model.BgpRoutingConfigBindingType)
}
func bgpUpdateRestMetadata() protocol.OperationRestMetadata {
fields := map[string]bindings.BindingType{}
fieldNameMap := map[string]string{}
paramsTypeMap := map[string]bindings.BindingType{}
pathParams := map[string]string{}
queryParams := map[string]string{}
headerParams := map[string]string{}
dispatchHeaderParams := map[string]string{}
bodyFieldsMap := map[string]string{}
fields["tier0_id"] = bindings.NewStringType()
fields["locale_service_id"] = bindings.NewStringType()
fields["bgp_routing_config"] = bindings.NewReferenceType(model.BgpRoutingConfigBindingType)
fields["override"] = bindings.NewOptionalType(bindings.NewBooleanType())
fieldNameMap["tier0_id"] = "Tier0Id"
fieldNameMap["locale_service_id"] = "LocaleServiceId"
fieldNameMap["bgp_routing_config"] = "BgpRoutingConfig"
fieldNameMap["override"] = "Override"
paramsTypeMap["tier0_id"] = bindings.NewStringType()
paramsTypeMap["locale_service_id"] = bindings.NewStringType()
paramsTypeMap["override"] = bindings.NewOptionalType(bindings.NewBooleanType())
paramsTypeMap["bgp_routing_config"] = bindings.NewReferenceType(model.BgpRoutingConfigBindingType)
paramsTypeMap["tier0Id"] = bindings.NewStringType()
paramsTypeMap["localeServiceId"] = bindings.NewStringType()
pathParams["tier0_id"] = "tier0Id"
pathParams["locale_service_id"] = "localeServiceId"
queryParams["override"] = "override"
resultHeaders := map[string]string{}
errorHeaders := map[string]map[string]string{}
return protocol.NewOperationRestMetadata(
fields,
fieldNameMap,
paramsTypeMap,
pathParams,
queryParams,
headerParams,
dispatchHeaderParams,
bodyFieldsMap,
"",
"bgp_routing_config",
"PUT",
"/global-manager/api/v1/global-infra/tier-0s/{tier0Id}/locale-services/{localeServiceId}/bgp",
"",
resultHeaders,
200,
"",
errorHeaders,
map[string]int{"com.vmware.vapi.std.errors.invalid_request": 400, "com.vmware.vapi.std.errors.unauthorized": 403, "com.vmware.vapi.std.errors.service_unavailable": 503, "com.vmware.vapi.std.errors.internal_server_error": 500, "com.vmware.vapi.std.errors.not_found": 404})
}
|
//SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
import "hardhat/console.sol";
import "@openzeppelin/contracts/utils/Context.sol";
import "@openzeppelin/contracts/utils/Counters.sol";
import "@openzeppelin/contracts/token/ERC721/ERC721.sol";
import "@openzeppelin/contracts/access/AccessControlEnumerable.sol";
import "@openzeppelin/contracts/token/ERC721/extensions/ERC721Enumerable.sol";
import "@openzeppelin/contracts/token/ERC721/extensions/ERC721Burnable.sol";
import "@openzeppelin/contracts/token/ERC721/extensions/ERC721Pausable.sol";
/**
* @dev {ERC721} token, including:
*
* - ability for holders to burn (destroy) their tokens
* - a minter role that allows for token minting (creation)
* - a pauser role that allows to stop all token transfers
* - token ID and URI autogeneration
*
* This contract uses {AccessControl} to lock permissioned functions using the
* different roles - head to its documentation for details.
*
* The account that deploys the contract will be granted the minter and pauser
* roles, as well as the default admin role, which will let it grant both minter
* and pauser roles to other accounts.
*/
contract NetSepio is
Context,
AccessControlEnumerable,
ERC721Enumerable,
ERC721Burnable,
ERC721Pausable
{
using Counters for Counters.Counter;
bytes32 public constant VOTER_ROLE = keccak256("VOTER_ROLE");
bytes32 public constant MODERATOR_ROLE = keccak256("MODERATOR_ROLE");
Counters.Counter private _tokenIdTracker;
string private _baseTokenURI;
struct WebsiteReview {
string domainName;
string websiteURL;
string websiteType;
string websiteTag;
string websiteSafety;
string metadataHash;
}
mapping(uint256 => WebsiteReview) public WebsiteReviews;
event ReviewCreation(address indexed minter, uint256 indexed tokenId, uint256 indexed timestamp);
event ReviewDeletion(address indexed ownerOrApproved, uint256 indexed tokenId, uint256 indexed timestamp);
event ReviewUpdate(address indexed ownerOrApproved, uint256 indexed tokenId, string oldMetadataHash, string newMetadatHash, uint256 indexed timestamp);
/**
* @dev Grants `DEFAULT_ADMIN_ROLE`, `VOTER_ROLE` and `MODERATOR_ROLE` to the
* account that deploys the contract.
*
* Token URIs will be autogenerated based on `baseURI` and their token IDs.
* See {ERC721-tokenURI}.
*/
constructor(
string memory name,
string memory symbol,
string memory baseTokenURI
) ERC721(name, symbol) {
_baseTokenURI = baseTokenURI;
_setupRole(DEFAULT_ADMIN_ROLE, _msgSender());
_setupRole(VOTER_ROLE, _msgSender());
_setupRole(MODERATOR_ROLE, _msgSender());
}
function _baseURI() internal view virtual override returns (string memory) {
return _baseTokenURI;
}
/**
* @dev Creates a new token for `to`. Its token ID will be automatically
* assigned (and available on the emitted {IERC721-Transfer} event), and the token
* URI autogenerated based on the base URI passed at construction.
*
* See {ERC721-_safeMint}.
*
* Requirements:
*
* - the caller must have the `VOTER_ROLE`.
*/
function createReview(string memory _domainName, string memory _websiteURL, string memory _websiteType, string memory _websiteTag, string memory _websiteSafety, string memory _metadataHash) public virtual {
require(hasRole(VOTER_ROLE, _msgSender()), "NetSepio: must have voter role to submit review");
// We cannot just use balanceOf to create the new tokenId because tokens
// can be burned (destroyed), so we need a separate counter.
uint256 tokenId = _tokenIdTracker.current();
_safeMint(_msgSender(), tokenId);
// Create Mapping
WebsiteReview memory websiteReview = WebsiteReview({
domainName: _domainName,
websiteURL: _websiteURL,
websiteType: _websiteType,
websiteTag: _websiteTag,
websiteSafety: _websiteSafety,
metadataHash: _metadataHash
});
WebsiteReviews[tokenId] = websiteReview;
_tokenIdTracker.increment();
emit ReviewCreation(_msgSender(), tokenId, block.timestamp);
}
/**
* @dev Destroys (Burns) an existing `tokenId`. See {ERC721-_burn}.
*
* Requirements:
*
* - The caller must own `tokenId` or be an approved operator.
*/
function deleteReview(uint256 tokenId) public virtual {
require(_isApprovedOrOwner(_msgSender(), tokenId), "NetSepio: caller is not owner nor approved to delete review");
// destroy (burn) the token.
_burn(tokenId);
emit ReviewDeletion(_msgSender(), tokenId, block.timestamp);
}
/**
* @dev Reads the metadata of a specified token. Returns the current data in
* storage of `tokenId`.
*
* @param tokenId The token to read the data off.
*
* @return A string representing the current metadataHash mapped with the tokenId.
*/
function readMetadata(uint256 tokenId) public virtual view returns (string memory) {
return WebsiteReviews[tokenId].metadataHash;
}
/**
* @dev Updates the metadata of a specified token. Writes `newMetadataHash` into storage
* of `tokenId`.
*
* @param tokenId The token to write metadata to.
* @param newMetadataHash The metadata to be written to the token.
*
* Emits a `ReviewUpdate` event.
*/
function updateReview(uint256 tokenId, string memory newMetadataHash) public virtual {
require(hasRole(VOTER_ROLE, _msgSender()), "NetSepio: caller is not owner nor approved to update review");
emit ReviewUpdate(_msgSender(), tokenId, WebsiteReviews[tokenId].metadataHash, newMetadataHash, block.timestamp);
WebsiteReviews[tokenId].metadataHash = newMetadataHash;
}
/**
* @dev Pauses all token transfers.
*
* See {ERC721Pausable} and {Pausable-_pause}.
*
* Requirements:
*
* - the caller must have the `MODERATOR_ROLE`.
*/
function pause() public virtual {
require(hasRole(MODERATOR_ROLE, _msgSender()), "NetSepio: must have moderator role to pause");
_pause();
}
/**
* @dev Unpauses all token transfers.
*
* See {ERC721Pausable} and {Pausable-_unpause}.
*
* Requirements:
*
* - the caller must have the `MODERATOR_ROLE`.
*/
function unpause() public virtual {
require(hasRole(MODERATOR_ROLE, _msgSender()), "NetSepio: must have moderator role to unpause");
_unpause();
}
function _beforeTokenTransfer(
address from,
address to,
uint256 tokenId
) internal virtual override(ERC721, ERC721Enumerable, ERC721Pausable) {
super._beforeTokenTransfer(from, to, tokenId);
}
/**
* @dev See {IERC165-supportsInterface}.
*/
function supportsInterface(bytes4 interfaceId)
public
view
virtual
override(AccessControlEnumerable, ERC721, ERC721Enumerable)
returns (bool)
{
return super.supportsInterface(interfaceId);
}
} |
<?php
namespace Database\Seeders;
use Carbon\Traits\Date;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Seeder;
use Illuminate\Support\Carbon;
use Illuminate\Support\Facades\DB;
class EmployeeTypeSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
// DB::statement('SET FOREIGN_KEY_CHECKS=0;');
DB::table('employee_types')->truncate();
$types = ['system admin', 'doctor', 'nurse', 'cleaner', 'security'];
foreach ($types as $type) {
DB::table('employee_types')->insert([
'name' => $type,
'created_at' => Carbon::now(),
'updated_at' => Carbon::now(),
]);
}
}
}
|
/* saira-vietnamese-900-italic*/
@font-face {
font-family: 'Saira';
font-style: italic;
font-display: swap;
font-weight: 900;
src: url('./files/saira-vietnamese-900-italic.woff2') format('woff2'), url('./files/saira-vietnamese-900-italic.woff') format('woff');
}
|
require "UnLua"
local Screen = require "Tutorials.Screen"
local FVector2D = UE.FVector2D
local FLinearColor = UE.FLinearColor
local M = Class()
function M:RandomPosition()
local x = math.random(0, 1920)
local y = math.random(0, 960)
self:SetPositionInViewport(FVector2D(x, y))
end
return M
|
Imports Gradients = org.deeplearning4j.rl4j.agent.learning.update.Gradients
Imports NeuralNetUpdaterConfiguration = org.deeplearning4j.rl4j.agent.learning.update.updater.NeuralNetUpdaterConfiguration
Imports org.deeplearning4j.rl4j.network
Imports Tag = org.junit.jupiter.api.Tag
Imports Test = org.junit.jupiter.api.Test
Imports RunWith = org.junit.runner.RunWith
Imports Mock = org.mockito.Mock
Imports MockitoJUnitRunner = org.mockito.junit.MockitoJUnitRunner
Imports NativeTag = org.nd4j.common.tests.tags.NativeTag
Imports TagNames = org.nd4j.common.tests.tags.TagNames
import static org.junit.jupiter.api.Assertions.assertTrue
import static org.junit.jupiter.api.Assertions.fail
import static org.mockito.ArgumentMatchers.any
Imports org.mockito.Mockito
'
' * ******************************************************************************
' * *
' * *
' * * This program and the accompanying materials are made available under the
' * * terms of the Apache License, Version 2.0 which is available at
' * * https://www.apache.org/licenses/LICENSE-2.0.
' * *
' * * See the NOTICE file distributed with this work for additional
' * * information regarding copyright ownership.
' * * Unless required by applicable law or agreed to in writing, software
' * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
' * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
' * * License for the specific language governing permissions and limitations
' * * under the License.
' * *
' * * SPDX-License-Identifier: Apache-2.0
' * *****************************************************************************
'
Namespace org.deeplearning4j.rl4j.agent.learning.update.updater.async
'JAVA TO VB CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
'ORIGINAL LINE: @RunWith(MockitoJUnitRunner.class) @Tag(TagNames.FILE_IO) @NativeTag public class AsyncSharedNetworksUpdateHandlerTest
Public Class AsyncSharedNetworksUpdateHandlerTest
'JAVA TO VB CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
'ORIGINAL LINE: @Mock ITrainableNeuralNet globalCurrentMock;
Friend globalCurrentMock As ITrainableNeuralNet
'JAVA TO VB CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
'ORIGINAL LINE: @Mock ITrainableNeuralNet targetMock;
Friend targetMock As ITrainableNeuralNet
'JAVA TO VB CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
'ORIGINAL LINE: @Test public void when_handleGradientsIsCalledWithoutTarget_expect_gradientsAppliedOnGlobalCurrent()
Public Overridable Sub when_handleGradientsIsCalledWithoutTarget_expect_gradientsAppliedOnGlobalCurrent()
' Arrange
Dim configuration As NeuralNetUpdaterConfiguration = NeuralNetUpdaterConfiguration.builder().build()
Dim sut As New AsyncSharedNetworksUpdateHandler(globalCurrentMock, configuration)
Dim gradients As New Gradients(10)
' Act
sut.handleGradients(gradients)
' Assert
verify(globalCurrentMock, times(1)).applyGradients(gradients)
End Sub
'JAVA TO VB CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
'ORIGINAL LINE: @Test public void when_handleGradientsIsCalledWithTarget_expect_gradientsAppliedOnGlobalCurrentAndTargetUpdated()
Public Overridable Sub when_handleGradientsIsCalledWithTarget_expect_gradientsAppliedOnGlobalCurrentAndTargetUpdated()
' Arrange
Dim configuration As NeuralNetUpdaterConfiguration = NeuralNetUpdaterConfiguration.builder().targetUpdateFrequency(2).build()
Dim sut As New AsyncSharedNetworksUpdateHandler(globalCurrentMock, targetMock, configuration)
Dim gradients As New Gradients(10)
' Act
sut.handleGradients(gradients)
sut.handleGradients(gradients)
' Assert
verify(globalCurrentMock, times(2)).applyGradients(gradients)
verify(targetMock, times(1)).copyFrom(globalCurrentMock)
End Sub
'JAVA TO VB CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
'ORIGINAL LINE: @Test public void when_configurationHasInvalidFrequency_expect_Exception()
Public Overridable Sub when_configurationHasInvalidFrequency_expect_Exception()
Try
Dim configuration As NeuralNetUpdaterConfiguration = NeuralNetUpdaterConfiguration.builder().targetUpdateFrequency(0).build()
Dim sut As New AsyncSharedNetworksUpdateHandler(globalCurrentMock, targetMock, configuration)
fail("NullPointerException should have been thrown")
Catch exception As System.ArgumentException
Dim expectedMessage As String = "Configuration: targetUpdateFrequency must be greater than 0, got: [0]"
Dim actualMessage As String = exception.Message
assertTrue(actualMessage.Contains(expectedMessage))
End Try
End Sub
End Class
End Namespace |
private
formatClass: aClass selector: aSymbol formatter: aFormatterClass
| source tree1 tree2 |
source := aClass sourceCodeAt: aSymbol.
tree1 := RBParser parseMethod: source.
tree2 := RBParser
parseMethod: (aFormatterClass new format: tree1)
onError: [ :err :pos | self assert: false ].
self assert: tree1 = tree2 |
// SPDX-License-Identifier: MIT
// solhint-disable const-name-snakecase
pragma solidity 0.6.10;
/**
* @title OwnedUpgradeabilityProxy
* @dev This contract combines an upgradeability proxy with basic authorization control functionalities
*/
contract OwnedUpgradeabilityProxy {
/**
* @dev Event to show ownership has been transferred
* @param previousOwner representing the address of the previous owner
* @param newOwner representing the address of the new owner
*/
event ProxyOwnershipTransferred(address indexed previousOwner, address indexed newOwner);
/**
* @dev Event to show ownership transfer is pending
* @param currentOwner representing the address of the current owner
* @param pendingOwner representing the address of the pending owner
*/
event NewPendingOwner(address currentOwner, address pendingOwner);
// Storage position of the owner and pendingOwner of the contract
bytes32 private constant proxyOwnerPosition = 0x6279e8199720cf3557ecd8b58d667c8edc486bd1cf3ad59ea9ebdfcae0d0dfac; //keccak256("trueUSD.proxy.owner");
bytes32 private constant pendingProxyOwnerPosition = 0x8ddbac328deee8d986ec3a7b933a196f96986cb4ee030d86cc56431c728b83f4; //keccak256("trueUSD.pending.proxy.owner");
/**
* @dev the constructor sets the original owner of the contract to the sender account.
*/
constructor() public {
_setUpgradeabilityOwner(msg.sender);
}
/**
* @dev Throws if called by any account other than the owner.
*/
modifier onlyProxyOwner() {
require(msg.sender == proxyOwner(), "only Proxy Owner");
_;
}
/**
* @dev Throws if called by any account other than the pending owner.
*/
modifier onlyPendingProxyOwner() {
require(msg.sender == pendingProxyOwner(), "only pending Proxy Owner");
_;
}
/**
* @dev Tells the address of the owner
* @return owner the address of the owner
*/
function proxyOwner() public view returns (address owner) {
bytes32 position = proxyOwnerPosition;
assembly {
owner := sload(position)
}
}
/**
* @dev Tells the address of the owner
* @return pendingOwner the address of the pending owner
*/
function pendingProxyOwner() public view returns (address pendingOwner) {
bytes32 position = pendingProxyOwnerPosition;
assembly {
pendingOwner := sload(position)
}
}
/**
* @dev Sets the address of the owner
*/
function _setUpgradeabilityOwner(address newProxyOwner) internal {
bytes32 position = proxyOwnerPosition;
assembly {
sstore(position, newProxyOwner)
}
}
/**
* @dev Sets the address of the owner
*/
function _setPendingUpgradeabilityOwner(address newPendingProxyOwner) internal {
bytes32 position = pendingProxyOwnerPosition;
assembly {
sstore(position, newPendingProxyOwner)
}
}
/**
* @dev Allows the current owner to transfer control of the contract to a newOwner.
*changes the pending owner to newOwner. But doesn't actually transfer
* @param newOwner The address to transfer ownership to.
*/
function transferProxyOwnership(address newOwner) external onlyProxyOwner {
require(newOwner != address(0));
_setPendingUpgradeabilityOwner(newOwner);
emit NewPendingOwner(proxyOwner(), newOwner);
}
/**
* @dev Allows the pendingOwner to claim ownership of the proxy
*/
function claimProxyOwnership() external onlyPendingProxyOwner {
emit ProxyOwnershipTransferred(proxyOwner(), pendingProxyOwner());
_setUpgradeabilityOwner(pendingProxyOwner());
_setPendingUpgradeabilityOwner(address(0));
}
/**
* @dev Allows the proxy owner to upgrade the current version of the proxy.
* @param implementation representing the address of the new implementation to be set.
*/
function upgradeTo(address implementation) public virtual onlyProxyOwner {
address currentImplementation;
bytes32 position = implementationPosition;
assembly {
currentImplementation := sload(position)
}
require(currentImplementation != implementation);
assembly {
sstore(position, implementation)
}
emit Upgraded(implementation);
}
/**
* @dev This event will be emitted every time the implementation gets upgraded
* @param implementation representing the address of the upgraded implementation
*/
event Upgraded(address indexed implementation);
// Storage position of the address of the current implementation
bytes32 private constant implementationPosition = 0x6e41e0fbe643dfdb6043698bf865aada82dc46b953f754a3468eaa272a362dc7; //keccak256("trueUSD.proxy.implementation");
function implementation() public view returns (address impl) {
bytes32 position = implementationPosition;
assembly {
impl := sload(position)
}
}
/**
* @dev Fallback functions allowing to perform a delegatecall to the given implementation.
* This function will return whatever the implementation call returns
*/
fallback() external payable {
proxyCall();
}
receive() external payable {
proxyCall();
}
function proxyCall() internal {
bytes32 position = implementationPosition;
assembly {
let ptr := mload(0x40)
calldatacopy(ptr, returndatasize(), calldatasize())
let result := delegatecall(gas(), sload(position), ptr, calldatasize(), returndatasize(), returndatasize())
returndatacopy(ptr, 0, returndatasize())
switch result
case 0 {
revert(ptr, returndatasize())
}
default {
return(ptr, returndatasize())
}
}
}
}
|
package main
import (
"context"
"github.com/CESARBR/knot-cloud-storage/pkg/data"
"github.com/CESARBR/knot-cloud-storage/pkg/logging"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
"time"
)
func disconnectClient(ctx context.Context, client *mongo.Client) {
if err := client.Disconnect(ctx); err != nil {
panic(err)
}
}
func setupDatabase(databaseURI string, databaseName string, logger logging.Logger) (*mongo.Database, context.Context, *mongo.Client) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
client, err := mongo.Connect(ctx, options.Client().ApplyURI(databaseURI))
failOnError(err, "Failed to connect to MongoDB")
return client.Database(databaseName, nil), ctx, client
}
func setupStore(newDatabase *mongo.Database, logger logging.Logger, intValue int32) data.Store {
return data.NewStore(newDatabase, logger, intValue)
}
|
defmodule TPUServe.MixProject do
use Mix.Project
def project do
[
app: :tpuserve,
version: "0.1.0",
releases: releases(),
elixir: "~> 1.12",
start_permanent: Mix.env() == :prod,
deps: deps(),
compilers: [:elixir_make] ++ Mix.compilers()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger],
mod: {TPUServe.Application, []}
]
end
def releases do
[
tpuserve: [
steps: [:assemble, &Burrito.wrap/1],
burrito: [
targets: [
linux: [os: :linux, cpu: :x86_64]
]
]
]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:plug_cowboy, "~> 2.0"},
{:jason, "~> 1.2"},
{:elixir_make, "~> 0.6", runtime: false},
{:msgpax, "~> 2.3.0"},
{:burrito, github: "burrito-elixir/burrito"},
{:exla, "~> 0.1.0-dev",
github: "elixir-nx/nx", sparse: "exla", branch: "sm-exla-export", only: [:test]},
{:nx, "~> 0.1.0-dev",
github: "elixir-nx/nx", sparse: "nx", branch: "sm-exla-export", override: true}
]
end
end
|
-- packages/acs-events/sql/postgresql/test/timespan-test.sql
--
-- Regression tests for timespan API
-- Separated from time_interval-test.sql
--
-- @author jowell@jsabino.com
-- @creation-date 2001-06-26
--
-- $Id: timespan-test.sql,v 1.4 2015/12/04 13:50:03 cvs Exp $
-- Note: These tests use the semi-ported utPLSQL regression package
\i utest-create.sql
-- Set-up the regression test
CREATE OR REPLACE FUNCTION ut__setup() RETURNS integer AS $$
BEGIN
raise notice 'Setting up timespans test...';
-- create copies of the tables (shadow tables) to verify API operations
-- No need for execute here?
create table ut_timespans as select * from timespans;
-- For testing purposes, both tables should still be empty
PERFORM ut_assert__eqtable ('Comparing copied data for time interval',
'timespans',
'ut_timespans'
);
-- Store keys that are in the table prior to the regresion test
create table ut_interval_ids as select interval_id from time_intervals;
create table ut_timespan_ids as select timespan_id from timespans;
return 0;
END;
$$ LANGUAGE plpgsql;
-- Clean up the mess that regression testing did
CREATE OR REPLACE FUNCTION ut__teardown() RETURNS integer AS $$
BEGIN
raise notice 'Tearing down timespans test...';
-- Delete intervals added by tests
-- cascade delete in timespans should delete corresponding entries in that table
-- Note that we exclude deleting rows that existed prior to regression test
delete from timespans
where timespan_id not in (select timespan_id
from ut_timespan_ids);
-- This is sufficient, actually.
delete from time_intervals
where interval_id not in (select interval_id
from ut_interval_ids);
-- Drop test tables
-- cascade option does not work?
drop table ut_timespans;
drop table ut_interval_ids;
drop table ut_timespan_ids;
return 0;
END;
$$ LANGUAGE plpgsql;
-- Postgres has this weird behavior that you cannot change a row twice
-- within a transaction.
-- We test the creation of a time interval entry
-- added
select define_function_args('ut__new','interval_id');
--
-- procedure ut__new/1
--
CREATE OR REPLACE FUNCTION ut__new(
new__interval_id integer
) RETURNS integer AS $$
DECLARE
v_interval_id time_intervals.interval_id%TYPE;
v_timespan_id timespans.timespan_id%TYPE;
BEGIN
-- The new function will create a copy on the time_intervals table
v_timespan_id := timespan__new(new__interval_id);
-- Since the timespan__new function creates a copy of the interval
-- we need the copied interval_id
select interval_id into v_interval_id
from timespans
where timespan_id = v_timespan_id;
-- Create shadow entries, too.
insert into ut_timespans (timespan_id,interval_id)
values (v_timespan_id,v_interval_id);
-- The new function will create a copy on the time_intervals table
-- We do two test. First, we check whether the copying mechanism is ok
PERFORM ut_assert__eq ('Test of timespan__new copying mechanism: ',
time_interval__eq(v_interval_id, new__interval_id),
true
);
-- Second, we check whether the timespans table is properly populated
PERFORM ut_assert__eqtable ('Test of timespan__new entry in timespans table: ',
'ut_timespans',
'timespans'
);
-- If successful, interval id is correct
return v_timespan_id;
END;
$$ LANGUAGE plpgsql;
-- We test the creation of a time interval entry
--
-- procedure ut__new/2
--
CREATE OR REPLACE FUNCTION ut__new(
new__date1 timestamptz,
new__date2 timestamptz
) RETURNS integer AS $$
DECLARE
v_interval_id time_intervals.interval_id%TYPE;
BEGIN
-- We first want to create an entry in the time interval table
-- because the timespan_new function copies this interval
v_interval_id := time_interval__new(new__date1, new__date2);
-- Create a new timespan using the function above
return ut__new(v_interval_id);
END;
$$ LANGUAGE plpgsql;
-- Check the deletion of a time interval
-- added
select define_function_args('ut__delete','timespan_id');
--
-- procedure ut__delete/1
--
CREATE OR REPLACE FUNCTION ut__delete(
delete__timespan_id integer
) RETURNS integer AS $$
DECLARE
BEGIN
-- Delete the row from actual table
PERFORM timespan__delete(delete__timespan_id);
PERFORM ut_assert__eqtable ('Testing timespan__delete: ',
'ut_timespans',
'timespans'
);
-- Delete entry from shadow table
-- JS: Aha, a demonstration of the effect of transactions to foreign keys
-- JS: It seems that while timespan__delete would remove the row from
-- JS: time_intervals, the cascade delete removal of the corresponding row
-- JS: in timespans is not yet done until the transation is complete. Thus,
-- JS: deleting the row in the shadow table within this function/transaction
-- JS: will cause the comparison of the timespans table and the shadow table
-- JS: to fail (since delete will immediately remove the row from the shadow
-- JS: table). We do the delete outside this function/transaction instead.
-- Delete from shadow table
-- delete from ut_timespans
-- where timespan_id = delete__timespan_id;
-- If successful, interval id is correct
return 0;
END;
$$ LANGUAGE plpgsql;
-- added
select define_function_args('ut__join_interval','timespan_id,interval_id,copy_p');
--
-- procedure ut__join_interval/3
--
CREATE OR REPLACE FUNCTION ut__join_interval(
join_interval__timespan_id integer,
join_interval__interval_id integer,
join_interval__copy_p boolean
) RETURNS integer AS $$
DECLARE
v_interval_id time_intervals.interval_id%TYPE;
v_interval_id_ck time_intervals.interval_id%TYPE;
v_interval_id_cp time_intervals.interval_id%TYPE;
BEGIN
-- Get interval id of orginal interval (before join)
select interval_id into v_interval_id
from timespans
where timespan_id = join_interval__timespan_id;
-- Join the supplied interval with existing interval
-- Return the interval_id being joined (will be different if copy_p = true)
v_interval_id_cp := timespan__join_interval(join_interval__timespan_id,
join_interval__interval_id,
join_interval__copy_p);
-- Dont forget to put the newly created timepsan into the shadow table
insert into ut_timespans (timespan_id,interval_id)
values (join_interval__timespan_id,v_interval_id_cp);
-- Check if there are now two intervals with the same timespan_id in timespans table
PERFORM ut_assert__eqquery ('Testing timespan__join with two intervals (2 entries): ',
'select count(*)
from timespans
where timespan_id = ' || join_interval__timespan_id,
'select 2 from dual'
);
-- This is probably a more robust check, since we want to compare the resulting timespan table
PERFORM ut_assert__eqtable ('Testing timespan__join: table comparison test: ',
'ut_timespans',
'timespans'
);
-- Did not do the interval check since it is dependent upon join_interval__copy_p
-- Besides, it seems silly to me: since there are only two intervals, checking table equality
-- AND checking that only two intervals are in the time span should be enough!
return 0;
END;
$$ LANGUAGE plpgsql;
-- added
--
-- procedure ut__join/2
--
CREATE OR REPLACE FUNCTION ut__join(
join__timespan_id_1 integer,
join__timespan_id_2 integer
) RETURNS integer AS $$
DECLARE
rec_timespan record;
BEGIN
PERFORM timespan__join(join__timespan_id_1,join__timespan_id_2);
-- Joining means that the intervals in join__timespan_id_2 are
-- included in the intervals in join__timespan_id_1
FOR rec_timespan IN
select *
from timespans
where timespan_id = join__timespan_id_2
LOOP
insert into ut_timespans (timespan_id,interval_id)
values (join__timespan_id_1,rec_timespan.interval_id);
END LOOP;
-- Check equality of tables
PERFORM ut_assert__eqtable ('Testing timespan__join by specifying timespan_id: ',
'ut_timespans',
'timespans'
);
return 0;
END;
$$ LANGUAGE plpgsql;
-- added
select define_function_args('ut__join','timespan_id,start_date,end_date');
--
-- procedure ut__join/3
--
CREATE OR REPLACE FUNCTION ut__join(
join__timespan_id integer,
join__start_date timestamptz,
join__end_date timestamptz
) RETURNS integer AS $$
DECLARE
v_interval_id time_intervals.interval_id%TYPE;
BEGIN
v_interval_id := timespan__join(join__timespan_id,join__start_date,join__end_date);
-- Joining means that the interval becomes part
-- of the timespan specified by join__timespan_id
insert into ut_timespans (timespan_id,interval_id)
values (join__timespan_id,v_interval_id);
-- Check equality of tables
PERFORM ut_assert__eqtable ('Testing timespan__join by specifying start and end dates: ',
'ut_timespans',
'timespans'
);
return 0;
END;
$$ LANGUAGE plpgsql;
-- added
select define_function_args('ut__copy','timespan_id,offset');
--
-- procedure ut__copy/2
--
CREATE OR REPLACE FUNCTION ut__copy(
copy__timespan_id integer,
copy__offset interval
) RETURNS integer AS $$
DECLARE
v_timespan_id timespans.timespan_id%TYPE;
v_interval_id time_intervals.interval_id%TYPE;
v_interval_id_ck time_intervals.interval_id%TYPE;
rec_timespan record;
BEGIN
v_timespan_id := timespan__copy(copy__timespan_id,copy__offset);
-- Put copy in shadow table. There may be more than one interval in a
-- time interval so we need to loop through all
for rec_timespan in
select *
from timespans
where timespan_id = v_timespan_id
loop
-- Populate the shadow table
insert into ut_timespans (timespan_id,interval_id)
values (rec_timespan.timespan_id,rec_timespan.interval_id);
end loop;
-- Check proper population of shadow table
PERFORM ut_assert__eqtable ('Testing timespan__copy: ',
'ut_timespans',
'timespans'
);
return v_timespan_id;
END;
$$ LANGUAGE plpgsql;
-- added
select define_function_args('ut__exists_p','timespan_id,result');
--
-- procedure ut__exists_p/2
--
CREATE OR REPLACE FUNCTION ut__exists_p(
exists_p__timespan_id integer,
exists_p__result boolean
) RETURNS integer AS $$
DECLARE
BEGIN
PERFORM ut_assert__eq ('Testing timespan__exists_p: ',
timespan__exists_p(exists_p__timespan_id),
exists_p__result
);
return 0;
END;
$$ LANGUAGE plpgsql;
-- added
select define_function_args('ut__multi_interval_p','timespan_id,result');
--
-- procedure ut__multi_interval_p/2
--
CREATE OR REPLACE FUNCTION ut__multi_interval_p(
multi_interval_p__timespan_id integer,
multi_interval_p__result boolean
) RETURNS integer AS $$
DECLARE
BEGIN
return ut_assert__eq ('Testing timespan__multi_interval_p: ',
timespan__multi_interval_p(multi_interval_p__timespan_id),
multi_interval_p__result
);
END;
$$ LANGUAGE plpgsql;
-- added
select define_function_args('ut__overlaps_interval_p','timespan_id,interval_id,result');
--
-- procedure ut__overlaps_interval_p/3
--
CREATE OR REPLACE FUNCTION ut__overlaps_interval_p(
overlaps_interval_p__timespan_id integer,
overlaps_interval_p__interval_id integer,
overlaps_interval_p__result boolean
) RETURNS integer AS $$
DECLARE
BEGIN
return ut_assert__eq ('Testing timespan__overlaps_interval_p: ',
timespan__overlaps_interval_p(overlaps_interval_p__timespan_id,
overlaps_interval_p__interval_id),
overlaps_interval_p__result
);
END;
$$ LANGUAGE plpgsql;
-- added
--
-- procedure ut__overlaps_p/3
--
CREATE OR REPLACE FUNCTION ut__overlaps_p(
overlaps_p__timespan_1_id integer,
overlaps_p__timespan_2_id integer,
overlaps_p__result boolean
) RETURNS integer AS $$
DECLARE
BEGIN
return ut_assert__eq ('Testing timespan__overlaps_p, timespan vs. timespan: ',
timespan__overlaps_p(overlaps_p__timespan_1_id,
overlaps_p__timespan_2_id),
overlaps_p__result
);
END;
$$ LANGUAGE plpgsql;
-- added
select define_function_args('ut__overlaps_p','timespan_id,start_date,end_date,result');
--
-- procedure ut__overlaps_p/4
--
CREATE OR REPLACE FUNCTION ut__overlaps_p(
overlaps_p__timespan_id integer,
overlaps_p__start_date timestamptz,
overlaps_p__end_date timestamptz,
overlaps_p__result boolean
) RETURNS integer AS $$
DECLARE
BEGIN
return ut_assert__eq ('Test of timespan__overlaps_p, timespan vs. start and end dates: ',
timespan__overlaps_p(overlaps_p__timespan_id,
overlaps_p__start_date,
overlaps_p__end_date),
overlaps_p__result
);
END;
$$ LANGUAGE plpgsql;
-- added
select define_function_args('ut__interval_delete','timespan_id,interval_id');
--
-- procedure ut__interval_delete/2
--
CREATE OR REPLACE FUNCTION ut__interval_delete(
interval_delete__timespan_id integer,
interval_delete__interval_id integer
) RETURNS integer AS $$
DECLARE
BEGIN
PERFORM timespan__interval_delete(interval_delete__timespan_id,interval_delete__interval_id);
-- Remove from shadow table
delete from ut_timespans
where timespan_id = interval_delete__timespan_id
and
interval_id = interval_delete__interval_id;
return ut_assert__eqtable('Testing timespan__interval_delete: ',
'ut_timespans',
'timespans'
);
END;
$$ LANGUAGE plpgsql;
--
-- procedure ut__regression1/0
--
CREATE OR REPLACE FUNCTION ut__regression1(
) RETURNS integer AS $$
DECLARE
v_result integer := 0;
v_interval_id time_intervals.interval_id%TYPE;
v_interval_id_ck time_intervals.interval_id%TYPE;
v_timespan_id timespans.timespan_id%TYPE;
v_timespan_id_ck timespans.timespan_id%TYPE;
BEGIN
raise notice 'Regression test, part 1 (creates and edits).';
-- First create an interval
v_interval_id := time_interval__new(timestamptz '2001-01-01',timestamptz '2001-01-20');
--Check if creation of timespans work by supplying an interval id to be copied
PERFORM ut__new(v_interval_id);
-- We first check if the creation of timespans work
-- This should be equivalent to what we have above
v_timespan_id := ut__new(timestamptz '2001-01-25',timestamptz '2001-02-02');
-- Test if timespan exists
PERFORM ut__exists_p(v_timespan_id,true);
-- Unfortunately, we cannot delete the timespan and then check its non-existence
-- (transactions). So we check for a known non-existent timespan
PERFORM ut__exists_p(v_timespan_id+100,false);
-- Check if multi-interval (obviously not)
PERFORM ut__multi_interval_p(v_timespan_id,false);
-- The interval does not overlap the timespan
PERFORM ut__overlaps_interval_p(v_timespan_id,v_interval_id,false);
-- Join the first interval with the second, without making a copy
PERFORM ut__join_interval(v_timespan_id,v_interval_id,false);
-- Should now be a multi-interval timespan
PERFORM ut__multi_interval_p(v_timespan_id,true);
-- Now that the interval is part of the timespan, they should overlap
PERFORM ut__overlaps_interval_p(v_timespan_id,v_interval_id,true);
-- A new timespans
v_timespan_id := ut__new(timestamptz '2001-03-05',timestamptz '2001-03-31');
v_timespan_id_ck := ut__new(timestamptz '2001-06-05',timestamptz '2001-06-30');
-- These timespans should not overlap
PERFORM ut__overlaps_p(v_timespan_id,v_timespan_id_ck,false);
-- Check overlaps against these known dates
PERFORM ut__overlaps_p(v_timespan_id,timestamptz '2001-02-06',timestamptz '2001-03-25',true);
PERFORM ut__overlaps_p(v_timespan_id,timestamptz '2001-03-07',timestamptz '2001-04-01',true);
PERFORM ut__overlaps_p(v_timespan_id,timestamptz '2001-01-01',timestamptz '2001-03-20',true);
PERFORM ut__overlaps_p(v_timespan_id,timestamptz '2001-01-01',null,true);
PERFORM ut__overlaps_p(v_timespan_id,null,timestamptz '2001-04-01',true);
PERFORM ut__overlaps_p(v_timespan_id,timestamptz '2001-04-01',timestamptz '2001-04-30',false);
PERFORM ut__overlaps_p(v_timespan_id,timestamptz '2001-02-01',timestamptz '2001-02-27',false);
-- Join the first interval with the second, making a copy
PERFORM ut__join_interval(v_timespan_id,v_interval_id,true);
-- Join the two (the joined interval is longer)
PERFORM ut__join(v_timespan_id_ck,v_timespan_id);
-- These timespans should now overlap
PERFORM ut__overlaps_p(v_timespan_id,v_timespan_id_ck,true);
-- Join an interval instead
PERFORM ut__join(v_timespan_id_ck,timestamptz '2001-12-01',timestamptz '2001-12-31');
-- Copy a timespan (will only contain two)
PERFORM ut__copy(v_timespan_id,interval '0 days');
-- Now try to delete the interval just joined
PERFORM ut__interval_delete(v_timespan_id,v_interval_id);
-- We will improve the regression test so there is reporting
-- of individual test results. For now, reaching this far is
-- enough to declare success.
return v_result;
END;
$$ LANGUAGE plpgsql;
--
-- procedure ut__regression2/0
--
CREATE OR REPLACE FUNCTION ut__regression2(
) RETURNS integer AS $$
DECLARE
v_result integer := 0;
rec_timespan record;
BEGIN
raise notice 'Regression test, part 2 (deletes).';
-- Remove all entries made by regression test
-- This also tests the deletion mechanism
FOR rec_timespan IN
select * from timespans
where timespan_id not in (select timespan_id from ut_timespan_ids)
LOOP
PERFORM ut__delete(rec_timespan.timespan_id);
END LOOP;
-- We will improve the regression test so there is reporting
-- of individual test results. For now, reaching this far is
-- enough to declare success.
return v_result;
END;
$$ LANGUAGE plpgsql;
--------------------------------------------------------------------------------
-- Main regression test. PostgreSQL does not allow multiple changes made to a
-- primary key inside a transaction if the primary key is referenced by another
-- table (e.g., insert and delete). As a fix, we break down the regression test
-- so that row creations and edits are separate from row deletions
--------------------------------------------------------------------------------
select (case when ut__setup() = 0
then
'Regression test properly set up.'
end) as setup_result;
select (case when ut__regression1() = 0
then
'Regression test, part 1 successful.'
end) as test_result;
select * from time_intervals;
select * from timespans;
select * from ut_timespans;
select (case when ut__regression2() = 0
then
'Regression test, part 2 successful.'
end) as test_result;
-- Unfortunately, we need to recheck the deletion since we cannot put
-- actual deletion of entries in the shadow table inside the ut__delete
-- function due to the transactional nature of the functions
delete from ut_timespans
where timespan_id not in (select timespan_id from ut_timespan_ids);
select (case when ut_assert__eqtable('Recheck of deletion','timespans','ut_timespans') = 0
then
'Recheck of deletion successful.'
end) as recheck_result;
select (case when ut__teardown() = 0
then
'Regression test properly torn down.'
end) as teardown_result;
-- Clean up created functions.
-- This depends on openacs4 installed.
select drop_package('ut');
--------------------------------------------------------------------------------
-- End of regression test
--------------------------------------------------------------------------------
\i utest-drop.sql
|
implicit none
interface
subroutine hello() bind (c)
end subroutine hello
end interface
call hello()
end program
|
pragma solidity >=0.4.24;
interface iexchanger {
function calculateamountaftersettlement(
address from,
bytes32 currencykey,
uint amount,
uint refunded
) external view returns (uint amountaftersettlement);
function maxsecsleftinwaitingperiod(address account, bytes32 currencykey) external view returns (uint);
function settlementowing(address account, bytes32 currencykey)
external
view
returns (
uint reclaimamount,
uint rebateamount,
uint numentries
);
function feerateforexchange(bytes32 sourcecurrencykey, bytes32 destinationcurrencykey)
external
view
returns (uint exchangefeerate);
function getamountsforexchange(
uint sourceamount,
bytes32 sourcecurrencykey,
bytes32 destinationcurrencykey
)
external
view
returns (
uint amountreceived,
uint fee,
uint exchangefeerate
);
function exchange(
address from,
bytes32 sourcecurrencykey,
uint sourceamount,
bytes32 destinationcurrencykey,
address destinationaddress
) external returns (uint amountreceived);
function exchangeonbehalf(
address exchangeforaddress,
address from,
bytes32 sourcecurrencykey,
uint sourceamount,
bytes32 destinationcurrencykey
) external returns (uint amountreceived);
function settle(address from, bytes32 currencykey)
external
returns (
uint reclaimed,
uint refunded,
uint numentries
);
}
|
import { fetchPost } from '@/conf/fetch'
/**
* 加载违规行为
*/
const loadSituations = () => fetchPost('/wsproject/situation/loadSituations', {})
/**
* 添加或更新违规行为
* @param {*} situation
*/
const dealSituation = (situation) => fetchPost('/wsproject/situation/dealSituation', {
id: situation.id,
name: situation.name,
sort: situation.sort,
})
/**
* 删除违法行为
* @param {*} id
*/
const delSituation = (id) => fetchPost('/wsproject/situation/delSituation', {
id
})
export {
loadSituations,
dealSituation,
delSituation,
} |
Imports System
Imports System.Reflection
Imports System.Runtime.InteropServices
' As informações gerais sobre um assembly são controladas por
' conjunto de atributos. Altere estes valores de atributo para modificar as informações
' associada a um assembly.
' Revise os valores dos atributos do assembly
<Assembly: AssemblyTitle("Test")>
<Assembly: AssemblyDescription("")>
<Assembly: AssemblyCompany("")>
<Assembly: AssemblyProduct("Test")>
<Assembly: AssemblyCopyright("Copyright © 2021")>
<Assembly: AssemblyTrademark("")>
<Assembly: ComVisible(False)>
'O GUID a seguir será destinado à ID de typelib se este projeto for exposto para COM
<Assembly: Guid("97826f82-8101-4234-9196-913ec194c530")>
' As informações da versão de um assembly consistem nos quatro valores a seguir:
'
' Versão Principal
' Versão Secundária
' Número da Versão
' Revisão
'
' É possível especificar todos os valores ou usar como padrão os Números de Build e da Revisão
' usando o "*" como mostrado abaixo:
' <Assembly: AssemblyVersion("1.0.*")>
<Assembly: AssemblyVersion("1.0.0.0")>
<Assembly: AssemblyFileVersion("1.0.0.0")>
|
alias Data.Repo
alias Data.Channel
alias Data.Character
alias Data.Class
alias Data.ClassSkill
alias Data.Config
alias Data.Exit
alias Data.HelpTopic
alias Data.Item
alias Data.NPC
alias Data.NPCItem
alias Data.NPCSpawner
alias Data.Quest
alias Data.QuestStep
alias Data.Race
alias Data.Room
alias Data.RoomItem
alias Data.Script
alias Data.Skill
alias Data.Social
alias Data.User
alias Data.Zone
defmodule Helpers do
def add_item_to_room(room, item, attributes) do
changeset = %RoomItem{} |> RoomItem.changeset(Map.merge(attributes, %{room_id: room.id, item_id: item.id}))
case changeset |> Repo.insert do
{:ok, _room_item} ->
room |> update_room(%{items: [Item.instantiate(item) | room.items]})
_ ->
raise "Error creating room item"
end
end
def add_item_to_npc(npc, item, params) do
npc
|> Ecto.build_assoc(:npc_items)
|> NPCItem.changeset(Map.put(params, :item_id, item.id))
|> Repo.insert!()
end
def add_npc_to_zone(zone, npc, attributes) do
%NPCSpawner{}
|> NPCSpawner.changeset(Map.merge(attributes, %{npc_id: npc.id, zone_id: zone.id}))
|> Repo.insert!()
end
def create_config(name, value) do
%Config{}
|> Config.changeset(%{name: name, value: value})
|> Repo.insert!()
end
def create_item(attributes) do
%Item{}
|> Item.changeset(attributes)
|> Repo.insert!()
end
def create_npc(attributes) do
%NPC{}
|> NPC.changeset(attributes)
|> Repo.insert!()
end
def update_npc(npc, attributes) do
npc
|> NPC.changeset(attributes)
|> Repo.update!()
end
def create_room(zone, attributes) do
%Room{}
|> Room.changeset(Map.merge(attributes, %{zone_id: zone.id}))
|> Repo.insert!()
end
def update_room(room, attributes) do
room
|> Room.changeset(attributes)
|> Repo.update!()
end
def create_exit(attributes) do
reverse_attributes = %{
start_room_id: attributes.finish_room_id,
finish_room_id: attributes.start_room_id,
direction: to_string(Exit.opposite(attributes.direction)),
}
%Exit{}
|> Exit.changeset(reverse_attributes)
|> Repo.insert!
%Exit{}
|> Exit.changeset(attributes)
|> Repo.insert!()
end
def create_user(attributes) do
%User{}
|> User.changeset(attributes)
|> Repo.insert!()
end
def create_character(user, attributes) do
user
|> Ecto.build_assoc(:characters)
|> Character.changeset(attributes)
|> Repo.insert!()
end
def create_zone(attributes) do
%Zone{}
|> Zone.changeset(attributes)
|> Repo.insert!()
end
def create_race(attributes) do
%Race{}
|> Race.changeset(attributes)
|> Repo.insert!()
end
def create_class(attributes) do
%Class{}
|> Class.changeset(attributes)
|> Repo.insert!()
end
def create_skill(attributes) do
%Skill{}
|> Skill.changeset(attributes)
|> Repo.insert!()
end
def create_class_skill(class, skill) do
%ClassSkill{}
|> ClassSkill.changeset(%{class_id: class.id, skill_id: skill.id})
|> Repo.insert!()
end
def create_help_topic(attributes) do
%HelpTopic{}
|> HelpTopic.changeset(attributes)
|> Repo.insert!()
end
def create_social(attributes) do
%Social{}
|> Social.changeset(attributes)
|> Repo.insert!()
end
def create_channel(name, color \\ "red") do
%Channel{}
|> Channel.changeset(%{name: name, color: color})
|> Repo.insert!()
end
def create_quest(params) do
%Quest{}
|> Quest.changeset(params)
|> Repo.insert!()
end
def create_quest_step(quest, params) do
%QuestStep{}
|> QuestStep.changeset(Map.merge(params, %{quest_id: quest.id}))
|> Repo.insert!
end
end
defmodule Seeds do
import Helpers
def run do
bandit_hideout = create_zone(%{name: "Bandit Hideout", description: "A place for bandits to hide out"})
village = create_zone(%{name: "Village", description: "The local village"})
entrance = create_room(bandit_hideout, %{
name: "Entrance",
description: "A large square room with rough hewn walls.",
currency: 0,
x: 4,
y: 1,
map_layer: 1,
})
hallway = create_room(bandit_hideout, %{
name: "Hallway",
description: "As you go further west, the hallway descends downward.",
currency: 0,
x: 3,
y: 1,
map_layer: 1,
})
create_exit(%{direction: "west", start_room_id: entrance.id, finish_room_id: hallway.id})
hallway_turn = create_room(bandit_hideout, %{
name: "Hallway",
description: "The hallway bends south, continuing sloping down.",
currency: 0,
x: 2,
y: 1,
map_layer: 1,
})
create_exit(%{direction: "west", start_room_id: hallway.id, finish_room_id: hallway_turn.id})
hallway_south = create_room(bandit_hideout, %{
name: "Hallway",
description: "The south end of the hall has a wooden door embedded in the rock wall.",
currency: 0,
x: 2,
y: 2,
map_layer: 1,
})
create_exit(%{direction: "south", start_room_id: hallway_turn.id, finish_room_id: hallway_south.id})
great_room = create_room(bandit_hideout, %{
name: "Great Room",
description: "The great room of the bandit hideout. There are several tables along the walls with chairs pulled up. Cards are on the table along with mugs.",
currency: 0,
x: 2,
y: 3,
map_layer: 1,
})
create_exit(%{direction: "south", start_room_id: hallway_south.id, finish_room_id: great_room.id})
dorm = create_room(bandit_hideout, %{
name: "Bedroom",
description: "There is a bed in the corner with a dirty blanket on top. A chair sits in the corner by a small fire pit.",
currency: 0,
x: 1,
y: 3,
map_layer: 1,
})
create_exit(%{direction: "west", start_room_id: great_room.id, finish_room_id: dorm.id})
kitchen = create_room(bandit_hideout, %{
name: "Kitchen",
description: "A large cooking fire is at this end of the great room. A pot boils away at over the flame.",
currency: 0,
x: 3,
y: 3,
map_layer: 1,
})
create_exit(%{direction: "east", start_room_id: great_room.id, finish_room_id: kitchen.id})
shack = create_room(village, %{
name: "Shack",
description: "A small shack built against the rock walls of a small cliff.",
currency: 0,
x: 1,
y: 1,
map_layer: 1,
})
create_exit(%{direction: "east", start_room_id: entrance.id, finish_room_id: shack.id})
forest_path = create_room(village, %{
name: "Forest Path",
description: "A small path that leads away from the village to the mountain",
currency: 0,
x: 2,
y: 1,
map_layer: 1,
})
create_exit(%{direction: "east", start_room_id: shack.id, finish_room_id: forest_path.id})
stats = %{
health_points: 50,
max_health_points: 50,
skill_points: 50,
max_skill_points: 50,
endurance_points: 50,
max_endurance_points: 50,
strength: 10,
agility: 10,
intelligence: 10,
awareness: 10,
vitality: 10,
willpower: 10,
}
bran = create_npc(%{
name: "Bran",
level: 1,
currency: 0,
experience_points: 124,
stats: stats,
events: [],
is_quest_giver: true,
})
add_npc_to_zone(bandit_hideout, bran, %{
room_id: entrance.id,
spawn_interval: 15,
})
bandit = create_npc(%{
name: "Bandit",
level: 2,
currency: 100,
experience_points: 230,
stats: stats,
events: [
%{
id: UUID.uuid4(),
type: "room/entered",
actions: [
%{type: "commands/target", options: %{player: true}}
],
},
%{
id: UUID.uuid4(),
type: "combat/ticked",
options: %{
weight: 10,
},
actions: [
%{type: "commands/skill", options: %{skill: "slash"}}
]
},
],
})
add_npc_to_zone(bandit_hideout, bandit, %{
room_id: great_room.id,
spawn_interval: 15,
})
add_npc_to_zone(bandit_hideout, bandit, %{
room_id: kitchen.id,
spawn_interval: 15,
})
sword = create_item(%{
name: "Short Sword",
description: "A simple blade",
type: "weapon",
stats: %{},
effects: [],
keywords: ["sword"],
})
entrance = entrance |> add_item_to_room(sword, %{spawn_interval: 15})
leather_armor = create_item(%{
name: "Leather Armor",
description: "A simple chestpiece made out of leather",
type: "armor",
stats: %{slot: :chest, armor: 5},
effects: [],
keywords: ["leather"],
})
entrance = entrance |> add_item_to_room(leather_armor, %{spawn_interval: 15})
elven_armor = create_item(%{
name: "Elven armor",
description: "An elven chest piece.",
type: "armor",
stats: %{slot: :chest, armor: 10},
effects: [%{kind: "stats", field: :agility, amount: 5, mode: "add"}, %{kind: "stats", field: :strength, amount: 5, mode: "add"}],
keywords: ["elven"],
})
entrance = entrance |> add_item_to_room(elven_armor, %{spawn_interval: 15})
potion = create_item(%{
name: "Potion",
description: "A healing potion, recover health points",
type: "basic",
stats: %{},
effects: [%{kind: "recover", type: "health", amount: 10}],
whitelist_effects: ["recover", "stats"],
is_usable: true,
amount: 1,
keywords: [],
})
bandit |> add_item_to_npc(potion, %{drop_rate: 80})
elixir = create_item(%{
name: "Elixir",
description: "A healing elixir, recover skill points",
type: "basic",
stats: %{},
effects: [%{kind: "recover", type: "skill", amount: 10}],
whitelist_effects: ["recover", "stats"],
is_usable: true,
amount: 1,
keywords: [],
})
bandit |> add_item_to_npc(elixir, %{drop_rate: 80})
save = %Data.Save{
version: 1,
room_id: entrance.id,
config: %{},
stats: %{},
channels: ["global", "newbie"],
level: 1,
level_stats: %{},
currency: 0,
experience_points: 0,
spent_experience_points: 0,
items: [Item.instantiate(sword)],
wearing: %{},
wielding: %{},
}
create_config("game_name", "ExVenture MUD")
create_config("motd", "Welcome to the {white}MUD{/white}")
create_config("after_sign_in_message", "Thanks for checking out the game!")
create_config("starting_save", save |> Poison.encode!)
create_config("regen_tick_count", "7")
create_race(%{
name: "Human",
description: "A human",
starting_stats: %{
health_points: 50,
max_health_points: 50,
skill_points: 50,
max_skill_points: 50,
endurance_points: 50,
max_endurance_points: 50,
strength: 10,
agility: 10,
intelligence: 10,
awareness: 10,
vitality: 10,
willpower: 10,
},
})
dwarf = create_race(%{
name: "Dwarf",
description: "A dwarf",
starting_stats: %{
health_points: 50,
max_health_points: 50,
skill_points: 50,
max_skill_points: 50,
endurance_points: 50,
max_endurance_points: 50,
strength: 12,
agility: 8,
intelligence: 10,
awareness: 10,
vitality: 10,
willpower: 10,
},
})
create_race(%{
name: "Elf",
description: "An elf",
starting_stats: %{
health_points: 50,
max_health_points: 50,
skill_points: 50,
max_skill_points: 50,
endurance_points: 50,
max_endurance_points: 50,
strength: 8,
agility: 12,
intelligence: 10,
awareness: 10,
vitality: 10,
willpower: 10,
},
})
fighter = create_class(%{
name: "Fighter",
description: "Uses strength and swords to overcome.",
})
mage = create_class(%{
name: "Mage",
description: "Uses intelligence and magic to overcome.",
})
slash = create_skill(%{
level: 1,
name: "Slash",
description: "Use your weapon to slash at your target",
points: 1,
user_text: "You slash at [target].",
usee_text: "You were slashed at by [user].",
command: "slash",
whitelist_effects: ["damage", "damage/type", "stats"],
effects: [
%{kind: "damage", type: "slashing", amount: 10},
%{kind: "damage/type", types: ["slashing"]},
],
})
magic_missile = create_skill(%{
level: 1,
name: "Magic Missile",
description: "You shoot a bolt of arcane energy out of your hand",
points: 2,
user_text: "You shoot a bolt of arcane energy at [target].",
usee_text: "[user] shoots a bolt of arcane energy at you.",
command: "magic missile",
whitelist_effects: ["damage", "damage/type", "stats"],
effects: [
%{kind: "damage", type: "arcane", amount: 10},
%{kind: "damage/type", types: ["arcane"]},
],
})
create_skill(%{
level: 1,
name: "Heal",
is_global: true,
description: "Heal yourself a small amount",
points: 1,
user_text: "You heal [target].",
usee_text: "You were healed by [user].",
command: "heal",
whitelist_effects: ["recover", "stats"],
effects: [
%{kind: "recover", type: "health", amount: 10},
],
})
create_class_skill(fighter, slash)
create_class_skill(mage, magic_missile)
create_help_topic(%{name: "Fighter", keywords: ["fighter"], body: "This class uses physical skills"})
create_help_topic(%{name: "Mage", keywords: ["mage"], body: "This class uses arcane skills"})
create_social(%{
name: "Smile",
command: "smile",
with_target: "[user] smiles at [target].",
without_target: "[user] smiles.",
})
create_channel("global")
create_channel("newbie", "cyan")
quest = create_quest(%{
giver_id: bran.id,
name: "Finding a Guard",
description: "You must take out the bandits further down the cave.",
completed_message: "You did it!",
script: [
%Script.Line{
key: "start",
message: "Can you take out some bandits?",
listeners: [
%{phrase: "yes|bandit", key: "accept"},
],
},
%Script.Line{
key: "accept",
message: "Great!",
trigger: "quest",
},
],
level: 1,
experience: 400,
currency: 100,
})
create_quest_step(quest, %{type: "npc/kill", count: 3, npc_id: bandit.id})
save =
Game.Config.starting_save()
|> Map.put(:stats, dwarf.starting_stats())
|> Map.put(:config, %{
hints: true,
prompt: "%h/%Hhp %s/%Ssp %e/%Eep %x",
pager_size: 20,
regen_notifications: true,
})
|> Map.put(:version, 11)
user = create_user(%{
name: "admin",
password: "password",
flags: ["admin"],
})
create_character(user, %{
name: "admin",
race_id: dwarf.id,
class_id: mage.id,
save: save,
})
end
end
defmodule Seeds.LargeScale do
import Helpers
defp generate_rooms(zone) do
Enum.flat_map(1..12, fn x ->
Enum.map(1..12, fn y ->
create_room(zone, %{
name: "Room #{x}-#{y}",
description: "A room",
currency: 0,
x: x,
y: y,
map_layer: 1,
})
end)
end)
end
defp generate_exits(rooms) do
Enum.each(1..12, fn x ->
Enum.each(1..12, fn y ->
room = Enum.find(rooms, &(&1.x == x && &1.y == y))
north_room = Enum.find(rooms, &(&1.x == x && &1.y == y - 1))
west_room = Enum.find(rooms, &(&1.x == x - 1 && &1.y == y))
if west_room, do: create_exit(%{direction: "west", finish_room_id: west_room.id, start_room_id: room.id})
if north_room, do: create_exit(%{direction: "north", finish_room_id: north_room.id, start_room_id: room.id})
end)
end)
end
def run do
Enum.each(1..100, fn zone_index ->
zone = create_zone(%{name: "Zone #{zone_index}", description: "A zone"})
rooms = generate_rooms(zone)
generate_exits(rooms)
stats = %{
health_points: 25,
max_health_points: 25,
skill_points: 10,
max_skill_points: 10,
endurance_points: 10,
max_endurance_points: 10,
strength: 13,
agility: 10,
intelligence: 10,
awareness: 10,
vitality: 10,
willpower: 10,
}
move_event = %{
type: "tick",
id: "d80a37c1-6f7b-4e55-a102-0c1549bab5bd",
action: %{
wait: 120,
type: "move",
max_distance: 2,
chance: 120,
}
}
emote_event = %{
type: "tick",
id: "5660c186-5fbc-4448-9dc6-20ef5c922d0a",
action: %{
wait: 60,
type: "emote",
message: "emotes something",
chance: 60
}
}
Enum.each(1..10, fn npc_index ->
npc = create_npc(%{
name: "NPC #{zone.id}-#{npc_index}",
level: 1,
currency: 0,
experience_points: 124,
stats: stats,
events: [move_event, emote_event],
is_quest_giver: false,
})
Enum.each(1..20, fn _spawn_index ->
room = Enum.random(rooms)
add_npc_to_zone(zone, npc, %{
room_id: room.id,
spawn_interval: 15,
})
end)
end)
end)
end
end
Seeds.run()
#Seeds.LargeScale.run()
|
ALTER DATABASE [RNB_Sup] SET EMERGENCY;
ALTER DATABASE [RNB_Sup] set multi_user
EXEC sp_detach_db [RNB_Sup]
/* DELETE THE LOG FILE */
EXEC sp_attach_single_file_db @DBName = [RCSQL], @physname = N'D:\Databases\RCSQL.mdf'
CREATE DATABASE RCSQL ON (FILENAME = 'D:\Databases\RCSQL.mdf')
FOR ATTACH_FORCE_REBUILD_LOG |
{
"id": "de692a8c-e43e-4e93-8ca1-7226f791ac97",
"modelName": "GMFolder",
"mvc": "1.1",
"name": "de692a8c-e43e-4e93-8ca1-7226f791ac97",
"children": [
],
"filterType": "GMPath",
"folderName": "paths",
"isDefaultView": false,
"localisedFolderName": "ResourceTree_Paths"
} |
{
"id": "6581212e-6a6e-45dc-b976-52e0ad50235b",
"modelName": "GMObject",
"mvc": "1.0",
"name": "obj_obstacle",
"eventList": [
{
"id": "ebe36f49-b9e9-4a7d-9ad4-08f30dff7f95",
"modelName": "GMEvent",
"mvc": "1.0",
"IsDnD": false,
"collisionObjectId": "00000000-0000-0000-0000-000000000000",
"enumb": 0,
"eventtype": 0,
"m_owner": "6581212e-6a6e-45dc-b976-52e0ad50235b"
}
],
"maskSpriteId": "00000000-0000-0000-0000-000000000000",
"overriddenProperties": null,
"parentObjectId": "00000000-0000-0000-0000-000000000000",
"persistent": false,
"physicsAngularDamping": 0.1,
"physicsDensity": 0.5,
"physicsFriction": 0.2,
"physicsGroup": 0,
"physicsKinematic": false,
"physicsLinearDamping": 0.1,
"physicsObject": false,
"physicsRestitution": 0.1,
"physicsSensor": false,
"physicsShape": 1,
"physicsShapePoints": null,
"physicsStartAwake": true,
"properties": null,
"solid": false,
"spriteId": "0002e410-45eb-4a36-8a09-921a94a4bc1f",
"visible": true
} |
// Copyright (c) 2016-2019 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package nginx
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"os"
"os/exec"
"path"
"path/filepath"
"text/template"
"github.com/uber/kraken/nginx/config"
"github.com/uber/kraken/utils/httputil"
"github.com/uber/kraken/utils/log"
)
const (
_genDir = "/tmp/nginx"
)
var _clientCABundle = path.Join(_genDir, "ca.crt")
// Config defines nginx configuration.
type Config struct {
Root bool `yaml:"root"`
// Name defines the default nginx template for each component.
Name string `yaml:"name"`
// TemplatePath takes precedence over Name, overwrites default template.
TemplatePath string `yaml:"template_path"`
CacheDir string `yaml:"cache_dir"`
LogDir string `yaml:"log_dir"`
tls httputil.TLSConfig
}
func (c *Config) inject(params map[string]interface{}) error {
for _, s := range []string{"cache_dir", "log_dir"} {
if _, ok := params[s]; ok {
return fmt.Errorf("invalid params: %s is reserved", s)
}
}
params["cache_dir"] = c.CacheDir
params["log_dir"] = c.LogDir
return nil
}
// GetTemplate returns the template content.
func (c *Config) getTemplate() (string, error) {
if c.TemplatePath != "" {
b, err := ioutil.ReadFile(c.TemplatePath)
if err != nil {
return "", fmt.Errorf("read template: %s", err)
}
return string(b), nil
}
tmpl, err := config.GetDefaultTemplate(c.Name)
if err != nil {
return "", fmt.Errorf("get default template: %s", err)
}
return tmpl, nil
}
// Build builds nginx config.
func (c *Config) Build(params map[string]interface{}) ([]byte, error) {
tmpl, err := c.getTemplate()
if err != nil {
return nil, fmt.Errorf("get template: %s", err)
}
if _, ok := params["client_verification"]; !ok {
params["client_verification"] = config.DefaultClientVerification
}
site, err := populateTemplate(tmpl, params)
if err != nil {
return nil, fmt.Errorf("populate template: %s", err)
}
// Build nginx config with base template and component specific template.
tmpl, err = config.GetDefaultTemplate("base")
if err != nil {
return nil, fmt.Errorf("get default base template: %s", err)
}
src, err := populateTemplate(tmpl, map[string]interface{}{
"site": string(site),
"ssl_enabled": !c.tls.Server.Disabled,
"ssl_certificate": c.tls.Server.Cert.Path,
"ssl_certificate_key": c.tls.Server.Key.Path,
"ssl_password_file": c.tls.Server.Passphrase.Path,
"ssl_client_certificate": _clientCABundle,
})
if err != nil {
return nil, fmt.Errorf("populate base: %s", err)
}
return src, nil
}
// Option allows setting optional nginx configuration.
type Option func(*Config)
// WithTLS configures nginx configuration with tls.
func WithTLS(tls httputil.TLSConfig) Option {
return func(c *Config) { c.tls = tls }
}
// Run injects params into an nginx configuration template and runs it.
func Run(config Config, params map[string]interface{}, opts ...Option) error {
if config.Name == "" && config.TemplatePath == "" {
return errors.New("invalid config: name or template_path required")
}
if config.CacheDir == "" {
return errors.New("invalid config: cache_dir required")
}
if config.LogDir == "" {
return errors.New("invalid config: log_dir required")
}
for _, opt := range opts {
opt(&config)
}
// Create root directory for generated files for nginx.
if err := os.MkdirAll(_genDir, 0775); err != nil {
return err
}
if config.tls.Server.Disabled {
log.Warn("Server TLS is disabled")
} else {
for _, s := range append(
config.tls.CAs,
config.tls.Server.Cert,
config.tls.Server.Key,
config.tls.Server.Passphrase) {
if _, err := os.Stat(s.Path); err != nil {
return fmt.Errorf("invalid TLS config: %s", err)
}
}
// Concat all ca files into bundle.
cabundle, err := os.Create(_clientCABundle)
if err != nil {
return fmt.Errorf("create cabundle: %s", err)
}
if err := config.tls.WriteCABundle(cabundle); err != nil {
return fmt.Errorf("write cabundle: %s", err)
}
cabundle.Close()
}
if err := os.MkdirAll(config.CacheDir, 0775); err != nil {
return err
}
if err := config.inject(params); err != nil {
return err
}
src, err := config.Build(params)
if err != nil {
return fmt.Errorf("build nginx config: %s", err)
}
conf := filepath.Join(_genDir, config.Name)
if err := ioutil.WriteFile(conf, src, 0755); err != nil {
return fmt.Errorf("write src: %s", err)
}
stdoutLog := path.Join(config.LogDir, "nginx-stdout.log")
stdout, err := os.OpenFile(stdoutLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return fmt.Errorf("open stdout log: %s", err)
}
args := []string{"/usr/sbin/nginx", "-g", "daemon off;", "-c", conf}
if config.Root {
args = append([]string{"sudo"}, args...)
}
cmd := exec.Command(args[0], args[1:]...)
cmd.Stdout = stdout
cmd.Stderr = stdout
return cmd.Run()
}
func populateTemplate(tmpl string, args map[string]interface{}) ([]byte, error) {
t, err := template.New("nginx").Parse(tmpl)
if err != nil {
return nil, fmt.Errorf("parse: %s", err)
}
out := &bytes.Buffer{}
if err := t.Execute(out, args); err != nil {
return nil, fmt.Errorf("exec: %s", err)
}
return out.Bytes(), nil
}
// GetServer returns a string for an nginx server directive value.
func GetServer(net, addr string) string {
if net == "unix" {
return "unix:" + addr
}
return addr
}
|
%%
%% $Header$
%% $Name$
%%
Package ``seaice'' provides a dynamic and thermodynamic interactive sea-ice
model. Sea-ice model thermodynamics are based on Hibler (see Modeling a Variable Thickness Sea-Ice Cover, Hibler, Monthly Weather Review, 1980),
that is, a 2-category model that simulates ice thickness and concentration.
Snow is simulated as per Zhang et al. (see Arctic ice-ocean modeling with
and without climate restoring, Zhang et. al, Journal of Physical Oceanography,
1998).
Sea-ice dynamics is based
on a viscous-plastic model solved using the alternating-direction-implicit (ADI) method of
Zhang and Rothrock (see Modeling Arctic Sea Ice with an Efficient Plastic
Solution, Zhang and Rothrock, Journal of Geophysical Research, 2000).
|
// THIS FILE IS COPIED FROM FBTHRIFT, DO NOT MODIFY ITS CONTENTS DIRECTLY
// generated-by : fbcode/common/hs/thrift/exactprint/tests/sync-fbthrift-tests.sh
// source: thrift/compiler/test/fixtures/*
// @generated
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace cpp2 apache.thrift.test
namespace py3 thrift.test.lazy_deserialization
struct Foo {
1: list<double> field1; // fast to skip in CompactProtocol
2: list<i32> field2; // slow to skip in CompactProtocol
3: list<double> field3; // fast to skip in CompactProtocol
4: list<i32> field4; // slow to skip in CompactProtocol
}
// Identical to Foo, except field3 and field4 are lazy
struct LazyFoo {
1: list<double> field1;
2: list<i32> field2;
3: list<double> field3 (cpp.experimental.lazy);
4: list<i32> field4 (cpp.experimental.lazy);
}
struct OptionalFoo {
1: optional list<double> field1;
2: optional list<i32> field2;
3: optional list<double> field3;
4: optional list<i32> field4;
}
struct OptionalLazyFoo {
1: optional list<double> field1;
2: optional list<i32> field2;
3: optional list<double> field3 (cpp.experimental.lazy);
4: optional list<i32> field4 (cpp.experimental.lazy);
}
struct LazyCppRef {
1: optional list<i32> field1 (cpp.ref_type = "unique", cpp.experimental.lazy);
2: optional list<i32> field2 (cpp.ref_type = "shared", cpp.experimental.lazy);
3: optional list<i32> field3 (
cpp.ref_type = "shared_const",
cpp.experimental.lazy,
);
}
// Same as Foo, except adding index field explicitly
// Since we can't use negative as index field, we will change id
// in serialized data manually
struct IndexedFoo {
100: double serialized_data_size;
1: list<double> field1;
2: list<i32> field2;
3: list<double> field3;
4: list<i32> field4;
101: map<i16, i64> field_id_to_size;
}
struct OptionalIndexedFoo {
100: double serialized_data_size;
1: optional list<double> field1;
2: optional list<i32> field2;
3: optional list<double> field3;
4: optional list<i32> field4;
101: map<i16, i64> field_id_to_size;
}
struct Empty {
}
const i32 kSizeId = 100;
const i32 kIndexId = 101;
|
(in-package :cudd)
(defmacro define-simple-managed-function (name interface &body doc)
`(defun ,name ()
,@doc
(,interface
%mp%)))
(define-simple-managed-function disable-gc cudd-disable-garbage-collection
"Disables garbage collection. Garbage
collection is initially enabled. This function may be called to
disable it. However, garbage collection will still occur when a new
node must be created and no memory is left, or when garbage collection
is required for correctness. (E.g., before reordering.)")
(define-simple-managed-function enable-gc cudd-enable-garbage-collection
"Enables garbage collection. Garbage collection is
initially enabled. Therefore it is necessary to call this function
only if garbage collection has been explicitly disabled.")
(define-simple-managed-function peak-node-count cudd-read-peak-node-count
"Reports the peak number of nodes.
This number includes node on the free list. At the peak,
the number of nodes on the free list is guaranteed to be less than
DD_MEM_CHUNK. ")
(define-simple-managed-function peak-live-node-count cudd-read-peak-live-node-count
"Reports the peak number of live nodes.")
(define-simple-managed-function node-count cudd-read-node-count
"Reports the number of nodes in BDDs and ADDs.
This number does not include the isolated projection
functions and the unused constants. These nodes that are not counted
are not part of the DDs manipulated by the application. ")
(define-simple-managed-function zdd-node-count cudd-zdd-read-node-count
"Reports the number of nodes in ZDDs.
This number always includes the two constants 1 and 0. ")
(defun set-background (bck)
"Sets the background constant of the manager. It assumes
that the DdNode pointer bck is already referenced."
(cudd-set-background %mp% bck))
(defun count-leaves (node)
"Counts the number of leaves in a DD."
(cudd-count-leaves (node-pointer node)))
(defun dag-size (node)
"Counts the number of nodes in a DD."
(etypecase node
(zdd-node (cudd-zdd-dag-size (node-pointer node)))
(add-node (cudd-dag-size (node-pointer node)))
(bdd-node (cudd-dag-size (node-pointer node)))))
(define-simple-managed-function bdd-variables cudd-bdd-variables
"Return the number of BDD variables")
(define-simple-managed-function zdd-variables cudd-zdd-variables
"Return the number of ZDD variables")
(define-simple-managed-function bdd-max-variables cudd-bdd-max-variables
"Return the maximum number of BDD variables")
(define-simple-managed-function zdd-max-variables cudd-zdd-max-variables
"Return the maximum number of ZDD variables")
|
;;; config/literate/cli.el -*- lexical-binding: t; -*-
(load! "autoload")
;; Tangle the user's config.org before 'doom sync' runs
;; (add-hook 'doom-sync-pre-hook #'+literate-tangle-h)
|
#!/bin/guile -s
!#
;对树的映射,直接定义和使用map
(define (square x)
(* x x))
(define (square-tree tree)
(cond ((null? tree) '())
((not (pair? tree)) (square tree))
(else
(cons (square-tree (car tree))
(square-tree (cdr tree))))))
(define (square-tree-map tree)
(map (lambda (sub-tree)
(if (pair? sub-tree)
(square-tree-map sub-tree)
(square sub-tree)))
tree))
;测试
(display (square-tree (list 1 (list 2 (list 3 4) 5) (list 6 7))))
(newline)
(display (square-tree-map (list 1 (list 2 (list 3 4) 5) (list 6 7))))
(newline)
|
<?xml version="1.0" encoding="UTF-8"?>
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:math="http://www.w3.org/2005/xpath-functions/math"
exclude-result-prefixes="xs math"
xmlns="http://www.w3.org/1999/xhtml"
version="3.0">
<xsl:output method="xhtml" html-version="5" omit-xml-declaration="yes"
include-content-type="no" indent="yes"/>
<xsl:template match="/">
<html>
<head>
</head>
</html>
<body>
</body>
</xsl:template>
</xsl:stylesheet> |
/*
* Copyright (c) 2011 the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradlefx.ide.tasks
import org.gradle.api.Project
import org.gradle.testfixtures.ProjectBuilder
import org.gradlefx.configuration.Configurations
import org.gradlefx.configuration.sdk.SdkType
import org.gradlefx.conventions.FlexType
import org.gradlefx.conventions.FrameworkLinkage
import org.gradlefx.conventions.GradleFxConvention
import org.gradlefx.ide.tasks.idea.IdeaProject
import spock.lang.Specification
/**
* @author <a href="mailto:drykovanov@wiley.com">Denis Rykovanov</a>
*/
class IdeaProjectModuleTest extends Specification {
IdeaProject getIdeaProjectTask() {
if (_ideaFxProjectTask == null) {
_ideaFxProjectTask = project.tasks.create("ideafx", IdeaProject)
GradleFxConvention pluginConvention = new GradleFxConvention(project)
_ideaFxProjectTask.flexConvention = pluginConvention
_ideaFxProjectTask.flexConvention.playerVersion = "11.5"
}
return _ideaFxProjectTask;
}
Project project
IdeaProject _ideaFxProjectTask
String imlFileContent
String testResourceDir = './src/test/resources/'
def setup() {
}
def "test generation idea project directory"() {
given:
setupProjectWithName "test"
when:
ideaProjectTask.createProjectConfig()
then:
File modulesFile = project.file(".idea/modules.xml")
modulesFile.exists()
}
def "test module is added"() {
given:
setupProjectWithName "test"
when:
ideaProjectTask.createProjectConfig()
then:
File modulesFile = project.file(".idea/modules.xml")
def xml = new XmlParser().parse(modulesFile);
String filepath = xml.component.modules.module.'@filepath'.text()
String expectedFilepath = "\$PROJECT_DIR\$/${project.name}.iml"
filepath.equals(expectedFilepath)
}
def "test generation empty project"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = "swc"
when:
ideaProjectTask.createProjectConfig()
then:
File imlFile = project.file("${project.name}.iml")
imlFile.exists()
}
def "config for pure web lib"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = "swc"
ideaProjectTask.flexConvention.sdkTypes.add(SdkType.AIR)
when:
ideaProjectTask.createProjectConfig()
then:
def configuration = getModuleConfNode()
configuration.'@name'.text() == 'test'
configuration.'@output-type'.text() == "Library"
configuration.'@pure-as'.text() == "true"
}
def "config for flex web lib"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = "swc"
ideaProjectTask.flexConvention.frameworkLinkage = FrameworkLinkage.external
ideaProjectTask.flexConvention.sdkTypes.add(SdkType.Flex)
when:
ideaProjectTask.createProjectConfig()
then:
def configuration = getModuleConfNode()
configuration.'@name'.text() == 'test'
configuration.'@output-type'.text() == "Library"
configuration.'@pure-as'.text() == "false"
}
def "config with swc dependency"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = 'swc'
project.getDependencies().add(Configurations.MERGE_CONFIGURATION_NAME.configName(), project.files('lib/some.swc'))
when:
ideaProjectTask.createProjectConfig()
then:
def configuration = getModuleConfNode()
def moduleId = configuration.dependencies.entries.entry.'@library-id'.text();
moduleId != null
configuration.dependencies.entries.entry.dependency.'@linkage'.text() == 'Merged'
def moduleMgr = getModuleRootMgrNode()
def orderEntry = moduleMgr.orderEntry.find { it.'@type' == "module-library" }
orderEntry.library.'@type'.text() == 'flex'
orderEntry.library.properties.'@id'.text() == moduleId
orderEntry.library.CLASSES.root.'@url'.text() == 'jar://$MODULE_DIR$/lib/some.swc!/'
}
def "config with project dependency"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = 'swc'
project.getDependencies().add(Configurations.MERGE_CONFIGURATION_NAME.configName(), project.project(':util'))
when:
ideaProjectTask.createProjectConfig()
then:
def configuration = getModuleConfNode()
def entry = configuration.dependencies.entries.entry.first();
entry.'@module-name' == 'util'
entry.'@build-configuration-name' == 'util'
entry.dependency.'@linkage'.text() == 'Merged'
def moduleMgr = getModuleRootMgrNode()
def orderEntry = moduleMgr.orderEntry.find { it.'@type' == "module" }
//<orderEntry type="module" module-name="util" />
orderEntry.'@module-name' == 'util'
}
def "setup dependency type"() {
setup:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = 'swc'
project.getDependencies().add(configName, project.project(':util'))
ideaProjectTask.createProjectConfig()
expect:
def configuration = getModuleConfNode()
def entry = configuration.dependencies.entries.entry.first();
entry.dependency.'@linkage'.text() == linkageType
where:
configName << [Configurations.MERGE_CONFIGURATION_NAME.configName(),
Configurations.INTERNAL_CONFIGURATION_NAME.configName(),
Configurations.EXTERNAL_CONFIGURATION_NAME.configName(),
Configurations.TEST_CONFIGURATION_NAME.configName()]
linkageType << ['Merged', 'Include', 'External', 'Test']
}
def "setup flex sdk"() {
setup:
setupProjectWithName "test"
ideaProjectTask.flexConvention.frameworkLinkage = frameworkLinkage
ideaProjectTask.flexConvention.sdkTypes.add(SdkType.Flex)
ideaProjectTask.createProjectConfig()
expect:
def configuration = getModuleConfNode()
configuration.dependencies.sdk.'@name'.text() == 'default_flex_sdk'
configuration.dependencies.'@framework-linkage'.text() == ideaSdkLinkage
getModuleRootMgrNode().orderEntry.find { it.'@type' == "jdk" }.'@jdkName' == 'default_flex_sdk'
where:
frameworkLinkage << [FrameworkLinkage.merged, FrameworkLinkage.rsl]
ideaSdkLinkage << ['Merged', 'Runtime']
}
def "setup flex sdk with custom name"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.flexSdkName = 'customname_flex_sdk'
when:
ideaProjectTask.createProjectConfig()
then:
def configuration = getModuleConfNode()
configuration.dependencies.sdk.'@name'.text() == 'customname_flex_sdk'
getModuleRootMgrNode().orderEntry.find { it.'@type' == 'jdk' }.'@jdkName' == 'customname_flex_sdk'
}
def "setup web app project"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = 'swf'
ideaProjectTask.flexConvention.mainClass = 'subpackage/WebContainer.as'
when:
ideaProjectTask.createProjectConfig();
then:
getModuleConfNode().'@main-class'.text() == "subpackage.WebContainer"
getModuleConfNode().'@target-platform'.text() == ""
}
def "setup empty air app project "() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = 'air'
ideaProjectTask.flexConvention.sdkTypes.add(SdkType.AIR)
when:
ideaProjectTask.createProjectConfig()
then:
getModuleConfNode().'@main-class'.text() == "Main"
getModuleConfNode().'@target-platform'.text() == "Desktop"
getModuleConfNode().'@output-type'.text() == ""
getModuleConfNode().'@output-file'.text() == "test.swf"
getModuleConfNode().'packaging-air-desktop'.'@package-file-name'.text() == 'test'
getModuleConfNode().'packaging-air-desktop'.'@use-generated-descriptor'.text() == 'false'
getModuleConfNode().'packaging-air-desktop'.'@custom-descriptor-path'.text() == '$MODULE_DIR$/src/main/actionscript/test.xml'
}
def "setup air app project"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = 'air'
ideaProjectTask.flexConvention.mainClass = 'subpackage/AirContainer.mxml'
ideaProjectTask.flexConvention.output = 'customOutput'
ideaProjectTask.flexConvention.air.applicationDescriptor = 'src/main/actionscript/air.xml'
when:
ideaProjectTask.createProjectConfig()
then:
//todo files included in package
getModuleConfNode().'@main-class'.text() == "subpackage.AirContainer"
getModuleConfNode().'@target-platform'.text() == "Desktop"
getModuleConfNode().'@output-type'.text() == ""
getModuleConfNode().'@output-file'.text() == "customOutput.swf"
getModuleConfNode().'packaging-air-desktop'.'@package-file-name'.text() == 'customOutput'
getModuleConfNode().'packaging-air-desktop'.'@use-generated-descriptor'.text() == 'false'
getModuleConfNode().'packaging-air-desktop'.'@custom-descriptor-path'.text() == '$MODULE_DIR$/src/main/actionscript/air.xml'
}
def "setup air mobile project"() {
setup:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = FlexType.mobile
ideaProjectTask.flexConvention.mainClass = 'subpackage/AirContainer.mxml'
ideaProjectTask.flexConvention.output = 'customOutput'
ideaProjectTask.flexConvention.air.applicationDescriptor = 'src/main/actionscript/apk.xml'
ideaProjectTask.flexConvention.airMobile.platform = platform
ideaProjectTask.createProjectConfig()
expect:
//todo files included in package
getModuleConfNode().'@main-class'.text() == "subpackage.AirContainer"
getModuleConfNode().'@target-platform'.text() == "Mobile"
getModuleConfNode().'@output-type'.text() == ""
getModuleConfNode().'@output-file'.text() == "customOutput.swf"
getModuleConfNode()["packaging-$packagin_suffix"].'@enabled'.text() == 'true'
getModuleConfNode()["packaging-$packagin_suffix"].'@package-file-name'.text() == 'customOutput'
getModuleConfNode()["packaging-$packagin_suffix"].'@use-generated-descriptor'.text() == 'false'
getModuleConfNode()["packaging-$packagin_suffix"].'@custom-descriptor-path'.text() == '$MODULE_DIR$/src/main/actionscript/apk.xml'
where:
platform << ['android', 'ios']
packagin_suffix << ['android', 'ios']
}
def "setup air mobile ios specific values"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = FlexType.mobile
ideaProjectTask.flexConvention.airMobile.platform = 'ios'
ideaProjectTask.flexConvention.airMobile.platformSdk = '/ios_sdk'
ideaProjectTask.flexConvention.airMobile.provisioningProfile = 'provisioning-profile.mobileprovision'
ideaProjectTask.flexConvention.air.keystore = 'somecert.p12'
when:
ideaProjectTask.createProjectConfig()
then:
//check platform sdk
//check cert check provision file
//<AirSigningOptions sdk="app sdk" keystore-path="key" provisioning-profile-path="profision file" />
def configuration = getModuleConfNode()
configuration["packaging-ios"].AirSigningOptions.'@keystore-path'.text() == '$MODULE_DIR$/somecert.p12'
configuration["packaging-ios"].AirSigningOptions.'@use-temp-certificate'.text() == 'false'
configuration["packaging-ios"].AirSigningOptions.'@sdk'.text() == '/ios_sdk'
configuration["packaging-ios"].AirSigningOptions.'@provisioning-profile-path'.text() == '$MODULE_DIR$/provisioning-profile.mobileprovision'
}
def "setup swcAir project"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = FlexType.swcAir
ideaProjectTask.flexConvention.sdkTypes.add(SdkType.Flex)
when:
ideaProjectTask.createProjectConfig()
then:
def configuration = getModuleConfNode()
configuration.'@name'.text() == 'test'
configuration.'@output-type'.text() == "Library"
configuration.'@pure-as'.text() == "false"
configuration.'@output-file'.text() == "test.swc"
}
def "setup air lib project"() {
given:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = FlexType.swc
ideaProjectTask.flexConvention.sdkTypes.add(SdkType.Flex)
ideaProjectTask.flexConvention.additionalCompilerOptions << '+configname=air'
when:
ideaProjectTask.createProjectConfig()
then:
def configuration = getModuleConfNode()
configuration.'@name'.text() == 'test'
configuration.'@output-type'.text() == "Library"
configuration.'@target-platform'.text() == "Desktop"
configuration.'@pure-as'.text() == "false"
}
def "setup air certificate options for android and air"() { //todo cover ios
setup:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = type
ideaProjectTask.flexConvention.air.keystore = 'somecert.p12'
ideaProjectTask.createProjectConfig()
expect:
def configuration = getModuleConfNode()
configuration["packaging-$suffix"].AirSigningOptions.'@keystore-path'.text() == '$MODULE_DIR$/somecert.p12'
configuration["packaging-$suffix"].AirSigningOptions.'@use-temp-certificate'.text() == 'false'
where:
type << [FlexType.air, FlexType.mobile]
suffix << ['air-desktop', 'android']
}
def "include file in air package"() {
setup:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = type
ideaProjectTask.flexConvention.airMobile.platform = platform
ideaProjectTask.flexConvention.air.includeFileTrees = [project.fileTree(dir: "sub-resource-dir", include: '**/*.*')]
ideaProjectTask.createProjectConfig()
expect:
// project.fileTree(dir: "sub-resource-dir", include: '*.*').files.empty == false
// def configuration = getModuleConfNode()
// configuration["packaging-$suffix"].'files-to-package'.empty == false
where:
type | platform | suffix
FlexType.air | null | 'air-desktop'
FlexType.mobile | 'android' | 'android'
FlexType.mobile | 'ios' | 'ios'
}
def "additional compiler options"() {
setup:
setupProjectWithName "test"
ideaProjectTask.flexConvention.type = FlexType.swc
ideaProjectTask.flexConvention.additionalCompilerOptions << '+configname=air'
ideaProjectTask.flexConvention.additionalCompilerOptions << '-tools-locale="en"' << '-default-background-color=0xcccccc'
when:
ideaProjectTask.createProjectConfig()
then:
def configuration = getModuleConfNode()
configuration.'compiler-options'.'option'.find{ it.'@name' == "additionalOptions" }.'@value' == '+configname=air -tools-locale="en" -default-background-color=0xcccccc'
}
def setupProjectWithName(String projectName) {
//todo extract
File projectDir = new File(this.getClass().getResource("/stub-project-dir/intellij-dummy.xml").toURI())
Project root = ProjectBuilder.builder().withProjectDir(projectDir.parentFile).withName('root').build()
Project utilProject = ProjectBuilder.builder().withProjectDir(projectDir.getParentFile()).withParent(root).withName('util').build()
this.project = ProjectBuilder.builder().withProjectDir(projectDir.getParentFile()).withParent(root).withName(projectName).build()
ideaProjectTask.flexConvention.type = 'swc'
[
Configurations.INTERNAL_CONFIGURATION_NAME.configName(),
Configurations.EXTERNAL_CONFIGURATION_NAME.configName(),
Configurations.MERGE_CONFIGURATION_NAME.configName(),
Configurations.RSL_CONFIGURATION_NAME.configName(),
Configurations.THEME_CONFIGURATION_NAME.configName(),
Configurations.TEST_CONFIGURATION_NAME.configName()
].each { project.configurations.create(it) }
}
def getModuleConfNode() {
File imlFile = project.file("${project.name}.iml")
def xml = new XmlParser().parse(imlFile);
def configManager = xml.component.find { it ->
it.@name == "FlexBuildConfigurationManager" }
configManager != null
return configManager.configurations.configuration
}
def getModuleRootMgrNode() {
File imlFile = project.file("${project.name}.iml")
def xml = new XmlParser().parse(imlFile);
def configManager = xml.component.find { it ->
it.@name == "NewModuleRootManager" }
configManager != null
return configManager
}
}
|
#version 330 core
in vec3 in_Vertex;
in vec2 in_TexCoord0;
in vec3 in_Normals;
uniform mat4 modelViewProjection;
uniform mat4 modelView;
uniform mat4 normalsMatrix;
uniform mat4 modelMatrix;
out vec2 coordTexture;
smooth out vec3 vertexNormal;
out vec3 worldPos;
out vec4 eyeSpacePos;
void main()
{
gl_Position = modelViewProjection* vec4(in_Vertex,1.0);
coordTexture = in_TexCoord0;
vec4 normal = normalize(normalsMatrix*vec4(in_Normals,0.0));
vec4 pos = modelMatrix*vec4(in_Vertex,1.0);
//vec4 normal = vec4(in_Normals,0.0);
vertexNormal = normal.xyz;
worldPos = pos.xyz;
eyeSpacePos = modelView*vec4(in_Vertex, 1.0);
//vertexNormal = in_Normals;
}
|
{-# OPTIONS --without-K #-}
{-
Imports everything that is not imported by something else.
This is not supposed to be used anywhere, this is just a simple way to
do `make all'
This file is intentionally named index.agda so that
Agda will generate index.html.
-}
module index where
import Base
import Spaces.IntervalProps
import Algebra.F2NotCommutative
import Spaces.LoopSpaceCircle
import Spaces.LoopSpaceDecidableWedgeCircles
import Homotopy.PullbackIsPullback
import Homotopy.PushoutIsPushout
import Homotopy.Truncation
import Sets.QuotientUP
import Spaces.PikSn
import Homotopy.VanKampen
import Homotopy.Cover
import Homotopy.Cover.ExamplePi1Circle
|
/*
* Copyright 2013 Moving Blocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
void main(){
gl_FragData[0].rgba = vec4(gl_TexCoord[0].xy, 0.9, gl_Color.a);
}
|
///======= Copyright (c) Stereolabs Corporation, All rights reserved. ===============
///
/// Basic wireframe shader that can be used for rendering spatial mapping meshes.
///
Shader "Custom/Spatial Mapping/ WireframeViedoOverlay"
{
Properties
{
_WireColor("Wire color", Color) = (1.0, 1.0, 1.0, 1.0)
}
SubShader
{
Tags{ "RenderType" = "Opaque" }
Lighting Off
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
float dist : TEXCOORD1;
};
sampler2D _MainTex;
float4 _MainTex_ST;
float4 _WireColor;
v2f vert(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
o.dist = length(ObjSpaceViewDir(v.vertex));
return o;
}
float4 frag(v2f i) : SV_Target
{
float4 color = _WireColor;
//#if AWAYS
color.a = 10 / (0.1 + i.dist*i.dist);
//#endif
return color;
}
ENDCG
}
}
Fallback off
}
|
#!/usr/bin/env bash
load bats-extra.bash
# local version: 1.2.0.0
@test "1" {
#[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh 1
assert_success
assert_output "1"
}
@test "2" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh 2
assert_success
assert_output "2"
}
@test "3" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh 3
assert_success
assert_output "4"
}
@test "4" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh 4
assert_success
assert_output "8"
}
@test "16" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh 16
assert_success
assert_output "32768"
}
@test "32" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh 32
assert_success
assert_output "2147483648"
}
@test "64" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh 64
assert_success
assert_output "9223372036854775808"
}
@test "square 0 raises an exception" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh 0
assert_failure
assert_output "Error: invalid input"
}
@test "negative square raises an exception" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh -1
assert_failure
assert_output "Error: invalid input"
}
@test "square greater than 64 raises an exception" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh 65
assert_failure
assert_output "Error: invalid input"
}
@test "returns the total number of grains on the board" {
[[ $BATS_RUN_SKIPPED == "true" ]] || skip
run bash grains.sh total
assert_success
assert_output "18446744073709551615"
}
|
defmodule PhoenixAuthKata.AuthController do
use PhoenixAuthKata.Web, :controller
def index(conn, %{ "provider" => provider }) do
redirect conn, external: authorize_url!(provider)
end
def callback( conn, %{ "provider" => provider, "code" => code }) do
token = get_token!(provider, code)
user = get_user!(provider, token)
conn
|> put_session(:current_user, user)
|> put_session(:access_token, token.access_token)
|> redirect(to: "/")
end
defp authorize_url!("google") do
Google.authorize_url!(scope: "email profile")
end
defp authorize_url!(_) do
raise "No matching provider available"
end
defp get_token!("google", code) do
Google.get_token!(code: code)
end
defp get_token!(_, _) do
raise "No matching provider available"
end
defp get_user!("google", token) do
user_url = "https://www.googleapis.com/plus/v1/people/me/openIdConnect"
OAuth2.AccessToken.get!(token, user_url)
end
end
|
/*
:name: typedef_test_13
:description: Test
:should_fail: 0
:tags: 6.18
*/
typedef bit data_t;
parameter k = 6;
parameter j = 5;
parameter l = 2;
typedef data_t my_ar_t [bit[31:0][k:0]][bit[j:0][l:0]];
|
Class {
#name : #HighstockPlotOptionsSeriesPointEventsTest,
#superclass : #TestCase,
#category : 'HighstockStV6-Tests'
}
{ #category : #'tests-accessing' }
HighstockPlotOptionsSeriesPointEventsTest >> testClick [
| component javascript expectedConfiguration |
component := HighstockPlotOptionsSeriesPointEvents new.
component click: 'test'.
javascript := String streamContents: [ :stream | component javascriptContentOn: stream ].
expectedConfiguration := Dictionary new
at: 'click' put: 'test';
yourself.
self assert: javascript equals: expectedConfiguration asJavascript
]
{ #category : #'tests-accessing' }
HighstockPlotOptionsSeriesPointEventsTest >> testMouseOut [
| component javascript expectedConfiguration |
component := HighstockPlotOptionsSeriesPointEvents new.
component mouseOut: 'test'.
javascript := String streamContents: [ :stream | component javascriptContentOn: stream ].
expectedConfiguration := Dictionary new
at: 'mouseOut' put: 'test';
yourself.
self assert: javascript equals: expectedConfiguration asJavascript
]
{ #category : #'tests-accessing' }
HighstockPlotOptionsSeriesPointEventsTest >> testMouseOver [
| component javascript expectedConfiguration |
component := HighstockPlotOptionsSeriesPointEvents new.
component mouseOver: 'test'.
javascript := String streamContents: [ :stream | component javascriptContentOn: stream ].
expectedConfiguration := Dictionary new
at: 'mouseOver' put: 'test';
yourself.
self assert: javascript equals: expectedConfiguration asJavascript
]
{ #category : #'tests-accessing' }
HighstockPlotOptionsSeriesPointEventsTest >> testRemove [
| component javascript expectedConfiguration |
component := HighstockPlotOptionsSeriesPointEvents new.
component remove: 'test'.
javascript := String streamContents: [ :stream | component javascriptContentOn: stream ].
expectedConfiguration := Dictionary new
at: 'remove' put: 'test';
yourself.
self assert: javascript equals: expectedConfiguration asJavascript
]
{ #category : #'tests-accessing' }
HighstockPlotOptionsSeriesPointEventsTest >> testSelect [
| component javascript expectedConfiguration |
component := HighstockPlotOptionsSeriesPointEvents new.
component select: 'test'.
javascript := String streamContents: [ :stream | component javascriptContentOn: stream ].
expectedConfiguration := Dictionary new
at: 'select' put: 'test';
yourself.
self assert: javascript equals: expectedConfiguration asJavascript
]
{ #category : #'tests-accessing' }
HighstockPlotOptionsSeriesPointEventsTest >> testUnselect [
| component javascript expectedConfiguration |
component := HighstockPlotOptionsSeriesPointEvents new.
component unselect: 'test'.
javascript := String streamContents: [ :stream | component javascriptContentOn: stream ].
expectedConfiguration := Dictionary new
at: 'unselect' put: 'test';
yourself.
self assert: javascript equals: expectedConfiguration asJavascript
]
{ #category : #'tests-accessing' }
HighstockPlotOptionsSeriesPointEventsTest >> testUpdate [
| component javascript expectedConfiguration |
component := HighstockPlotOptionsSeriesPointEvents new.
component update: 'test'.
javascript := String streamContents: [ :stream | component javascriptContentOn: stream ].
expectedConfiguration := Dictionary new
at: 'update' put: 'test';
yourself.
self assert: javascript equals: expectedConfiguration asJavascript
]
|
\begin{frame}
\begin{center}
{\fontsize{2.5cm}{1em}\selectfont counterfeit}
\end{center}
\end{frame}
|
-define(INFO(Str, Args), io:format("INFO: " ++ Str ++ "~n", Args)).
-define(DEBUG(Str, Args), io:format("DEBUG: " ++ Str ++ "~n", Args)).
-define(WARNING(Str, Args), io:format("WARNING: " ++ Str ++ "~n", Args)).
-define(ERROR(Str, Args), io:format("ERROR: " ++ Str ++ "~n", Args)).
|
declare module 'faye' {
type Message = {
// TODO: generalize
[key: string]: unknown;
subscription?: string;
};
type Subscription = {
cancel: () => void;
};
type Callback = (message: Message) => unknown;
interface Middleware {
incoming: (message: Message, callback: Callback) => unknown;
outgoing: (message: Message, callback: Callback) => unknown;
}
export class Client {
constructor(url: string, options: { timeout: number });
addExtension(extension: Middleware): void;
subscribe(channel: string, callback: Callback): Promise<Subscription>;
}
}
declare module 'Base64' {
function atob(input: string): string;
}
|
/**
* Copyright (c) 2020 EmeraldPay, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.emeraldpay.dshackle.upstream.rpcclient
import io.emeraldpay.dshackle.test.TestingCommons
import spock.lang.Specification
class JsonRpcRequestSpec extends Specification {
def "Serialize empty params"() {
setup:
def req = new JsonRpcRequest("test_foo", [])
when:
def act = req.toJson()
then:
new String(act) == '{"jsonrpc":"2.0","id":1,"method":"test_foo","params":[]}'
}
def "Serialize single param"() {
setup:
def req = new JsonRpcRequest("test_foo", ["0x0000"])
when:
def act = req.toJson()
then:
new String(act) == '{"jsonrpc":"2.0","id":1,"method":"test_foo","params":["0x0000"]}'
}
def "Serialize two params"() {
setup:
def req = new JsonRpcRequest("test_foo", ["0x0000", false])
when:
def act = req.toJson()
then:
new String(act) == '{"jsonrpc":"2.0","id":1,"method":"test_foo","params":["0x0000",false]}'
}
def "Same requests are equal"() {
setup:
def req1 = new JsonRpcRequest("test_foo", ["0x0000", false])
def req2 = new JsonRpcRequest("test_foo", ["0x0000", false])
when:
def act = req1.equals(req2)
then:
act == true
}
}
|
---
output:
html_document:
keep_md: yes
---
## Run multiple simulations with `squidR()`
SQuID was designed to provide a user-friendly and web-based program to simulate data for testing a variety of ideas about sampling and bias in hierarchical mixed modelling. For those very familiar with these approaches and curious about SQuID, you might be interested in using the SQuID R function `squidR()` that is also used by `squidApp()` to simulate data. However, before getting into simulating data with `squidR()`, we recommend that you familiarize yourself with the various SQuID phenotypic equation components by reading the documentation on the *Full model (Step by step)* page within the SQuID app.
The advantages of using the SQuID R function instead of the SQuID app are that:
1. you can incorporate the SQuID R function into your own R code.
2. you have more flexibility in how you specify the input parameters. For instance, you could simulate distinct data sets with a range of values for some parameters.
Below we present an example of using the function `squidR()`. In this example, we simulate 2 different data sets where in the first scenario we sample 50 individuals 10 times and in the second scenario we sample 100 individuals 5 times.
First, we load the `squid` package.
```{r, echo=TRUE}
library(squid)
```
Then, we prepare the input parameters for the 2 distinct simulation scenarios. We store these values in a `data.frame` where each row represents a new simulation scenario. It is important that the column names are equivalent to the parameter names used by `squidR()`. All the parameter names are listed and described in the `squidR()` R documentation which is accessible by running the help command (i.e. `?squid()`). An exception to that is when you have to specify a matrix input parameter (and not a scalar input) such as the variance/correlation matrix (e.g. `Vind`). In this case, you have to define the value of each element in the matrix separately (i.e. distinct columns). At this stage, the names assigned to the matrix elements are arbitrary and will not be inputted into `squidR()`. We will see below how we reconstruct the input matrix before running `squidR()`.
```{r, echo=TRUE}
################
# SQuID inputs #
################
# Prepare squid input data.frame
parameters <- data.frame( "NI" = c(50 , 100), # Number of individuals
"NR" = c(10, 5), # Number of samples per individual
"VI" = 0.5, # Among-individual variance (intercept)
"B0" = 0.5, # Population mean phenotype (intercept)
"Ve" = 0.5, # Measurement error variance
"Tmax" = 100 # total time steps
)
parameters
```
Finally, we run the different simulation scenarios one by one by looping `squidR()` function which simulates the data for each row of the parameter `data.frame`. We start by converting the parameter row into a `list` which is expected by `squidR()`. We then reconstruct the variance/correlation matrix (4x4) by assigning the variances and correlations at the right position. In this example, we just assign the value of the among-individual variance at the intercept. At this stage the name of the matrix (i.e. `Vind`) has to be the one expected by `squidR()`. Similarly, we reconstruct the population mean matrix `B`. When the input parameters are defined, we run the current simulation and combine this generated data with the total data table (using the function `rbind()`). Note that we only recover the sampled data and that we also add the simulation parameters to be able to recover the data associated to each simulation scenario.
```{r, echo=TRUE}
#######################
# Run all simulations #
#######################
run_sim <- function(i){
# Select simulation parameters
param <- parameters[i, ]
# Converting the parameters data.frame into a list
inputs <- as.list(param)
# Create variance/correlation matrix
inputs$Vind <- matrix(0, nrow = 4, ncol = 4)
inputs$Vind[1,1] <- param$VI
# Create population mean value matrix (1 row)
inputs$B <- c(param$B0, rep(0,3))
# Run simulation
dt <- squid::squidR(inputs)$sampled_data
# Add parameter values to data.frame
return(cbind(dt, param))
}
dt_sim <- do.call("rbind", lapply(1:nrow(parameters), run_sim))
```
Now you can analyse you simulated data and investigate, for instance, how different sampling designs will impact you statistical parameter estimation. You can extract the data from a specific scenario using the function `subset()` as described below:
```{r, echo=TRUE}
# Example of subsetting data
sub_data <- subset(dt_sim, NI == 100 & NR == 5)
```
|
package no.nav.helse
import no.nav.helse.sporenstreks.domene.Arbeidsgiverperiode
import no.nav.helse.sporenstreks.domene.Refusjonskrav
import no.nav.helse.sporenstreks.domene.RefusjonskravStatus
import java.time.LocalDate
import java.time.LocalDate.of
object TestData {
val validIdentitetsnummer = "20015001543"
val notValidIdentitetsnummer = "50012001987"
val validOrgNr = "123456785"
val notValidOrgNr = "123456789"
val opprettetAv = "20015001543"
val gyldigKrav = Refusjonskrav(
opprettetAv,
validIdentitetsnummer,
validOrgNr,
setOf(Arbeidsgiverperiode(of(2020, 4,4), of(2020, 4,10), 2, 1000.0)),
RefusjonskravStatus.MOTTATT
)
} |
program test7;
var x : real = 2.0;
var y : real = 3.0;
var z : real;
function first():real
begin
x := 2.0;
y := 3.0;
z := 4.0 * y / x / (5.0 * x);
writeln(z); (*output should be 0.6*)
end;
function second():real
begin
x := 2.0;
y := 3.0;
z := 4.0 * y / x / 5.0 * x;
writeln(z); (* output should be 2.4 *)
end;
begin
first();
second();
end; |
#pragma once
#include <glm.hpp>
#include <gtc/matrix_transform.hpp>
#include <gtc/type_ptr.hpp>
namespace Gogaman
{
struct PointLightComponent
{
glm::vec3 position;
glm::vec3 radiance;
};
struct DirectionalLightComponent
{
glm::vec3 direction;
glm::vec3 radiance;
};
} |
contract SafeMath{
function safeMul(uint a, uint b) internal returns (uint) {
uint c = a * b;
assert(a == 0 || c / a == b);
return c;
}
function safeDiv(uint a, uint b) internal returns (uint) {
assert(b > 0);
uint c = a / b;
assert(a == b * c + a % b);
return c;
}
function safeSub(uint a, uint b) internal returns (uint) {
assert(b <= a);
return a - b;
}
function safeAdd(uint a, uint b) internal returns (uint) {
uint c = a + b;
assert(c >= a);
return c;
}
function assert(bool assertion) internal {
if (!assertion) {
throw;
}
}
}
contract ERC20{
function totalSupply() constant returns (uint256 totalSupply) {}
function balanceOf(address _owner) constant returns (uint256 balance) {}
function transfer(address _recipient, uint256 _value) returns (bool success) {}
function transferFrom(address _from, address _recipient, uint256 _value) returns (bool success) {}
function approve(address _spender, uint256 _value) returns (bool success) {}
function allowance(address _owner, address _spender) constant returns (uint256 remaining) {}
event Transfer(address indexed _from, address indexed _recipient, uint256 _value);
event Approval(address indexed _owner, address indexed _spender, uint256 _value);
}
contract FXCoinICO is ERC20, SafeMath{
mapping(address => uint256) balances;
function balanceOf(address _owner) constant returns (uint256 balance) {
return balances[_owner];
}
function transfer(address _to, uint256 _value) returns (bool success){
balances[msg.sender] = safeSub(balances[msg.sender], _value);
balances[_to] = safeAdd(balances[_to], _value);
Transfer(msg.sender, _to, _value);
return true;
}
mapping (address => mapping (address => uint256)) allowed;
function transferFrom(address _from, address _to, uint256 _value) returns (bool success){
var _allowance = allowed[_from][msg.sender];
balances[_to] = safeAdd(balances[_to], _value);
balances[_from] = safeSub(balances[_from], _value);
allowed[_from][msg.sender] = safeSub(_allowance, _value);
Transfer(_from, _to, _value);
return true;
}
function approve(address _spender, uint256 _value) returns (bool success) {
allowed[msg.sender][_spender] = _value;
Approval(msg.sender, _spender, _value);
return true;
}
function allowance(address _owner, address _spender) constant returns (uint256 remaining) {
return allowed[_owner][_spender];
}
modifier during_offering_time(){
if (now >= endTime){
throw;
}else{
_;
}
}
function () payable during_offering_time {
createTokens(msg.sender);
}
function createTokens(address recipient) payable {
if (msg.value == 0) {
throw;
}
uint tokens = safeDiv(safeMul(msg.value, price), 1 ether);
totalSupply = safeAdd(totalSupply, tokens);
balances[recipient] = safeAdd(balances[recipient], tokens);
if (!owner.send(msg.value)) {
throw;
}
}
string public name = "FXCoin";
string public symbol = "FXC";
uint public decimals = 4;
uint256 public INITIAL_SUPPLY = 350000000000;
uint256 public totalSupply;
uint256 public price;
address public owner;
uint256 public endTime;
function FXCoinICO() {
totalSupply = INITIAL_SUPPLY;
balances[msg.sender] = INITIAL_SUPPLY;
endTime = now + 1 weeks;
owner = msg.sender;
price = 8500000;
}
}
|
(ns cube-test.ut-simp.msg-test
(:require
; [clojure.test :refer :all]
[cube-test.ut-simp.msg :as msg]
; [clojure-test :as t]
[clojure.test :as t]))
(deftest a-test
(testing "basic dummy test"
; (is (= 0 1))
(is (= msg/dummy 11))))
(def msg-box-0 {::msg/box-id 0 ::msg/msg {::msg/text "abc" ::msg/msg-level :INFO}})
(deftest msg-box-0-basic
(testing "msg-box-0 has box-id=0"
(is (= (msg-box-0 ::msg/box-id) 0))))
(deftest inc-msg-level
(testing "inc-msg-level properly increments msg-level"
(let [
result (msg/inc-msg-level msg-box-0)
; result 4
box-id' (result ::msg/box-id)
msg-level' (get-in result [::msg/msg ::msg/msg-level])]
; box-id' 2]
(println "msg-level=" msg-level')
(is (= msg-level' :WARN)))))
(def dummy 7)
(comment
(println *ns*)
;; (require '[clojure.test :as t])
;; (require [clojure.test :as t])
(require [clojure.repl :as repl])
(repl/dir t)
(t/run-tests 'cube-test.ut-simp.msg-test)
,)
; (deftest inc-msg-level
; (testing "inc-msg-level properly increments msg-level"
; (is (= 0 1))))
; (deftest extract-msg-box-num
; (testing "properly extract the box number from msg-box id"
; (let [result (msg/extract-msg-box-num msg-box-0)]
; (is (= result 0)))))
|
/**
******************************************************************************
* @file lptim.h
* @brief This file contains all the function prototypes for
* the lptim.c file
******************************************************************************
* @attention
*
* <h2><center>© Copyright (c) 2021 STMicroelectronics.
* All rights reserved.</center></h2>
*
* This software component is licensed by ST under Ultimate Liberty license
* SLA0044, the "License"; You may not use this file except in compliance with
* the License. You may obtain a copy of the License at:
* www.st.com/SLA0044
*
******************************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef __LPTIM_H__
#define __LPTIM_H__
#ifdef __cplusplus
extern "C" {
#endif
/* Includes ------------------------------------------------------------------*/
#include "main.h"
/* USER CODE BEGIN Includes */
/* USER CODE END Includes */
extern LPTIM_HandleTypeDef hlptim1;
/* USER CODE BEGIN Private defines */
/* USER CODE END Private defines */
void MX_LPTIM1_Init(void);
/* USER CODE BEGIN Prototypes */
/* USER CODE END Prototypes */
#ifdef __cplusplus
}
#endif
#endif /* __LPTIM_H__ */
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
|
dropping/grabbing
initializeOriginalPositionFor: aMorph
aMorph displayedAttachmentPoints do: [ :ea | ea delete ].
aMorph alignAttachmentPointsWithGridNear: aMorph position.
originalPosition := aMorph position - self position.
aMorph connectionTarget displayAttachmentPointsFor: self.
|
package validator
import (
"context"
"errors"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
type testClaims struct {
Scope string `json:"scope"`
ReturnError error
}
func (tc *testClaims) Validate(context.Context) error {
return tc.ReturnError
}
func TestValidator_ValidateToken(t *testing.T) {
const (
issuer = "https://go-jwt-middleware.eu.authok.com/"
audience = "https://go-jwt-middleware-api/"
subject = "1234567890"
)
testCases := []struct {
name string
token string
keyFunc func(context.Context) (interface{}, error)
algorithm SignatureAlgorithm
customClaims func() CustomClaims
expectedError error
expectedClaims *ValidatedClaims
}{
{
name: "it successfully validates a token",
token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwczovL2dvLWp3dC1taWRkbGV3YXJlLmV1LmF1dGgwLmNvbS8iLCJzdWIiOiIxMjM0NTY3ODkwIiwiYXVkIjpbImh0dHBzOi8vZ28tand0LW1pZGRsZXdhcmUtYXBpLyJdfQ.-R2K2tZHDrgsEh9JNWcyk4aljtR6gZK0s2anNGlfwz0",
keyFunc: func(context.Context) (interface{}, error) {
return []byte("secret"), nil
},
algorithm: HS256,
expectedClaims: &ValidatedClaims{
RegisteredClaims: RegisteredClaims{
Issuer: issuer,
Subject: subject,
Audience: []string{audience},
},
},
},
{
name: "it successfully validates a token with custom claims",
token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwczovL2dvLWp3dC1taWRkbGV3YXJlLmV1LmF1dGgwLmNvbS8iLCJzdWIiOiIxMjM0NTY3ODkwIiwiYXVkIjpbImh0dHBzOi8vZ28tand0LW1pZGRsZXdhcmUtYXBpLyJdLCJzY29wZSI6InJlYWQ6bWVzc2FnZXMifQ.oqtUZQ-Q8un4CPduUBdGVq5gXpQVIFT_QSQjkOXFT5I",
keyFunc: func(context.Context) (interface{}, error) {
return []byte("secret"), nil
},
algorithm: HS256,
customClaims: func() CustomClaims {
return &testClaims{}
},
expectedClaims: &ValidatedClaims{
RegisteredClaims: RegisteredClaims{
Issuer: issuer,
Subject: subject,
Audience: []string{audience},
},
CustomClaims: &testClaims{
Scope: "read:messages",
},
},
},
{
name: "it throws an error when token has a different signing algorithm than the validator",
token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwczovL2dvLWp3dC1taWRkbGV3YXJlLmV1LmF1dGgwLmNvbS8iLCJzdWIiOiIxMjM0NTY3ODkwIiwiYXVkIjpbImh0dHBzOi8vZ28tand0LW1pZGRsZXdhcmUtYXBpLyJdfQ.-R2K2tZHDrgsEh9JNWcyk4aljtR6gZK0s2anNGlfwz0",
keyFunc: func(context.Context) (interface{}, error) {
return []byte("secret"), nil
},
algorithm: RS256,
expectedError: errors.New(`expected "RS256" signing algorithm but token specified "HS256"`),
},
{
name: "it throws an error when it cannot parse the token",
token: "",
keyFunc: func(context.Context) (interface{}, error) {
return []byte("secret"), nil
},
algorithm: HS256,
expectedError: errors.New("could not parse the token: square/go-jose: compact JWS format must have three parts"),
},
{
name: "it throws an error when it fails to fetch the keys from the key func",
token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwczovL2dvLWp3dC1taWRkbGV3YXJlLmV1LmF1dGgwLmNvbS8iLCJzdWIiOiIxMjM0NTY3ODkwIiwiYXVkIjpbImh0dHBzOi8vZ28tand0LW1pZGRsZXdhcmUtYXBpLyJdfQ.-R2K2tZHDrgsEh9JNWcyk4aljtR6gZK0s2anNGlfwz0",
keyFunc: func(context.Context) (interface{}, error) {
return nil, errors.New("key func error message")
},
algorithm: HS256,
expectedError: errors.New("error getting the keys from the key func: key func error message"),
},
{
name: "it throws an error when it fails to deserialize the claims because the signature is invalid",
token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwczovL2dvLWp3dC1taWRkbGV3YXJlLmV1LmF1dGgwLmNvbS8iLCJzdWIiOiIxMjM0NTY3ODkwIiwiYXVkIjpbImh0dHBzOi8vZ28tand0LW1pZGRsZXdhcmUtYXBpLyJdfQ.vR2K2tZHDrgsEh9zNWcyk4aljtR6gZK0s2anNGlfwz0",
keyFunc: func(context.Context) (interface{}, error) {
return []byte("secret"), nil
},
algorithm: HS256,
expectedError: errors.New("could not get token claims: square/go-jose: error in cryptographic primitive"),
},
{
name: "it throws an error when it fails to validate the registered claims",
token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwczovL2dvLWp3dC1taWRkbGV3YXJlLmV1LmF1dGgwLmNvbS8iLCJzdWIiOiIxMjM0NTY3ODkwIn0.VoIwDVmb--26wGrv93NmjNZYa4nrzjLw4JANgEjPI28",
keyFunc: func(context.Context) (interface{}, error) {
return []byte("secret"), nil
},
algorithm: HS256,
expectedError: errors.New("expected claims not validated: square/go-jose/jwt: validation failed, invalid audience claim (aud)"),
},
{
name: "it throws an error when it fails to validate the custom claims",
token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwczovL2dvLWp3dC1taWRkbGV3YXJlLmV1LmF1dGgwLmNvbS8iLCJzdWIiOiIxMjM0NTY3ODkwIiwiYXVkIjpbImh0dHBzOi8vZ28tand0LW1pZGRsZXdhcmUtYXBpLyJdLCJzY29wZSI6InJlYWQ6bWVzc2FnZXMifQ.oqtUZQ-Q8un4CPduUBdGVq5gXpQVIFT_QSQjkOXFT5I",
keyFunc: func(context.Context) (interface{}, error) {
return []byte("secret"), nil
},
algorithm: HS256,
customClaims: func() CustomClaims {
return &testClaims{
ReturnError: errors.New("custom claims error message"),
}
},
expectedError: errors.New("custom claims not validated: custom claims error message"),
},
}
for _, testCase := range testCases {
testCase := testCase
t.Run(testCase.name, func(t *testing.T) {
t.Parallel()
validator, err := New(
testCase.keyFunc,
testCase.algorithm,
issuer,
[]string{audience},
WithCustomClaims(testCase.customClaims),
)
require.NoError(t, err)
tokenClaims, err := validator.ValidateToken(context.Background(), testCase.token)
if testCase.expectedError != nil {
assert.EqualError(t, err, testCase.expectedError.Error())
assert.Nil(t, tokenClaims)
} else {
require.NoError(t, err)
assert.Exactly(t, testCase.expectedClaims, tokenClaims)
}
})
}
}
func TestNewValidator(t *testing.T) {
const (
issuer = "https://go-jwt-middleware.eu.authok.com/"
audience = "https://go-jwt-middleware-api/"
algorithm = HS256
)
var keyFunc = func(context.Context) (interface{}, error) {
return []byte("secret"), nil
}
t.Run("it throws an error when the keyFunc is nil", func(t *testing.T) {
_, err := New(nil, algorithm, issuer, []string{audience})
assert.EqualError(t, err, "keyFunc is required but was nil")
})
t.Run("it throws an error when the signature algorithm is empty", func(t *testing.T) {
_, err := New(keyFunc, "", issuer, []string{audience})
assert.EqualError(t, err, "unsupported signature algorithm")
})
t.Run("it throws an error when the signature algorithm is unsupported", func(t *testing.T) {
_, err := New(keyFunc, "none", issuer, []string{audience})
assert.EqualError(t, err, "unsupported signature algorithm")
})
t.Run("it throws an error when the issuerURL is empty", func(t *testing.T) {
_, err := New(keyFunc, algorithm, "", []string{audience})
assert.EqualError(t, err, "issuer url is required but was empty")
})
t.Run("it throws an error when the audience is nil", func(t *testing.T) {
_, err := New(keyFunc, algorithm, issuer, nil)
assert.EqualError(t, err, "audience is required but was nil")
})
}
|
pragma solidity =0.6.12;
import './libraries/SafeMath.sol';
contract UniswapV2ERC20 {
using SafeMathUniswap for uint;
string public constant name = 'SunflowerSwap LP Token';
string public constant symbol = 'SLP';
uint8 public constant decimals = 18;
uint public totalSupply;
mapping(address => uint) public balanceOf;
mapping(address => mapping(address => uint)) public allowance;
bytes32 public DOMAIN_SEPARATOR;
// keccak256("Permit(address owner,address spender,uint256 value,uint256 nonce,uint256 deadline)");
bytes32 public constant PERMIT_TYPEHASH = 0x6e71edae12b1b97f4d1f60370fef10105fa2faae0126114a169c64845d6126c9;
mapping(address => uint) public nonces;
event Approval(address indexed owner, address indexed spender, uint value);
event Transfer(address indexed from, address indexed to, uint value);
constructor() public {
uint chainId;
assembly {
chainId := chainid()
}
DOMAIN_SEPARATOR = keccak256(
abi.encode(
keccak256('EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)'),
keccak256(bytes(name)),
keccak256(bytes('1')),
chainId,
address(this)
)
);
}
function _mint(address to, uint value) internal {
totalSupply = totalSupply.add(value);
balanceOf[to] = balanceOf[to].add(value);
emit Transfer(address(0), to, value);
}
function _burn(address from, uint value) internal {
balanceOf[from] = balanceOf[from].sub(value);
totalSupply = totalSupply.sub(value);
emit Transfer(from, address(0), value);
}
function _approve(address owner, address spender, uint value) private {
allowance[owner][spender] = value;
emit Approval(owner, spender, value);
}
function _transfer(address from, address to, uint value) private {
balanceOf[from] = balanceOf[from].sub(value);
balanceOf[to] = balanceOf[to].add(value);
emit Transfer(from, to, value);
}
function approve(address spender, uint value) external returns (bool) {
_approve(msg.sender, spender, value);
return true;
}
function transfer(address to, uint value) external returns (bool) {
_transfer(msg.sender, to, value);
return true;
}
function transferFrom(address from, address to, uint value) external returns (bool) {
if (allowance[from][msg.sender] != uint(-1)) {
allowance[from][msg.sender] = allowance[from][msg.sender].sub(value);
}
_transfer(from, to, value);
return true;
}
function permit(address owner, address spender, uint value, uint deadline, uint8 v, bytes32 r, bytes32 s) external {
require(deadline >= block.timestamp, 'UniswapV2: EXPIRED');
bytes32 digest = keccak256(
abi.encodePacked(
'\x19\x01',
DOMAIN_SEPARATOR,
keccak256(abi.encode(PERMIT_TYPEHASH, owner, spender, value, nonces[owner]++, deadline))
)
);
address recoveredAddress = ecrecover(digest, v, r, s);
require(recoveredAddress != address(0) && recoveredAddress == owner, 'UniswapV2: INVALID_SIGNATURE');
_approve(owner, spender, value);
}
}
|
#!/usr/bin/awk -f
function make_arr(arr_out, csv_in) {return split(csv_in, arr_out, ",")}
function test_heap_init( _heap, _arr, _len) {
at_test_begin("heap_init()")
_heap["foo"] = "bar"
at_true("foo" in _heap)
heap_init(_heap)
at_true(!("foo" in _heap))
}
function test_heap_init_arr( _heap, _arr, _len) {
at_test_begin("heap_init_arr()")
_len = make_arr(_arr, "1,2,3,4,5,6,7,8,9,10")
heap_init_arr(_heap, _arr, _len)
at_true("10 9 7 8 5 6 3 1 4 2" == \
arr_to_str(_heap, heap_size(_heap)))
_len = make_arr(_arr, "1,2,3,4,5,6,7,8,9")
heap_init_arr(_heap, _arr, _len)
at_true("9 8 7 4 5 6 3 2 1" == \
arr_to_str(_heap, heap_size(_heap)))
# repeating
_len = make_arr(_arr, "1,2,3,3,4,4,4,5,6,6")
heap_init_arr(_heap, _arr, _len)
at_true("6 6 4 5 4 3 4 2 3 1" == \
arr_to_str(_heap, heap_size(_heap)))
}
function test_heap_peek_max( _heap, _arr, _len) {
at_test_begin("heap_peek_max()")
_len = make_arr(_arr, "1,2,3,4,5,6,7,8,9,10")
heap_init_arr(_heap, _arr, _len)
at_true(10 == heap_peek_max(_heap))
_len = make_arr(_arr, "1,2,3,4,5,6,7,8,9")
heap_init_arr(_heap, _arr, _len)
at_true(9 == heap_peek_max(_heap))
# repeating
_len = make_arr(_arr, "1,2,3,3,4,4,4,5,6,6")
heap_init_arr(_heap, _arr, _len)
at_true(6 == heap_peek_max(_heap))
}
function test_heap_pop( _heap, _arr, _len) {
at_test_begin("heap_pop()")
_len = make_arr(_arr, "6,7,8,9,10,1,2,3,4,5")
heap_init_arr(_heap, _arr, _len)
at_true(10 == heap_peek_max(_heap))
at_true("10 9 8 6 7 1 2 3 4 5" == \
arr_to_str(_heap, heap_size(_heap)))
heap_pop(_heap)
at_true(9 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(8 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(7 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(6 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(5 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(4 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(3 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(2 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(1 == heap_peek_max(_heap))
heap_pop(_heap)
at_true("" == heap_peek_max(_heap))
_len = make_arr(_arr, "5,6,7,1,3,4,2")
heap_init_arr(_heap, _arr, _len)
at_true(7 == heap_peek_max(_heap))
at_true("7 6 5 1 3 4 2" == arr_to_str(_heap, heap_size(_heap)))
heap_pop(_heap)
at_true(6 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(5 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(4 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(3 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(2 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(1 == heap_peek_max(_heap))
heap_pop(_heap)
at_true("" == heap_peek_max(_heap))
# repeating
_len = make_arr(_arr, "1,2,3,3,4,4,4,5,6,6")
heap_init_arr(_heap, _arr, _len)
at_true(6 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(6 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(5 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(4 == heap_peek_max(_heap))
heap_pop(_heap)
heap_pop(_heap)
heap_pop(_heap)
at_true(3 == heap_peek_max(_heap))
heap_pop(_heap)
heap_pop(_heap)
at_true(2 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(1 == heap_peek_max(_heap))
heap_pop(_heap)
at_true("" == heap_peek_max(_heap))
# repeating by push
heap_init(_heap)
heap_push(_heap, 1)
heap_push(_heap, 2)
heap_push(_heap, 2)
heap_push(_heap, 1)
heap_push(_heap, 3)
at_true(3 == heap_peek_max(_heap))
at_true("3 2 2 1 1" == arr_to_str(_heap, heap_size(_heap)))
heap_pop(_heap)
at_true(2 == heap_peek_max(_heap))
heap_pop(_heap)
heap_pop(_heap)
at_true(1 == heap_peek_max(_heap))
heap_pop(_heap)
at_true(1 == heap_peek_max(_heap))
heap_pop(_heap)
at_true("" == heap_peek_max(_heap))
# pop empty
heap_pop(_heap)
heap_pop(_heap)
heap_pop(_heap)
heap_push(_heap, 3)
at_true(3 == heap_peek_max(_heap))
}
function test_heap_push( _heap, _arr, _len) {
at_test_begin("heap_push()")
heap_init(_heap)
heap_push(_heap, 10)
at_true(10 == heap_peek_max(_heap))
at_true("10" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 5)
at_true(10 == heap_peek_max(_heap))
at_true("10 5" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 3)
at_true(10 == heap_peek_max(_heap))
at_true("10 5 3" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 20)
at_true(20 == heap_peek_max(_heap))
at_true("20 10 3 5" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 14)
at_true(20 == heap_peek_max(_heap))
at_true("20 14 3 5 10" == arr_to_str(_heap, heap_size(_heap)))
_len = make_arr(_arr, "5,6,7,1,3,4,2")
heap_init_arr(_heap, _arr, _len)
at_true(7 == heap_peek_max(_heap))
at_true("7 6 5 1 3 4 2" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 0)
at_true(7 == heap_peek_max(_heap))
at_true("7 6 5 1 3 4 2 0" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 20)
at_true(20 == heap_peek_max(_heap))
at_true("20 7 5 6 3 4 2 0 1" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 12)
at_true(20 == heap_peek_max(_heap))
at_true("20 12 5 6 7 4 2 0 1 3" == \
arr_to_str(_heap, heap_size(_heap)))
# repeating
heap_init(_heap)
heap_push(_heap, 1)
at_true(1 == heap_peek_max(_heap))
at_true("1" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 2)
at_true(2 == heap_peek_max(_heap))
at_true("2 1" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 2)
at_true(2 == heap_peek_max(_heap))
at_true("2 1 2" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 1)
at_true(2 == heap_peek_max(_heap))
at_true("2 1 2 1" == arr_to_str(_heap, heap_size(_heap)))
heap_push(_heap, 3)
at_true(3 == heap_peek_max(_heap))
at_true("3 2 2 1 1" == arr_to_str(_heap, heap_size(_heap)))
}
function test_heap_size( _heap, _arr, _len) {
at_test_begin("heap_size()")
heap_init(_heap)
at_true(0 == heap_size(_heap))
heap_push(_heap, 10)
at_true(1 == heap_size(_heap))
heap_push(_heap, 20)
at_true(2 == heap_size(_heap))
heap_pop(_heap)
at_true(1 == heap_size(_heap))
heap_pop(_heap)
at_true(0 == heap_size(_heap))
_len = make_arr(_arr, "1,2,3,4,5,6,7,8,9,10")
heap_init_arr(_heap, _arr, _len)
at_true(10 == heap_size(_heap))
}
function test_heap_is_empty( _heap, _arr, _len) {
at_test_begin("heap_is_empty()")
heap_init(_heap)
at_true(1 == heap_is_empty(_heap))
heap_push(_heap, 10)
at_true(0 == heap_is_empty(_heap))
heap_push(_heap, 20)
at_true(0 == heap_is_empty(_heap))
heap_pop(_heap)
at_true(0 == heap_is_empty(_heap))
heap_pop(_heap)
at_true(1 == heap_is_empty(_heap))
_len = make_arr(_arr, "1,2,3,4,5,6,7,8,9,10")
heap_init_arr(_heap, _arr, _len)
at_true(0 == heap_is_empty(_heap))
}
function main() {
at_awklib_awktest_required()
test_heap_init()
test_heap_init_arr()
test_heap_peek_max()
test_heap_pop()
test_heap_push()
test_heap_size()
test_heap_is_empty()
if (Report)
at_report()
}
BEGIN {
main()
}
|
module HelloTests
include("mpiexec.jl")
run_mpi_driver(procs=3,file="driver_hello.jl")
end # module
|
TYPE=VIEW
query=select `performance_schema`.`events_waits_summary_global_by_event_name`.`EVENT_NAME` AS `events`,`performance_schema`.`events_waits_summary_global_by_event_name`.`COUNT_STAR` AS `total`,`performance_schema`.`events_waits_summary_global_by_event_name`.`SUM_TIMER_WAIT` AS `total_latency`,`performance_schema`.`events_waits_summary_global_by_event_name`.`AVG_TIMER_WAIT` AS `avg_latency`,`performance_schema`.`events_waits_summary_global_by_event_name`.`MAX_TIMER_WAIT` AS `max_latency` from `performance_schema`.`events_waits_summary_global_by_event_name` where ((`performance_schema`.`events_waits_summary_global_by_event_name`.`EVENT_NAME` <> \'idle\') and (`performance_schema`.`events_waits_summary_global_by_event_name`.`SUM_TIMER_WAIT` > 0)) order by `performance_schema`.`events_waits_summary_global_by_event_name`.`SUM_TIMER_WAIT` desc
md5=7cafecd806838fe586dc4c00fb492681
updatable=1
algorithm=2
definer_user=mysql.sys
definer_host=localhost
suid=0
with_check_option=0
timestamp=2021-09-30 16:40:29
create-version=1
source=SELECT event_name AS event, count_star AS total, sum_timer_wait AS total_latency, avg_timer_wait AS avg_latency, max_timer_wait AS max_latency FROM performance_schema.events_waits_summary_global_by_event_name WHERE event_name != \'idle\' AND sum_timer_wait > 0 ORDER BY sum_timer_wait DESC
client_cs_name=utf8
connection_cl_name=utf8_general_ci
view_body_utf8=select `performance_schema`.`events_waits_summary_global_by_event_name`.`EVENT_NAME` AS `events`,`performance_schema`.`events_waits_summary_global_by_event_name`.`COUNT_STAR` AS `total`,`performance_schema`.`events_waits_summary_global_by_event_name`.`SUM_TIMER_WAIT` AS `total_latency`,`performance_schema`.`events_waits_summary_global_by_event_name`.`AVG_TIMER_WAIT` AS `avg_latency`,`performance_schema`.`events_waits_summary_global_by_event_name`.`MAX_TIMER_WAIT` AS `max_latency` from `performance_schema`.`events_waits_summary_global_by_event_name` where ((`performance_schema`.`events_waits_summary_global_by_event_name`.`EVENT_NAME` <> \'idle\') and (`performance_schema`.`events_waits_summary_global_by_event_name`.`SUM_TIMER_WAIT` > 0)) order by `performance_schema`.`events_waits_summary_global_by_event_name`.`SUM_TIMER_WAIT` desc
|
#version 430 core
layout(binding = 0) uniform samplerCube cubemap;
layout(binding = 4) uniform sampler2D normalmap;
uniform vec3 camerapos;
uniform vec3 fogcolor;
uniform float fogfactor;
uniform float time;
out vec4 fcolor;
in TESSEVAL {
vec3 position;
vec2 texcoord;
vec3 incident;
} fragment;
void main(void)
{
const vec3 lightdirection = vec3(0.5, 0.5, 0.5);
const vec3 ambient = vec3(0.5, 0.5, 0.5);
const vec3 lightcolor = vec3(1.0, 1.0, 1.0);
vec2 D1 = vec2(0.5, 0.5) * (0.1*time);
vec2 D2 = vec2(-0.5, -0.5) * (0.1*time);
vec3 normal = texture(normalmap, 0.01*fragment.texcoord + D1).rgb;
normal += texture(normalmap, 0.01*fragment.texcoord + D2).rgb;
normal = (normal * 2.0) - 1.0;
float normaly = normal.y;
normal.y = normal.z;
normal.z = normaly;
normal = normalize(normal);
const float eta = 0.33;
vec3 incident = normalize(fragment.incident);
vec3 reflection = reflect(incident, vec3(0.0, 1.0, 0.0) * normal);
vec3 refraction = refract(incident, vec3(0.0, 1.0, 0.0) * normal, eta);
vec4 reflectionColor = texture(cubemap, reflection);
vec4 refractionColor = texture(cubemap, -refraction);
float fresnel = 0.7 * pow(max(0.0, 1.0 - dot(-incident, normal)), 0.5);
fcolor = mix(refractionColor, reflectionColor, fresnel);
fcolor.rgb *= vec3(0.9, 0.95, 1.0) * 0.7;
fcolor.a = 0.95;
}
|
> {-# LANGUAGE DeriveDataTypeable #-}
Here is the definition of the syntax for literals.
> module Syntax.LitSyn where
> import Data.Foldable
> import Data.Generics
> import Numeric(fromRat) -- to convert a rational to float
> import Utils.Pretty
the data definition. We parametrize the types of the AST by the type of names. This is util por implement
a renamer.
> data Lit a = LitInt Integer
> | LitRat Rational
> | LitChar Char
> | LitStr String -- I prefer to not desugar string literals. But it is easy to implement.
> deriving (Eq, Ord, Show, Data, Typeable)
the printer
> instance Pretty a => Pretty (Lit a) where
> pprint (LitInt i) = integer i
> pprint (LitRat r) = float $ fromRat r
> pprint (LitChar c) = char c
> pprint (LitStr s) = text s
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {
AnyClassMember,
AnyExpression,
AnyTypeArguments,
AnyTypeParameter,
FlowClassImplements,
JSNodeBase,
TSExpressionWithTypeArguments,
} from '../index';
import {createQuickBuilder} from '../utils';
export type ClassHead = JSNodeBase & {
type: 'ClassHead';
superClass?: AnyExpression;
body: Array<AnyClassMember>;
typeParameters?: AnyTypeParameter;
superTypeParameters?: AnyTypeArguments;
implements?:
| undefined
| Array<FlowClassImplements | TSExpressionWithTypeArguments>;
};
export const classHead = createQuickBuilder<ClassHead, 'body'>(
'ClassHead',
'body',
{
bindingKeys: {},
visitorKeys: {
superClass: true,
body: true,
typeParameters: true,
superTypeParameters: true,
implements: true,
},
},
);
|
"
I am CacheWeight.
I keep track of the weight of a cache.
The weight of a cache is the sum of the weight of all values currently present. The simplest and default weight calculation returns a constant 1 for each value, effectively counting the number of values.
The default maximum is 16.
Using compute, a selector or block, applied to a value, different calculation can be made. Consider for example #sizeInMemory.
"
Class {
#name : #CacheWeight,
#superclass : #Object,
#instVars : [
'total',
'maximum',
'compute'
],
#category : #'System-Caching'
}
{ #category : #accessing }
CacheWeight >> add: value [
| weight |
weight := compute cull: value.
total := total + weight
]
{ #category : #accessing }
CacheWeight >> compute [
^ compute
]
{ #category : #initialize }
CacheWeight >> compute: valuable [
compute := valuable
]
{ #category : #initialization }
CacheWeight >> initialize [
super initialize.
total := 0.
maximum := 16.
compute := [ 1 ]
]
{ #category : #testing }
CacheWeight >> isBelowMaximum [
^ total <= maximum
]
{ #category : #accessing }
CacheWeight >> maximum [
^ maximum
]
{ #category : #initialize }
CacheWeight >> maximum: integer [
maximum := integer
]
{ #category : #accessing }
CacheWeight >> remove: value [
| weight |
weight := compute cull: value.
total := total - weight
]
{ #category : #initialization }
CacheWeight >> reset [
total := 0
]
{ #category : #accessing }
CacheWeight >> total [
^ total
]
|
pragma solidity 0.4.19;
contract Token {
/// @return total amount of tokens
function totalSupply() constant returns (uint supply) {}
/// @param _owner The address from which the balance will be retrieved
/// @return The balance
function balanceOf(address _owner) constant returns (uint balance) {}
/// @notice send `_value` token to `_to` from `msg.sender`
/// @param _to The address of the recipient
/// @param _value The amount of token to be transferred
/// @return Whether the transfer was successful or not
function transfer(address _to, uint _value) returns (bool success) {}
/// @notice send `_value` token to `_to` from `_from` on the condition it is approved by `_from`
/// @param _from The address of the sender
/// @param _to The address of the recipient
/// @param _value The amount of token to be transferred
/// @return Whether the transfer was successful or not
function transferFrom(address _from, address _to, uint _value) returns (bool success) {}
/// @notice `msg.sender` approves `_addr` to spend `_value` tokens
/// @param _spender The address of the account able to transfer the tokens
/// @param _value The amount of wei to be approved for transfer
/// @return Whether the approval was successful or not
function approve(address _spender, uint _value) returns (bool success) {}
/// @param _owner The address of the account owning tokens
/// @param _spender The address of the account able to transfer the tokens
/// @return Amount of remaining tokens allowed to spent
function allowance(address _owner, address _spender) constant returns (uint remaining) {}
event Transfer(address indexed _from, address indexed _to, uint _value);
event Approval(address indexed _owner, address indexed _spender, uint _value);
}
contract RegularToken is Token {
function transfer(address _to, uint _value) returns (bool) {
//Default assumes totalSupply can't be over max (2^256 - 1).
if (balances[msg.sender] >= _value && balances[_to] + _value >= balances[_to]) {
balances[msg.sender] -= _value;
balances[_to] += _value;
Transfer(msg.sender, _to, _value);
return true;
} else { return false; }
}
function transferFrom(address _from, address _to, uint _value) returns (bool) {
if (balances[_from] >= _value && allowed[_from][msg.sender] >= _value && balances[_to] + _value >= balances[_to]) {
balances[_to] += _value;
balances[_from] -= _value;
allowed[_from][msg.sender] -= _value;
Transfer(_from, _to, _value);
return true;
} else { return false; }
}
function balanceOf(address _owner) constant returns (uint) {
return balances[_owner];
}
function approve(address _spender, uint _value) returns (bool) {
allowed[msg.sender][_spender] = _value;
Approval(msg.sender, _spender, _value);
return true;
}
function allowance(address _owner, address _spender) constant returns (uint) {
return allowed[_owner][_spender];
}
mapping (address => uint) balances;
mapping (address => mapping (address => uint)) allowed;
uint public totalSupply;
}
contract UnboundedRegularToken is RegularToken {
uint constant MAX_UINT = 2**256 - 1;
/// @dev ERC20 transferFrom, modified such that an allowance of MAX_UINT represents an unlimited amount.
/// @param _from Address to transfer from.
/// @param _to Address to transfer to.
/// @param _value Amount to transfer.
/// @return Success of transfer.
function transferFrom(address _from, address _to, uint _value)
public
returns (bool)
{
uint allowance = allowed[_from][msg.sender];
if (balances[_from] >= _value
&& allowance >= _value
&& balances[_to] + _value >= balances[_to]
) {
balances[_to] += _value;
balances[_from] -= _value;
if (allowance < MAX_UINT) {
allowed[_from][msg.sender] -= _value;
}
Transfer(_from, _to, _value);
return true;
} else {
return false;
}
}
}
contract FABIToken is UnboundedRegularToken {
uint public totalSupply = 1*10**13;
uint8 constant public decimals = 4;
string constant public name = "FABI";
string constant public symbol = "FB";
function FABIToken() {
balances[msg.sender] = totalSupply;
Transfer(address(0), msg.sender, totalSupply);
}
} |
package dev.sasikanth.pinnit.di
import android.app.Application
import dagger.BindsInstance
import dagger.Component
import dev.sasikanth.pinnit.PinnitApp
import dev.sasikanth.pinnit.activity.MainActivity
import dev.sasikanth.pinnit.background.receivers.AppUpdateReceiver
import dev.sasikanth.pinnit.background.receivers.BootCompletedReceiver
import dev.sasikanth.pinnit.background.receivers.UnpinNotificationReceiver
import dev.sasikanth.pinnit.editor.EditorScreen
import dev.sasikanth.pinnit.notifications.NotificationsScreen
import dev.sasikanth.pinnit.options.OptionsBottomSheet
import dev.sasikanth.pinnit.qspopup.QsPopupActivity
import javax.inject.Scope
@AppScope
@Component(modules = [AppModule::class])
interface AppComponent {
@Component.Factory
interface Factory {
fun create(@BindsInstance application: Application): AppComponent
}
fun inject(target: PinnitApp)
fun inject(target: MainActivity)
fun inject(target: QsPopupActivity)
fun inject(target: NotificationsScreen)
fun inject(target: EditorScreen)
fun inject(target: OptionsBottomSheet)
fun inject(target: UnpinNotificationReceiver)
fun inject(target: BootCompletedReceiver)
fun inject(target: AppUpdateReceiver)
}
@Scope
@Retention(AnnotationRetention.RUNTIME)
annotation class AppScope
|
package require vtk
# A script to test the threshold filter.
# Values above 2000 are set to 255.
# Values below 2000 are set to 0.
# Image pipeline
vtkPNGReader reader
reader SetFileName "$VTK_DATA_ROOT/Data/fullhead15.png"
vtkImageCast cast
cast SetOutputScalarTypeToShort
cast SetInputConnection [reader GetOutputPort]
vtkImageThreshold thresh
thresh SetInputConnection [cast GetOutputPort]
thresh ThresholdByUpper 2000.0
thresh SetInValue 0
thresh SetOutValue 200
thresh ReleaseDataFlagOff
vtkImageCityBlockDistance dist
dist SetDimensionality 2
dist SetInputConnection [thresh GetOutputPort]
vtkImageViewer viewer
viewer SetInputConnection [dist GetOutputPort]
viewer SetColorWindow 117
viewer SetColorLevel 43
viewer Render
|
-- Ada_GUI implementation based on Gnoga. Adapted 2021
-- --
-- GNOGA - The GNU Omnificent GUI for Ada --
-- --
-- G N O G A . S E R V E R --
-- --
-- S p e c --
-- --
-- --
-- Copyright (C) 2014 David Botton --
-- --
-- This library is free software; you can redistribute it and/or modify --
-- it under terms of the GNU General Public License as published by the --
-- Free Software Foundation; either version 3, or (at your option) any --
-- later version. This library is distributed in the hope that it will be --
-- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of --
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. --
-- --
-- As a special exception under Section 7 of GPL version 3, you are --
-- granted additional permissions described in the GCC Runtime Library --
-- Exception, version 3.1, as published by the Free Software Foundation. --
-- --
-- You should have received a copy of the GNU General Public License and --
-- a copy of the GCC Runtime Library Exception along with this program; --
-- see the files COPYING3 and COPYING.RUNTIME respectively. If not, see --
-- <http://www.gnu.org/licenses/>. --
-- --
-- As a special exception, if other files instantiate generics from this --
-- unit, or you link this unit with other files to produce an executable, --
-- this unit does not by itself cause the resulting executable to be --
-- covered by the GNU General Public License. This exception does not --
-- however invalidate any other reasons why the executable file might be --
-- covered by the GNU Public License. --
-- --
-- For more information please go to http://www.gnoga.com --
------------------------------------------------------------------------------
-- Serverside bindings and tools
package Ada_GUI.Gnoga.Server is
-- Gnoga applications generally use the following layout. However
-- if the executable can be located in App Dir. Any missing standard
-- subdirectory will instead use the html root which if missing is
-- App Dir.
--
-- App Dir
-- |
-- |___ bin - your Gnoga app binary
-- |
-- |___ html - boot.html (or other boot loader used)
-- |
-- |___ js - must contain jquery.min.js
-- |
-- |___ css - optional, a directory for serving css files
-- |
-- |___ img - optional, a directory of serving graphics.
-- |
-- |___ templates - optional, if using Gnoga.Server.Template_Parser
-- |
-- |___ upload - option, optional directory for incoming files
function Directory_Separator return String;
-- Return the Directory Separator using for the OS Gnoga is compiled on.
function Application_Directory return String;
-- This is the root directory for the application.
function Executable_Directory return String;
-- Locates this application's executable directory
-- This is usually in Application_Directory/bin
function HTML_Directory return String;
-- Locates the applications HTML Root for this application
function JS_Directory return String;
-- Locates the /js directory for this application
function CSS_Directory return String;
-- Locates the /css director for this application
function IMG_Directory return String;
-- Locates the /img directory for this application
function Upload_Directory return String;
-- Locates the /upload directory for this application
function Templates_Directory return String;
-- Locates the templates directory for this application
-- If not in Application_Directory/templates, tries
-- Application_Directory/share/gnoga/templates, if not
-- uses Application_Directory
end Ada_GUI.Gnoga.Server;
|
tests
testElementContentModelManyBranches
| dtd names |
names := (1 to: 50) collect: [:each | 'element', each printString].
dtd := String streamContents: [:stream |
stream
nextPutAll: '<!DOCTYPE root [';
nextPutAll: (self elementDeclarationsNamed: names);
nextPutAll: '<!ELEMENT root ('.
names
do: [:each | stream nextPutAll: each]
separatedBy: [stream nextPut: $|].
stream nextPutAll: ')>]>'].
names do: [:each |
self
shouldnt: [SAXHandler parse: dtd, '<root><', each, '/></root>']
raise: XMLValidationException].
self
should: [
SAXHandler parse:
dtd, '<root><element', (names size + 1) printString, '/></root>']
raise: XMLValidationException. |
<?php
namespace Anfischer\Cloner;
use Anfischer\Cloner\Stubs\BankAccount;
use Anfischer\Cloner\Stubs\FinancialAdviser;
use Anfischer\Cloner\Stubs\Person;
use Anfischer\Cloner\Stubs\SocialSecurityNumber;
use Anfischer\Cloner\Stubs\VerificationRule;
use Anfischer\Cloner\Stubs\WorkAddress;
class CloneServiceTest extends TestCase
{
/** @test */
public function it_can_clone_a_model_with_no_relations()
{
$original = factory(Person::class)->make();
$clone = (new CloneService())->clone($original);
$this->assertEquals($original, $clone);
}
/** @test */
public function it_can_clone_a_model_with_a_belongs_to_relation()
{
$person = factory(Person::class)->create();
$person->socialSecurityNumber()->save(factory(SocialSecurityNumber::class)->make());
$original = Person::with('socialSecurityNumber')->first();
$clone = (new CloneService())->clone($original);
$this->assertEquals(
$original->only([
'first_name',
'last_name',
'email',
'phone',
'gender',
]),
$clone->getAttributes()
);
$this->assertEquals($original->socialSecurityNumber->only('social_security_number'), $clone->socialSecurityNumber->getAttributes());
}
/** @test */
public function it_can_clone_a_model_with_a_has_many_relation()
{
$stub = factory(Person::class)->create();
factory(BankAccount::class, 10)->make()->each(function ($account) use ($stub) {
$stub->bankAccounts()->save($account);
});
$original = Person::with('bankAccounts')->first();
$clone = (new CloneService())->clone($original);
$this->assertEquals(
$original->only([
'first_name',
'last_name',
'email',
'phone',
'gender',
]),
$clone->getAttributes()
);
$this->assertCount(10, $clone->bankAccounts);
$clone->bankAccounts->each(function ($item, $key) use ($original, $clone) {
$this->assertEquals($original->bankAccounts[$key]->only(['account_number', 'account_name']), $item->getAttributes());
});
}
/** @test */
public function it_can_clone_a_model_with_a_many_to_many_relation()
{
$stub = factory(Person::class)->create();
factory(WorkAddress::class, 10)->create()->each(function ($relation, $key) use ($stub) {
$stub->workAddresses()->attach([$relation->id => ['pivot_data' => 'Test ' . $key]]);
});
$original = Person::with(['workAddresses' => function ($relation) {
$relation->withPivot('pivot_data');
}])->first();
$clone = (new CloneService())->clone($original);
$this->assertEquals(
$original->only([
'first_name',
'last_name',
'email',
'phone',
'gender',
]),
$clone->getAttributes()
);
$this->assertCount(10, $clone->workAddresses);
$clone->workAddresses->each(function ($item, $key) use ($original, $clone) {
$this->assertEquals($original->workAddresses[$key]->only(['address', 'postcode']), $item->getAttributes());
$this->assertEquals($original->workAddresses[$key]->pivot->only('pivot_data', 'work_address_id'), $item->pivot->getAttributes());
});
}
/** @test */
public function it_can_clone_a_model_with_a_has_one_relation_with_a_has_many_relation()
{
$parent = factory(Person::class)->create();
$parent->socialSecurityNumber()->save(factory(SocialSecurityNumber::class)->make());
$socialSecurityNumber = SocialSecurityNumber::first();
factory(VerificationRule::class, 10)->make()->each(function ($relation) use ($socialSecurityNumber) {
$socialSecurityNumber->verificationRules()->save($relation);
});
$original = Person::with('socialSecurityNumber.verificationRules')->first();
$clone = (new CloneService())->clone($original);
$this->assertEquals(
$original->only([
'first_name',
'last_name',
'email',
'phone',
'gender',
]),
$clone->getAttributes()
);
$this->assertEquals($original->socialSecurityNumber->only('social_security_number'), $clone->socialSecurityNumber->getAttributes());
$this->assertCount(10, $clone->socialSecurityNumber->verificationRules);
$clone->socialSecurityNumber->verificationRules->each(function ($item, $key) use ($original, $clone) {
$this->assertEquals($original->socialSecurityNumber->verificationRules[$key]->only('rule'), $item->getAttributes());
});
}
/** @test */
public function it_can_clone_a_model_with_a_has_many_relations_with_a_belongs_to_many_relation()
{
$parent = factory(Person::class)->create();
factory(BankAccount::class, 5)->make()->each(function ($account) use ($parent) {
$parent->bankAccounts()->save($account);
});
$bankAccounts = BankAccount::all();
$bankAccounts->each(function ($account) {
factory(FinancialAdviser::class, 2)->create()->each(function ($relation) use ($account) {
$account->financialAdvisers()->attach($relation);
});
});
$original = Person::with('bankAccounts.financialAdvisers')->first();
$clone = (new CloneService())->clone($original);
$this->assertEquals(
$original->only([
'first_name',
'last_name',
'email',
'phone',
'gender',
]),
$clone->getAttributes()
);
$this->assertCount($original->bankAccounts->count(), $clone->bankAccounts);
$clone->bankAccounts->each(function ($account, $key) use ($original) {
$this->assertEquals(
$original->bankAccounts[$key]->only(['account_number', 'account_name']),
$account->getAttributes()
);
});
$clone->bankAccounts->each(function ($account, $accountKey) use ($original) {
$account->financialAdvisers->each(function ($adviser, $adviserKey) use ($original, $accountKey) {
$this->assertEquals(
$original->bankAccounts[$accountKey]->financialAdvisers[$adviserKey]->only(['first_name', 'last_name', 'email']),
$adviser->getAttributes()
);
});
});
}
}
|
object Form1: TForm1
Left = 207
Top = 132
Width = 339
Height = 263
Caption = 'Picking demo'
Color = clBtnFace
Font.Charset = DEFAULT_CHARSET
Font.Color = clWindowText
Font.Height = -11
Font.Name = 'MS Sans Serif'
Font.Style = []
OldCreateOrder = False
Position = poScreenCenter
OnClose = FormClose
OnCreate = FormCreate
OnKeyDown = FormKeyDown
OnKeyUp = FormKeyUp
OnMouseDown = FormMouseDown
OnMouseMove = FormMouseMove
OnMouseUp = FormMouseUp
OnMouseWheel = FormMouseWheel
OnPaint = FormPaint
PixelsPerInch = 96
TextHeight = 13
object AppEvents: TApplicationEvents
OnIdle = AppPropertiesIdle
Left = 32
Top = 8
end
end
|
page ,132
TITLE C library emulation, not relying on MS-DOS.
;*****************************************************************************;
; ;
; FILE NAME: annaldiv.asm ;
; ;
; DESCRIPTION: C compiler long math library ;
; ;
; NOTES: ;
; ;
; HISTORY: ;
; 1996/06/26 JFL Created this file. ;
; ;
; (c) Copyright 1996-2017 Hewlett Packard Enterprise Development LP ;
; Licensed under the Apache 2.0 license - www.apache.org/licenses/LICENSE-2.0 ;
;*****************************************************************************;
.model small, C
.code
.386
;-----------------------------------------------------------------------------;
; ;
; Function: _aNNaldiv ;
; ;
; Description: Signed indirect long division ;
; ;
; Parameters: On stack: ;
; WORD Pointer to the 32 bits dividend ;
; DWORD Divisor ;
; ;
; Returns: DX:AX Copy of the result ;
; ;
; Notes: Uses 386 instructions. May not be used on old machines. ;
; ;
; Regs altered: EAX, EDX, CX ;
; ;
; History: ;
; ;
; 1996/06/26 JFL Created this routine. ;
; ;
;-----------------------------------------------------------------------------;
_aNNaldiv proc public
push bp
mov bp, sp
push bx ; Must be preserved
mov bx, [bp+4] ; Pointer to the Multiplier and result
mov eax, DWORD ptr [bx]
xor edx, edx
idiv dword ptr [bp+6]
mov DWORD ptr [bx], eax
shld edx, eax, 16 ; mov dx:ax, eax
pop bx ; Restore the initial value
pop bp
ret 6
_aNNaldiv endp
END
|
const std = @import("std");
pub fn build(b: *std.build.Builder) void {
const mode = b.standardReleaseOptions();
const lib = b.addSharedLibrary("cart", "src/main.zig", .unversioned);
lib.setBuildMode(mode);
lib.setTarget(.{ .cpu_arch = .wasm32, .os_tag = .freestanding });
lib.import_memory = true;
lib.initial_memory = 65536;
lib.max_memory = 65536;
lib.global_base = 6560;
lib.stack_size = 8192;
// Workaround https://github.com/ziglang/zig/issues/2910, preventing
// functions from compiler_rt getting incorrectly marked as exported, which
// prevents them from being removed even if unused.
lib.export_symbol_names = &[_][]const u8{ "start", "update" };
lib.install();
}
|
using System;
using System.Collections.Generic;
using System.Text;
namespace CursoCSharp.Fundamentos
{
class VariaveisEConstantes
{
public static void Executar()
{
// área da circunferencia
double raio = 4.5;
const double PI = 3.14;
double area = PI * raio * raio;
Console.WriteLine(area);
Console.WriteLine("Área é " + area);
// Tipos internos
bool estaChovendo = true;
Console.WriteLine("Está chovendo" + estaChovendo);
byte idade = 48;
Console.WriteLine("Idade " + idade);
sbyte saldoDeGols = sbyte.MinValue;
Console.WriteLine("Saldo de Gols" + saldoDeGols);
short salario = short.MaxValue;
Console.WriteLine("Salário " + salario);
int menorValorInt = int.MinValue; // Mais usado dos inteiros!
Console.WriteLine("Menor valor Int " + menorValorInt);
uint populacaoBrasileira = 207_600_000;
Console.WriteLine("População Brasileira: " + populacaoBrasileira);
long menorValorLong = long.MinValue;
Console.WriteLine("Menor long " + menorValorLong);
ulong populacaoMundial = 7_600_000_000;
Console.WriteLine("População Mundial " + populacaoMundial);
float precoComputador = 1299.99f;
Console.WriteLine("Preço Computador " + precoComputador);
double valorDeMercadoDaApple = 1_000_000_000.00; // Mais usado dos reais!
Console.WriteLine("Valor de Mercado da Apple " + valorDeMercadoDaApple);
decimal distanciaEntreEstrelas = decimal.MaxValue;
Console.WriteLine("Distância entre Estrelas " + distanciaEntreEstrelas);
char letra = 'b';
Console.WriteLine("Letra " + letra);
string texto = "Seja bem vindo ao Curso de C#!";
Console.WriteLine(texto);
}
}
}
|
<?php
namespace Tingo\ews\API\Message;
/**
* Class representing CreateManagedFolder
*/
class CreateManagedFolder extends CreateManagedFolderRequestType
{
}
|
#pragma once
// Fortnite (1.8) SDK
#ifdef _MSC_VER
#pragma pack(push, 0x8)
#endif
namespace SDK
{
//---------------------------------------------------------------------------
//Classes
//---------------------------------------------------------------------------
// WidgetBlueprintGeneratedClass TutorialRichText.TutorialRichText_C
// 0x0028 (0x0260 - 0x0238)
class UTutorialRichText_C : public UCommonUserWidget
{
public:
struct FPointerToUberGraphFrame UberGraphFrame; // 0x0238(0x0008) (Transient, DuplicateTransient)
class UFortRichTextBlock* FortRichTextBlock_1; // 0x0240(0x0008) (BlueprintVisible, ExportObject, ZeroConstructor, InstancedReference, IsPlainOldData, RepSkip, RepNotify, Interp, NonTransactional, EditorOnly, NoDestructor, AutoWeak, ContainsInstancedReference, AssetRegistrySearchable, SimpleDisplay, AdvancedDisplay, Protected, BlueprintCallable, BlueprintAuthorityOnly, TextExportTransient, NonPIEDuplicateTransient, ExposeOnSpawn, PersistentInstance, UObjectWrapper, HasGetValueTypeHash, NativeAccessSpecifierPublic, NativeAccessSpecifierProtected, NativeAccessSpecifierPrivate)
struct FText Text; // 0x0248(0x0018) (Edit, BlueprintVisible)
static UClass* StaticClass()
{
static auto ptr = UObject::FindClass("WidgetBlueprintGeneratedClass TutorialRichText.TutorialRichText_C");
return ptr;
}
void Construct();
void ExecuteUbergraph_TutorialRichText(int EntryPoint);
};
}
#ifdef _MSC_VER
#pragma pack(pop)
#endif
|
-- --------------------------------------------------------- [ Model.idr<Code> ]
-- Module : UML.Code.Model
-- Description : Data types for common code constructs.
-- Copyright : (c) Jan de Muijnck-Hughes
-- License : see LICENSE
-- --------------------------------------------------------------------- [ EOH ]
module UML.Code.Model
%access public
data DType : Type where
MkSType : (name : String) -> DType
MkCType : (name : String) -> (attrs : List (String, String)) -> DType
-- MkLType : (name : String) -> (itemTy : DType) -> DType
DTypes : Type
DTypes = List DType
||| Defines a function in an interface.
data Function : Type where
||| Constructs a new function.
MkPFunc : (name : String)
-> (ps : List (Pair String String))
-> (retTy : String) -> Function
MkFunc : (name : String)
-> (rety : String)
-> Function
-- ---------------------------------------------------------------------- [ Eq ]
instance Eq DType where
(==) (MkSType x) (MkSType y) = x == y
(==) (MkCType x xs) (MkCType y ys) = x == y && xs == ys
(==) _ _ = False
instance Eq Function where
(==) (MkPFunc x xs xr) (MkPFunc y ys yr) = x == y && xs == ys && xr == yr
(==) (MkFunc x xr) (MkFunc y yr) = x == y && xr == yr
(==) _ _ = False
-- -------------------------------------------------------------------- [ Show ]
instance Show DType where
show (MkSType n) = unwords ["[Data Simple", show n, "]\n"]
show (MkCType n as) = unwords ["[Data Complex", show n, show as, "]\n"]
instance Show Function where
show (MkPFunc n ps rty) = unwords ["[Func", show n, show ps, show rty, " ]\n"]
show (MkFunc n rty) = unwords ["[Func", show n, show rty, "]\n"]
-- --------------------------------------------------------------------- [ EOF ]
|
<%@ page contentType="text/html;charset=UTF-8" %>
<%@ include file="/WEB-INF/views/include/taglib.jsp"%>
<html>
<head>
<title>绩效等次管理</title>
<meta name="decorator" content="default"/>
<script type="text/javascript">
$(document).ready(function() {
});
function page(n,s){
$("#pageNo").val(n);
$("#pageSize").val(s);
$("#searchForm").submit();
return false;
}
</script>
</head>
<body>
<ul class="nav nav-tabs">
<li class="active"><a href="${ctx}/exam/examLdScore/">绩效等次列表</a></li>
<shiro:hasPermission name="exam:examLdScore:edit"><li><a href="${ctx}/exam/examLdScore/form">绩效等次添加</a></li></shiro:hasPermission>
</ul>
<form:form id="searchForm" modelAttribute="examLdScore" action="${ctx}/exam/examLdScore/" method="post" class="breadcrumb form-search">
<input id="pageNo" name="pageNo" type="hidden" value="${page.pageNo}"/>
<input id="pageSize" name="pageSize" type="hidden" value="${page.pageSize}"/>
<ul class="ul-form">
<li><label>姓名:</label>
<form:input path="name" htmlEscape="false" class="input-medium"/>
</li>
<li><label>最终得分:</label>
<form:input path="sumScore" htmlEscape="false" class="input-medium"/>
</li>
<li class="btns"><input id="btnSubmit" class="btn btn-primary" type="submit" value="查询"/></li>
<li class="clearfix"></li>
</ul>
</form:form>
<sys:message content="${message}"/>
<table id="contentTable" class="table table-striped table-bordered table-condensed">
<thead>
<tr>
<th>序号</th>
<th>姓名</th>
<th>最终得分</th>
<shiro:hasPermission name="exam:examLdScore:edit"><th>操作</th></shiro:hasPermission>
</tr>
</thead>
<tbody>
<c:forEach items="${page.list}" var="examLdScore" varStatus="status">
<tr>
<td>
${(page.pageNo-1)*page.pageSize+status.index+1}
</td>
<td>
${examLdScore.name}
</td>
<td>
${examLdScore.sumScore}
</td>
<shiro:hasPermission name="exam:examLdScore:edit"><td>
<a href="${ctx}/exam/examLdScore/form?id=${examLdScore.id}">修改</a>
<a href="${ctx}/exam/examLdScore/delete?id=${examLdScore.id}" onclick="return confirmx('确认要删除该绩效等次吗?', this.href)">删除</a>
</td></shiro:hasPermission>
</tr>
</c:forEach>
</tbody>
</table>
<div class="pagination">${page}</div>
</body>
</html> |
<!-- Navbar -->
<div class="container-fluid">
<div class="row bg-white">
<nav class="col navbar navbar-expand-lg navbar-light bg-white shadow px-0 py-0">
<div class="container">
<a href="#" class="navbar-brand">
<img src="<?= base_url('assets/landing/images/logo.png')?>" alt="Logo SIMANIS" class="img-fluid">
</a>
<button class="navbar-toggler navbar-toggler-right" type="button" data-toggle="collapse"
data-target="#navb">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navb">
<ul class="navbar-nav ml-auto mr-3">
<li class="nav-item mx-md-2">
<a href="<?= base_url('beranda')?>" class="nav-link">Beranda</a>
</li>
<li class="nav-item mx-md-2">
<a href="<?= base_url('profil')?>" class="nav-link">Profil</a>
</li>
<li class="nav-item mx-md-2">
<a href="<?= base_url('ptsp')?>" class="nav-link active">Layanan PTSP</a>
</li>
<li class="nav-item mx-md-2">
<a href="<?= base_url('pengaduan')?>" class="nav-link">Pengaduan</a>
</li>
</ul>
<!-- mobile button -->
<form class="form-inline d-sm-block d-md-none" method="post" action="<?= base_url('masuk')?>">
<button class="btn btn-login my-2 my-sm-0 px-3">Masuk | Daftar</button>
</form>
<!-- desktop button -->
<form class="form-inline my-2 my-lg-0 d-none d-md-block" method="post"
action="<?= base_url('masuk')?>">
<button class="btn btn-login btn-navbar-right my-2 my-sm-0 px-3">Masuk | Daftar</button>
</form>
</div>
</div>
</nav>
</div>
</div>
</div>
<!-- Chat Haji & Umrah -->
<div class="container haji-umrah">
<div class="sticky-container">
<ul class="sticky">
<li>
<img src="<?= base_url('assets/landing/images/wa.png')?>" width="32" height="32">
<p><a href="https://api.whatsapp.com/send?phone=628112650662&text=Info" target="_blank">Chat Haji <br> &
Umrah</a></p>
</li>
</ul>
</div>
</div>
<!-- Main Content -->
<main>
<section class="detail-ptsp-title">
<div class="container">
<div class="row text-center">
<div class="col-md-12">
<h4>Permohonan Mutasi GPAI PNS</h4>
</div>
</div>
</div>
</section>
<section class="detail-ptsp">
<div class="container">
<div class="row py-4">
<div class="col-md-6 text-center">
<img class="img-fluid" src="<?= base_url('assets/landing/images/pelayanan.png')?>" alt="">
</div>
<div class="col-md-6 content-detail text-center">
<div class="row mb-4">
<div class="col-md-12 syarat">
<div class="card shadow">
<div class="card-header text-center mb-0 py-1">
<h4>Standar Operasional Prosedur</h4>
</div>
<div class="card-body">
<ol type="1" class="ml-3 list">
<li> Surat permohonan ditujukan kepada Kepala Kantor Kemenag Kab. Klaten
dilampiri </li>
<li> Surat permohonan penambahan tugas mengajar dari GPAI PNS.</li>
<li> Surat pernyataan tidak berkeberatan dari Kepala Sekolah Satminkal.</li>
<li> Surat pernyataan tidak berkeberatan menerima GPAI PNS dari Kepala Sekolah
Satminkal tambahan mengajar.</li>
<li> Surat persetujuan dari pengawas PAI yang membawahi wilayah kerjanya.</li>
<li> Pemohon mengunggah Surat Permohonan dan Surat Pernyataan yang telah
diisi/dibuat dalam 1 file. <br> (Format: PDF, Ukuran: Max 10 MB)</li>
<li>Pemohon menunggu pemberitahuan dari pihak Kemenag bahwa proses permohonan
telah selesai.</li>
</ol>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
</main>
|
(SMT::IS-TYPE-HYP-DECL)
(SMT::BOOLEANP-OF-IS-TYPE-HYP-DECL)
(SMT::EXTRACT-IS-DECL
(1071 54 (:REWRITE SUBSETP-CAR-MEMBER))
(700 105 (:REWRITE SUBSETP-CONS-2))
(332 20 (:DEFINITION SYMBOL-LISTP))
(276 276 (:TYPE-PRESCRIPTION SUBSETP-EQUAL))
(201 159 (:REWRITE SUBSETP-WHEN-ATOM-RIGHT))
(199 29 (:REWRITE SYMBOLP-OF-CAR-WHEN-SYMBOL-LISTP))
(159 159 (:REWRITE SUBSETP-TRANS2))
(159 159 (:REWRITE SUBSETP-TRANS))
(117 117 (:REWRITE SUBSETP-WHEN-ATOM-LEFT))
(114 19 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP))
(102 6 (:DEFINITION MEMBER-EQUAL))
(98 22 (:REWRITE SYMBOL-LISTP-OF-CDR-WHEN-SYMBOL-LISTP))
(77 77 (:REWRITE DEFAULT-CDR))
(74 74 (:REWRITE DEFAULT-CAR))
(66 4 (:REWRITE TRUE-LISTP-OF-CAR-WHEN-TRUE-LIST-LISTP))
(60 60 (:REWRITE SYMBOL-LISTP-WHEN-NOT-CONSP))
(58 2 (:DEFINITION TRUE-LIST-LISTP))
(54 54 (:REWRITE SUBSETP-MEMBER . 4))
(54 54 (:REWRITE SUBSETP-MEMBER . 3))
(54 54 (:REWRITE SUBSETP-MEMBER . 2))
(54 54 (:REWRITE SUBSETP-MEMBER . 1))
(54 54 (:REWRITE INTERSECTP-MEMBER . 3))
(54 54 (:REWRITE INTERSECTP-MEMBER . 2))
(42 42 (:DEFINITION ATOM))
(40 8 (:DEFINITION LEN))
(38 38 (:TYPE-PRESCRIPTION SET::SETP-TYPE))
(38 19 (:REWRITE SET::NONEMPTY-MEANS-SET))
(26 2 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FUNC-ALISTP))
(19 19 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE))
(19 19 (:REWRITE SET::IN-SET))
(18 2 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-TYPES-P))
(18 2 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-INFO-ALIST-P))
(18 2 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-FIELD-ALIST-P))
(16 8 (:REWRITE DEFAULT-+-2))
(14 14 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP))
(14 14 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2))
(14 14 (:LINEAR LEN-WHEN-PREFIXP))
(14 2 (:REWRITE SMT::FUNC-ALISTP-OF-CDR-WHEN-FUNC-ALISTP))
(13 13 (:REWRITE SMT::FTY-INFO-ALIST-P-WHEN-NOT-CONSP))
(11 11 (:REWRITE TERMP-IMPLIES-PSEUDO-TERMP))
(10 2 (:REWRITE SMT::FTY-TYPES-P-OF-CDR-WHEN-FTY-TYPES-P))
(10 2 (:REWRITE SMT::FTY-INFO-ALIST-P-OF-CDR-WHEN-FTY-INFO-ALIST-P))
(10 2 (:REWRITE SMT::FTY-FIELD-ALIST-P-OF-CDR-WHEN-FTY-FIELD-ALIST-P))
(8 8 (:TYPE-PRESCRIPTION TRUE-LISTP))
(8 8 (:REWRITE SMT::FUNC-ALISTP-WHEN-SUBSETP-EQUAL))
(8 8 (:REWRITE DEFAULT-+-1))
(7 7 (:LINEAR STR::COUNT-LEADING-CHARSET-LEN))
(4 4 (:REWRITE TRUE-LIST-LISTP-WHEN-NOT-CONSP))
(4 4 (:REWRITE SMT::FUNC-ALISTP-WHEN-NOT-CONSP))
(4 4 (:REWRITE SMT::FTY-TYPES-P-WHEN-NOT-CONSP))
(4 4 (:REWRITE SMT::FTY-FIELD-ALIST-P-WHEN-NOT-CONSP))
(4 2 (:REWRITE TRUE-LIST-LISTP-OF-CDR-WHEN-TRUE-LIST-LISTP))
(2 2 (:REWRITE TERM-LISTP-IMPLIES-PSEUDO-TERM-LISTP))
)
(SMT::BOOLEANP-OF-EXTRACT-IS-DECL)
(SMT::PSEUDO-TERM-LISTP-OF-APPEND-OF-PSEUDO-TERM-LISTP
(327 1 (:DEFINITION PSEUDO-TERMP))
(119 6 (:REWRITE TRUE-LISTP-OF-CAR-WHEN-TRUE-LIST-LISTP))
(112 3 (:DEFINITION TRUE-LIST-LISTP))
(106 4 (:DEFINITION SYMBOL-LISTP))
(72 12 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP))
(70 48 (:REWRITE DEFAULT-CDR))
(60 6 (:DEFINITION TRUE-LISTP))
(59 57 (:REWRITE DEFAULT-CAR))
(56 7 (:REWRITE SYMBOLP-OF-CAR-WHEN-SYMBOL-LISTP))
(33 33 (:REWRITE TERM-LISTP-IMPLIES-PSEUDO-TERM-LISTP))
(24 24 (:TYPE-PRESCRIPTION SET::SETP-TYPE))
(24 12 (:REWRITE SET::NONEMPTY-MEANS-SET))
(21 3 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FUNC-ALISTP))
(19 19 (:REWRITE TERMP-IMPLIES-PSEUDO-TERMP))
(18 3 (:DEFINITION LENGTH))
(17 5 (:REWRITE SYMBOL-LISTP-OF-CDR-WHEN-SYMBOL-LISTP))
(15 3 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-TYPES-P))
(15 3 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-INFO-ALIST-P))
(15 3 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-FIELD-ALIST-P))
(15 3 (:DEFINITION LEN))
(12 12 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE))
(12 12 (:REWRITE SET::IN-SET))
(12 4 (:REWRITE CAR-OF-APPEND))
(10 10 (:REWRITE SYMBOL-LISTP-WHEN-NOT-CONSP))
(8 8 (:REWRITE CONSP-OF-APPEND))
(8 4 (:REWRITE TRUE-LIST-LISTP-OF-CDR-WHEN-TRUE-LIST-LISTP))
(8 1 (:REWRITE SMT::FUNC-ALISTP-OF-CDR-WHEN-FUNC-ALISTP))
(7 7 (:TYPE-PRESCRIPTION LEN))
(6 6 (:REWRITE TRUE-LIST-LISTP-WHEN-NOT-CONSP))
(6 6 (:REWRITE SMT::FUNC-ALISTP-WHEN-SUBSETP-EQUAL))
(6 3 (:REWRITE DEFAULT-+-2))
(6 1 (:REWRITE SMT::FTY-TYPES-P-OF-CDR-WHEN-FTY-TYPES-P))
(6 1 (:REWRITE SMT::FTY-INFO-ALIST-P-OF-CDR-WHEN-FTY-INFO-ALIST-P))
(6 1 (:REWRITE SMT::FTY-FIELD-ALIST-P-OF-CDR-WHEN-FTY-FIELD-ALIST-P))
(4 4 (:REWRITE CDR-OF-APPEND-WHEN-CONSP))
(3 3 (:REWRITE SMT::FUNC-ALISTP-WHEN-NOT-CONSP))
(3 3 (:REWRITE SMT::FTY-TYPES-P-WHEN-NOT-CONSP))
(3 3 (:REWRITE SMT::FTY-INFO-ALIST-P-WHEN-NOT-CONSP))
(3 3 (:REWRITE SMT::FTY-FIELD-ALIST-P-WHEN-NOT-CONSP))
(3 3 (:REWRITE DEFAULT-+-1))
)
(SMT::EXTRACT-DISJUNCT
(4604 1632 (:REWRITE DEFAULT-+-2))
(3628 298 (:REWRITE RATIONALP-OF-CAR-WHEN-RATIONAL-LISTP))
(3628 298 (:REWRITE INTEGERP-OF-CAR-WHEN-INTEGER-LISTP))
(2991 277 (:DEFINITION RATIONAL-LISTP))
(2991 277 (:DEFINITION INTEGER-LISTP))
(2955 2463 (:REWRITE DEFAULT-CDR))
(2584 212 (:DEFINITION LENGTH))
(2546 1632 (:REWRITE DEFAULT-+-1))
(2293 418 (:REWRITE RATIONAL-LISTP-OF-CDR-WHEN-RATIONAL-LISTP))
(2293 418 (:REWRITE INTEGER-LISTP-OF-CDR-WHEN-INTEGER-LISTP))
(2044 212 (:DEFINITION LEN))
(1640 328 (:REWRITE COMMUTATIVITY-OF-+))
(1518 1518 (:REWRITE DEFAULT-CAR))
(1312 328 (:DEFINITION INTEGER-ABS))
(1295 44 (:DEFINITION SYMBOL-LISTP))
(996 166 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP))
(735 40 (:REWRITE TRUE-LISTP-OF-CAR-WHEN-TRUE-LIST-LISTP))
(659 23 (:DEFINITION TRUE-LIST-LISTP))
(498 62 (:REWRITE SYMBOLP-OF-CAR-WHEN-SYMBOL-LISTP))
(492 492 (:TYPE-PRESCRIPTION STR::TRUE-LISTP-OF-EXPLODE))
(492 328 (:REWRITE STR::CONSP-OF-EXPLODE))
(484 484 (:REWRITE RATIONAL-LISTP-WHEN-NOT-CONSP))
(484 484 (:REWRITE INTEGER-LISTP-WHEN-NOT-CONSP))
(386 346 (:REWRITE DEFAULT-<-2))
(382 346 (:REWRITE DEFAULT-<-1))
(353 353 (:TYPE-PRESCRIPTION LEN))
(332 332 (:TYPE-PRESCRIPTION SET::SETP-TYPE))
(332 166 (:REWRITE SET::NONEMPTY-MEANS-SET))
(328 328 (:REWRITE DEFAULT-UNARY-MINUS))
(328 164 (:REWRITE STR::COERCE-TO-LIST-REMOVAL))
(273 50 (:REWRITE SYMBOL-LISTP-OF-CDR-WHEN-SYMBOL-LISTP))
(238 20 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FUNC-ALISTP))
(167 7 (:REWRITE ACL2-COUNT-WHEN-MEMBER))
(166 166 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE))
(166 166 (:REWRITE SET::IN-SET))
(166 20 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-TYPES-P))
(166 20 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-INFO-ALIST-P))
(166 20 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-FIELD-ALIST-P))
(164 164 (:REWRITE STR::EXPLODE-WHEN-NOT-STRINGP))
(164 164 (:REWRITE DEFAULT-REALPART))
(164 164 (:REWRITE DEFAULT-NUMERATOR))
(164 164 (:REWRITE DEFAULT-IMAGPART))
(164 164 (:REWRITE DEFAULT-DENOMINATOR))
(147 147 (:TYPE-PRESCRIPTION TRUE-LISTP))
(128 18 (:REWRITE SMT::FUNC-ALISTP-OF-CDR-WHEN-FUNC-ALISTP))
(116 116 (:REWRITE SYMBOL-LISTP-WHEN-NOT-CONSP))
(107 7 (:DEFINITION MEMBER-EQUAL))
(92 92 (:LINEAR ACL2-COUNT-WHEN-MEMBER))
(92 18 (:REWRITE SMT::FTY-TYPES-P-OF-CDR-WHEN-FTY-TYPES-P))
(92 18 (:REWRITE SMT::FTY-INFO-ALIST-P-OF-CDR-WHEN-FTY-INFO-ALIST-P))
(92 18 (:REWRITE SMT::FTY-FIELD-ALIST-P-OF-CDR-WHEN-FTY-FIELD-ALIST-P))
(88 14 (:REWRITE SUBSETP-CAR-MEMBER))
(72 72 (:REWRITE SMT::FUNC-ALISTP-WHEN-SUBSETP-EQUAL))
(70 70 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP))
(70 70 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2))
(70 70 (:LINEAR LEN-WHEN-PREFIXP))
(58 26 (:REWRITE TRUE-LIST-LISTP-OF-CDR-WHEN-TRUE-LIST-LISTP))
(56 56 (:REWRITE TERM-LISTP-IMPLIES-PSEUDO-TERM-LISTP))
(51 51 (:REWRITE TERMP-IMPLIES-PSEUDO-TERMP))
(44 44 (:REWRITE TRUE-LIST-LISTP-WHEN-NOT-CONSP))
(42 9 (:REWRITE SUBSETP-IMPLIES-SUBSETP-CDR))
(36 36 (:REWRITE SMT::FUNC-ALISTP-WHEN-NOT-CONSP))
(36 36 (:REWRITE SMT::FTY-TYPES-P-WHEN-NOT-CONSP))
(36 36 (:REWRITE SMT::FTY-INFO-ALIST-P-WHEN-NOT-CONSP))
(36 36 (:REWRITE SMT::FTY-FIELD-ALIST-P-WHEN-NOT-CONSP))
(35 35 (:LINEAR STR::COUNT-LEADING-CHARSET-LEN))
(27 27 (:TYPE-PRESCRIPTION SUBSETP-EQUAL))
(19 19 (:TYPE-PRESCRIPTION MEMBER-EQUAL))
(14 14 (:REWRITE SUBSETP-MEMBER . 2))
(14 14 (:REWRITE SUBSETP-MEMBER . 1))
(10 10 (:REWRITE SUBSETP-OF-CDR))
(8 8 (:REWRITE SUBSETP-TRANS2))
(8 8 (:REWRITE SUBSETP-TRANS))
(6 3 (:TYPE-PRESCRIPTION TRUE-LISTP-APPEND))
(4 4 (:REWRITE SUBSETP-WHEN-ATOM-RIGHT))
(4 4 (:REWRITE SUBSETP-WHEN-ATOM-LEFT))
(4 4 (:REWRITE SUBSETP-REFL))
(4 4 (:REWRITE MEMBER-OF-CAR))
(3 3 (:TYPE-PRESCRIPTION BINARY-APPEND))
)
(SMT::EXTRACT-FLAG
(4973 1764 (:REWRITE DEFAULT-+-2))
(3728 308 (:REWRITE RATIONALP-OF-CAR-WHEN-RATIONAL-LISTP))
(3728 308 (:REWRITE INTEGERP-OF-CAR-WHEN-INTEGER-LISTP))
(3094 2563 (:REWRITE DEFAULT-CDR))
(3056 287 (:DEFINITION RATIONAL-LISTP))
(3056 287 (:DEFINITION INTEGER-LISTP))
(2802 231 (:DEFINITION LENGTH))
(2748 1764 (:REWRITE DEFAULT-+-1))
(2328 428 (:REWRITE RATIONAL-LISTP-OF-CDR-WHEN-RATIONAL-LISTP))
(2328 428 (:REWRITE INTEGER-LISTP-OF-CDR-WHEN-INTEGER-LISTP))
(2217 231 (:DEFINITION LEN))
(1770 354 (:REWRITE COMMUTATIVITY-OF-+))
(1613 1613 (:REWRITE DEFAULT-CAR))
(1434 48 (:DEFINITION SYMBOL-LISTP))
(1416 354 (:DEFINITION INTEGER-ABS))
(1104 184 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP))
(801 44 (:REWRITE TRUE-LISTP-OF-CAR-WHEN-TRUE-LIST-LISTP))
(717 25 (:DEFINITION TRUE-LIST-LISTP))
(538 68 (:REWRITE SYMBOLP-OF-CAR-WHEN-SYMBOL-LISTP))
(531 531 (:TYPE-PRESCRIPTION STR::TRUE-LISTP-OF-EXPLODE))
(531 354 (:REWRITE STR::CONSP-OF-EXPLODE))
(499 499 (:REWRITE RATIONAL-LISTP-WHEN-NOT-CONSP))
(499 499 (:REWRITE INTEGER-LISTP-WHEN-NOT-CONSP))
(418 374 (:REWRITE DEFAULT-<-2))
(414 374 (:REWRITE DEFAULT-<-1))
(391 391 (:TYPE-PRESCRIPTION LEN))
(368 368 (:TYPE-PRESCRIPTION SET::SETP-TYPE))
(368 184 (:REWRITE SET::NONEMPTY-MEANS-SET))
(354 354 (:REWRITE DEFAULT-UNARY-MINUS))
(354 177 (:REWRITE STR::COERCE-TO-LIST-REMOVAL))
(293 54 (:REWRITE SYMBOL-LISTP-OF-CDR-WHEN-SYMBOL-LISTP))
(264 22 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FUNC-ALISTP))
(192 8 (:REWRITE ACL2-COUNT-WHEN-MEMBER))
(184 184 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE))
(184 184 (:REWRITE SET::IN-SET))
(184 22 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-TYPES-P))
(184 22 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-INFO-ALIST-P))
(184 22 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-FIELD-ALIST-P))
(177 177 (:REWRITE STR::EXPLODE-WHEN-NOT-STRINGP))
(177 177 (:REWRITE DEFAULT-REALPART))
(177 177 (:REWRITE DEFAULT-NUMERATOR))
(177 177 (:REWRITE DEFAULT-IMAGPART))
(177 177 (:REWRITE DEFAULT-DENOMINATOR))
(160 160 (:TYPE-PRESCRIPTION TRUE-LISTP))
(142 20 (:REWRITE SMT::FUNC-ALISTP-OF-CDR-WHEN-FUNC-ALISTP))
(128 128 (:REWRITE SYMBOL-LISTP-WHEN-NOT-CONSP))
(125 8 (:DEFINITION MEMBER-EQUAL))
(103 16 (:REWRITE SUBSETP-CAR-MEMBER))
(102 20 (:REWRITE SMT::FTY-TYPES-P-OF-CDR-WHEN-FTY-TYPES-P))
(102 20 (:REWRITE SMT::FTY-INFO-ALIST-P-OF-CDR-WHEN-FTY-INFO-ALIST-P))
(102 20 (:REWRITE SMT::FTY-FIELD-ALIST-P-OF-CDR-WHEN-FTY-FIELD-ALIST-P))
(100 100 (:LINEAR ACL2-COUNT-WHEN-MEMBER))
(80 80 (:REWRITE SMT::FUNC-ALISTP-WHEN-SUBSETP-EQUAL))
(80 80 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP))
(80 80 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2))
(80 80 (:LINEAR LEN-WHEN-PREFIXP))
(62 62 (:REWRITE TERM-LISTP-IMPLIES-PSEUDO-TERM-LISTP))
(62 28 (:REWRITE TRUE-LIST-LISTP-OF-CDR-WHEN-TRUE-LIST-LISTP))
(57 57 (:REWRITE TERMP-IMPLIES-PSEUDO-TERMP))
(49 10 (:REWRITE SUBSETP-IMPLIES-SUBSETP-CDR))
(48 48 (:REWRITE TRUE-LIST-LISTP-WHEN-NOT-CONSP))
(40 40 (:REWRITE SMT::FUNC-ALISTP-WHEN-NOT-CONSP))
(40 40 (:REWRITE SMT::FTY-TYPES-P-WHEN-NOT-CONSP))
(40 40 (:REWRITE SMT::FTY-INFO-ALIST-P-WHEN-NOT-CONSP))
(40 40 (:REWRITE SMT::FTY-FIELD-ALIST-P-WHEN-NOT-CONSP))
(40 40 (:LINEAR STR::COUNT-LEADING-CHARSET-LEN))
(31 31 (:TYPE-PRESCRIPTION SUBSETP-EQUAL))
(20 20 (:TYPE-PRESCRIPTION MEMBER-EQUAL))
(16 16 (:REWRITE SUBSETP-MEMBER . 2))
(16 16 (:REWRITE SUBSETP-MEMBER . 1))
(11 11 (:REWRITE SUBSETP-OF-CDR))
(10 10 (:REWRITE SUBSETP-TRANS2))
(10 10 (:REWRITE SUBSETP-TRANS))
(5 5 (:REWRITE SUBSETP-WHEN-ATOM-RIGHT))
(5 5 (:REWRITE SUBSETP-WHEN-ATOM-LEFT))
(5 5 (:REWRITE SUBSETP-REFL))
(5 5 (:REWRITE MEMBER-OF-CAR))
)
(FLAG::FLAG-EQUIV-LEMMA)
(SMT::EXTRACT-FLAG-EQUIVALENCES)
(SMT::FLAG-LEMMA-FOR-RETURN-TYPE-OF-EXTRACT-DISJUNCT.DECL-LIST
(75999 2826 (:DEFINITION SYMBOL-LISTP))
(52209 2526 (:REWRITE TRUE-LISTP-OF-CAR-WHEN-TRUE-LIST-LISTP))
(47757 1613 (:DEFINITION TRUE-LIST-LISTP))
(40572 6762 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP))
(35580 3887 (:REWRITE SYMBOLP-OF-CAR-WHEN-SYMBOL-LISTP))
(30796 30790 (:REWRITE DEFAULT-CDR))
(24753 24753 (:REWRITE DEFAULT-CAR))
(22800 3776 (:REWRITE SYMBOL-LISTP-OF-CDR-WHEN-SYMBOL-LISTP))
(13893 1163 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FUNC-ALISTP))
(13524 13524 (:TYPE-PRESCRIPTION SET::SETP-TYPE))
(13524 6762 (:REWRITE SET::NONEMPTY-MEANS-SET))
(11467 2291 (:DEFINITION LEN))
(10826 2163 (:REWRITE TRUE-LIST-LISTP-OF-CDR-WHEN-TRUE-LIST-LISTP))
(9935 1161 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-TYPES-P))
(9935 1161 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-INFO-ALIST-P))
(9935 1161 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-FIELD-ALIST-P))
(7923 1013 (:REWRITE SMT::FUNC-ALISTP-OF-CDR-WHEN-FUNC-ALISTP))
(7024 7024 (:REWRITE SYMBOL-LISTP-WHEN-NOT-CONSP))
(6762 6762 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE))
(6762 6762 (:REWRITE SET::IN-SET))
(5891 1011 (:REWRITE SMT::FTY-TYPES-P-OF-CDR-WHEN-FTY-TYPES-P))
(5891 1011 (:REWRITE SMT::FTY-INFO-ALIST-P-OF-CDR-WHEN-FTY-INFO-ALIST-P))
(5891 1011 (:REWRITE SMT::FTY-FIELD-ALIST-P-OF-CDR-WHEN-FTY-FIELD-ALIST-P))
(4582 2291 (:REWRITE DEFAULT-+-2))
(3944 3944 (:REWRITE SMT::FUNC-ALISTP-WHEN-SUBSETP-EQUAL))
(3126 3126 (:REWRITE TRUE-LIST-LISTP-WHEN-NOT-CONSP))
(3050 3050 (:REWRITE TERM-LISTP-IMPLIES-PSEUDO-TERM-LISTP))
(2674 2674 (:REWRITE TERMP-IMPLIES-PSEUDO-TERMP))
(2291 2291 (:REWRITE DEFAULT-+-1))
(1972 1972 (:REWRITE SMT::FUNC-ALISTP-WHEN-NOT-CONSP))
(1972 1972 (:REWRITE SMT::FTY-TYPES-P-WHEN-NOT-CONSP))
(1972 1972 (:REWRITE SMT::FTY-INFO-ALIST-P-WHEN-NOT-CONSP))
(1972 1972 (:REWRITE SMT::FTY-FIELD-ALIST-P-WHEN-NOT-CONSP))
(290 290 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP))
(290 290 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2))
(290 290 (:LINEAR LEN-WHEN-PREFIXP))
(160 40 (:DEFINITION BINARY-APPEND))
(145 145 (:LINEAR STR::COUNT-LEADING-CHARSET-LEN))
(80 80 (:REWRITE APPEND-WHEN-NOT-CONSP))
(6 6 (:TYPE-PRESCRIPTION STR::TRUE-LISTP-OF-EXPLODE))
(6 4 (:REWRITE STR::CONSP-OF-EXPLODE))
(4 2 (:REWRITE STR::COERCE-TO-LIST-REMOVAL))
(2 2 (:REWRITE STR::EXPLODE-WHEN-NOT-STRINGP))
)
(SMT::RETURN-TYPE-OF-EXTRACT-DISJUNCT.DECL-LIST)
(SMT::RETURN-TYPE-OF-EXTRACT-DISJUNCT.THEOREM)
(SMT::RETURN-TYPE-OF-EXTRACT-CONJUNCT.DECL-LIST)
(SMT::RETURN-TYPE-OF-EXTRACT-CONJUNCT.THEOREM)
(SMT::EXTRACT-CONJUNCT
(12288 12288 (:TYPE-PRESCRIPTION SMT::PSEUDO-TERM-FIX))
(3591 151 (:REWRITE TRUE-LISTP-OF-CAR-WHEN-TRUE-LIST-LISTP))
(3246 118 (:DEFINITION TRUE-LIST-LISTP))
(2496 416 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP))
(1909 193 (:REWRITE TRUE-LIST-LISTP-OF-CDR-WHEN-TRUE-LIST-LISTP))
(1835 1835 (:REWRITE DEFAULT-CDR))
(1198 1198 (:REWRITE DEFAULT-CAR))
(855 70 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FUNC-ALISTP))
(832 832 (:TYPE-PRESCRIPTION SET::SETP-TYPE))
(832 416 (:REWRITE SET::NONEMPTY-MEANS-SET))
(615 70 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-TYPES-P))
(615 70 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-INFO-ALIST-P))
(615 70 (:REWRITE SMT::SYMBOLP-OF-CAAR-WHEN-FTY-FIELD-ALIST-P))
(588 588 (:REWRITE SYMBOL-LISTP-WHEN-NOT-CONSP))
(535 70 (:REWRITE SMT::FUNC-ALISTP-OF-CDR-WHEN-FUNC-ALISTP))
(450 90 (:DEFINITION LEN))
(416 416 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE))
(416 416 (:REWRITE SET::IN-SET))
(395 70 (:REWRITE SMT::FTY-TYPES-P-OF-CDR-WHEN-FTY-TYPES-P))
(395 70 (:REWRITE SMT::FTY-INFO-ALIST-P-OF-CDR-WHEN-FTY-INFO-ALIST-P))
(395 70 (:REWRITE SMT::FTY-FIELD-ALIST-P-OF-CDR-WHEN-FTY-FIELD-ALIST-P))
(243 243 (:REWRITE TRUE-LIST-LISTP-WHEN-NOT-CONSP))
(240 240 (:REWRITE SMT::FUNC-ALISTP-WHEN-SUBSETP-EQUAL))
(180 90 (:REWRITE DEFAULT-+-2))
(163 163 (:REWRITE SMT::FTY-INFO-ALIST-P-WHEN-NOT-CONSP))
(120 120 (:REWRITE SMT::FUNC-ALISTP-WHEN-NOT-CONSP))
(120 120 (:REWRITE SMT::FTY-TYPES-P-WHEN-NOT-CONSP))
(120 120 (:REWRITE SMT::FTY-FIELD-ALIST-P-WHEN-NOT-CONSP))
(103 103 (:REWRITE TERMP-IMPLIES-PSEUDO-TERMP))
(94 94 (:REWRITE TERM-LISTP-IMPLIES-PSEUDO-TERM-LISTP))
(90 90 (:REWRITE DEFAULT-+-1))
)
(SMT::SMT-EXTRACT)
(SMT::PSEUDO-TERM-LISTP-OF-SMT-EXTRACT.DECL-LIST)
(SMT::PSEUDO-TERMP-OF-SMT-EXTRACT.THEOREM)
|
@article{drichoutis2017much,
author = {Drichoutis, Andreas C and others},
journal = {Agricultural Economics Review},
number = {1},
pages = {48--61},
publisher = {Greek Association of Agricultural Economists},
title = {How much do consumers care about farm labour exploitation?},
volume = {18},
year = {2017}
}
|
0 6 0.00146341463415
10 3 0.00219512195122
25 1 0.00243902439024
40 2 0.00292682926829
67 1 0.00317073170732
100 2 0.00365853658537
105 1 0.00390243902439
106 3 0.00463414634146
108 1 0.00487804878049
110 1 0.00512195121951
111 1 0.00536585365854
114 1 0.00560975609756
115 4 0.00658536585366
116 8 0.00853658536585
117 5 0.00975609756098
118 13 0.0129268292683
119 8 0.0148780487805
120 21 0.02
121 10 0.0224390243902
122 1 0.0226829268293
123 3 0.0234146341463
124 4 0.0243902439024
125 2 0.0248780487805
126 10 0.0273170731707
127 9 0.029512195122
128 15 0.0331707317073
129 8 0.0351219512195
130 17 0.0392682926829
131 9 0.0414634146341
132 16 0.0453658536585
133 3 0.0460975609756
134 6 0.0475609756098
135 30 0.0548780487805
136 9 0.0570731707317
137 88 0.0785365853659
138 7 0.080243902439
139 57 0.0941463414634
140 6 0.0956097560976
141 1 0.0958536585366
142 4 0.0968292682927
143 1 0.0970731707317
144 1 0.0973170731707
145 3 0.0980487804878
146 3 0.0987804878049
147 2 0.0992682926829
148 7 0.100975609756
149 2 0.101463414634
150 5 0.102682926829
153 1 0.102926829268
155 15 0.106585365854
157 3 0.107317073171
158 2 0.107804878049
159 2 0.108292682927
160 1 0.108536585366
161 3 0.109268292683
162 1 0.109512195122
163 3 0.110243902439
164 4 0.111219512195
165 3 0.111951219512
166 8 0.113902439024
167 5 0.11512195122
168 2 0.115609756098
169 1 0.115853658537
170 2 0.116341463415
171 1 0.116585365854
172 2 0.117073170732
173 1 0.117317073171
174 2 0.117804878049
175 7 0.119512195122
176 2 0.12
177 8 0.121951219512
178 9 0.124146341463
179 5 0.125365853659
180 4 0.126341463415
181 9 0.128536585366
182 6 0.13
183 8 0.131951219512
184 4 0.132926829268
185 14 0.136341463415
186 8 0.138292682927
187 12 0.141219512195
188 16 0.14512195122
189 9 0.147317073171
190 14 0.150731707317
191 17 0.15487804878
192 28 0.161707317073
193 23 0.167317073171
194 22 0.172682926829
195 31 0.180243902439
196 36 0.189024390244
197 28 0.195853658537
198 49 0.207804878049
199 42 0.218048780488
200 39 0.22756097561
201 26 0.233902439024
202 19 0.238536585366
203 36 0.247317073171
204 30 0.254634146341
205 42 0.26487804878
206 47 0.276341463415
207 34 0.284634146341
208 33 0.292682926829
209 40 0.30243902439
210 44 0.313170731707
211 41 0.323170731707
212 43 0.333658536585
213 49 0.345609756098
214 28 0.35243902439
215 35 0.360975609756
216 43 0.371463414634
217 27 0.378048780488
218 41 0.388048780488
219 42 0.398292682927
220 35 0.406829268293
221 39 0.416341463415
222 31 0.423902439024
223 34 0.432195121951
224 38 0.441463414634
225 33 0.449512195122
226 49 0.461463414634
227 45 0.47243902439
228 24 0.478292682927
229 43 0.488780487805
230 25 0.49487804878
231 25 0.500975609756
232 23 0.506585365854
233 20 0.511463414634
234 29 0.518536585366
235 25 0.524634146341
236 45 0.535609756098
237 30 0.542926829268
238 21 0.548048780488
239 34 0.556341463415
240 38 0.565609756098
241 28 0.57243902439
242 22 0.577804878049
243 11 0.580487804878
244 27 0.587073170732
245 16 0.590975609756
246 40 0.600731707317
247 32 0.608536585366
248 18 0.612926829268
249 20 0.617804878049
250 28 0.624634146341
251 19 0.629268292683
252 18 0.633658536585
253 10 0.636097560976
254 21 0.641219512195
255 15 0.64487804878
256 16 0.648780487805
257 32 0.656585365854
258 18 0.660975609756
259 35 0.669512195122
260 11 0.672195121951
261 15 0.675853658537
262 10 0.678292682927
263 25 0.684390243902
264 19 0.689024390244
265 18 0.693414634146
266 11 0.696097560976
267 9 0.698292682927
268 5 0.699512195122
269 9 0.701707317073
270 8 0.703658536585
271 15 0.707317073171
272 14 0.710731707317
273 10 0.713170731707
274 12 0.716097560976
275 16 0.72
276 17 0.724146341463
277 29 0.731219512195
278 20 0.736097560976
279 21 0.741219512195
280 13 0.744390243902
281 17 0.748536585366
282 12 0.751463414634
283 16 0.755365853659
284 14 0.758780487805
285 24 0.764634146341
286 2 0.76512195122
287 36 0.773902439024
288 37 0.782926829268
289 39 0.79243902439
290 13 0.795609756098
291 21 0.800731707317
292 30 0.808048780488
293 36 0.816829268293
294 13 0.82
295 18 0.824390243902
296 2 0.82487804878
297 5 0.826097560976
298 5 0.827317073171
299 6 0.828780487805
300 8 0.830731707317
301 5 0.831951219512
302 5 0.833170731707
303 4 0.834146341463
304 4 0.83512195122
305 9 0.837317073171
306 4 0.838292682927
307 17 0.84243902439
308 20 0.847317073171
309 3 0.848048780488
310 2 0.848536585366
311 1 0.848780487805
312 1 0.849024390244
313 2 0.849512195122
314 1 0.849756097561
315 1 0.85
316 5 0.851219512195
317 2 0.851707317073
318 3 0.85243902439
319 3 0.853170731707
321 4 0.854146341463
323 1 0.854390243902
324 1 0.854634146341
329 4 0.855609756098
330 1 0.855853658537
331 1 0.856097560976
332 1 0.856341463415
338 3 0.857073170732
339 1 0.857317073171
344 1 0.85756097561
346 1 0.857804878049
347 2 0.858292682927
348 2 0.858780487805
352 3 0.859512195122
354 1 0.859756097561
355 1 0.86
359 2 0.860487804878
364 1 0.860731707317
371 2 0.861219512195
373 1 0.861463414634
374 1 0.861707317073
375 1 0.861951219512
386 1 0.862195121951
387 1 0.86243902439
389 2 0.862926829268
400 1 0.863170731707
407 1 0.863414634146
416 1 0.863658536585
451 1 0.863902439024
479 1 0.864146341463
496 1 0.864390243902
497 1 0.864634146341
505 1 0.86487804878
514 4 0.865853658537
516 1 0.866097560976
525 1 0.866341463415
530 1 0.866585365854
533 1 0.866829268293
562 1 0.867073170732
567 1 0.867317073171
586 1 0.86756097561
592 1 0.867804878049
609 1 0.868048780488
611 1 0.868292682927
612 1 0.868536585366
630 1 0.868780487805
697 1 0.869024390244
756 1 0.869268292683
760 1 0.869512195122
962 1 0.869756097561
1028 1 0.87
1029 1 0.870243902439
1035 10 0.872682926829
1036 11 0.875365853659
1041 8 0.877317073171
1042 1 0.87756097561
1044 1 0.877804878049
1045 1 0.878048780488
1046 2 0.878536585366
1047 1 0.878780487805
1048 3 0.879512195122
1049 5 0.880731707317
1050 4 0.881707317073
1051 6 0.883170731707
1052 2 0.883658536585
1053 6 0.88512195122
1054 9 0.887317073171
1055 16 0.891219512195
1056 23 0.896829268293
1057 18 0.901219512195
1058 47 0.912682926829
1059 12 0.915609756098
1060 10 0.918048780488
1061 11 0.920731707317
1062 14 0.924146341463
1063 8 0.926097560976
1064 10 0.928536585366
1065 11 0.931219512195
1066 8 0.933170731707
1067 5 0.934390243902
1068 7 0.936097560976
1069 5 0.937317073171
1070 8 0.939268292683
1071 2 0.939756097561
1072 6 0.941219512195
1073 8 0.943170731707
1074 12 0.946097560976
1075 2 0.946585365854
1077 2 0.947073170732
1078 5 0.948292682927
1079 10 0.950731707317
1080 2 0.951219512195
1081 3 0.951951219512
1082 3 0.952682926829
1083 2 0.953170731707
1084 2 0.953658536585
1086 5 0.95487804878
1087 2 0.955365853659
1088 5 0.956585365854
1089 4 0.95756097561
1090 1 0.957804878049
1091 3 0.958536585366
1093 2 0.959024390244
1094 4 0.96
1095 1 0.960243902439
1096 2 0.960731707317
1097 3 0.961463414634
1098 1 0.961707317073
1099 7 0.963414634146
1100 8 0.965365853659
1101 8 0.967317073171
1102 2 0.967804878049
1103 8 0.969756097561
1104 7 0.971463414634
1105 3 0.972195121951
1107 1 0.97243902439
1108 4 0.973414634146
1109 2 0.973902439024
1110 2 0.974390243902
1111 3 0.97512195122
1112 5 0.976341463415
1113 3 0.977073170732
1114 4 0.978048780488
1115 2 0.978536585366
1118 2 0.979024390244
1120 1 0.979268292683
1121 2 0.979756097561
1122 3 0.980487804878
1123 2 0.980975609756
1124 3 0.981707317073
1125 3 0.98243902439
1126 3 0.983170731707
1127 3 0.983902439024
1128 4 0.98487804878
1129 2 0.985365853659
1130 1 0.985609756098
1131 4 0.986585365854
1132 4 0.98756097561
1133 3 0.988292682927
1134 9 0.990487804878
1135 2 0.990975609756
1136 2 0.991463414634
1137 1 0.991707317073
1138 1 0.991951219512
1139 1 0.992195121951
1140 1 0.99243902439
1141 1 0.992682926829
1142 2 0.993170731707
1143 2 0.993658536585
1144 1 0.993902439024
1145 3 0.994634146341
1146 1 0.99487804878
1147 2 0.995365853659
1148 2 0.995853658537
1150 1 0.996097560976
1152 1 0.996341463415
1153 1 0.996585365854
1154 1 0.996829268293
1160 1 0.997073170732
1168 1 0.997317073171
1169 1 0.99756097561
1171 1 0.997804878049
1173 1 0.998048780488
1180 1 0.998292682927
1182 1 0.998536585366
1189 1 0.998780487805
1190 1 0.999024390244
1204 2 0.999512195122
1364 1 0.999756097561
1373 1 1
|