repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
fletcher91/open-raadsinformatie
ocd_backend/transformers/ggm.py
from lxml import etree from ocd_backend.log import get_source_logger from ocd_backend.transformers import BaseTransformer from ocd_backend.utils.misc import load_object, strip_namespaces log = get_source_logger('transformer') class GegevensmagazijnTransformer(BaseTransformer): def run(self, *args, **kwargs): args = args[0] self.source_definition = kwargs['source_definition'] item = self.deserialize_item(*args) return self.transform_item(*args, item=strip_namespaces(item)) def transform_item(self, raw_item_content_type, raw_item, item, class_name=False): if not class_name: class_name = item.xpath("local-name()") if class_name in self.source_definition['mapping']: item_source = self.source_definition['mapping'][class_name] item_class = item_source['item'] else: log.info('Skipping %s, does not exist in mapping' % class_name) return [] items = list() if 'sub_items' in item_source: for key, path in item_source['sub_items'].items(): for sub_item in item.xpath(path): items += self.transform_item(raw_item_content_type, etree.tostring(sub_item), sub_item, class_name=key) item_class = load_object(item_class) item = item_class(self.source_definition, raw_item_content_type, raw_item, item, unicode(item_source['doc_type'])) self.add_resolveable_media_urls(item) return [( item.get_combined_object_id(), item.get_object_id(), item.get_combined_index_doc(), item.get_index_doc(), item.doc_type )] + items
wolforest/wolf
lib/dts/src/main/java/study/daydayup/wolf/dts/transformation/DbTransformation.java
package study.daydayup.wolf.dts.transformation; import lombok.Getter; import lombok.NonNull; import study.daydayup.wolf.common.io.db.Row; import study.daydayup.wolf.common.io.db.Table; import study.daydayup.wolf.common.util.collection.CollectionUtil; import study.daydayup.wolf.dts.sink.Sink; import java.util.ArrayList; import java.util.List; /** * study.daydayup.wolf.framework.dts.transeformer * * @author Wingle * @since 2020/2/16 6:54 下午 **/ public class DbTransformation implements Transformation { private Sink sink; private Statistics statistics; @Getter private Operator currentOperator; private List<Operator> operatorList; public static DbTransformation newTask(@NonNull Sink sink) { return new DbTransformation(sink); } private DbTransformation(Sink sink) { this.sink = sink; statistics = new Statistics(); statistics.setKeyColumns(this.sink.getKeyColumns()); operatorList = new ArrayList<>(5); } public Operator addJob() { Operator operator = new Operator(statistics); operatorList.add(operator); currentOperator = operator; return currentOperator; } public Statistics transform(Table table) { return transform(table, false); } public Statistics transform(Table table, boolean sqlFormat) { if (!CollectionUtil.notEmpty(table)) { return statistics; } for (Row row : table) { transform(row); } if (sqlFormat){ //return format(); } return statistics; } public Statistics merge(@NonNull Row row) { if (null == row || operatorList.isEmpty()) { return statistics; } for (Operator operator : operatorList) { operator.operate(row); } return statistics; } public Statistics format() { if (operatorList.isEmpty()) { return statistics; } for (Operator operator : operatorList) { operator.format(); } return statistics; } private void transform(Row row) { if (null == row || operatorList.isEmpty()) { return; } if (isTransformed(row)) { return; } for (Operator operator : operatorList) { operator.operate(row); } } private boolean isTransformed(@NonNull Row row) { Long id = (Long) row.get(Table.DEFAULT_ID_COLUMN); return sink.isDuplicated(id); } }
depthmind/compiled-spring
depthmind-core/src/main/java/com/depthmind/concurrent/demo14/Container2.java
<filename>depthmind-core/src/main/java/com/depthmind/concurrent/demo14/Container2.java package com.depthmind.concurrent.demo14; import java.util.LinkedList; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * * 使用Lock和Condition来实现 * condition就是在什么条件下怎么做 * 对比上一个例子,Condition的方式可以更加精确的指定哪些线程被唤醒 * */ public class Container2<T> { private final LinkedList<T> lists = new LinkedList<>(); private final int MAX = 10; private int count = 0; private Lock lock = new ReentrantLock(); private Condition producer = lock.newCondition(); private Condition consumer = lock.newCondition(); public void put(T t){ try { lock.lock(); while (lists.size() == MAX) { producer.await(); } lists.add(t); ++count; consumer.signalAll(); } catch (Exception e) { e.printStackTrace(); } finally { lock.unlock(); } } public T get(){ T t = null; try { lock.lock(); while (lists.size() == 0) { consumer.await(); } t = lists.removeFirst(); count --; producer.signalAll(); } catch (Exception e) { e.printStackTrace(); } finally { lock.unlock(); } return t; } public static void main(String[] args) { Container2<String> c = new Container2<>(); for (int i = 0; i < 100; i++) { new Thread(()->{ for (int j = 0; j < 5; j++) { System.out.println(c.get()); } }, "c" + i).start(); } try { TimeUnit.SECONDS.sleep(1); } catch (InterruptedException e) { e.printStackTrace(); } for (int i = 0; i < 2; i++) { new Thread(()->{ for (int j = 0; j < 25; j++) { c.put(Thread.currentThread().getName() + " " + j); } }, "p" + i).start(); } } }
tonioshikanlu/tubman-hack
sources/b/l/f/c0/b.java
<filename>sources/b/l/f/c0/b.java package b.l.f.c0; public final class b extends s { /* renamed from: b reason: collision with root package name */ public static final char[] f5591b; public static final char[] c = {'T', 'N', '*', 'E'}; public static final char[] d = {'/', ':', '+', '.'}; /* renamed from: e reason: collision with root package name */ public static final char f5592e; static { char[] cArr = {'A', 'B', 'C', 'D'}; f5591b = cArr; f5592e = cArr[0]; } /* JADX WARNING: Removed duplicated region for block: B:19:0x007a */ /* JADX WARNING: Removed duplicated region for block: B:35:0x00d6 */ /* Code decompiled incorrectly, please refer to instructions dump. */ public boolean[] d(java.lang.String r11) { /* r10 = this; char r0 = f5592e int r1 = r11.length() r2 = 1 r3 = 0 r4 = 2 if (r1 >= r4) goto L_0x0011 java.lang.StringBuilder r1 = new java.lang.StringBuilder r1.<init>() goto L_0x0063 L_0x0011: char r1 = r11.charAt(r3) char r1 = java.lang.Character.toUpperCase(r1) int r4 = r11.length() int r4 = r4 - r2 char r4 = r11.charAt(r4) char r4 = java.lang.Character.toUpperCase(r4) char[] r5 = f5591b boolean r6 = b.l.f.c0.a.i(r5, r1) boolean r5 = b.l.f.c0.a.i(r5, r4) char[] r7 = c boolean r1 = b.l.f.c0.a.i(r7, r1) boolean r4 = b.l.f.c0.a.i(r7, r4) java.lang.String r7 = "Invalid start/end guards: " if (r6 == 0) goto L_0x004b if (r5 == 0) goto L_0x0041 goto L_0x0070 L_0x0041: java.lang.IllegalArgumentException r0 = new java.lang.IllegalArgumentException java.lang.String r11 = r7.concat(r11) r0.<init>(r11) throw r0 L_0x004b: if (r1 == 0) goto L_0x005a if (r4 == 0) goto L_0x0050 goto L_0x0070 L_0x0050: java.lang.IllegalArgumentException r0 = new java.lang.IllegalArgumentException java.lang.String r11 = r7.concat(r11) r0.<init>(r11) throw r0 L_0x005a: if (r5 != 0) goto L_0x0142 if (r4 != 0) goto L_0x0142 java.lang.StringBuilder r1 = new java.lang.StringBuilder r1.<init>() L_0x0063: r1.append(r0) r1.append(r11) r1.append(r0) java.lang.String r11 = r1.toString() L_0x0070: r0 = 20 r1 = r2 L_0x0073: int r4 = r11.length() int r4 = r4 - r2 if (r1 >= r4) goto L_0x00c6 char r4 = r11.charAt(r1) boolean r4 = java.lang.Character.isDigit(r4) if (r4 != 0) goto L_0x00c1 char r4 = r11.charAt(r1) r5 = 45 if (r4 == r5) goto L_0x00c1 char r4 = r11.charAt(r1) r5 = 36 if (r4 != r5) goto L_0x0095 goto L_0x00c1 L_0x0095: char[] r4 = d char r5 = r11.charAt(r1) boolean r4 = b.l.f.c0.a.i(r4, r5) if (r4 == 0) goto L_0x00a4 int r0 = r0 + 10 goto L_0x00c3 L_0x00a4: java.lang.IllegalArgumentException r0 = new java.lang.IllegalArgumentException java.lang.StringBuilder r2 = new java.lang.StringBuilder java.lang.String r3 = "Cannot encode : '" r2.<init>(r3) char r11 = r11.charAt(r1) r2.append(r11) r11 = 39 r2.append(r11) java.lang.String r11 = r2.toString() r0.<init>(r11) throw r0 L_0x00c1: int r0 = r0 + 9 L_0x00c3: int r1 = r1 + 1 goto L_0x0073 L_0x00c6: int r1 = r11.length() int r1 = r1 - r2 int r1 = r1 + r0 boolean[] r0 = new boolean[r1] r1 = r3 r4 = r1 L_0x00d0: int r5 = r11.length() if (r1 >= r5) goto L_0x0141 char r5 = r11.charAt(r1) char r5 = java.lang.Character.toUpperCase(r5) if (r1 == 0) goto L_0x00e7 int r6 = r11.length() int r6 = r6 - r2 if (r1 != r6) goto L_0x0103 L_0x00e7: r6 = 42 if (r5 == r6) goto L_0x0101 r6 = 69 if (r5 == r6) goto L_0x00fe r6 = 78 if (r5 == r6) goto L_0x00fb r6 = 84 if (r5 == r6) goto L_0x00f8 goto L_0x0103 L_0x00f8: r5 = 65 goto L_0x0103 L_0x00fb: r5 = 66 goto L_0x0103 L_0x00fe: r5 = 68 goto L_0x0103 L_0x0101: r5 = 67 L_0x0103: r6 = r3 L_0x0104: char[] r7 = b.l.f.c0.a.d int r8 = r7.length if (r6 >= r8) goto L_0x0115 char r7 = r7[r6] if (r5 != r7) goto L_0x0112 int[] r5 = b.l.f.c0.a.f5586e r5 = r5[r6] goto L_0x0116 L_0x0112: int r6 = r6 + 1 goto L_0x0104 L_0x0115: r5 = r3 L_0x0116: r7 = r2 r6 = r3 r8 = r6 L_0x0119: r9 = 7 if (r6 >= r9) goto L_0x0133 r0[r4] = r7 int r4 = r4 + 1 int r9 = 6 - r6 int r9 = r5 >> r9 r9 = r9 & r2 if (r9 == 0) goto L_0x012d if (r8 != r2) goto L_0x012a goto L_0x012d L_0x012a: int r8 = r8 + 1 goto L_0x0119 L_0x012d: r7 = r7 ^ 1 int r6 = r6 + 1 r8 = r3 goto L_0x0119 L_0x0133: int r5 = r11.length() int r5 = r5 - r2 if (r1 >= r5) goto L_0x013e r0[r4] = r3 int r4 = r4 + 1 L_0x013e: int r1 = r1 + 1 goto L_0x00d0 L_0x0141: return r0 L_0x0142: java.lang.IllegalArgumentException r0 = new java.lang.IllegalArgumentException java.lang.String r11 = r7.concat(r11) r0.<init>(r11) throw r0 */ throw new UnsupportedOperationException("Method not decompiled: b.l.f.c0.b.d(java.lang.String):boolean[]"); } }
gilramir/instmake
instmakeplugins/report_shell.py
<reponame>gilramir/instmake<filename>instmakeplugins/report_shell.py # Copyright (c) 2010 by Cisco Systems, Inc. """ Show records that GNU make would have used the shell to run instead of using fork/exec. """ from instmakelib import instmake_log as LOG import getopt import sys import os description = "Show records that GNU make would have used system() to build" def usage(): print description print "usage:" print "\tshell [OPTIONS]" print "\t\t-v Reverse -- show non-system() jobs." print "\t\t-r Show rules instead of processes." # From GNU make 3.79.1, job.c sh_chars = "#;\"*?[]&|<>(){}$`^" sh_cmds = [ "cd", "eval", "exec", "exit", "login", "logout", "set", "umask", "wait", "while", "for", "case", "if", ":", ".", "break", "continue", "export", "read", "readonly", "shift", "times", "trap", "switch" ] def uses_system(rec): """Would the command in the record be invoked via system() by GNU make? Returns 1 if Yes, 0 if No.""" # trivial algorithm cmd = rec.cmdline first_arg = rec.cmdline_args[0] # jmake records some actions and labels them as make-internal-function # Don't look for special characters in these records. if first_arg == "make-internal-function": return 0 else: # Check the first argument. if first_arg in sh_cmds: return 1 # Check the chars in the command-line for the special chars. for c in cmd: if c in sh_chars: return 1 return 0 # What to show to the user... procs or rules. SHOW_PROCS = 0 SHOW_RULES = 1 def report(log_file_names, args): # We only accept one log file if len(log_file_names) != 1: sys.exit("'shell' report uses one log file.") else: log_file_name = log_file_names[0] # Defaults show_what = SHOW_PROCS opposite = 0 # Our options optstring = "vr" longopts = [] # Process the command-line options. try: opts, args = getopt.getopt(args, optstring, longopts) except getopt.GetoptError: usage() sys.exit(1) for opt, arg in opts: if opt == "-v": opposite = 1 elif opt == "-r": show_what = SHOW_RULES else: sys.exit("%s option not handled." % (opt,)) # Open the log file log = LOG.LogFile(log_file_name) if show_what == SHOW_PROCS: report_procs(log, opposite) elif show_what == SHOW_RULES: report_rules(log, opposite) else: sys.exit("Unhandled show_what value.") def report_procs(log, opposite): """Read the log file and report the procs to the user.""" while 1: try: rec = log.read_record() except EOFError: log.close() break if opposite: if not uses_system(rec): rec.Print() else: if uses_system(rec): rec.Print() def report_rules(log, opposite): """Read the log file and report the rules to the user.""" recs = [] # We'll read the file and store the interested records # to the array. while 1: try: rec = log.read_record() except EOFError: log.close() break if opposite: if not uses_system(rec): recs.append(rec) else: if uses_system(rec): recs.append(rec) # Now we uniquify the rules mentioend by the recs, and # convert makefile filenames to absolute paths based on the # rec's CWD if the filename is not already absolute. rules = {} for rec in recs: # No makefile/lineno data? Report that fact tot he suer. if not rec.makefile_filename or not rec.makefile_lineno: print "No makefile/lineno info for PID %s:" % (rec.pid,) print "\t", rec.cmdline print continue # Make sure the makefile name is absolute. if not os.path.isabs(rec.makefile_filename): makefile = os.path.join(rec.cwd, rec.makefile_filename) else: makefile = rec.makefile_filename makefile = rec.makefile_filename # Save the data to the hashes. rules[makefile][lineno] = None linenos = rules.setdefault(makefile, {}) linenos[rec.makefile_lineno] = None # Report to the user, sorted by makefile, then line number. makefiles = rules.keys() makefiles.sort() for makefile in makefiles: linenos = rules[makefile].keys() linenos.sort() for lineno in linenos: # Print with a + before the line number so that the # line can be copied-and-pasted to a vim or emacs command-line # to have the editor open the file exactly at that line number. print "%s +%s" % (makefile, lineno)
bpaauwe/pg3
core/lib/config/__tests__/config.test.js
const config = require('../config') describe('check config object', () => { test('config should be defined', () => { expect(config).toBeDefined() }) test('config should be an object', () => { expect(config).toEqual(expect.any(Object)) }) test('shutdown should be false', () => { expect(config.shutdown).toBe(false) }) })
FranicevicNikola/OOP
FER/Exams-master/2019_20/AutumnExam_2019_20-09-04-IR_3/solved/src/main/java/hr/fer/oop/task1/ChocolateType.java
<gh_stars>0 package hr.fer.oop.task1; public enum ChocolateType { WHITE, MILK, DARK }
brandongk/segmenter
segmenter/jobs/GridSearchJob.py
<reponame>brandongk/segmenter from .BaseJob import BaseJob import argparse from typing import Dict import os import itertools import sys import pprint from importlib.machinery import SourceFileLoader from importlib import util import itertools class GridSearchJob(BaseJob): name = "grid-search" @staticmethod def arguments(parser) -> None: command_parser = parser.add_parser(GridSearchJob.name, help='Configure a grid search.') command_parser.add_argument("--config", type=str, help='the configuration file to use.', required=False) BaseJob.arguments(command_parser) @staticmethod def arguments_to_cli(args) -> str: return " ".join([ args["dataset"], args["config"], "--config {}".format(" ".join(args["config"])) ]) def execute_result(self, config): for key, var in zip(self.keys, config): os.environ[key] = str(var) os.environ["SEARCH"] = "True" os.system("launch configure %s > /dev/null" % self.args["dataset"]) def execute(self) -> None: from segmenter.helpers.p_tqdm import p_map as mapper filename = self.args["config"] loader = SourceFileLoader("searchconfig", filename) spec = util.spec_from_loader("searchconfig", loader) searchconfig = util.module_from_spec(spec) # type: ignore spec.loader.exec_module(searchconfig) # type: ignore self.keys = [k for k in searchconfig.search_space.keys()] configs = [ l for l in itertools.product(*searchconfig.search_space.values()) ] mapper(self.execute_result, configs)
gustavopinto/entente
seeds/mozilla/non262/regress/regress-626436.js
// Any copyright is dedicated to the Public Domain. // http://creativecommons.org/licenses/publicdomain/ // Contributors: <NAME> <<EMAIL>>, <NAME> <<EMAIL>> (1 ? 2 : delete(0 ? 0 : {})).x; reportCompare(0, 0, 'ok');
kcevik/SoftwareProjekt2020
src/main/java/de/fhbielefeld/pmt/login/event/LoginFailedEvent.java
package de.fhbielefeld.pmt.login.event; import java.util.EventObject; import de.fhbielefeld.pmt.login.ILoginModel; public class LoginFailedEvent extends EventObject { public LoginFailedEvent(ILoginModel model) { super(model); } }
menify/sandbox
aql/benchmark/lib_97/class_8.cpp
#include "class_8.h" #include "class_1.h" #include "class_7.h" #include "class_9.h" #include "class_6.h" #include "class_0.h" #include <lib_3/class_9.h> #include <lib_54/class_3.h> #include <lib_6/class_8.h> #include <lib_64/class_5.h> #include <lib_2/class_0.h> class_8::class_8() {} class_8::~class_8() {}
andreww591/tme-phabrics
host/gtk/gtk-display.h
/* $Id: gtk-display.h,v 1.10 2009/08/28 01:29:47 fredette Exp $ */ /* host/gtk/gtk-display.h - header file for GTK display support: */ /* * Copyright (c) 2003 <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes software developed by <NAME>. * 4. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #ifndef _HOST_GTK_GTK_DISPLAY_H #define _HOST_GTK_GTK_DISPLAY_H #include <tme/common.h> _TME_RCSID("$Id: gtk-display.h,v 1.10 2009/08/28 01:29:47 fredette Exp $"); /* includes: */ #include <tme/generic/fb.h> #include <tme/generic/keyboard.h> #include <tme/generic/mouse.h> #include <tme/threads.h> #include <tme/hash.h> #ifndef G_ENABLE_DEBUG #define G_ENABLE_DEBUG (0) #endif /* !G_ENABLE_DEBUG */ #include <gtk/gtk.h> /* macros: */ /* the callout flags: */ #define TME_GTK_DISPLAY_CALLOUT_CHECK (0) #define TME_GTK_DISPLAY_CALLOUT_RUNNING TME_BIT(0) #define TME_GTK_DISPLAY_CALLOUTS_MASK (-2) #define TME_GTK_DISPLAY_CALLOUT_KEYBOARD_CTRL TME_BIT(1) #define TME_GTK_DISPLAY_CALLOUT_MOUSE_CTRL TME_BIT(2) /* types: */ struct tme_gtk_display; /* a screen: */ struct tme_gtk_screen { /* the next screen: */ struct tme_gtk_screen *tme_gtk_screen_next; /* a backpointer to the display: */ struct tme_gtk_display *tme_gtk_screen_display; /* the framebuffer connection. unlike many other elements, this is *our* side of the framebuffer connection, not the peer's side: */ struct tme_fb_connection *tme_gtk_screen_fb; /* the current scaling. if this is < 0, the user has not forced a given scaling yet: */ int tme_gtk_screen_fb_scale; /* any colorset signature: */ tme_uint32_t tme_gtk_screen_colorset; /* the top-level window: */ GtkWidget *tme_gtk_screen_window; /* the outer vertical packing box: */ GtkWidget *tme_gtk_screen_vbox0; /* various menu item widgets: */ GtkWidget *tme_gtk_screen_scale_default; GtkWidget *tme_gtk_screen_scale_half; /* the Gtkframe & cairo_surface for the framebuffer: */ GtkWidget *tme_gtk_screen_gtkframe; cairo_surface_t *tme_gtk_screen_surface; GdkDevice *tme_gtk_screen_pointer; /* the translation function: */ int (*tme_gtk_screen_fb_xlat) _TME_P((struct tme_fb_connection *, struct tme_fb_connection *)); /* the mouse on label: */ GtkWidget *tme_gtk_screen_mouse_label; /* the status bar, and the context ID: */ GtkWidget *tme_gtk_screen_mouse_statusbar; guint tme_gtk_screen_mouse_statusbar_cid; /* if GDK_VoidSymbol, mouse mode is off. otherwise, mouse mode is on, and this is the keyval that will turn mouse mode off: */ guint tme_gtk_screen_mouse_keyval; /* when mouse mode is on, this is the previous events mask for the framebuffer event box: */ GdkEventMask tme_gtk_screen_mouse_events_old; /* when mouse mode is on, this is the warp center: */ gint tme_gtk_screen_mouse_warp_x; gint tme_gtk_screen_mouse_warp_y; /* when mouse mode is on, the last tme buttons state: */ unsigned int tme_gtk_screen_mouse_buttons_last; /* if nonzero, the screen needs a full redraw: */ int tme_gtk_screen_full_redraw; }; /* a GTK bad keysym: */ struct tme_gtk_keysym_bad { /* these are kept on a singly linked list: */ struct tme_gtk_keysym_bad *tme_gtk_keysym_bad_next; /* the bad keysym string: */ char *tme_gtk_keysym_bad_string; /* the flags and context used in the lookup: */ unsigned int tme_keysym_bad_flags; unsigned int tme_gtk_keysym_bad_context_length; tme_uint8_t *tme_gtk_keysym_bad_context; }; /* a display: */ struct tme_gtk_display { /* backpointer to our element: */ struct tme_element *tme_gtk_display_element; /* our mutex: */ tme_mutex_t tme_gtk_display_mutex; /* our thread: */ tme_threadid_t tme_gtk_display_thread; /* our keyboard connection: */ struct tme_keyboard_connection *tme_gtk_display_keyboard_connection; /* our keyboard buffer: */ struct tme_keyboard_buffer *tme_gtk_display_keyboard_buffer; /* our keysyms hash: */ tme_hash_t tme_gtk_display_keyboard_keysyms; /* the bad keysym records: */ struct tme_gtk_keysym_bad *tme_gtk_display_keyboard_keysyms_bad; /* our keysym to keycode hash: */ tme_hash_t tme_gtk_display_keyboard_keysym_to_keycode; /* the next keysym to allocate for an unknown keysym string: */ guint tme_gtk_display_keyboard_keysym_alloc_next; /* our mouse connection: */ struct tme_mouse_connection *tme_gtk_display_mouse_connection; /* our mouse buffer: */ struct tme_mouse_buffer *tme_gtk_display_mouse_buffer; /* our mouse cursor: */ GdkCursor *tme_gtk_display_mouse_cursor; /* our screens: */ struct tme_gtk_screen *tme_gtk_display_screens; /* the callout flags: */ unsigned int tme_gtk_display_callout_flags; }; /* a menu item: */ struct tme_gtk_display_menu_item { /* which menu item this is: */ unsigned int tme_gtk_display_menu_item_which; /* where to save the menu item widget: */ GtkWidget **tme_gtk_display_menu_item_widget; /* the string for the menu item label: */ const char *tme_gtk_display_menu_item_string; }; /* this generates menu items: */ typedef GCallback (*tme_gtk_display_menu_items_t) _TME_P((void *, struct tme_gtk_display_menu_item *)); /* prototypes: */ struct tme_gtk_screen *_tme_gtk_screen_new _TME_P((struct tme_gtk_display *)); int _tme_gtk_screen_connections_new _TME_P((struct tme_gtk_display *, struct tme_connection **)); void _tme_gtk_keyboard_new _TME_P((struct tme_gtk_display *)); void _tme_gtk_keyboard_attach _TME_P((struct tme_gtk_screen *)); int _tme_gtk_keyboard_connections_new _TME_P((struct tme_gtk_display *, struct tme_connection **)); void _tme_gtk_mouse_new _TME_P((struct tme_gtk_display *)); void _tme_gtk_mouse_mode_off _TME_P((struct tme_gtk_screen *, guint32)); void _tme_gtk_mouse_attach _TME_P((struct tme_gtk_screen *)); int _tme_gtk_mouse_connections_new _TME_P((struct tme_gtk_display *, struct tme_connection **)); int _tme_gtk_screen_update _TME_P((void *disp)); _tme_thret _tme_gtk_screen_th_update _TME_P((struct tme_gtk_display *)); void _tme_gtk_display_callout _TME_P((struct tme_gtk_display *, int)); gint _tme_gtk_display_enter_focus _TME_P((GtkWidget *, GdkEvent *, gpointer)); GtkWidget *_tme_gtk_display_menu_radio _TME_P((void *, tme_gtk_display_menu_items_t)); #endif /* _HOST_GTK_GTK_DISPLAY_H */
qeo/qeo-core
qeo-codegen/qeo-codegen/src/main/java/com/technicolor/qeo/codegen/type/qdm/QDM.java
/* * Copyright (c) 2016 - Qeo LLC * * The source code form of this Qeo Open Source Project component is subject * to the terms of the Clear BSD license. * * You can redistribute it and/or modify it under the terms of the Clear BSD * License (http://directory.fsf.org/wiki/License:ClearBSD). See LICENSE file * for more details. * * The Qeo Open Source Project also includes third party Open Source Software. * See LICENSE file for more details. */ package com.technicolor.qeo.codegen.type.qdm; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Intermediate object that contains the data o the parsed qdm. A QDM object has some other intermediate objects that * contains the data of each specific tag of the qdm. */ public class QDM { /** * String representation of the nonBasic type member. */ public static final String STRING_NON_BASIC = "nonBasic"; private final String mName; private final List<QdmModule> mModules; private final List<String> mIncludes; /** * QDM constructor. * * @param name The name of the qdm file. */ public QDM(String name) { mName = name; mIncludes = new ArrayList<String>(); mModules = new ArrayList<QdmModule>(); } /** * Add a QDM module. * * @param module the module. */ public void addModule(QdmModule module) { mModules.add(module); } /** * Get a list of QDM modules. * * @return list of modules. */ public List<QdmModule> getModules() { return Collections.unmodifiableList(mModules); } /** * Get map of all the include tags of the qdm. * * @return map with all the includes defined in the qdm */ public List<String> getIncludes() { return mIncludes; } /** * Get the name of the qdm file. * * @return The name. */ public String getName() { return mName; } /** * Validates if the definitions of all the stucts is done before we reference them, based on the order of the given * qdm. * * @return true if the order that the structs are defined is correct, false otherwise. */ public boolean validateStructsOrder() { Set<String> usedStructs = new HashSet<String>(); for (int i = 0; i < mModules.size(); i++) { // usedStructs.clear(); QdmModule module = mModules.get(i); List<QdmEnum> qdmEnums = module.getEnums(); for (int k = 0; k < qdmEnums.size(); k++) { QdmEnum mEnum = qdmEnums.get(k); if (usedStructs.contains(module.getName() + "::" + mEnum.getName())) { // the enum is defined after its reference return false; } } List<QdmStruct> qdmStructs = module.getStructs(); for (int j = 0; j < qdmStructs.size(); j++) { QdmStruct struct = qdmStructs.get(j); if (usedStructs.contains(module.getName() + "::" + struct.getName())) { // the struct is defined after its reference return false; } for (QdmMember member : struct.getMembers()) { if (member.getType().equals(QDM.STRING_NON_BASIC)) { // Add all the nonBasic types to the set if (member.getNonBasicTypeName().contains("::")) { usedStructs.add(member.getNonBasicTypeName()); } else { usedStructs.add(module.getName() + "::" + member.getNonBasicTypeName()); } } } } } return true; } }
FroobWorld/NabSuite
src/main/java/com/froobworld/nabsuite/modules/protect/command/ClaimHorseCommand.java
package com.froobworld.nabsuite.modules.protect.command; import cloud.commandframework.Command; import cloud.commandframework.context.CommandContext; import com.froobworld.nabsuite.command.NabCommand; import com.froobworld.nabsuite.modules.protect.ProtectModule; import com.froobworld.nabsuite.modules.protect.horse.HorseManager; import net.kyori.adventure.text.Component; import net.kyori.adventure.text.format.NamedTextColor; import org.bukkit.command.CommandSender; import org.bukkit.entity.AbstractHorse; import org.bukkit.entity.Entity; import org.bukkit.entity.Player; public class ClaimHorseCommand extends NabCommand { private final ProtectModule protectModule; public ClaimHorseCommand(ProtectModule protectModule) { super( "claimhorse", "Protect a horse from theft.", "nabsuite.command.claimhorse", Player.class, "protecthorse" ); this.protectModule = protectModule; } @Override public void execute(CommandContext<CommandSender> context) { Player player = (Player) context.getSender(); Entity vehicle = player.getVehicle(); if (!(vehicle instanceof AbstractHorse)) { player.sendMessage(Component.text("You need to be on the horse you wish to protect.", NamedTextColor.RED)); return; } HorseManager horseManager = protectModule.getHorseManager(); if (horseManager.getHorse(vehicle.getUniqueId()) != null) { player.sendMessage(Component.text("This horse has already been claimed.", NamedTextColor.RED)); return; } horseManager.protectHorse(vehicle.getUniqueId(), player.getUniqueId()); player.sendMessage(Component.text("Horse claimed.", NamedTextColor.YELLOW)); } @Override public Command.Builder<CommandSender> populateBuilder(Command.Builder<CommandSender> builder) { return builder; } }
alljoyn/lighting-service_framework
thin_core_library/lamp_service/src/LampState.c
/****************************************************************************** * Copyright (c) Open Connectivity Foundation (OCF), AllJoyn Open Source * Project (AJOSP) Contributors and others. * * SPDX-License-Identifier: Apache-2.0 * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution, and is available at * http://www.apache.org/licenses/LICENSE-2.0 * * Copyright (c) Open Connectivity Foundation and Contributors to AllSeen * Alliance. All rights reserved. * * Permission to use, copy, modify, and/or distribute this software for * any purpose with or without fee is hereby granted, provided that the * above copyright notice and this permission notice appear in all * copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE * AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR * PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR * PERFORMANCE OF THIS SOFTWARE. ******************************************************************************/ #include <LampState.h> #include <LampService.h> #include <OEM_LS_Code.h> #include <aj_nvram.h> #include <aj_debug.h> /** * Per-module definition of the current module for debug logging. Must be defined * prior to first inclusion of aj_debug.h */ #define AJ_MODULE LAMP_STATE /** * Turn on per-module debug printing by setting this variable to non-zero value * (usually in debugger). */ #ifndef NDEBUG uint8_t dbgLAMP_STATE = 1; #endif /* * The state object that represents the current lamp state. * This is mirrored in NVRAM and preserved across power cycles. * A signal will be sent when this changes if a session is active. */ static LampState TheLampState; static size_t memscpy(void*dest, size_t dstSize, const void*src, size_t copySize) { size_t minSize = dstSize < copySize ? dstSize : copySize; memcpy(dest, src, minSize); return minSize; } LampResponseCode LAMP_MarshalState(LampState* state, AJ_Message* msg) { AJ_InfoPrintf(("%s\n", __func__)); AJ_Status status = AJ_MarshalArgs(msg, "{sv}", "Hue", "u", state->hue); if (status != AJ_OK) { return LAMP_ERR_MESSAGE; } status = AJ_MarshalArgs(msg, "{sv}", "Saturation", "u", state->saturation); if (status != AJ_OK) { return LAMP_ERR_MESSAGE; } status = AJ_MarshalArgs(msg, "{sv}", "ColorTemp", "u", state->colorTemp); if (status != AJ_OK) { return LAMP_ERR_MESSAGE; } status = AJ_MarshalArgs(msg, "{sv}", "Brightness", "u", state->brightness); if (status != AJ_OK) { return LAMP_ERR_MESSAGE; } status = AJ_MarshalArgs(msg, "{sv}", "OnOff", "b", (state->onOff ? TRUE : FALSE)); if (status != AJ_OK) { return LAMP_ERR_MESSAGE; } return LAMP_OK; } LampResponseCode LAMP_UnmarshalState(LampStateContainer* state, AJ_Message* msg) { AJ_Arg array1, struct1; AJ_Status status = AJ_UnmarshalContainer(msg, &array1, AJ_ARG_ARRAY); LampResponseCode responseCode = LAMP_OK; AJ_DumpMsg("LAMP_UnmarshalState", msg, TRUE); // initialize memset(state, 0, sizeof(LampStateContainer)); do { char* field; char* sig; status = AJ_UnmarshalContainer(msg, &struct1, AJ_ARG_DICT_ENTRY); if (status != AJ_OK) { break; } status = AJ_UnmarshalArgs(msg, "s", &field); if (status != AJ_OK) { AJ_ErrPrintf(("AJ_UnmarshalArgs: %s\n", AJ_StatusText(status))); return LAMP_ERR_MESSAGE; } // Process the field! status = AJ_UnmarshalVariant(msg, (const char**) &sig); if (status != AJ_OK) { AJ_ErrPrintf(("AJ_UnmarshalVariant: %s\n", AJ_StatusText(status))); return LAMP_ERR_MESSAGE; } if (0 == strcmp(field, "OnOff")) { uint32_t onoff; status = AJ_UnmarshalArgs(msg, "b", &onoff); state->state.onOff = onoff ? TRUE : FALSE; state->stateFieldIndicators |= LAMP_STATE_ON_OFF_FIELD_INDICATOR; } else if (0 == strcmp(field, "Hue")) { status = AJ_UnmarshalArgs(msg, "u", &state->state.hue); state->stateFieldIndicators |= LAMP_STATE_HUE_FIELD_INDICATOR; } else if (0 == strcmp(field, "Saturation")) { status = AJ_UnmarshalArgs(msg, "u", &state->state.saturation); state->stateFieldIndicators |= LAMP_STATE_SATURATION_FIELD_INDICATOR; } else if (0 == strcmp(field, "ColorTemp")) { status = AJ_UnmarshalArgs(msg, "u", &state->state.colorTemp); state->stateFieldIndicators |= LAMP_STATE_COLOR_TEMP_FIELD_INDICATOR; } else if (0 == strcmp(field, "Brightness")) { status = AJ_UnmarshalArgs(msg, "u", &state->state.brightness); state->stateFieldIndicators |= LAMP_STATE_BRIGHTNESS_FIELD_INDICATOR; } else { AJ_ErrPrintf(("Unknown field: %s\n", field)); responseCode = LAMP_ERR_MESSAGE; AJ_SkipArg(msg); } status = AJ_UnmarshalCloseContainer(msg, &struct1); // if field invalid, throw the whole thing out and return the error } while (status == AJ_OK && responseCode == LAMP_OK); AJ_UnmarshalCloseContainer(msg, &array1); return responseCode; } #define LAMP_STATE_FD AJ_NVRAM_ID_FOR_APPS + 1 void LAMP_InitializeState(void) { AJ_NV_DATASET* id = AJ_NVRAM_Open(LAMP_STATE_FD, "r", 0); if (id != NULL) { AJ_NVRAM_Read(&TheLampState, sizeof(LampState), id); AJ_NVRAM_Close(id); } else { AJ_NV_DATASET* id = AJ_NVRAM_Open(LAMP_STATE_FD, "w", sizeof(LampState)); OEM_LS_SetFactoryState(&TheLampState); if (id != NULL) { AJ_NVRAM_Write(&TheLampState, sizeof(LampState), id); AJ_NVRAM_Close(id); } } } void LAMP_GetState(LampState* state) { memscpy((void*)(state), sizeof(LampState), (const void*)(&TheLampState), sizeof(LampState)); } void LAMP_SetState(const LampState* state) { AJ_InfoPrintf(("\n%s\n", __func__)); int32_t diff = memcmp(state, &TheLampState, sizeof(LampState)); if (diff) { AJ_InfoPrintf(("\n%s: Calling into NVRAM\n", __func__)); AJ_NV_DATASET* id = AJ_NVRAM_Open(LAMP_STATE_FD, "w", sizeof(LampState)); memscpy((void*)(&TheLampState), sizeof(LampState), (const void*)(state), sizeof(LampState)); if (id != NULL) { AJ_NVRAM_Write(&TheLampState, sizeof(LampState), id); AJ_NVRAM_Close(id); } } // this will cause the signal org.allseen.LSF.LampService.LampStateChanged // to be sent if there is a current session. LAMP_SendStateChangedSignal(); } void LAMP_ClearState(void) { memset(&TheLampState, 0, sizeof(LampState)); AJ_NVRAM_Delete(LAMP_STATE_FD); }
wesleyegberto/javaee_projects
jaxrs-specification/src/main/java/com/github/wesleyegberto/jaxrsspecificationtest/config/Audited.java
package com.github.wesleyegberto.jaxrsspecificationtest.config; import javax.ws.rs.NameBinding; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.TYPE, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @NameBinding public @interface Audited { }
jackstack24/gitaly-mirror
proto/go/gitalypb/remote.pb.go
<gh_stars>1-10 // Code generated by protoc-gen-go. DO NOT EDIT. // source: remote.proto package gitalypb import ( context "context" fmt "fmt" proto "github.com/golang/protobuf/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type AddRemoteRequest struct { Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"` Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` Url string `protobuf:"bytes,3,opt,name=url,proto3" json:"url,omitempty"` // If any, the remote is configured as a mirror with those mappings MirrorRefmaps []string `protobuf:"bytes,5,rep,name=mirror_refmaps,json=mirrorRefmaps,proto3" json:"mirror_refmaps,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *AddRemoteRequest) Reset() { *m = AddRemoteRequest{} } func (m *AddRemoteRequest) String() string { return proto.CompactTextString(m) } func (*AddRemoteRequest) ProtoMessage() {} func (*AddRemoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{0} } func (m *AddRemoteRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AddRemoteRequest.Unmarshal(m, b) } func (m *AddRemoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_AddRemoteRequest.Marshal(b, m, deterministic) } func (m *AddRemoteRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_AddRemoteRequest.Merge(m, src) } func (m *AddRemoteRequest) XXX_Size() int { return xxx_messageInfo_AddRemoteRequest.Size(m) } func (m *AddRemoteRequest) XXX_DiscardUnknown() { xxx_messageInfo_AddRemoteRequest.DiscardUnknown(m) } var xxx_messageInfo_AddRemoteRequest proto.InternalMessageInfo func (m *AddRemoteRequest) GetRepository() *Repository { if m != nil { return m.Repository } return nil } func (m *AddRemoteRequest) GetName() string { if m != nil { return m.Name } return "" } func (m *AddRemoteRequest) GetUrl() string { if m != nil { return m.Url } return "" } func (m *AddRemoteRequest) GetMirrorRefmaps() []string { if m != nil { return m.MirrorRefmaps } return nil } type AddRemoteResponse struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *AddRemoteResponse) Reset() { *m = AddRemoteResponse{} } func (m *AddRemoteResponse) String() string { return proto.CompactTextString(m) } func (*AddRemoteResponse) ProtoMessage() {} func (*AddRemoteResponse) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{1} } func (m *AddRemoteResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AddRemoteResponse.Unmarshal(m, b) } func (m *AddRemoteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_AddRemoteResponse.Marshal(b, m, deterministic) } func (m *AddRemoteResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_AddRemoteResponse.Merge(m, src) } func (m *AddRemoteResponse) XXX_Size() int { return xxx_messageInfo_AddRemoteResponse.Size(m) } func (m *AddRemoteResponse) XXX_DiscardUnknown() { xxx_messageInfo_AddRemoteResponse.DiscardUnknown(m) } var xxx_messageInfo_AddRemoteResponse proto.InternalMessageInfo type RemoveRemoteRequest struct { Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"` Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *RemoveRemoteRequest) Reset() { *m = RemoveRemoteRequest{} } func (m *RemoveRemoteRequest) String() string { return proto.CompactTextString(m) } func (*RemoveRemoteRequest) ProtoMessage() {} func (*RemoveRemoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{2} } func (m *RemoveRemoteRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RemoveRemoteRequest.Unmarshal(m, b) } func (m *RemoveRemoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_RemoveRemoteRequest.Marshal(b, m, deterministic) } func (m *RemoveRemoteRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_RemoveRemoteRequest.Merge(m, src) } func (m *RemoveRemoteRequest) XXX_Size() int { return xxx_messageInfo_RemoveRemoteRequest.Size(m) } func (m *RemoveRemoteRequest) XXX_DiscardUnknown() { xxx_messageInfo_RemoveRemoteRequest.DiscardUnknown(m) } var xxx_messageInfo_RemoveRemoteRequest proto.InternalMessageInfo func (m *RemoveRemoteRequest) GetRepository() *Repository { if m != nil { return m.Repository } return nil } func (m *RemoveRemoteRequest) GetName() string { if m != nil { return m.Name } return "" } type RemoveRemoteResponse struct { Result bool `protobuf:"varint,1,opt,name=result,proto3" json:"result,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *RemoveRemoteResponse) Reset() { *m = RemoveRemoteResponse{} } func (m *RemoveRemoteResponse) String() string { return proto.CompactTextString(m) } func (*RemoveRemoteResponse) ProtoMessage() {} func (*RemoveRemoteResponse) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{3} } func (m *RemoveRemoteResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RemoveRemoteResponse.Unmarshal(m, b) } func (m *RemoveRemoteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_RemoveRemoteResponse.Marshal(b, m, deterministic) } func (m *RemoveRemoteResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_RemoveRemoteResponse.Merge(m, src) } func (m *RemoveRemoteResponse) XXX_Size() int { return xxx_messageInfo_RemoveRemoteResponse.Size(m) } func (m *RemoveRemoteResponse) XXX_DiscardUnknown() { xxx_messageInfo_RemoveRemoteResponse.DiscardUnknown(m) } var xxx_messageInfo_RemoveRemoteResponse proto.InternalMessageInfo func (m *RemoveRemoteResponse) GetResult() bool { if m != nil { return m.Result } return false } type FetchInternalRemoteRequest struct { Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"` RemoteRepository *Repository `protobuf:"bytes,2,opt,name=remote_repository,json=remoteRepository,proto3" json:"remote_repository,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *FetchInternalRemoteRequest) Reset() { *m = FetchInternalRemoteRequest{} } func (m *FetchInternalRemoteRequest) String() string { return proto.CompactTextString(m) } func (*FetchInternalRemoteRequest) ProtoMessage() {} func (*FetchInternalRemoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{4} } func (m *FetchInternalRemoteRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FetchInternalRemoteRequest.Unmarshal(m, b) } func (m *FetchInternalRemoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FetchInternalRemoteRequest.Marshal(b, m, deterministic) } func (m *FetchInternalRemoteRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_FetchInternalRemoteRequest.Merge(m, src) } func (m *FetchInternalRemoteRequest) XXX_Size() int { return xxx_messageInfo_FetchInternalRemoteRequest.Size(m) } func (m *FetchInternalRemoteRequest) XXX_DiscardUnknown() { xxx_messageInfo_FetchInternalRemoteRequest.DiscardUnknown(m) } var xxx_messageInfo_FetchInternalRemoteRequest proto.InternalMessageInfo func (m *FetchInternalRemoteRequest) GetRepository() *Repository { if m != nil { return m.Repository } return nil } func (m *FetchInternalRemoteRequest) GetRemoteRepository() *Repository { if m != nil { return m.RemoteRepository } return nil } type FetchInternalRemoteResponse struct { Result bool `protobuf:"varint,1,opt,name=result,proto3" json:"result,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *FetchInternalRemoteResponse) Reset() { *m = FetchInternalRemoteResponse{} } func (m *FetchInternalRemoteResponse) String() string { return proto.CompactTextString(m) } func (*FetchInternalRemoteResponse) ProtoMessage() {} func (*FetchInternalRemoteResponse) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{5} } func (m *FetchInternalRemoteResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FetchInternalRemoteResponse.Unmarshal(m, b) } func (m *FetchInternalRemoteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FetchInternalRemoteResponse.Marshal(b, m, deterministic) } func (m *FetchInternalRemoteResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_FetchInternalRemoteResponse.Merge(m, src) } func (m *FetchInternalRemoteResponse) XXX_Size() int { return xxx_messageInfo_FetchInternalRemoteResponse.Size(m) } func (m *FetchInternalRemoteResponse) XXX_DiscardUnknown() { xxx_messageInfo_FetchInternalRemoteResponse.DiscardUnknown(m) } var xxx_messageInfo_FetchInternalRemoteResponse proto.InternalMessageInfo func (m *FetchInternalRemoteResponse) GetResult() bool { if m != nil { return m.Result } return false } type UpdateRemoteMirrorRequest struct { Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"` RefName string `protobuf:"bytes,2,opt,name=ref_name,json=refName,proto3" json:"ref_name,omitempty"` OnlyBranchesMatching [][]byte `protobuf:"bytes,3,rep,name=only_branches_matching,json=onlyBranchesMatching,proto3" json:"only_branches_matching,omitempty"` SshKey string `protobuf:"bytes,4,opt,name=ssh_key,json=sshKey,proto3" json:"ssh_key,omitempty"` KnownHosts string `protobuf:"bytes,5,opt,name=known_hosts,json=knownHosts,proto3" json:"known_hosts,omitempty"` KeepDivergentRefs bool `protobuf:"varint,6,opt,name=keep_divergent_refs,json=keepDivergentRefs,proto3" json:"keep_divergent_refs,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *UpdateRemoteMirrorRequest) Reset() { *m = UpdateRemoteMirrorRequest{} } func (m *UpdateRemoteMirrorRequest) String() string { return proto.CompactTextString(m) } func (*UpdateRemoteMirrorRequest) ProtoMessage() {} func (*UpdateRemoteMirrorRequest) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{6} } func (m *UpdateRemoteMirrorRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateRemoteMirrorRequest.Unmarshal(m, b) } func (m *UpdateRemoteMirrorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UpdateRemoteMirrorRequest.Marshal(b, m, deterministic) } func (m *UpdateRemoteMirrorRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_UpdateRemoteMirrorRequest.Merge(m, src) } func (m *UpdateRemoteMirrorRequest) XXX_Size() int { return xxx_messageInfo_UpdateRemoteMirrorRequest.Size(m) } func (m *UpdateRemoteMirrorRequest) XXX_DiscardUnknown() { xxx_messageInfo_UpdateRemoteMirrorRequest.DiscardUnknown(m) } var xxx_messageInfo_UpdateRemoteMirrorRequest proto.InternalMessageInfo func (m *UpdateRemoteMirrorRequest) GetRepository() *Repository { if m != nil { return m.Repository } return nil } func (m *UpdateRemoteMirrorRequest) GetRefName() string { if m != nil { return m.RefName } return "" } func (m *UpdateRemoteMirrorRequest) GetOnlyBranchesMatching() [][]byte { if m != nil { return m.OnlyBranchesMatching } return nil } func (m *UpdateRemoteMirrorRequest) GetSshKey() string { if m != nil { return m.SshKey } return "" } func (m *UpdateRemoteMirrorRequest) GetKnownHosts() string { if m != nil { return m.KnownHosts } return "" } func (m *UpdateRemoteMirrorRequest) GetKeepDivergentRefs() bool { if m != nil { return m.KeepDivergentRefs } return false } type UpdateRemoteMirrorResponse struct { DivergentRefs [][]byte `protobuf:"bytes,1,rep,name=divergent_refs,json=divergentRefs,proto3" json:"divergent_refs,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *UpdateRemoteMirrorResponse) Reset() { *m = UpdateRemoteMirrorResponse{} } func (m *UpdateRemoteMirrorResponse) String() string { return proto.CompactTextString(m) } func (*UpdateRemoteMirrorResponse) ProtoMessage() {} func (*UpdateRemoteMirrorResponse) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{7} } func (m *UpdateRemoteMirrorResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateRemoteMirrorResponse.Unmarshal(m, b) } func (m *UpdateRemoteMirrorResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UpdateRemoteMirrorResponse.Marshal(b, m, deterministic) } func (m *UpdateRemoteMirrorResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_UpdateRemoteMirrorResponse.Merge(m, src) } func (m *UpdateRemoteMirrorResponse) XXX_Size() int { return xxx_messageInfo_UpdateRemoteMirrorResponse.Size(m) } func (m *UpdateRemoteMirrorResponse) XXX_DiscardUnknown() { xxx_messageInfo_UpdateRemoteMirrorResponse.DiscardUnknown(m) } var xxx_messageInfo_UpdateRemoteMirrorResponse proto.InternalMessageInfo func (m *UpdateRemoteMirrorResponse) GetDivergentRefs() [][]byte { if m != nil { return m.DivergentRefs } return nil } type FindRemoteRepositoryRequest struct { Remote string `protobuf:"bytes,1,opt,name=remote,proto3" json:"remote,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *FindRemoteRepositoryRequest) Reset() { *m = FindRemoteRepositoryRequest{} } func (m *FindRemoteRepositoryRequest) String() string { return proto.CompactTextString(m) } func (*FindRemoteRepositoryRequest) ProtoMessage() {} func (*FindRemoteRepositoryRequest) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{8} } func (m *FindRemoteRepositoryRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FindRemoteRepositoryRequest.Unmarshal(m, b) } func (m *FindRemoteRepositoryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FindRemoteRepositoryRequest.Marshal(b, m, deterministic) } func (m *FindRemoteRepositoryRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_FindRemoteRepositoryRequest.Merge(m, src) } func (m *FindRemoteRepositoryRequest) XXX_Size() int { return xxx_messageInfo_FindRemoteRepositoryRequest.Size(m) } func (m *FindRemoteRepositoryRequest) XXX_DiscardUnknown() { xxx_messageInfo_FindRemoteRepositoryRequest.DiscardUnknown(m) } var xxx_messageInfo_FindRemoteRepositoryRequest proto.InternalMessageInfo func (m *FindRemoteRepositoryRequest) GetRemote() string { if m != nil { return m.Remote } return "" } // This migth throw a GRPC Unavailable code, to signal the request failure // is transient. type FindRemoteRepositoryResponse struct { Exists bool `protobuf:"varint,1,opt,name=exists,proto3" json:"exists,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *FindRemoteRepositoryResponse) Reset() { *m = FindRemoteRepositoryResponse{} } func (m *FindRemoteRepositoryResponse) String() string { return proto.CompactTextString(m) } func (*FindRemoteRepositoryResponse) ProtoMessage() {} func (*FindRemoteRepositoryResponse) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{9} } func (m *FindRemoteRepositoryResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FindRemoteRepositoryResponse.Unmarshal(m, b) } func (m *FindRemoteRepositoryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FindRemoteRepositoryResponse.Marshal(b, m, deterministic) } func (m *FindRemoteRepositoryResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_FindRemoteRepositoryResponse.Merge(m, src) } func (m *FindRemoteRepositoryResponse) XXX_Size() int { return xxx_messageInfo_FindRemoteRepositoryResponse.Size(m) } func (m *FindRemoteRepositoryResponse) XXX_DiscardUnknown() { xxx_messageInfo_FindRemoteRepositoryResponse.DiscardUnknown(m) } var xxx_messageInfo_FindRemoteRepositoryResponse proto.InternalMessageInfo func (m *FindRemoteRepositoryResponse) GetExists() bool { if m != nil { return m.Exists } return false } type FindRemoteRootRefRequest struct { Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"` Remote string `protobuf:"bytes,2,opt,name=remote,proto3" json:"remote,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *FindRemoteRootRefRequest) Reset() { *m = FindRemoteRootRefRequest{} } func (m *FindRemoteRootRefRequest) String() string { return proto.CompactTextString(m) } func (*FindRemoteRootRefRequest) ProtoMessage() {} func (*FindRemoteRootRefRequest) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{10} } func (m *FindRemoteRootRefRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FindRemoteRootRefRequest.Unmarshal(m, b) } func (m *FindRemoteRootRefRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FindRemoteRootRefRequest.Marshal(b, m, deterministic) } func (m *FindRemoteRootRefRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_FindRemoteRootRefRequest.Merge(m, src) } func (m *FindRemoteRootRefRequest) XXX_Size() int { return xxx_messageInfo_FindRemoteRootRefRequest.Size(m) } func (m *FindRemoteRootRefRequest) XXX_DiscardUnknown() { xxx_messageInfo_FindRemoteRootRefRequest.DiscardUnknown(m) } var xxx_messageInfo_FindRemoteRootRefRequest proto.InternalMessageInfo func (m *FindRemoteRootRefRequest) GetRepository() *Repository { if m != nil { return m.Repository } return nil } func (m *FindRemoteRootRefRequest) GetRemote() string { if m != nil { return m.Remote } return "" } type FindRemoteRootRefResponse struct { Ref string `protobuf:"bytes,1,opt,name=ref,proto3" json:"ref,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *FindRemoteRootRefResponse) Reset() { *m = FindRemoteRootRefResponse{} } func (m *FindRemoteRootRefResponse) String() string { return proto.CompactTextString(m) } func (*FindRemoteRootRefResponse) ProtoMessage() {} func (*FindRemoteRootRefResponse) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{11} } func (m *FindRemoteRootRefResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FindRemoteRootRefResponse.Unmarshal(m, b) } func (m *FindRemoteRootRefResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FindRemoteRootRefResponse.Marshal(b, m, deterministic) } func (m *FindRemoteRootRefResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_FindRemoteRootRefResponse.Merge(m, src) } func (m *FindRemoteRootRefResponse) XXX_Size() int { return xxx_messageInfo_FindRemoteRootRefResponse.Size(m) } func (m *FindRemoteRootRefResponse) XXX_DiscardUnknown() { xxx_messageInfo_FindRemoteRootRefResponse.DiscardUnknown(m) } var xxx_messageInfo_FindRemoteRootRefResponse proto.InternalMessageInfo func (m *FindRemoteRootRefResponse) GetRef() string { if m != nil { return m.Ref } return "" } type ListRemotesRequest struct { Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ListRemotesRequest) Reset() { *m = ListRemotesRequest{} } func (m *ListRemotesRequest) String() string { return proto.CompactTextString(m) } func (*ListRemotesRequest) ProtoMessage() {} func (*ListRemotesRequest) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{12} } func (m *ListRemotesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListRemotesRequest.Unmarshal(m, b) } func (m *ListRemotesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ListRemotesRequest.Marshal(b, m, deterministic) } func (m *ListRemotesRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_ListRemotesRequest.Merge(m, src) } func (m *ListRemotesRequest) XXX_Size() int { return xxx_messageInfo_ListRemotesRequest.Size(m) } func (m *ListRemotesRequest) XXX_DiscardUnknown() { xxx_messageInfo_ListRemotesRequest.DiscardUnknown(m) } var xxx_messageInfo_ListRemotesRequest proto.InternalMessageInfo func (m *ListRemotesRequest) GetRepository() *Repository { if m != nil { return m.Repository } return nil } type ListRemotesResponse struct { Remotes []*ListRemotesResponse_Remote `protobuf:"bytes,1,rep,name=remotes,proto3" json:"remotes,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ListRemotesResponse) Reset() { *m = ListRemotesResponse{} } func (m *ListRemotesResponse) String() string { return proto.CompactTextString(m) } func (*ListRemotesResponse) ProtoMessage() {} func (*ListRemotesResponse) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{13} } func (m *ListRemotesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListRemotesResponse.Unmarshal(m, b) } func (m *ListRemotesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ListRemotesResponse.Marshal(b, m, deterministic) } func (m *ListRemotesResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_ListRemotesResponse.Merge(m, src) } func (m *ListRemotesResponse) XXX_Size() int { return xxx_messageInfo_ListRemotesResponse.Size(m) } func (m *ListRemotesResponse) XXX_DiscardUnknown() { xxx_messageInfo_ListRemotesResponse.DiscardUnknown(m) } var xxx_messageInfo_ListRemotesResponse proto.InternalMessageInfo func (m *ListRemotesResponse) GetRemotes() []*ListRemotesResponse_Remote { if m != nil { return m.Remotes } return nil } type ListRemotesResponse_Remote struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` FetchUrl string `protobuf:"bytes,2,opt,name=fetch_url,json=fetchUrl,proto3" json:"fetch_url,omitempty"` PushUrl string `protobuf:"bytes,3,opt,name=push_url,json=pushUrl,proto3" json:"push_url,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ListRemotesResponse_Remote) Reset() { *m = ListRemotesResponse_Remote{} } func (m *ListRemotesResponse_Remote) String() string { return proto.CompactTextString(m) } func (*ListRemotesResponse_Remote) ProtoMessage() {} func (*ListRemotesResponse_Remote) Descriptor() ([]byte, []int) { return fileDescriptor_eefc82927d57d89b, []int{13, 0} } func (m *ListRemotesResponse_Remote) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListRemotesResponse_Remote.Unmarshal(m, b) } func (m *ListRemotesResponse_Remote) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ListRemotesResponse_Remote.Marshal(b, m, deterministic) } func (m *ListRemotesResponse_Remote) XXX_Merge(src proto.Message) { xxx_messageInfo_ListRemotesResponse_Remote.Merge(m, src) } func (m *ListRemotesResponse_Remote) XXX_Size() int { return xxx_messageInfo_ListRemotesResponse_Remote.Size(m) } func (m *ListRemotesResponse_Remote) XXX_DiscardUnknown() { xxx_messageInfo_ListRemotesResponse_Remote.DiscardUnknown(m) } var xxx_messageInfo_ListRemotesResponse_Remote proto.InternalMessageInfo func (m *ListRemotesResponse_Remote) GetName() string { if m != nil { return m.Name } return "" } func (m *ListRemotesResponse_Remote) GetFetchUrl() string { if m != nil { return m.FetchUrl } return "" } func (m *ListRemotesResponse_Remote) GetPushUrl() string { if m != nil { return m.PushUrl } return "" } func init() { proto.RegisterType((*AddRemoteRequest)(nil), "gitaly.AddRemoteRequest") proto.RegisterType((*AddRemoteResponse)(nil), "gitaly.AddRemoteResponse") proto.RegisterType((*RemoveRemoteRequest)(nil), "gitaly.RemoveRemoteRequest") proto.RegisterType((*RemoveRemoteResponse)(nil), "gitaly.RemoveRemoteResponse") proto.RegisterType((*FetchInternalRemoteRequest)(nil), "gitaly.FetchInternalRemoteRequest") proto.RegisterType((*FetchInternalRemoteResponse)(nil), "gitaly.FetchInternalRemoteResponse") proto.RegisterType((*UpdateRemoteMirrorRequest)(nil), "gitaly.UpdateRemoteMirrorRequest") proto.RegisterType((*UpdateRemoteMirrorResponse)(nil), "gitaly.UpdateRemoteMirrorResponse") proto.RegisterType((*FindRemoteRepositoryRequest)(nil), "gitaly.FindRemoteRepositoryRequest") proto.RegisterType((*FindRemoteRepositoryResponse)(nil), "gitaly.FindRemoteRepositoryResponse") proto.RegisterType((*FindRemoteRootRefRequest)(nil), "gitaly.FindRemoteRootRefRequest") proto.RegisterType((*FindRemoteRootRefResponse)(nil), "gitaly.FindRemoteRootRefResponse") proto.RegisterType((*ListRemotesRequest)(nil), "gitaly.ListRemotesRequest") proto.RegisterType((*ListRemotesResponse)(nil), "gitaly.ListRemotesResponse") proto.RegisterType((*ListRemotesResponse_Remote)(nil), "gitaly.ListRemotesResponse.Remote") } func init() { proto.RegisterFile("remote.proto", fileDescriptor_eefc82927d57d89b) } var fileDescriptor_eefc82927d57d89b = []byte{ // 772 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xcb, 0x6e, 0xd3, 0x4c, 0x14, 0x96, 0x93, 0x34, 0x97, 0x93, 0xb4, 0x4a, 0x26, 0x55, 0x7f, 0xc7, 0xa9, 0xf4, 0xa7, 0x86, 0x4a, 0x59, 0xd0, 0xa4, 0x2a, 0x17, 0xb1, 0x40, 0x42, 0x14, 0x84, 0xca, 0xa5, 0x15, 0x18, 0xba, 0x61, 0x63, 0x9c, 0x64, 0x92, 0x58, 0x71, 0x3c, 0x66, 0x66, 0x52, 0xc8, 0x5b, 0xb0, 0x2b, 0x0b, 0x5e, 0x80, 0x0d, 0x8f, 0xc1, 0x43, 0x75, 0x85, 0xc6, 0x33, 0x4e, 0x9c, 0xd4, 0x09, 0x8b, 0xc2, 0xce, 0x73, 0x2e, 0xdf, 0xf9, 0xce, 0x55, 0x86, 0x12, 0xc5, 0x63, 0xc2, 0x71, 0x2b, 0xa0, 0x84, 0x13, 0x94, 0x1d, 0xb8, 0xdc, 0xf1, 0xa6, 0x06, 0x78, 0xae, 0xcf, 0xa5, 0xcc, 0x28, 0xb1, 0xa1, 0x43, 0x71, 0x4f, 0xbe, 0xcc, 0x9f, 0x1a, 0x94, 0x9f, 0xf4, 0x7a, 0x56, 0xe8, 0x65, 0xe1, 0x4f, 0x13, 0xcc, 0x38, 0x7a, 0x08, 0x40, 0x71, 0x40, 0x98, 0xcb, 0x09, 0x9d, 0xea, 0x5a, 0x43, 0x6b, 0x16, 0x8f, 0x50, 0x4b, 0x62, 0xb5, 0xac, 0x99, 0xe6, 0x38, 0xf3, 0xed, 0xd7, 0x1d, 0xcd, 0x8a, 0xd9, 0x22, 0x04, 0x19, 0xdf, 0x19, 0x63, 0x3d, 0xd5, 0xd0, 0x9a, 0x05, 0x2b, 0xfc, 0x46, 0x65, 0x48, 0x4f, 0xa8, 0xa7, 0xa7, 0x43, 0x91, 0xf8, 0x44, 0xfb, 0xb0, 0x35, 0x76, 0x29, 0x25, 0xd4, 0xa6, 0xb8, 0x3f, 0x76, 0x02, 0xa6, 0x6f, 0x34, 0xd2, 0xcd, 0x82, 0xb5, 0x29, 0xa5, 0x96, 0x14, 0xbe, 0xcc, 0xe4, 0x33, 0xe5, 0x8d, 0x48, 0xa8, 0x4c, 0xcd, 0x2a, 0x54, 0x62, 0x7c, 0x59, 0x40, 0x7c, 0x86, 0xcd, 0x2e, 0x54, 0x85, 0xe4, 0x02, 0xff, 0xc3, 0x3c, 0xcc, 0x16, 0x6c, 0x2f, 0x06, 0x91, 0xc1, 0xd1, 0x0e, 0x64, 0x29, 0x66, 0x13, 0x8f, 0x87, 0x11, 0xf2, 0x96, 0x7a, 0x99, 0x97, 0x1a, 0x18, 0xcf, 0x31, 0xef, 0x0e, 0x5f, 0xf8, 0x1c, 0x53, 0xdf, 0xf1, 0xfe, 0x16, 0xb9, 0xc7, 0x50, 0x91, 0x5d, 0xb6, 0x63, 0x00, 0xa9, 0x55, 0x00, 0x56, 0x99, 0xaa, 0xb8, 0x91, 0xc4, 0xbc, 0x0f, 0xf5, 0x44, 0x62, 0x7f, 0x48, 0xe8, 0x6b, 0x0a, 0x6a, 0xe7, 0x41, 0xcf, 0xe1, 0xaa, 0x02, 0xa7, 0xaa, 0x5b, 0x37, 0xcd, 0xa7, 0x06, 0x79, 0x8a, 0xfb, 0x76, 0xac, 0xe0, 0x39, 0x8a, 0xfb, 0x67, 0x62, 0x76, 0xee, 0xc1, 0x0e, 0xf1, 0xbd, 0xa9, 0xdd, 0xa1, 0x8e, 0xdf, 0x1d, 0x62, 0x66, 0x8f, 0x1d, 0xde, 0x1d, 0xba, 0xfe, 0x40, 0x4f, 0x37, 0xd2, 0xcd, 0x92, 0xb5, 0x2d, 0xb4, 0xc7, 0x4a, 0x79, 0xaa, 0x74, 0xe8, 0x3f, 0xc8, 0x31, 0x36, 0xb4, 0x47, 0x78, 0xaa, 0x67, 0x42, 0xbc, 0x2c, 0x63, 0xc3, 0x57, 0x78, 0x8a, 0xfe, 0x87, 0xe2, 0xc8, 0x27, 0x9f, 0x7d, 0x7b, 0x48, 0x18, 0x17, 0x53, 0x27, 0x94, 0x10, 0x8a, 0x4e, 0x84, 0x04, 0xb5, 0xa0, 0x3a, 0xc2, 0x38, 0xb0, 0x7b, 0xee, 0x05, 0xa6, 0x03, 0xec, 0x73, 0x31, 0x76, 0x4c, 0xcf, 0x86, 0x75, 0xa8, 0x08, 0xd5, 0xb3, 0x48, 0x63, 0xe1, 0x3e, 0x33, 0x9f, 0x82, 0x91, 0x54, 0x11, 0x55, 0xc8, 0x7d, 0xd8, 0x5a, 0x02, 0xd2, 0x42, 0xd6, 0x9b, 0xbd, 0x05, 0x10, 0xd1, 0x0e, 0xd7, 0x9f, 0xcd, 0xf4, 0xac, 0x71, 0xaa, 0xb0, 0x61, 0x3b, 0x84, 0x2a, 0x2c, 0x6a, 0xc1, 0x52, 0x2f, 0xf3, 0x01, 0xec, 0x26, 0xbb, 0xcd, 0xdb, 0x88, 0xbf, 0xb8, 0x22, 0x4f, 0xd5, 0x46, 0xf9, 0x32, 0x3d, 0xd0, 0x63, 0x7e, 0x84, 0x08, 0x12, 0x37, 0x6f, 0xe2, 0x9c, 0x65, 0x6a, 0x81, 0xe5, 0x01, 0xd4, 0x12, 0xa2, 0x29, 0x8a, 0x65, 0x48, 0x53, 0xdc, 0x57, 0x79, 0x89, 0x4f, 0xf3, 0x0c, 0xd0, 0x6b, 0x97, 0x71, 0x69, 0xce, 0x6e, 0x4c, 0xcb, 0xfc, 0xa1, 0x41, 0x75, 0x01, 0x50, 0x45, 0x7e, 0x04, 0x39, 0x49, 0x50, 0xf6, 0xa4, 0x78, 0x64, 0x46, 0x70, 0x09, 0xd6, 0x2d, 0xc5, 0x3e, 0x72, 0x31, 0xde, 0x43, 0x56, 0x8a, 0x66, 0x87, 0x42, 0x8b, 0x1d, 0xbc, 0x3a, 0x14, 0xfa, 0x62, 0xbd, 0x6c, 0x71, 0xf6, 0x64, 0x35, 0xf2, 0xa1, 0xe0, 0x9c, 0x7a, 0x62, 0xd8, 0x83, 0x09, 0x93, 0x3a, 0x79, 0x12, 0x73, 0xe2, 0x7d, 0x4e, 0xbd, 0xa3, 0xef, 0x1b, 0xb0, 0x29, 0x61, 0xdf, 0x61, 0x7a, 0xe1, 0x76, 0x31, 0x3a, 0x81, 0xc2, 0xec, 0xd8, 0x21, 0x3d, 0x62, 0xb8, 0x7c, 0xaf, 0x8d, 0x5a, 0x82, 0x46, 0x5d, 0xc6, 0xec, 0xd5, 0x65, 0x33, 0x95, 0xd7, 0x50, 0x1f, 0xaa, 0x09, 0x2b, 0x8f, 0x66, 0x59, 0xaf, 0x3e, 0x54, 0xc6, 0xad, 0xb5, 0x36, 0x4b, 0x71, 0xde, 0x42, 0x29, 0x7e, 0x24, 0x51, 0x7d, 0xde, 0xa5, 0x6b, 0xf7, 0xd9, 0xd8, 0x4d, 0x56, 0x2e, 0x41, 0x62, 0x40, 0xd7, 0x77, 0x0c, 0xed, 0x45, 0xbe, 0x2b, 0x2f, 0x92, 0x61, 0xae, 0x33, 0x59, 0x0c, 0xd2, 0xd4, 0xd0, 0x08, 0xb6, 0x93, 0xd6, 0x09, 0xcd, 0xd3, 0x5f, 0xbd, 0xa3, 0xc6, 0xed, 0xf5, 0x46, 0x2a, 0x58, 0xfe, 0xea, 0xb2, 0x99, 0xc9, 0xa7, 0xca, 0x1a, 0xfa, 0x08, 0x95, 0x6b, 0x5b, 0x81, 0x1a, 0x09, 0x20, 0x0b, 0xeb, 0x69, 0xec, 0xad, 0xb1, 0x58, 0x48, 0x28, 0x85, 0xde, 0x40, 0x31, 0x36, 0xc9, 0xc8, 0x48, 0x1c, 0x6f, 0x89, 0x5a, 0x5f, 0x33, 0xfa, 0x11, 0xde, 0xa1, 0x76, 0x7c, 0xf8, 0x41, 0xd8, 0x79, 0x4e, 0xa7, 0xd5, 0x25, 0xe3, 0xb6, 0xfc, 0x3c, 0x20, 0x74, 0xd0, 0x96, 0xde, 0xed, 0xf0, 0x87, 0xa2, 0x3d, 0x20, 0xea, 0x1d, 0x74, 0x3a, 0xd9, 0x50, 0x74, 0xf7, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x49, 0x23, 0x90, 0x30, 0x95, 0x08, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // RemoteServiceClient is the client API for RemoteService service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type RemoteServiceClient interface { AddRemote(ctx context.Context, in *AddRemoteRequest, opts ...grpc.CallOption) (*AddRemoteResponse, error) FetchInternalRemote(ctx context.Context, in *FetchInternalRemoteRequest, opts ...grpc.CallOption) (*FetchInternalRemoteResponse, error) RemoveRemote(ctx context.Context, in *RemoveRemoteRequest, opts ...grpc.CallOption) (*RemoveRemoteResponse, error) UpdateRemoteMirror(ctx context.Context, opts ...grpc.CallOption) (RemoteService_UpdateRemoteMirrorClient, error) FindRemoteRepository(ctx context.Context, in *FindRemoteRepositoryRequest, opts ...grpc.CallOption) (*FindRemoteRepositoryResponse, error) FindRemoteRootRef(ctx context.Context, in *FindRemoteRootRefRequest, opts ...grpc.CallOption) (*FindRemoteRootRefResponse, error) ListRemotes(ctx context.Context, in *ListRemotesRequest, opts ...grpc.CallOption) (RemoteService_ListRemotesClient, error) } type remoteServiceClient struct { cc *grpc.ClientConn } func NewRemoteServiceClient(cc *grpc.ClientConn) RemoteServiceClient { return &remoteServiceClient{cc} } func (c *remoteServiceClient) AddRemote(ctx context.Context, in *AddRemoteRequest, opts ...grpc.CallOption) (*AddRemoteResponse, error) { out := new(AddRemoteResponse) err := c.cc.Invoke(ctx, "/gitaly.RemoteService/AddRemote", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *remoteServiceClient) FetchInternalRemote(ctx context.Context, in *FetchInternalRemoteRequest, opts ...grpc.CallOption) (*FetchInternalRemoteResponse, error) { out := new(FetchInternalRemoteResponse) err := c.cc.Invoke(ctx, "/gitaly.RemoteService/FetchInternalRemote", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *remoteServiceClient) RemoveRemote(ctx context.Context, in *RemoveRemoteRequest, opts ...grpc.CallOption) (*RemoveRemoteResponse, error) { out := new(RemoveRemoteResponse) err := c.cc.Invoke(ctx, "/gitaly.RemoteService/RemoveRemote", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *remoteServiceClient) UpdateRemoteMirror(ctx context.Context, opts ...grpc.CallOption) (RemoteService_UpdateRemoteMirrorClient, error) { stream, err := c.cc.NewStream(ctx, &_RemoteService_serviceDesc.Streams[0], "/gitaly.RemoteService/UpdateRemoteMirror", opts...) if err != nil { return nil, err } x := &remoteServiceUpdateRemoteMirrorClient{stream} return x, nil } type RemoteService_UpdateRemoteMirrorClient interface { Send(*UpdateRemoteMirrorRequest) error CloseAndRecv() (*UpdateRemoteMirrorResponse, error) grpc.ClientStream } type remoteServiceUpdateRemoteMirrorClient struct { grpc.ClientStream } func (x *remoteServiceUpdateRemoteMirrorClient) Send(m *UpdateRemoteMirrorRequest) error { return x.ClientStream.SendMsg(m) } func (x *remoteServiceUpdateRemoteMirrorClient) CloseAndRecv() (*UpdateRemoteMirrorResponse, error) { if err := x.ClientStream.CloseSend(); err != nil { return nil, err } m := new(UpdateRemoteMirrorResponse) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func (c *remoteServiceClient) FindRemoteRepository(ctx context.Context, in *FindRemoteRepositoryRequest, opts ...grpc.CallOption) (*FindRemoteRepositoryResponse, error) { out := new(FindRemoteRepositoryResponse) err := c.cc.Invoke(ctx, "/gitaly.RemoteService/FindRemoteRepository", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *remoteServiceClient) FindRemoteRootRef(ctx context.Context, in *FindRemoteRootRefRequest, opts ...grpc.CallOption) (*FindRemoteRootRefResponse, error) { out := new(FindRemoteRootRefResponse) err := c.cc.Invoke(ctx, "/gitaly.RemoteService/FindRemoteRootRef", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *remoteServiceClient) ListRemotes(ctx context.Context, in *ListRemotesRequest, opts ...grpc.CallOption) (RemoteService_ListRemotesClient, error) { stream, err := c.cc.NewStream(ctx, &_RemoteService_serviceDesc.Streams[1], "/gitaly.RemoteService/ListRemotes", opts...) if err != nil { return nil, err } x := &remoteServiceListRemotesClient{stream} if err := x.ClientStream.SendMsg(in); err != nil { return nil, err } if err := x.ClientStream.CloseSend(); err != nil { return nil, err } return x, nil } type RemoteService_ListRemotesClient interface { Recv() (*ListRemotesResponse, error) grpc.ClientStream } type remoteServiceListRemotesClient struct { grpc.ClientStream } func (x *remoteServiceListRemotesClient) Recv() (*ListRemotesResponse, error) { m := new(ListRemotesResponse) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } // RemoteServiceServer is the server API for RemoteService service. type RemoteServiceServer interface { AddRemote(context.Context, *AddRemoteRequest) (*AddRemoteResponse, error) FetchInternalRemote(context.Context, *FetchInternalRemoteRequest) (*FetchInternalRemoteResponse, error) RemoveRemote(context.Context, *RemoveRemoteRequest) (*RemoveRemoteResponse, error) UpdateRemoteMirror(RemoteService_UpdateRemoteMirrorServer) error FindRemoteRepository(context.Context, *FindRemoteRepositoryRequest) (*FindRemoteRepositoryResponse, error) FindRemoteRootRef(context.Context, *FindRemoteRootRefRequest) (*FindRemoteRootRefResponse, error) ListRemotes(*ListRemotesRequest, RemoteService_ListRemotesServer) error } // UnimplementedRemoteServiceServer can be embedded to have forward compatible implementations. type UnimplementedRemoteServiceServer struct { } func (*UnimplementedRemoteServiceServer) AddRemote(ctx context.Context, req *AddRemoteRequest) (*AddRemoteResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method AddRemote not implemented") } func (*UnimplementedRemoteServiceServer) FetchInternalRemote(ctx context.Context, req *FetchInternalRemoteRequest) (*FetchInternalRemoteResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method FetchInternalRemote not implemented") } func (*UnimplementedRemoteServiceServer) RemoveRemote(ctx context.Context, req *RemoveRemoteRequest) (*RemoveRemoteResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method RemoveRemote not implemented") } func (*UnimplementedRemoteServiceServer) UpdateRemoteMirror(srv RemoteService_UpdateRemoteMirrorServer) error { return status.Errorf(codes.Unimplemented, "method UpdateRemoteMirror not implemented") } func (*UnimplementedRemoteServiceServer) FindRemoteRepository(ctx context.Context, req *FindRemoteRepositoryRequest) (*FindRemoteRepositoryResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method FindRemoteRepository not implemented") } func (*UnimplementedRemoteServiceServer) FindRemoteRootRef(ctx context.Context, req *FindRemoteRootRefRequest) (*FindRemoteRootRefResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method FindRemoteRootRef not implemented") } func (*UnimplementedRemoteServiceServer) ListRemotes(req *ListRemotesRequest, srv RemoteService_ListRemotesServer) error { return status.Errorf(codes.Unimplemented, "method ListRemotes not implemented") } func RegisterRemoteServiceServer(s *grpc.Server, srv RemoteServiceServer) { s.RegisterService(&_RemoteService_serviceDesc, srv) } func _RemoteService_AddRemote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(AddRemoteRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(RemoteServiceServer).AddRemote(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/gitaly.RemoteService/AddRemote", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(RemoteServiceServer).AddRemote(ctx, req.(*AddRemoteRequest)) } return interceptor(ctx, in, info, handler) } func _RemoteService_FetchInternalRemote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(FetchInternalRemoteRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(RemoteServiceServer).FetchInternalRemote(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/gitaly.RemoteService/FetchInternalRemote", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(RemoteServiceServer).FetchInternalRemote(ctx, req.(*FetchInternalRemoteRequest)) } return interceptor(ctx, in, info, handler) } func _RemoteService_RemoveRemote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(RemoveRemoteRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(RemoteServiceServer).RemoveRemote(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/gitaly.RemoteService/RemoveRemote", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(RemoteServiceServer).RemoveRemote(ctx, req.(*RemoveRemoteRequest)) } return interceptor(ctx, in, info, handler) } func _RemoteService_UpdateRemoteMirror_Handler(srv interface{}, stream grpc.ServerStream) error { return srv.(RemoteServiceServer).UpdateRemoteMirror(&remoteServiceUpdateRemoteMirrorServer{stream}) } type RemoteService_UpdateRemoteMirrorServer interface { SendAndClose(*UpdateRemoteMirrorResponse) error Recv() (*UpdateRemoteMirrorRequest, error) grpc.ServerStream } type remoteServiceUpdateRemoteMirrorServer struct { grpc.ServerStream } func (x *remoteServiceUpdateRemoteMirrorServer) SendAndClose(m *UpdateRemoteMirrorResponse) error { return x.ServerStream.SendMsg(m) } func (x *remoteServiceUpdateRemoteMirrorServer) Recv() (*UpdateRemoteMirrorRequest, error) { m := new(UpdateRemoteMirrorRequest) if err := x.ServerStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func _RemoteService_FindRemoteRepository_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(FindRemoteRepositoryRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(RemoteServiceServer).FindRemoteRepository(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/gitaly.RemoteService/FindRemoteRepository", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(RemoteServiceServer).FindRemoteRepository(ctx, req.(*FindRemoteRepositoryRequest)) } return interceptor(ctx, in, info, handler) } func _RemoteService_FindRemoteRootRef_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(FindRemoteRootRefRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(RemoteServiceServer).FindRemoteRootRef(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/gitaly.RemoteService/FindRemoteRootRef", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(RemoteServiceServer).FindRemoteRootRef(ctx, req.(*FindRemoteRootRefRequest)) } return interceptor(ctx, in, info, handler) } func _RemoteService_ListRemotes_Handler(srv interface{}, stream grpc.ServerStream) error { m := new(ListRemotesRequest) if err := stream.RecvMsg(m); err != nil { return err } return srv.(RemoteServiceServer).ListRemotes(m, &remoteServiceListRemotesServer{stream}) } type RemoteService_ListRemotesServer interface { Send(*ListRemotesResponse) error grpc.ServerStream } type remoteServiceListRemotesServer struct { grpc.ServerStream } func (x *remoteServiceListRemotesServer) Send(m *ListRemotesResponse) error { return x.ServerStream.SendMsg(m) } var _RemoteService_serviceDesc = grpc.ServiceDesc{ ServiceName: "gitaly.RemoteService", HandlerType: (*RemoteServiceServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "AddRemote", Handler: _RemoteService_AddRemote_Handler, }, { MethodName: "FetchInternalRemote", Handler: _RemoteService_FetchInternalRemote_Handler, }, { MethodName: "RemoveRemote", Handler: _RemoteService_RemoveRemote_Handler, }, { MethodName: "FindRemoteRepository", Handler: _RemoteService_FindRemoteRepository_Handler, }, { MethodName: "FindRemoteRootRef", Handler: _RemoteService_FindRemoteRootRef_Handler, }, }, Streams: []grpc.StreamDesc{ { StreamName: "UpdateRemoteMirror", Handler: _RemoteService_UpdateRemoteMirror_Handler, ClientStreams: true, }, { StreamName: "ListRemotes", Handler: _RemoteService_ListRemotes_Handler, ServerStreams: true, }, }, Metadata: "remote.proto", }
1067511899/tornado-learn
codewars/HumanReadableTime.py
''' Write a function, which takes a non-negative integer (seconds) as input and returns the time in a human-readable format (HH:MM:SS) HH = hours, padded to 2 digits, range: 00 - 99 MM = minutes, padded to 2 digits, range: 00 - 59 SS = seconds, padded to 2 digits, range: 00 - 59 The maximum time never exceeds 359999 (99:59:59) You can find some examples in the test fixtures. ''' import time def make_readable(seconds): se = seconds % 60 mi = seconds // 60 % 60 hr = seconds // 3600 return "{:02}:{:02}:{:02}".format(hr, mi, se) if __name__ == '__main__': print(time.strftime('%H:%M:%S', time.gmtime(86399))) print(make_readable(359999))
ggreen/khem
khem-joelib/src/main/java/joelib2/math/Matrix3D.java
<gh_stars>0 /////////////////////////////////////////////////////////////////////////////// //Filename: $RCSfile: Matrix3D.java,v $ //Purpose: TODO description. //Language: Java //Compiler: JDK 1.4 //Authors: <NAME> //Version: $Revision: 1.4 $ // $Date: 2005/02/17 16:48:35 $ // $Author: wegner $ // //Copyright JOELIB/JOELib2: Dept. Computer Architecture, University of // Tuebingen, Germany, 2001,2002,2003,2004,2005 //Copyright JOELIB/JOELib2: ALTANA PHARMA AG, Konstanz, Germany, // 2003,2004,2005 // //This program is free software; you can redistribute it and/or modify //it under the terms of the GNU General Public License as published by //the Free Software Foundation version 2 of the License. // //This program is distributed in the hope that it will be useful, //but WITHOUT ANY WARRANTY; without even the implied warranty of //MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //GNU General Public License for more details. /////////////////////////////////////////////////////////////////////////////// package joelib2.math; import joelib2.util.RandomNumber; /** * * @.author wegner * @.license GPL * @.cvsversion $Revision: 1.4 $, $Date: 2005/02/17 16:48:35 $ */ public interface Matrix3D { //~ Methods //////////////////////////////////////////////////////////////// public void setMatrixFrom(double[][] matrix); double determinant(); /** * Description of the Method * * @param c Description of the Parameter * @return Description of the Return Value */ Matrix3D diving(final double c); /** * Description of the Method * * @param alpha Description of the Parameter * @param beta Description of the Parameter * @param gamma Description of the Parameter * @param a Description of the Parameter * @param b Description of the Parameter * @param c Description of the Parameter */ void fillOrth(double alpha, double beta, double gamma, double a, double b, double c); /** * Description of the Method * * @param i Description of the Parameter * @param j Description of the Parameter * @return Description of the Return Value */ double get(int i, int j); /** * Gets the array attribute of the Matrix3x3 object * * @param m Description of the Parameter */ void getArray(double[] m); /** * Description of the Method * * @return Description of the Return Value */ Matrix3D invert(); /** * Description of the Method * * @param v Description of the Parameter * @param m Description of the Parameter * @return Description of the Return Value */ Vector3D mul(final Vector3D v, final Matrix3D m); /** * Description of the Method * * @param m Description of the Parameter * @param v Description of the Parameter * @return Description of the Return Value */ Vector3D mul(final Matrix3D m, final Vector3D v); /** * Description of the Method * * @param rnd Description of the Parameter */ void randomRotation(RandomNumber rnd); /** * Description of the Method * * @param v Description of the Parameter * @param angle Description of the Parameter */ void rotAboutAxisByAngle(final Vector3D v, final double angle); /** * Description of the Method * * @param c Description of the Parameter * @param noatoms Description of the Parameter */ void rotateCoords(double[] c, int noatoms); /** * Description of the Method * * @param i Description of the Parameter * @param j Description of the Parameter * @param v Description of the Parameter */ void set(int i, int j, double v); /** * Description of the Method * * @param phi Description of the Parameter * @param theta Description of the Parameter * @param psi Description of the Parameter */ void setupRotMat(double phi, double theta, double psi); /** * Description of the Method * * @return Description of the Return Value */ String toString(); /** * Description of the Method * * @param m Description of the Parameter * @return Description of the Return Value */ String toString(Matrix3D m); } /////////////////////////////////////////////////////////////////////////////// //END OF FILE. ///////////////////////////////////////////////////////////////////////////////
Eruimdas/eruimdas.github.io
vendor/bundle/ruby/2.7.0/gems/tzinfo-data-1.2021.1/lib/tzinfo/data/definitions/Antarctica/Casey.rb
# encoding: UTF-8 # This file contains data derived from the IANA Time Zone Database # (https://www.iana.org/time-zones). module TZInfo module Data module Definitions module Antarctica module Casey include TimezoneDefinition timezone 'Antarctica/Casey' do |tz| tz.offset :o0, 0, 0, :'-00' tz.offset :o1, 28800, 0, :'+08' tz.offset :o2, 39600, 0, :'+11' tz.transition 1969, 1, :o1, -31536000, 4880445, 2 tz.transition 2009, 10, :o2, 1255802400 tz.transition 2010, 3, :o1, 1267714800 tz.transition 2011, 10, :o2, 1319738400 tz.transition 2012, 2, :o1, 1329843600 tz.transition 2016, 10, :o2, 1477065600 tz.transition 2018, 3, :o1, 1520701200 tz.transition 2018, 10, :o2, 1538856000 tz.transition 2019, 3, :o1, 1552752000 tz.transition 2019, 10, :o2, 1570129200 tz.transition 2020, 3, :o1, 1583596800 tz.transition 2020, 10, :o2, 1601740860 end end end end end end
RipDevil/mirador
src/state/selectors/config.js
<reponame>RipDevil/mirador<gh_stars>100-1000 import { createSelector } from 'reselect'; import deepmerge from 'deepmerge'; import { miradorSlice } from './utils'; import { getWorkspace } from './getters'; /** */ export function getConfig(state) { const slice = miradorSlice(state || {}); return slice.config || {}; } /** * Extract an exportable version of state using the configuration from the config. */ export function getExportableState(state) { const exportConfig = getConfig(state).export; return Object.entries(exportConfig).reduce( (acc, [stem, value]) => { if (value === true) { acc[stem] = state[stem]; } else if (value.filter) { acc[stem] = Object.entries(state[stem]) .filter(value.filter) .reduce( (stemAcc, [k, v]) => { stemAcc[k] = v; // eslint-disable-line no-param-reassign return stemAcc; }, {}, ); } return acc; }, {}, ); } /** * Return languages from config (in state) and indicate which is currently set * @param {object} state * @return {Array} [ {locale: 'de', label: 'Deutsch', current: true}, ... ] */ export const getLanguagesFromConfigWithCurrent = createSelector( [getConfig], ({ availableLanguages, language }) => Object.keys(availableLanguages).map(key => ({ current: key === language, label: availableLanguages[key], locale: key, })), ); export const getShowZoomControlsConfig = createSelector( [ getWorkspace, getConfig, ], (workspace, config) => ( workspace.showZoomControls === undefined ? (config.workspace.showZoomControls) : workspace.showZoomControls ), ); export const getTheme = createSelector( [getConfig], ({ theme, themes, selectedTheme }) => deepmerge(theme, themes[selectedTheme] || {}), ); export const getThemeIds = createSelector( [getConfig], ({ themes }) => Object.keys(themes), ); export const getContainerId = createSelector( [getConfig], ({ id }) => id, ); export const getThemeDirection = createSelector( [getConfig], ({ theme }) => theme.direction || 'ltr', ); export const getRequestsConfig = createSelector( [getConfig], ({ requests }) => requests || {}, );
ShimmyShaman/midge
src/modules/ui_elements/stack_container.h
// /* stack_container.h */ // #ifndef stack_container_H // #define stack_container_H // #include "control/mc_controller.h" // #include "core/core_definitions.h" // #include "mc_str.h" // #include "render/render_common.h" // typedef enum mcu_stack_orientation { // MCU_STACK_ORIENTATION_NULL = 0, // MCU_STACK_ORIENTATION_VERTICAL, // MCU_STACK_ORIENTATION_HORIZONTAL, // } mcu_stack_orientation; // typedef struct mcu_stack_container { // mc_node *node; // void *tag; // mcu_stack_orientation orientation; // struct { // float width, height; // } _children_extents; // render_color background_color; // } mcu_stack_container; // int mcu_init_stack_container(mc_node *parent, mcu_stack_container **p_button); // #endif // stack_container_H
souslesens/souslesensGraph
public/toutlesens/js/customizeUI.js
<gh_stars>1-10 /******************************************************************************* * SOUSLESENS LICENSE************************ * * The MIT License (MIT) * * Copyright (c) 2016-2017 <NAME> <EMAIL> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * ******************************************************************************/ var customizeUI = (function () { self = {} self.hideFilters = false; var idsList; // var legendDivWidth=Gparams.rightPanelWidth; self.customInfo = function (obj) { if (queryParams.sinequaCallbackUrl && obj.labelNeo == "norme") { var str = ""; if (obj.neoAttrs && obj.neoAttrs.id_doc) { // str = "<a href='" + queryParams.sinequaCallbackUrl + "?~~ID~~=" + obj.neoAttrs.id_doc + "target='_parent'>search in Sinequa</a>"; str = '<a href="' + //decodeURIComponent(queryParams.sinequaCallbackUrl).replace('~~ID~~',obj.neoAttrs.id_doc)+ (queryParams.sinequaCallbackUrl).replace('~~ID~~', obj.neoAttrs.id_doc) + '" target="_parent">Show in Sinequa</a>'; } return str; } if (queryParams.entityFilterUrl && obj.labelNeo == "ref") { var str = ""; if (obj.neoAttrs && obj.neoAttrs.ref) { // str = "<a href='" + queryParams.sinequaCallbackUrl + "?~~ID~~=" + obj.neoAttrs.id_doc + "target='_parent'>search in Sinequa</a>"; str = '<a href="' + // decodeURIComponent(queryParams.entityFilterUrl).replace('~~ID~~',obj.neoAttrs.ref)+ (queryParams.entityFilterUrl).replace('~~Ref~~', obj.neoAttrs.ref) + '" target="_parent">Search in Sinequa</a>'; } return str; } return ""; } self.init = function () { // if (queryParams.sinequaCallbackUrl) customizeUI.customizationName = "Sinequa"; } self.customize = function () { if (queryParams.ui == "basic") { } var initialQuery = queryParams.initialQuery; if (initialQuery && initialQuery.length > 0) { var idsList = initialQuery.split(","); Gparams.startWithBulkGraphView = false; currentDisplayType = "SIMPLE_FORCE_GRAPH"; toutlesensController.setResponsiveDimensions(0); searchNodes.getWhereClauseFromArray("id", idsList, function (err, result) { toutlesensController.generateGraph(null, {applyFilters: true}, function () { $("#filtersDiv").html(""); $("#graphMessage").html(""); }); }) } else { toutlesensController.setResponsiveDimensions(rightPanelWidth); if (Gparams.startWithBulkGraphView) searchNodes.showBulkGraph(subGraph); } } self.addPlugins = function (plugins, callback) { plugins.forEach(function (plugin) { $("#pluginLi").html(plugin) $("#pluginDiv").load("plugins/" + plugin + "/index.html",function(err,result){ if (callback) callback(); }) }); } /* $("#findTabsUl").append(" <li><a href='#" + plugin + "Div'><span id='lang_60'>" + plugin + "</span></a></li>") $("#findTabs").append("<div id='" + plugin + "Div'></div>").ready(function () { $("#" + plugin + "Div").load("plugins/" + plugin + "/index.html") if (callback) callback(); }); })*/ return self; })()
Nmane1612/Nihar-Mane
Python Files/critter3.py
class Critter(object): def __init__(self, name): self.name = name def talk(self): print( "Hi. I'm", self.name, "\n") crit1 = Critter("abcd") print(crit1.name) crit1.talk()
laplace22/narou
lib/web/helper4web.rb
<reponame>laplace22/narou # -*- coding: utf-8 -*- # # Copyright 2013 whiteleaf. All rights reserved. # require_relative "../helper" module Helper module_function def print_horizontal_rule print "<hr>" end end
dacap/loseface
src/captu/linux/spcadecoder.h
#ifndef SPCADECODER_H #define SPCADECODER_H #include "spca5xx.h" #ifdef __cplusplus extern "C" { #endif #define ISHIFT 11 #define IFIX(a) ((long)((a) * (1 << ISHIFT) + .5)) #define IMULT(a, b) (((a) * (b)) >> ISHIFT) #define ITOINT(a) ((a) >> ISHIFT) /* special markers */ #define M_BADHUFF -1 struct in { unsigned char *p; unsigned int bits; int left; int marker; }; /*********************************/ struct dec_hufftbl; struct enc_hufftbl; union hufftblp { struct dec_hufftbl *dhuff; struct enc_hufftbl *ehuff; }; struct scan { int dc; /* old dc value */ union hufftblp hudc; union hufftblp huac; int next; /* when to switch to next scan */ int cid; /* component id */ int hv; /* horiz/vert, copied from comp */ int tq; /* quant tbl, copied from comp */ }; /*********************************/ #define DECBITS 10 /* seems to be the optimum */ struct dec_hufftbl { int maxcode[17]; int valptr[16]; unsigned char vals[256]; unsigned int llvals[1 << DECBITS]; }; /*********************************/ /*********************************/ /*********************************/ struct comp { int cid; int hv; int tq; }; #define MAXCOMP 4 #define ERR_NO_SOI 1 #define ERR_NOT_8BIT 2 #define ERR_HEIGHT_MISMATCH 3 #define ERR_WIDTH_MISMATCH 4 #define ERR_BAD_WIDTH_OR_HEIGHT 5 #define ERR_TOO_MANY_COMPPS 6 #define ERR_ILLEGAL_HV 7 #define ERR_QUANT_TABLE_SELECTOR 8 #define ERR_NOT_YCBCR_221111 9 #define ERR_UNKNOWN_CID_IN_SCAN 10 #define ERR_NOT_SEQUENTIAL_DCT 11 #define ERR_WRONG_MARKER 12 #define ERR_NO_EOI 13 #define ERR_BAD_TABLES 14 #define ERR_DEPTH_MISMATCH 15 int spca50x_outpicture (struct spca5xx_frame *myframe); void init_jpeg_decoder(void); void create_jpeg_from_data (unsigned char* dst, unsigned char * src, int qIndex, int w, int h, unsigned char format, int o_size, int *size, int omit_huffman_table); #ifdef __cplusplus } #endif #endif /* SPCADECODER_H */
berendkleinhaneveld/Registrationshop
ui/widgets/transferfunction/TransferFunctionWidget.py
""" TransferFunctionWidget :Authors: <NAME> """ from ui.widgets.histogram import Histogram from ui.widgets.histogram import HistogramWidget from ui.widgets.transferfunction import TransferFunctionNodeItem from ui.widgets.transferfunction import TransferFunctionItem from ui.widgets.ColorWidget import ColorButton from core.data.DataAnalyzer import DataAnalyzer from PySide.QtGui import QWidget from PySide.QtGui import QGridLayout from PySide.QtGui import QGraphicsLineItem from PySide.QtGui import QPen from PySide.QtGui import QLabel from PySide.QtGui import QLineEdit from PySide.QtGui import QPushButton from PySide.QtGui import QColorDialog from PySide.QtGui import QColor from PySide.QtCore import QLineF from PySide.QtCore import Signal from PySide.QtCore import Slot from ui.widgets import Style class TransferFunctionWidget(QWidget): """ TransferFunctionWidget """ valueChanged = Signal(object) def __init__(self): super(TransferFunctionWidget, self).__init__() self.nodes = [] self.lines = [] self.histogram = Histogram() self.histogram.enabled = False # Create a histogram widget for the background of the transfer function editor self.histogramWidget = HistogramWidget() self.histogramWidget.setHistogram(self.histogram) self.histogramWidget.setAxeMode(bottom=HistogramWidget.AxeClear, left=HistogramWidget.AxeLog) self.histogramWidget.update() self.histogramWidget._histogramItem.delegate = self Style.styleWidgetForTab(self.histogramWidget) # Invisible item that catches mouse events on top of the histogram self.transferfunctionItem = TransferFunctionItem() self.transferfunctionItem.setZValue(250) self.transferfunctionItem.delegate = self self.histogramWidget.addItem(self.transferfunctionItem) # Create a widget for editing the selected node of the transfer function self.nodeItemWidget = NodeItemWidget() self.nodeItemWidget.setEnabled(False) self.nodeItemWidget.nodeUpdated.connect(self.updateNode) self.nodeItemWidget.removePoint.connect(self.removePoint) layout = QGridLayout() layout.setSpacing(0) layout.setContentsMargins(0, 0, 0, 0) layout.addWidget(self.histogramWidget, 0, 0) layout.addWidget(self.nodeItemWidget, 1, 0) self.setLayout(layout) def setImageData(self, imageData): # Clear any previous nodes for node in self.nodes: self.histogramWidget.scene().removeItem(node) self.nodes = [] # Clear any previous lines for line in self.lines: self.histogramWidget.scene().removeItem(line) self.lines = [] bins = DataAnalyzer.histogramForData(imageData, 256) self.histogram.bins = bins self.histogram.enabled = True self.range = imageData.GetScalarRange() # Create and add nodes from the transfer function self.updateNodes() self.valueChanged.emit(self) def resizeEvent(self, event): self.histogramWidget.update() self.updateNodes() def updateNodes(self): for index in range(len(self.transferFunction.points)): point = self.transferFunction.points[index] if index < len(self.nodes): # Just update the node item nodeItem = self.nodes[index] else: # Create a new node item nodeItem = TransferFunctionNodeItem() nodeItem.setSceneBoundsItem(self.histogramWidget._histogramItem) nodeItem.setZValue(300) nodeItem.delegate = self self.histogramWidget.scene().addItem(nodeItem) self.nodes.append(nodeItem) nodeItem.setPosition([(point.value - self.range[0]) / (self.range[1] - self.range[0]), point.opacity]) nodeItem.updateColor(point.color) nodeItem.node = point if nodeItem.isSelected(): self.selectedNode(nodeItem) # Clean up redundant node items if len(self.nodes) > len(self.transferFunction.points): # Remove node items from scene for index in range(len(self.transferFunction.points), len(self.nodes)): nodeItem = self.nodes[index] self.histogramWidget.scene().removeItem(nodeItem) # Remove them from the nodes del self.nodes[len(self.transferFunction.points):] assert len(self.nodes) == len(self.transferFunction.points) self.updateLines() def updateLines(self): pen = QPen(QColor.fromHsl(0, 100, 100)) sortedNodes = sorted(self.nodes, key=lambda x: x.pos().x()) for index in range(len(self.nodes)-1): node = sortedNodes[index] nextNode = sortedNodes[index+1] if index < len(self.lines): # Just update the line segment lineItem = self.lines[index] else: # Create a new line segment lineItem = QGraphicsLineItem() lineItem.setZValue(250) lineItem.setPen(pen) self.histogramWidget.scene().addItem(lineItem) self.lines.append(lineItem) line = QLineF(node.pos(), nextNode.pos()) lineItem.setLine(line) # Clean up redundent lines if len(self.lines) >= len(self.nodes): # Remove the redundant line segments from the scene for index in range(len(self.nodes)-1, len(self.lines)): lineItem = self.lines[index] self.histogramWidget.scene().removeItem(lineItem) # Delete the line segments from the list del self.lines[len(self.nodes)-1:] assert len(self.lines) == len(self.nodes) - 1 self.histogramWidget._scene.update() def nodeUpdated(self, node): position = node.getPosition() index = self._indexForNode(node) self.transferFunction.updatePointAtIndex(index, position) self.nodeItemWidget.setNode(self.transferFunction.points[index]) self.updateNodes() self.valueChanged.emit(self) @Slot(object) def updateNode(self, point): index = self._indexForPoint(point) nodeItem = self.nodes[index] nodeItem.updateColor(point.color) self.transferFunction.updateTransferFunction() self.valueChanged.emit(self) def selectedNode(self, nodeItem): self.nodeItemWidget.setNode(nodeItem.node) def addNodeAtCoord(self, coord): self.transferFunction.addPointAtCoord(coord, [1, 1, 1]) self.updateNodes() self.valueChanged.emit(self) def removePoint(self, point): index = self._indexForPoint(point) self.transferFunction.removePointAtIndex(index) self.updateNodes() def unselect(self): for node in self.nodes: if node.isSelected(): node.setSelected(False) self.nodeItemWidget.setNode(None) self.updateNodes() def _indexForPoint(self, point): for index in range(len(self.transferFunction.points)): if point == self.transferFunction.points[index]: return index def _indexForNode(self, node): for index in range(len(self.nodes)): if node == self.nodes[index]: return index class NodeItemWidget(QWidget): nodeUpdated = Signal(object) removePoint = Signal(object) def __init__(self): super(NodeItemWidget, self).__init__() self.node = None self.valueEdit = QLineEdit() self.opacityEdit = QLineEdit() self.colorButton = ColorButton() self.colorButton.setMaximumWidth(100) self.deleteButton = QPushButton("x") layout = QGridLayout() layout.addWidget(QLabel("Value:"), 0, 0) layout.addWidget(QLabel("Opacity:"), 0, 1) layout.addWidget(QLabel("Color:"), 0, 2) layout.addWidget(self.valueEdit, 1, 0) layout.addWidget(self.opacityEdit, 1, 1) layout.addWidget(self.colorButton, 1, 2) layout.addWidget(self.deleteButton, 1, 3) self.colorButton.clicked.connect(self.showColorDialog) self.deleteButton.clicked.connect(self.deleteNode) self.setLayout(layout) def setNode(self, node): self.node = node if not node: self.setEnabled(False) self.valueEdit.setText(" ") self.opacityEdit.setText(" ") self.colorButton.setColor([0.8, 0.8, 0.8]) return self.setEnabled(True) self.valueEdit.setText("%.1f" % self.node.value) self.opacityEdit.setText("%.3f" % self.node.opacity) self.colorButton.setColor(self.node.color) def deleteNode(self): self.removePoint.emit(self.node) def showColorDialog(self): color = QColorDialog.getColor() if not color.isValid(): return rgba = list(color.getRgbF()) self.node.color = [rgba[0], rgba[1], rgba[2]] self.colorButton.setColor(self.node.color) self.nodeUpdated.emit(self.node)
chenjianping99/CradCool
src/com/jiubang/goscreenlock/theme/cjpcardcool/switcher/bean/ViewResponAreaInfo.java
<reponame>chenjianping99/CradCool<filename>src/com/jiubang/goscreenlock/theme/cjpcardcool/switcher/bean/ViewResponAreaInfo.java package com.jiubang.goscreenlock.theme.cjpcardcool.switcher.bean; /** * 响应区域 * * @author hezhiyi * @date [2013-5-20] */ public class ViewResponAreaInfo { public static final int VIEW_TYPE_OF_WIDGETLAYOUT = 0; private int mViewType = VIEW_TYPE_OF_WIDGETLAYOUT; private float mLeftAreaX = 0; private float mLeftAreaY = 0; private float mLeftAreaWidth = 0; private float mLeftAreaHeight = 0; private float mRightAreaX = 0; private float mRightAreaY = 0; private float mRightAreaWidth = 0; private float mRightAreaHeight = 0; private float mTopAreaX = 0; private float mTopAreaY = 0; private float mTopAreaWidth = 0; private float mTopAreaHeight = 0; private float mButtomAreaX = 0; private float mButtomAreaY = 0; private float mButtomAreaWidth = 0; private float mButtomAreaHeight = 0; public void setViewType(int type) { mViewType = type; } public int getViewType() { return mViewType; } public void setLeftAreaX(float x) { mLeftAreaX = x; } public float getLeftAreaX() { return mLeftAreaX; } public void setLeftAreaY(float y) { mLeftAreaY = y; } public float getLeftAreaY() { return mLeftAreaY; } public void setLeftAreaWidth(float width) { mLeftAreaWidth = width; } public float getLeftAreaWidth() { return mLeftAreaWidth; } public void setLeftAreaHeight(float height) { mLeftAreaHeight = height; } public float getLeftAreaHeight() { return mLeftAreaHeight; } // public void setRightAreaX(float x) { mRightAreaX = x; } public float getRightAreaX() { return mRightAreaX; } public void setRightAreaY(float y) { mRightAreaY = y; } public float getRightAreaY() { return mRightAreaY; } public void setRightAreaWidth(float width) { mRightAreaWidth = width; } public float getRightAreaWidth() { return mRightAreaWidth; } public void setRightAreaHeight(float height) { mRightAreaHeight = height; } public float getRightAreaHeight() { return mRightAreaHeight; } // public void setTopAreaX(float x) { mTopAreaX = x; } public float getTopAreaX() { return mTopAreaX; } public void setTopAreaY(float y) { mTopAreaY = y; } public float getTopAreaY() { return mTopAreaY; } public void setTopAreaWidth(float width) { mTopAreaWidth = width; } public float getTopAreaWidth() { return mTopAreaWidth; } public void setTopAreaHeight(float height) { mTopAreaHeight = height; } public float getTopAreaHeight() { return mTopAreaHeight; } // public void setButtomAreaX(float x) { mButtomAreaX = x; } public float getButtomAreaX() { return mButtomAreaX; } public void setButtomAreaY(float y) { mButtomAreaY = y; } public float getButtomAreaY() { return mButtomAreaY; } public void setButtomAreaWidth(float width) { mButtomAreaWidth = width; } public float getButtomAreaWidth() { return mButtomAreaWidth; } public void setButtomAreaHeight(float height) { mButtomAreaHeight = height; } public float getButtomAreaHeight() { return mButtomAreaHeight; } private boolean compare(ViewResponAreaInfo info1, ViewResponAreaInfo info2) { if (info1.getButtomAreaHeight() == info2.getButtomAreaHeight() && info1.getButtomAreaWidth() == info2.getButtomAreaWidth() && info1.getButtomAreaX() == info2.getButtomAreaX() && info1.getButtomAreaY() == info2.getButtomAreaY() && info1.getLeftAreaHeight() == info2.getLeftAreaHeight() && info1.getLeftAreaWidth() == info2.getLeftAreaWidth() && info1.getLeftAreaX() == info2.getLeftAreaX() && info1.getLeftAreaY() == info2.getLeftAreaY() && info1.getRightAreaHeight() == info2.getRightAreaHeight() && info1.getRightAreaWidth() == info2.getRightAreaWidth() && info1.getRightAreaX() == info2.getRightAreaX() && info1.getRightAreaY() == info2.getRightAreaY() && info1.getTopAreaHeight() == info2.getTopAreaHeight() && info1.getTopAreaWidth() == info2.getTopAreaWidth() && info1.getTopAreaX() == info2.getTopAreaX() && info1.getTopAreaY() == info2.getTopAreaY() && info1.getViewType() == info2.getViewType()) { return true; } return false; } }
smagill/opensphere-desktop
open-sphere-base/core/src/main/java/io/opensphere/core/cache/accessor/TimeSpanAccessor.java
package io.opensphere.core.cache.accessor; import io.opensphere.core.cache.matcher.IntervalPropertyMatcher; import io.opensphere.core.cache.matcher.TimeSpanMatcher; import io.opensphere.core.cache.util.PropertyDescriptor; import io.opensphere.core.model.time.TimeSpan; /** * Accessor for time span property values. * * @param <S> The type of object that provides the property values. */ public abstract class TimeSpanAccessor<S> extends AbstractIntervalPropertyAccessor<S, TimeSpan> implements PersistentPropertyAccessor<S, TimeSpan> { /** The property descriptor. */ public static final PropertyDescriptor<TimeSpan> PROPERTY_DESCRIPTOR; /** The standard name of the time span property. */ public static final String TIME_PROPERTY_NAME = "time"; static { PROPERTY_DESCRIPTOR = new PropertyDescriptor<>(TIME_PROPERTY_NAME, TimeSpan.class); } /** * Construct the time span accessor. * * @param extent A time span that comprises all of the spans provided by * this accessor. */ public TimeSpanAccessor(TimeSpan extent) { super(extent); } @Override public IntervalPropertyMatcher<?> createMatcher() { return new TimeSpanMatcher(TIME_PROPERTY_NAME, getExtent()); } @Override public PropertyDescriptor<TimeSpan> getPropertyDescriptor() { return PROPERTY_DESCRIPTOR; } @Override public String toString() { return new StringBuilder(128).append(TimeSpanAccessor.class.getSimpleName()).append('[').append(getPropertyDescriptor()) .append(']').toString(); } }
Scandinaf/TicTacToe
src/main/scala/com/tictactoe/service/session/exception/SessionServiceException.scala
package com.tictactoe.service.session.exception import com.tictactoe.exception.AppException import cats.syntax.show._ import com.tictactoe.exception.AppException.{ErrorCode, ParameterKey, PrettyMessage} import com.tictactoe.model.Session.SessionId sealed trait SessionServiceException extends AppException object SessionServiceException { final case class SessionAlreadyExistsException(id: SessionId) extends SessionServiceException { override val prettyMessage: PrettyMessage = PrettyMessage(show"A session with this identifier already exists. $id") override def parameters: Map[ParameterKey, String] = Map(ParameterKey.SessionId -> id.value) override def errorCode: ErrorCode = ErrorCode.internalError } }
VaibhawPandey/interview
src/com/competitiveCodes/hackerrank/basicPractice/ACMICPCTeam.java
package com.competitiveCodes.hackerrank.basicPractice; import java.util.Scanner; public class ACMICPCTeam { //THis solution is correct logically.. private static void solve() { String[] topic = {"10101", "11100", "11010", "00101"}; Scanner scanner = new Scanner(System.in); scanner.nextBigInteger(2); int[] a = new int[topic.length]; for (int i = 0; i < topic.length; i++) { int decimal = Integer.parseInt(topic[i], 2); a[i] = decimal; } int maxBitCount, maxScoreCount; maxBitCount = maxScoreCount = 0; for (int i = 0; i < a.length; i++) { for (int j = i + 1; j < a.length; j++) { int k = a[i] | a[j]; int bitCount = 0; while (k != 0) { k &= (k - 1); bitCount++; } if(bitCount > maxBitCount) { maxBitCount = bitCount; maxScoreCount = 1; } else if(bitCount == maxBitCount) { maxScoreCount++; } } } System.out.println(maxBitCount); System.out.println(maxScoreCount); } /* //TO pass all test case , input should be of type bigInteger public static void main(String[] args) { Scanner scanner = new Scanner(System.in); int numOfPeople = scanner.nextInt(); int numOfTopics = scanner.nextInt(); BigInteger[] familiarity = new BigInteger[numOfPeople]; int maxBitCount = 0; int maxScoreCount = 0; BigInteger score; for(int i=0; i<numOfPeople; i++) { familiarity[i] = scanner.nextBigInteger(2); } for(int i=0; i<familiarity.length-1; i++) { for(int j=i+1; j<=familiarity.length-1; j++) { score = familiarity[i].or(familiarity[j]); int bitCount = score.bitCount(); if(bitCount > maxBitCount) { maxBitCount = bitCount; maxScoreCount = 1; } else if(bitCount == maxBitCount) { maxScoreCount++; } } } System.out.println(maxBitCount); System.out.println(maxScoreCount); } */ public static void main( String[] args ) { solve(); } }
jasond1016/learnAlgorithm
java/leetcode/p0015_3sum/Solution.java
package leetcode.p0015_3sum; import java.util.*; public class Solution { public static void main(String[] args) { Solution solution = new Solution(); int[] nums = new int[]{-1, 0, 1, 2, -1, -4}; // -4,-1,-1,0,1,2 Arrays.sort(nums); System.out.println(solution.threeSum(nums)); } public List<List<Integer>> threeSum(int[] nums) { List<List<Integer>> result = new ArrayList<>(); Arrays.sort(nums); for (int i = 0; i < nums.length; i++) { List<List<Integer>> twoSumResult = twoSum(nums, i + 1, -nums[i]); for (List<Integer> integers : twoSumResult) { integers.add(nums[i]); } result.addAll(twoSumResult); while (i < nums.length - 1 && nums[i] == nums[i + 1]) { i++; } } return result; } private List<List<Integer>> twoSum(int[] nums, int start, int target) { int lo = start; int hi = nums.length - 1; List<List<Integer>> result = new ArrayList<>(); while (lo < hi) { int left = nums[lo]; int right = nums[hi]; int sum = left + right; if (sum < target) { while (lo < hi && nums[lo] == left) { lo++; } } else if (sum > target) { while (lo < hi && nums[hi] == right) { hi--; } } else { List<Integer> lst = new ArrayList<>(); lst.add(left); lst.add(right); result.add(lst); while (lo < hi && nums[lo] == left) { lo++; } while (lo < hi && nums[hi] == right) { hi--; } } } return result; } }
KarolisCibulskis/ns4kafka
cli/src/main/java/com/michelin/ns4kafka/cli/client/UsernameAndPasswordRequest.java
<gh_stars>10-100 package com.michelin.ns4kafka.cli.client; import io.micronaut.core.annotation.Introspected; import lombok.Builder; import lombok.Getter; import lombok.Setter; @Introspected @Getter @Setter @Builder public class UsernameAndPasswordRequest { private String username; private String password; }
andre15silva/styler
python/experiments/projects/couchbase-couchbase-jvm-core/real_error_dataset/1/13/FlushTest.java
/** * Copyright (C) 2014 Couchbase, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALING * IN THE SOFTWARE. */ package com.couchbase.client.core.cluster; import com.couchbase.client.core.message.ResponseStatus; import com.couchbase.client.core.message.kv.GetRequest; import com.couchbase.client.core.message.kv.GetResponse; import com.couchbase.client.core.message.kv.UpsertRequest; import com.couchbase.client.core.message.kv.UpsertResponse; import com.couchbase.client.core.message.config.FlushRequest; import com.couchbase.client.core.message.config.FlushResponse; import com.couchbase.client.core.util.ClusterDependentTest; import io.netty.buffer.Unpooled; import io.netty.util.CharsetUtil; import org.junit.Test; import rx.Observable; import rx.functions.Func1; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; /** * Verifies the functionality of Flush in various scenarios. * * @author <NAME> * @since 1.0 */ public class FlushTest extends ClusterDependentTest { @Test public void shouldFlush() { List<String> keys = Arrays.asList("key1", "key2", "key3"); Observable.from(keys).flatMap(new Func1<String, Observable<UpsertResponse>>() { @Override public Observable<UpsertResponse> call(String key) { return cluster().send(new UpsertRequest(key, Unpooled.copiedBuffer("Content", CharsetUtil.UTF_8), bucket())); } }).toBlocking().last(); Observable<FlushResponse> response = cluster().send(new FlushRequest(bucket(), password())); assertEquals(ResponseStatus.SUCCESS, response.toBlocking().first().status()); List<GetResponse> responses = Observable .from(keys) .flatMap(new Func1<String, Observable<GetResponse>>() { @Override public Observable<GetResponse> call(String key) { return cluster().send(new GetRequest(key, bucket())); } }).toList().toBlocking().single(); assertEquals(keys.size(), responses.size()); for (GetResponse get : responses) { assertEquals(ResponseStatus.NOT_EXISTS, get.status()); } } }
pinkeraw/keter-sql
keter-parser/src/main/java/dev/keter/tree/NaturalJoin.java
<reponame>pinkeraw/keter-sql package dev.keter.tree; import com.google.common.collect.ImmutableList; import java.util.List; import static com.google.common.base.MoreObjects.toStringHelper; public class NaturalJoin extends JoinCriteria { @Override public boolean equals(Object obj) { if (this == obj) { return true; } return (obj != null) && (getClass() == obj.getClass()); } @Override public int hashCode() { return 0; } @Override public String toString() { return toStringHelper(this).toString(); } @Override public List<Node> getNodes() { return ImmutableList.of(); } }
devCharles/kodemia-api
src/routes/active-campaign/index.js
<gh_stars>0 const Router = require('koa-router') const contacts = require('./contacts') const apply = require('./apply') const companies = require('./companies') const router = new Router({ prefix: '/active-campaign' }) router.use(contacts.routes(), contacts.allowedMethods()) router.use(apply.routes(), apply.allowedMethods()) router.use(companies.routes(), companies.allowedMethods()) module.exports = router
yousefiparsa/filestack-python
filestack/trafarets.py
import trafaret as t CONTENT_DOWNLOAD_SCHEMA = t.Dict({ 'dl': t.Bool(), 'cache': t.Bool() }) CONTENT_DOWNLOAD_SCHEMA.make_optional('*') OVERWRITE_SCHEMA = t.Dict({ 'url': t.String(), 'base64decode': t.Bool() }) OVERWRITE_SCHEMA.make_optional('*') STORE_LOCATION_SCHEMA = t.Enum('S3', 'gcs', 'azure', 'rackspace', 'dropbox') STORE_SCHEMA = t.Dict({ 'filename': t.String(), 'mimetype': t.String(), 'path': t.String(), 'container': t.String(), 'access': t.String(), 'base64decode': t.Bool() }) STORE_SCHEMA.make_optional('*')
nickm01/iNaturalistReactNative
src/providers/contexts.js
<reponame>nickm01/iNaturalistReactNative<filename>src/providers/contexts.js // @flow import { createContext } from "react"; const ExploreContext: Object = createContext<Function>( ); const ObsEditContext: Object = createContext<Function>( ); const ObservationContext: Object = createContext<Function>( ); const PhotoGalleryContext: Object = createContext<Function>( ); export { ExploreContext, ObsEditContext, ObservationContext, PhotoGalleryContext };
jibrel/flux
src/model/fluxExtensions/CancerReasonReferenceFix.js
import CancerReasonReference from '../onco/core/CancerReasonReference'; import { FHIRHelper, uuid } from '../json-helper'; /** * This fix class replaces the original onco.core.CancerReasonReference.fromFHIR function * with a version that sets the value correctly as a reference instead of a full object. */ export default class CancerReasonReferenceFix extends CancerReasonReference { static fromFHIR(fhir, fhirType, shrId=uuid(), allEntries=[], mappedResources={}, referencesOut=[], asExtension=false) { if (asExtension && fhir['valueReference']) { const inst = new CancerReasonReferenceFix(); const entryId = fhir['valueReference']['reference']; if (!mappedResources[entryId]) { const referencedEntry = allEntries.find(e => e.fullUrl === entryId); if (referencedEntry) { mappedResources[entryId] = FHIRHelper.createInstanceFromFHIR(null, referencedEntry['resource'], 'undefined', shrId, allEntries, mappedResources, referencesOut); } } if (mappedResources[entryId]) { inst.value = FHIRHelper.createReference(mappedResources[entryId], referencesOut); } // no else in this case since we don't know which of the 2 possible types it is return inst; } else { return super.fromFHIR(fhir, fhirType, shrId, allEntries, mappedResources, referencesOut, asExtension); } } }
pase13voxi/TeaMemory
app/src/test/java/coolpharaoh/tee/speicher/tea/timer/views/new_tea/suggestions/PuerhTeaSuggestionsTest.java
package coolpharaoh.tee.speicher.tea.timer.views.new_tea.suggestions; import android.app.Application; import android.content.res.Resources; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import coolpharaoh.tee.speicher.tea.timer.R; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class PuerhTeaSuggestionsTest { private Suggestions puerhTeaSuggestions; @Mock Application application; @Mock Resources resources; @Before public void setUp() { when(application.getResources()).thenReturn(resources); puerhTeaSuggestions = new PuerhTeaSuggestions(application); } @Test public void getAmountTsSuggestion() { final int[] arrayTs = new int[]{1, 2}; when(resources.getIntArray(R.array.new_tea_suggestions_puerh_tea_amount_ts)).thenReturn(arrayTs); assertThat(puerhTeaSuggestions.getAmountTsSuggestions()).isEqualTo(arrayTs); } @Test public void getAmountGrSuggestion() { final int[] arrayGr = new int[]{1, 2}; when(resources.getIntArray(R.array.new_tea_suggestions_puerh_tea_amount_gr)).thenReturn(arrayGr); assertThat(puerhTeaSuggestions.getAmountGrSuggestions()).isEqualTo(arrayGr); } @Test public void getAmountTbSuggestion() { final int[] arrayTb = new int[]{1, 2}; when(resources.getIntArray(R.array.new_tea_suggestions_puerh_tea_amount_tb)).thenReturn(arrayTb); assertThat(puerhTeaSuggestions.getAmountTbSuggestions()).isEqualTo(arrayTb); } @Test public void getTemperatureCelsiusSuggestion() { final int[] arrayCelsius = new int[]{1, 2}; when(resources.getIntArray(R.array.new_tea_suggestions_puerh_tea_temperature_celsius)).thenReturn(arrayCelsius); assertThat(puerhTeaSuggestions.getTemperatureCelsiusSuggestions()).isEqualTo(arrayCelsius); } @Test public void getTemperatureFahrenheitSuggestion() { final int[] arrayFahrenheit = new int[]{1, 2}; when(resources.getIntArray(R.array.suggestions_puerh_tea_temperature_fahrenheit)).thenReturn(arrayFahrenheit); assertThat(puerhTeaSuggestions.getTemperatureFahrenheitSuggestions()).isEqualTo(arrayFahrenheit); } @Test public void getSteepingTimeSuggestion() { final String[] arrayTime = new String[]{"1:00", "2:30"}; when(resources.getStringArray(R.array.new_tea_suggestions_puerh_tea_time)).thenReturn(arrayTime); assertThat(puerhTeaSuggestions.getTimeSuggestions()).isEqualTo(arrayTime); } }
ark100/multi-os-engine
moe/test/xosrt_tests/src/test/java/org/moe/xosrt/binding/test/uikit/testhelpers/CustomControl.java
<gh_stars>0 package org.moe.xosrt.binding.test.uikit.testhelpers; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.objc.ann.Selector; import ios.coregraphics.struct.CGRect; import ios.uikit.UIColor; import ios.uikit.UIControl; import ios.uikit.UILabel; public class CustomControl extends UIControl { static { NatJ.register(); } @Selector("alloc") public static native CustomControl alloc(); @Selector("init") public native CustomControl init(); public UILabel label; protected CustomControl(Pointer peer) { super(peer); } @Selector("initWithFrame:") public native UIControl initWithFrame(CGRect frame); @Selector("setFrame:") @Override public void setFrame(CGRect frame) { super.setFrame(frame); if (label == null) { label = (UILabel)UILabel.alloc().initWithFrame(frame); label.setText("Default text"); label.setBackgroundColor(UIColor.whiteColor()); label.setTextColor(UIColor.blackColor()); } label.setFrame(frame); } @Selector("setBackgroundColor:") @Override public void setBackgroundColor(UIColor bkColor) { super.setBackgroundColor(bkColor); label.setBackgroundColor(bkColor); } }
LigeiramenteDesidratado/StrangeMachine
StrangeMachine/smSkinnedModel.c
<reponame>LigeiramenteDesidratado/StrangeMachine #include "util/bitmask.h" #include "util/common.h" #include "data/array.h" #include "smController.h" #include "smGLTFLoader.h" #include "smInput.h" #include "smMem.h" #include "smRearrangeBones.h" #include "smShader.h" #include "smShaderProgram.h" #include "smSkinnedMesh.h" #include "smTexture.h" #include "smUniform.h" typedef struct { skinned_mesh_s *meshes; struct clip_s **clips; struct skeleton_s *skeleton; mat4 *pose_palette; // TODO: materials? texture_s texture; struct controller_s *fade_controller; unsigned char current_clip, next_clip; } skinned_model_s; void next_animation(skinned_model_s *sample); skinned_model_s *skinned_model_new(void) { skinned_model_s *skinned_model = SM_CALLOC(1, sizeof(skinned_model_s)); SM_ASSERT(skinned_model != NULL); return skinned_model; } // void load_model(skinned_model_s *model, const char *filename); // // bool skinned_model_ctor(skinned_model_s *skinned_model, const char *gltf_path, const char *texture_path) { // // SM_ASSERT(skinned_model != NULL); // // load_model(skinned_model, gltf_path); // // bone_map_s *opt_maps = rearrange_skeleton(skinned_model->skeleton); // for (size_t i = 0; i < SM_ARRAY_SIZE(skinned_model->meshes); ++i) { // rearrange_mesh(&skinned_model->meshes[i], opt_maps); // } // // for (size_t i = 0; i < SM_ARRAY_SIZE(skinned_model->clips); ++i) { // rearrange_clip(skinned_model->clips[i], opt_maps); // } // hmfree(opt_maps); // // skinned_model->texture = texture_new(); // if (!texture_ctor(&skinned_model->texture, texture_path)) { // SM_LOG_ERROR("failed to construct texture"); // return false; // } // // struct controller_s *control = controller_new(); // if (!controller_ctor(control, skinned_model->skeleton)) // return false; // skinned_model->fade_controller = control; // // controller_play(skinned_model->fade_controller, skinned_model->clips[0]); // controller_do(skinned_model->fade_controller, 0.0f); // pose_get_matrix_palette(controller_get_current_pose(skinned_model->fade_controller), &skinned_model->pose_palette); // // skinned_model->next_clip = 0; // skinned_model->current_clip = 0; // // return true; // } bool skinned_model_ctor2(skinned_model_s *skinned_model, const char *gltf_path, const char *texture_path) { SM_ASSERT(skinned_model != NULL); cgltf_data *data = gltf_loader_load_file(gltf_path); skinned_model->meshes = gltf_loader_load_meshes(data); skinned_model->skeleton = gltf_loader_load_skeleton(data); skinned_model->clips = gltf_loader_load_animation_clips(data); /* for (size_t i = 0; i < data->materials_count; i++) { */ /* printf("%s\n", data->materials[i].name); */ /* } */ gltf_loader_free_data(data); bone_map_s *opt_maps = rearrange_skeleton(skinned_model->skeleton); for (size_t i = 0; i < SM_ARRAY_SIZE(skinned_model->meshes); ++i) { rearrange_mesh(&skinned_model->meshes[i], opt_maps); } for (size_t i = 0; i < SM_ARRAY_SIZE(skinned_model->clips); ++i) { rearrange_clip(skinned_model->clips[i], opt_maps); } hmfree(opt_maps); skinned_model->texture = texture_new(); if (!texture_ctor(&skinned_model->texture, texture_path)) { SM_LOG_ERROR("failed to construct texture"); return false; } struct controller_s *control = controller_new(); if (!controller_ctor(control, skinned_model->skeleton)) return false; skinned_model->fade_controller = control; controller_play(skinned_model->fade_controller, skinned_model->clips[0]); controller_do(skinned_model->fade_controller, 0.0f); pose_get_matrix_palette(controller_get_current_pose(skinned_model->fade_controller), &skinned_model->pose_palette); skinned_model->next_clip = 0; skinned_model->current_clip = 0; return true; } void skinned_model_dtor(skinned_model_s *skinned_model) { SM_ASSERT(skinned_model != NULL); controller_dtor(skinned_model->fade_controller); SM_ALIGNED_ARRAY_DTOR(skinned_model->pose_palette); skeleton_dtor(skinned_model->skeleton); for (size_t i = 0; i < SM_ARRAY_SIZE(skinned_model->clips); ++i) { clip_dtor(skinned_model->clips[i]); } SM_ARRAY_DTOR(skinned_model->clips); for (size_t i = 0; i < SM_ARRAY_SIZE(skinned_model->meshes); ++i) { skinned_mesh_dtor(&skinned_model->meshes[i]); } SM_ARRAY_DTOR(skinned_model->meshes); SM_FREE(skinned_model); skinned_model = NULL; } void skinned_model_do(skinned_model_s *skinned_model, float dt) { controller_do(skinned_model->fade_controller, dt); if (input_scan_key_lock(sm_key_n)) { next_animation(skinned_model); } if (skinned_model->current_clip != skinned_model->next_clip) { // sample->fade_timer = 3.0; skinned_model->current_clip = skinned_model->next_clip; controller_fade_to(skinned_model->fade_controller, skinned_model->clips[skinned_model->current_clip], 0.5f); } pose_get_matrix_palette(controller_get_current_pose(skinned_model->fade_controller), &skinned_model->pose_palette); mat4 **inverse_bind_pose = skeleton_get_inverse_bind_pose(skinned_model->skeleton); for (size_t i = 0; i < SM_ALIGNED_ARRAY_SIZE(skinned_model->pose_palette); ++i) { glm_mat4_mul(skinned_model->pose_palette[i], (*inverse_bind_pose)[i], skinned_model->pose_palette[i]); } } void skinned_model_draw(skinned_model_s *skinned_model) { SM_ASSERT(skinned_model != NULL); shader_bind(SHADERS[SKINNED_SHADER]); if (skinned_model->pose_palette) { GLuint loc = glGetUniformLocation(SHADERS[SKINNED_SHADER], "animated"); uniform_set_array(loc, skinned_model->pose_palette, (int32_t)SM_ALIGNED_ARRAY_SIZE(skinned_model->pose_palette)); } texture_set(&skinned_model->texture, glGetUniformLocation(SHADERS[SKINNED_SHADER], "tex0"), 0); uint8_t flags = 0; MASK_SET(flags, 1 << skinned_mesh_attr_locs.position); MASK_SET(flags, 1 << skinned_mesh_attr_locs.tex_coord); MASK_SET(flags, 1 << skinned_mesh_attr_locs.normal); MASK_SET(flags, 1 << skinned_mesh_attr_locs.weight); MASK_SET(flags, 1 << skinned_mesh_attr_locs.joint); for (size_t i = 0; i < SM_ARRAY_SIZE(skinned_model->meshes); ++i) { skinned_mesh_bind(&skinned_model->meshes[i], flags); GLuint handle = skinned_model->meshes[i].index_buffer.ebo; size_t num_indices = skinned_model->meshes[i].index_buffer.count; glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, handle); glDrawElements(GL_TRIANGLES, (int32_t)num_indices, GL_UNSIGNED_INT, 0); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); skinned_mesh_unbind(&skinned_model->meshes[i], flags); } texture_unset(0); shader_unbind(); } bool skinned_model_set_animation(skinned_model_s *skinned_model, const char *animation) { for (unsigned int i = 0; i < SM_ARRAY_SIZE(skinned_model->clips); ++i) { if (strcmp(clip_get_name(skinned_model->clips[i]), animation) == 0) { skinned_model->next_clip = i; return true; } } return false; } char **skinned_model_get_animations(skinned_model_s *skinned_model) { char **animations = NULL; size_t size = SM_ARRAY_SIZE(skinned_model->clips); SM_ARRAY_NEW(animations, size); for (unsigned int i = 0; i < size; ++i) { animations[i] = strdup(clip_get_name(skinned_model->clips[i])); } return animations; } void next_animation(skinned_model_s *sample) { sample->next_clip++; if (sample->next_clip >= SM_ARRAY_SIZE(sample->clips)) sample->next_clip = 0; } // #include <assimp/cimport.h> // #include <assimp/postprocess.h> // #include <assimp/scene.h> // #include <assimp/version.h> // // int get_node_index(const struct aiNode *node, const struct aiBone **all_nodes, uint32_t num_nodes) { // if (node == NULL) { // return -1; // } // // for (size_t i = 0; i < num_nodes; ++i) { // if (strcmp(node->mName.data, all_nodes[i]->mNode->mName.data) == 0) { // return i; // } // } // // return -1; // } // // void load_mesh(skinned_mesh_s *skinned_mesh, const struct aiMesh *mesh) { // // /* skinned_mesh_s skinned_mesh = skinned_mesh_new(); */ // /* if (!skinned_mesh_ctor(&skinned_mesh)) { */ // /* printf("failed to create skinned mesh\n"); */ // /* return skinned_mesh; */ // /* } */ // // if (mesh->mVertices != NULL && mesh->mNumVertices > 0) { // SM_ARRAY_SET_SIZE(skinned_mesh->vertex.positions, mesh->mNumVertices); // memcpy(skinned_mesh->vertex.positions, mesh->mVertices, mesh->mNumVertices * sizeof(float) * 3); // } // // if (mesh->mNormals != NULL && mesh->mNumVertices > 0) { // SM_ARRAY_SET_SIZE(skinned_mesh->vertex.normals, mesh->mNumVertices); // memcpy(skinned_mesh->vertex.normals, mesh->mNormals, mesh->mNumVertices * sizeof(float) * 3); // } // // if (mesh->mTextureCoords[0] != NULL && mesh->mNumVertices > 0) { // SM_ARRAY_SET_SIZE(skinned_mesh->vertex.tex_coords, mesh->mNumVertices); // for (uint32_t uv = 0; uv < mesh->mNumVertices; ++uv) { // skinned_mesh->vertex.tex_coords[uv].v[0] = mesh->mTextureCoords[0][uv].x; // skinned_mesh->vertex.tex_coords[uv].v[1] = mesh->mTextureCoords[0][uv].y; // } // } else { // SM_ARRAY_SET_SIZE(skinned_mesh->vertex.tex_coords, mesh->mNumVertices); // for (uint32_t uv = 0; uv < mesh->mNumVertices; ++uv) { // skinned_mesh->vertex.tex_coords[uv].v[0] = 1.0f; // skinned_mesh->vertex.tex_coords[uv].v[1] = 1.0f; // } // } // // // load indices // if (mesh->mFaces != NULL && mesh->mNumFaces > 0) { // for (uint32_t i = 0; i < mesh->mNumFaces; ++i) { // struct aiFace face = mesh->mFaces[i]; // for (uint32_t j = 0; j < face.mNumIndices; ++j) { // SM_ARRAY_PUSH(skinned_mesh->indices, face.mIndices[j]); // } // } // } // // SM_ARRAY_SET_SIZE(skinned_mesh->weights, mesh->mNumVertices); // memset(skinned_mesh->weights, 0, sizeof(vec4) * mesh->mNumVertices); // // SM_ARRAY_SET_SIZE(skinned_mesh->influences, mesh->mNumVertices); // memset(skinned_mesh->influences, -1, sizeof(ivec4) * mesh->mNumVertices); // // for (uint32_t bone_index = 0; bone_index < mesh->mNumBones; ++bone_index) { // int boneID = -1; // boneID = bone_index; // SM_ASSERT(boneID != -1); // struct aiVertexWeight *weights = mesh->mBones[bone_index]->mWeights; // int num_weights = mesh->mBones[bone_index]->mNumWeights; // for (int weight_index = 0; weight_index < num_weights; ++weight_index) { // int vertex_id = weights[weight_index].mVertexId; // float weight = weights[weight_index].mWeight; // // for (int vi = 0; vi < 4; ++vi) { // if (skinned_mesh->influences[vertex_id].v[vi] == -1) { // skinned_mesh->influences[vertex_id].v[vi] = boneID; // skinned_mesh->weights[vertex_id].v[vi] = weight; // break; // } // } // } // } // } // // void load_skeleton(skinned_model_s *model, const struct aiMesh *mesh) { // // if (model->skeleton != NULL) { // return; // } // // pose_s rest_pose = pose_new(); // pose_resize(&rest_pose, mesh->mNumBones); // // pose_s bind_pose = pose_new(); // pose_resize(&bind_pose, mesh->mNumBones); // // // equivalent to gltf_loader_load_rest_pose // for (uint32_t bone_index = 0; bone_index < mesh->mNumBones; ++bone_index) { // // struct aiBone *b = mesh->mBones[bone_index]; // // // Get the bone local transform matrix and convert it to transform_s // struct aiMatrix4x4 mat = b->mNode->mTransformation; // mat4 m = mat4_transpose(mat4_new(mat.a1, mat.a2, mat.a3, mat.a4, mat.b1, mat.b2, mat.b3, mat.b4, mat.c1, mat.c2, // mat.c3, mat.c4, mat.d1, mat.d2, mat.d3, mat.d4)); // transform_s local_transform = transform_mat4_to_transform(m); // // // Get the bone parent index // int parent_index = get_node_index(b->mNode->mParent, (const struct aiBone **)mesh->mBones, mesh->mNumBones); // // pose_set_parent(&rest_pose, bone_index, parent_index); // pose_set_local_transform(&rest_pose, bone_index, local_transform); // pose_set_name(&rest_pose, bone_index, b->mName.data); // } // // size_t num_bones = SM_ARRAY_SIZE(rest_pose.nodes); // transform_s *world_bind_pose = NULL; // SM_ARRAY_SET_SIZE(world_bind_pose, num_bones); // // for (size_t i = 0; i < num_bones; ++i) { // world_bind_pose[i] = pose_get_global_transform(&rest_pose, i); // } // // for (size_t i = 0; i < num_bones; ++i) { // struct aiBone *b = mesh->mBones[i]; // // struct aiMatrix4x4 ai_inverse_bind_pose = b->mOffsetMatrix; // mat4 inverse_bind_pose_mtraix = mat4_transpose( // mat4_new(ai_inverse_bind_pose.a1, ai_inverse_bind_pose.a2, ai_inverse_bind_pose.a3, ai_inverse_bind_pose.a4, // ai_inverse_bind_pose.b1, ai_inverse_bind_pose.b2, ai_inverse_bind_pose.b3, ai_inverse_bind_pose.b4, // ai_inverse_bind_pose.c1, ai_inverse_bind_pose.c2, ai_inverse_bind_pose.c3, ai_inverse_bind_pose.c4, // ai_inverse_bind_pose.d1, ai_inverse_bind_pose.d2, ai_inverse_bind_pose.d3, // ai_inverse_bind_pose.d4)); // // mat4 bind_pose_matrix = mat4_inverse(inverse_bind_pose_mtraix); // transform_s bind_pose_transform = transform_mat4_to_transform(bind_pose_matrix); // // int32_t joint_index = get_node_index(b->mNode, (const struct aiBone **)mesh->mBones, num_bones); // world_bind_pose[joint_index] = bind_pose_transform; // } // pose_copy(&bind_pose, &rest_pose); // // for (size_t i = 0; i < num_bones; ++i) { // transform_s current = world_bind_pose[i]; // int32_t p = pose_get_parent(&bind_pose, i); // // if (p >= 0) { // bring into parent space // transform_s parent = world_bind_pose[p]; // current = transform_combine(transform_inverse(parent), current); // } // // pose_set_local_transform(&bind_pose, i, current); // } // SM_ARRAY_DTOR(world_bind_pose); // // const char **bone_names = NULL; // SM_ARRAY_NEW(bone_names, mesh->mNumBones); // SM_ARRAY_SET_SIZE(bone_names, mesh->mNumBones); // // for (uint32_t bone_index = 0; bone_index < mesh->mNumBones; ++bone_index) { // struct aiBone *b = mesh->mBones[bone_index]; // if (b->mName.length > 0) { // bone_names[bone_index] = strdup(b->mName.data); // printf("bone name: %s\n", bone_names[bone_index]); // } else { // bone_names[bone_index] = strdup("EMPTY NODE"); // printf("WARNING: bone %d has no name\n", bone_index); // } // } // printf("assimp names: %lu\n", SM_ARRAY_SIZE(bone_names)); // // model->skeleton = skeleton_new(); // if (!skeleton_ctor(model->skeleton, &rest_pose, &bind_pose, (const char **)bone_names)) // printf("error building skeleton\n"); // // SM_ARRAY_DTOR(bone_names); // // pose_dtor(&rest_pose); // pose_dtor(&bind_pose); // } // // void process_nodes(skinned_model_s *model, struct aiNode *node, const struct aiScene *scene) { // // process each mesh located at the current node // for (unsigned int i = 0; i < node->mNumMeshes; i++) { // // struct aiMesh *mesh = scene->mMeshes[node->mMeshes[i]]; // printf("mesh bones: %d\n", mesh->mNumBones); // // // the node object only contains indices to index the actual objects in the scene. // // the scene contains all the data, node is just to keep stuff organized (like relations between nodes). // skinned_mesh_s skinned_mesh = skinned_mesh_new(); // if (!skinned_mesh_ctor(&skinned_mesh)) { // printf("error building skinned mesh\n"); // } // // // print count of bones // printf("mesh bones: %d\n", mesh->mNumBones); // // load_mesh(&skinned_mesh, mesh); // SM_ARRAY_PUSH(model->meshes, skinned_mesh); // load_skeleton(model, mesh); // } // // after we've processed all of the meshes (if any) we then recursively process each of the children nodes // for (unsigned int i = 0; i < node->mNumChildren; i++) { // process_nodes(model, node->mChildren[i], scene); // } // } // // void load_model(skinned_model_s *model, const char *filename) { // // unsigned int major = aiGetVersionMajor(); // unsigned int minor = aiGetVersionMinor(); // printf("version: %d.%d\n", major, minor); // const struct aiScene *scene = // aiImportFile(filename, aiProcessPreset_TargetRealtime_Fast | aiProcess_PopulateArmatureData); // if (!scene || scene->mFlags & AI_SCENE_FLAGS_INCOMPLETE || !scene->mRootNode) { // printf("ERROR::ASSIMP::%s\n", aiGetErrorString()); // exit(1); // } // // has animations? // printf("animations: %d\n", scene->mNumAnimations); // // process ASSIMP's root node recursively // process_nodes(model, scene->mRootNode, scene); // uint32_t anim_count = scene->mNumAnimations; // // SM_ARRAY_SET_SIZE(model->clips, anim_count); // // for (uint32_t i = 0; i < anim_count; ++i) { // struct aiAnimation *anim = scene->mAnimations[i]; // uint32_t num_channels = anim->mNumChannels; // // model->clips[i] = clip_new(); // if (!clip_ctor(model->clips[i], anim->mName.data)) // exit(1); // // struct clip_s *clip = model->clips[i]; // // clip_set_cap_tracks(clip, num_channels); // for (uint32_t j = 0; j < num_channels; ++j) { // struct aiNodeAnim *channel = anim->mChannels[j]; // int32_t node_index = pose_get_index_by_name(skeleton_get_rest_pose(model->skeleton), channel->mNodeName.data); // // interpolation_e interpolation = CONSTANT_INTERP; // if (channel->mPreState == aiAnimBehaviour_LINEAR) // interpolation = LINEAR_INTERP; // else if (channel->mPreState == aiAnimBehaviour_REPEAT) // interpolation = CUBIC_INTERP; // // transform_track_s *track = clip_get_transform_track_from_joint(clip, node_index); // track_resize_frame(&track->position, channel->mNumPositionKeys); // track_resize_frame(&track->rotation, channel->mNumRotationKeys); // track_resize_frame(&track->scale, channel->mNumScalingKeys); // // for (uint32_t k = 0; k < channel->mNumPositionKeys; ++k) { // struct aiVectorKey *key = &channel->mPositionKeys[k]; // // track->position.interpolation = interpolation; // track_resize_frame_in(&track->position, 3, k); // track_resize_frame_out(&track->position, 3, k); // track_resize_frame_value(&track->position, 3, k); // // frame_s *frame = &track->position.frames[k]; // frame->t = key->mTime / 1000.0f; // frame->value[0] = key->mValue.x; // frame->value[1] = key->mValue.y; // frame->value[2] = key->mValue.z; // // // TODO: fix this // frame->in[0] = 0.0f; // frame->in[1] = 0.0f; // frame->in[2] = 0.0f; // // frame->out[0] = 0.0f; // frame->out[1] = 0.0f; // frame->out[2] = 0.0f; // } // // for (uint32_t k = 0; k < channel->mNumRotationKeys; ++k) { // struct aiQuatKey *key = &channel->mRotationKeys[k]; // // track->rotation.interpolation = interpolation; // // track_resize_frame_in(&track->rotation, 4, k); // track_resize_frame_out(&track->rotation, 4, k); // track_resize_frame_value(&track->rotation, 4, k); // // frame_s *frame = &track->rotation.frames[k]; // frame->t = key->mTime / 1000.0f; // frame->value[0] = key->mValue.x; // frame->value[1] = key->mValue.y; // frame->value[2] = key->mValue.z; // frame->value[3] = key->mValue.w; // // // TODO: fix this // frame->in[0] = 0.0f; // frame->in[1] = 0.0f; // frame->in[2] = 0.0f; // frame->in[3] = 0.0f; // // frame->out[0] = 0.0f; // frame->out[1] = 0.0f; // frame->out[2] = 0.0f; // frame->out[3] = 0.0f; // } // // for (uint32_t k = 0; k < channel->mNumScalingKeys; ++k) { // struct aiVectorKey *key = &channel->mScalingKeys[k]; // // track->scale.interpolation = interpolation; // track_resize_frame_in(&track->scale, 3, k); // track_resize_frame_out(&track->scale, 3, k); // track_resize_frame_value(&track->scale, 3, k); // // frame_s *frame = &track->scale.frames[k]; // frame->t = key->mTime / 1000.0f; // frame->value[0] = key->mValue.x; // frame->value[1] = key->mValue.y; // frame->value[2] = key->mValue.z; // // // TODO: fix this // frame->in[0] = 0.0f; // frame->in[1] = 0.0f; // frame->in[2] = 0.0f; // // frame->out[0] = 0.0f; // frame->out[1] = 0.0f; // frame->out[2] = 0.0f; // } // } // clip_recalculate_duration(clip); // } // // for (size_t i = 0; i < SM_ARRAY_SIZE(model->clips); ++i) { // for (size_t j = 0; j < clip_get_size(model->clips[i]); ++j) { // int32_t joint = clip_get_id_at_index(model->clips[i], j); // // transform_track_s *ttrack = clip_get_transform_track_from_joint(model->clips[i], joint); // // track_index_look_up_table(&ttrack->position); // track_index_look_up_table(&ttrack->rotation); // track_index_look_up_table(&ttrack->scale); // } // } // // aiReleaseImport(scene); // }
jandejongh/jqueues
src/test/java/org/javades/jqueues/r5/entity/jq/queue/TestJob2.java
/* * Copyright 2010-2018 <NAME> <<EMAIL>>, TNO. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.javades.jqueues.r5.entity.jq.queue; import java.util.HashMap; import java.util.Map; import org.javades.jqueues.r5.entity.jq.job.SimJob; import org.javades.jqueues.r5.entity.jq.job.visitslogging.DefaultVisitsLoggingSimJob; /** A (alternative) test {@link SimJob}. * * @param <Q> The type of {@link SimQueue}s supported. * * @author <NAME>, TNO * * <p> * Copyright (C) 2005-2017 <NAME>, TNO * * <p> * This file is covered by the LICENSE file in the root of this project. * */ public class TestJob2<Q extends SimQueue> extends DefaultVisitsLoggingSimJob<TestJob2, Q> { private final boolean reported; public final int n; public final double scheduledArrivalTime; public static Map<SimQueue, Double> createRequestedServiceTimeMap (final int n) { if (n <= 0) throw new IllegalArgumentException (); final Map<SimQueue, Double> requestedServiceTimeMap = new HashMap<> (); requestedServiceTimeMap.put (null, (double) n); return requestedServiceTimeMap; } public TestJob2 (boolean reported, int n) { super (null, "TestJob[" + n + "]", (Map<Q, Double>) createRequestedServiceTimeMap (n)); this.reported = reported; this.n = n; this.scheduledArrivalTime = this.n; } }
madeso/hopper
devel/game/gamemessagewindow.h
#ifndef _GAMEMESSAGEWINDOW_ #define _GAMEMESSAGEWINDOW_ #include "dynagui.h" #include "delay.h" /** * CGameMessageWindow : displays a short text for the provided amout of time and then hide itself * * if the display timeout is 0, the window never hides itself * timeout in ms, defaults to 2 secs. */ class CGameMessageWindow { public: CGameMessageWindow( int timeout = 2000 ); ~CGameMessageWindow(); void show(); void hide(); void update(); void setMessage( std::string message ); private: DynaGUI* mGUI; DWindow* mWindow; DText* mMessage; int mTimeOut; CDelay* mHideDelay; }; #endif
abhijeetviswam/libucresolv
include/glibc-string/tst-strfry.c
<reponame>abhijeetviswam/libucresolv #include <stdio.h> #include <string.h> int do_test (void) { char str[] = "this is a test"; strfry (str); return 0; } #include <support/test-driver.c>
github-fhl/staff
src/components/layouts/ContentLayout.js
<filename>src/components/layouts/ContentLayout.js import React from 'react'; import {Link} from 'react-router' import {Breadcrumb} from 'antd' import PropTypes from 'prop-types'; import './ContentLayout.scss' // autoHeight 是否固定内容区域的高度为一屏,还是自动延伸 //主内容区布局 const ContentLayout = ({header, footer, children, autoHeight = true})=>( <div className="layout-fix"> { header && <div className="layout-fix-header"> {header} </div> } <div className={`layout-fix-content${autoHeight ? '' : ' layout-fix-height'}`}> {children} </div> { footer && <div className="layout-fix-footer"> {footer} </div> } </div>) ContentLayout.propTypes = { fixHeight: PropTypes.bool, header: PropTypes.element, footer: PropTypes.element, children: PropTypes.any.isRequired } export default ContentLayout; //主内容区头部 export const ContentHeader = ({extra, location,title={},children})=> { const pathSnippets = location.pathname.split('/').filter(i => i && i.toLowerCase()!=='home' ); const extraBreadcrumbItems = pathSnippets.map((_, index) => { const url = `/${pathSnippets.slice(0, index + 1).join('/')}`; let text=title[_] || decodeURI(_); return ( <Breadcrumb.Item key={url}> <Link to={url}> {text} </Link> </Breadcrumb.Item> ); }); const breadcrumbItems = [ ( <Breadcrumb.Item key="home"> <Link to="/">Home</Link> </Breadcrumb.Item> )].concat(extraBreadcrumbItems); return ( <div className="page-header"> <h2 className="pull-left"> <Breadcrumb> {breadcrumbItems} </Breadcrumb> {children} </h2> <div className="pull-right">{extra}</div> </div> ) } ContentHeader.propTypes = { title: PropTypes.object, location: PropTypes.object, extra: PropTypes.element, }
Bugasu/ghidra
Ghidra/Framework/DB/src/main/java/db/BooleanField.java
<reponame>Bugasu/ghidra /* ### * IP: GHIDRA * REVIEWED: YES * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package db; import ghidra.util.exception.AssertException; import java.io.IOException; /** * <code>BooleanField</code> provides a wrapper for boolean data which is read or * written to a Record. */ public class BooleanField extends Field { private byte value; /** * Construct a boolean data field with an initial value of false. */ public BooleanField() { } /** * Construct a boolean data field with an initial value of b. * @param b initial value */ public BooleanField(boolean b) { value = b ? (byte) 1 : (byte) 0; } /* * @see ghidra.framework.store.db.Field#getBooleanValue() */ @Override public boolean getBooleanValue() { return (value == 0) ? false : true; } /* * @see ghidra.framework.store.db.Field#setBooleanValue(boolean) */ @Override public void setBooleanValue(boolean b) { this.value = b ? (byte) 1 : (byte) 0; } /* * @see ghidra.framework.store.db.Field#length() */ @Override int length() { return 1; } /* * @see ghidra.framework.store.db.Field#write(ghidra.framework.store.Buffer, int) */ @Override int write(Buffer buf, int offset) throws IOException { return buf.putByte(offset, value); } /* * @see ghidra.framework.store.db.Field#read(ghidra.framework.store.Buffer, int) */ @Override int read(Buffer buf, int offset) throws IOException { value = buf.getByte(offset); return offset + 1; } /* * @see ghidra.framework.store.db.Field#readLength(ghidra.framework.store.Buffer, int) */ @Override int readLength(Buffer buf, int offset) throws IOException { return 1; } /* * @see ghidra.framework.store.db.Field#getFieldType() */ @Override protected byte getFieldType() { return BOOLEAN_TYPE; } /* * @see java.lang.Object#toString() */ @Override public String toString() { return "BooleanField: " + Boolean.toString(getBooleanValue()); } @Override public String getValueAsString() { return Boolean.toString(getBooleanValue()); } /* * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (obj == null || !(obj instanceof BooleanField)) return false; BooleanField otherField = (BooleanField) obj; return otherField.value == value; } /* * @see java.lang.Comparable#compareTo(java.lang.Object) */ @Override public int compareTo(Field o) { BooleanField f = (BooleanField) o; if (value == f.value) return 0; else if (value < f.value) return -1; return 1; } /* * @see ghidra.framework.store.db.Field#newField(ghidra.framework.store.db.Field) */ @Override public Field newField(Field fieldValue) { if (fieldValue.isVariableLength()) throw new AssertException(); return new BooleanField(fieldValue.getLongValue() != 0); } /* * @see ghidra.framework.store.db.Field#newField() */ @Override public Field newField() { return new BooleanField(); } /* * @see ghidra.framework.store.db.Field#getLongValue() */ @Override public long getLongValue() { return value; } /* * @see ghidra.framework.store.db.Field#getBinaryData() */ @Override public byte[] getBinaryData() { return new byte[] { value }; } @Override public int hashCode() { // TODO Auto-generated method stub return value; } }
rsn8887/mame2003-plus-libretro
src/includes/lemmings.h
VIDEO_START( lemmings ); VIDEO_STOP( lemmings ); VIDEO_EOF( lemmings ); VIDEO_UPDATE( lemmings ); WRITE16_HANDLER( lemmings_pixel_0_w ); WRITE16_HANDLER( lemmings_pixel_1_w ); WRITE16_HANDLER( lemmings_vram_w ); extern data16_t *lemmings_pixel_0_data,*lemmings_pixel_1_data,*lemmings_vram_data,*lemmings_control_data;
maxvonhippel/snake
ns-3-dev/src/lte/model/lte-mac-header.h
<reponame>maxvonhippel/snake /* -*- Mode:C++; c-file-style:"gnu"; indent-tabs-mode:nil; -*- */ /* * Copyright (c) 2010 TELEMATICS LAB, DEE - Politecnico di Bari * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation; * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * * Author: <NAME> <<EMAIL>> */ #ifndef LTE_MAC_HEADER_H #define LTE_MAC_HEADER_H #include <ns3/header.h> #include <ns3/mac48-address.h> #include <ns3/address-utils.h> namespace ns3 { /** * \ingroup lte * * This class implements the LTE MAC header */ class LteMacHeader : public Header { public: static TypeId GetTypeId (void); virtual TypeId GetInstanceTypeId (void) const; virtual uint32_t GetSerializedSize (void) const; virtual void Serialize (Buffer::Iterator start) const; virtual uint32_t Deserialize (Buffer::Iterator start); virtual void Print (std::ostream &os) const; /** * \brief set the source MAC address * \param source the source mac address */ void SetSource (Mac48Address source); /** * \brief set the the destination MAC address * \param destination the destination MAC address */ void SetDestination (Mac48Address destination); /** * \brief get the the source MAC address * \return the source MAC address */ Mac48Address GetSource () const; /** * \brief get the the destination MAC address * \return the destination MAC address */ Mac48Address GetDestination () const; private: Mac48Address m_source; Mac48Address m_destination; }; } // namespace ns3 #endif /* LTE_MAC_HEADER_H */
siwelo/bitshares-2
libraries/chain/include/graphene/chain/withdraw_permission_object.hpp
<filename>libraries/chain/include/graphene/chain/withdraw_permission_object.hpp /* * Copyright (c) 2015 Cryptonomex, Inc., and contributors. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * * 1. Any modified source or binaries are used only with the BitShares network. * * 2. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * * 3. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ #pragma once #include <graphene/chain/protocol/authority.hpp> #include <graphene/db/generic_index.hpp> namespace graphene { namespace chain { /** * @class withdraw_permission_object * @brief Grants another account authority to withdraw a limited amount of funds per interval * * The primary purpose of this object is to enable recurring payments on the blockchain. An account which wishes to * process a recurring payment may use a @ref withdraw_permission_claim_operation to reference an object of this type * and withdraw up to @ref withdrawal_limit from @ref withdraw_from_account. Only @ref authorized_account may do * this. Any number of withdrawals may be made so long as the total amount withdrawn per period does not exceed the * limit for any given period. */ class withdraw_permission_object : public graphene::db::abstract_object<withdraw_permission_object> { public: static const uint8_t space_id = protocol_ids; static const uint8_t type_id = withdraw_permission_object_type; /// The account authorizing @ref authorized_account to withdraw from it account_id_type withdraw_from_account; /// The account authorized to make withdrawals from @ref withdraw_from_account account_id_type authorized_account; /// The maximum amount which may be withdrawn per period. All withdrawals must be of this asset type asset withdrawal_limit; /// The duration of a withdrawal period in seconds uint32_t withdrawal_period_sec = 0; /// The beginning of the next withdrawal period time_point_sec period_start_time; /// The time at which this withdraw permission expires time_point_sec expiration; /// tracks the total amount share_type claimed_this_period; /// True if the permission may still be claimed for this period; false if it has already been used asset available_this_period( fc::time_point_sec current_time )const { if( current_time >= period_start_time + withdrawal_period_sec ) return withdrawal_limit; return asset( ( withdrawal_limit.amount > claimed_this_period ) ? withdrawal_limit.amount - claimed_this_period : 0, withdrawal_limit.asset_id ); } }; struct by_from; struct by_authorized; struct by_expiration; typedef multi_index_container< withdraw_permission_object, indexed_by< ordered_unique< tag<by_id>, member< object, object_id_type, &object::id > >, ordered_non_unique< tag<by_from>, member<withdraw_permission_object, account_id_type, &withdraw_permission_object::withdraw_from_account> >, ordered_non_unique< tag<by_authorized>, member<withdraw_permission_object, account_id_type, &withdraw_permission_object::authorized_account> >, ordered_non_unique< tag<by_expiration>, member<withdraw_permission_object, time_point_sec, &withdraw_permission_object::expiration> > > > withdraw_permission_object_multi_index_type; typedef generic_index<withdraw_permission_object, withdraw_permission_object_multi_index_type> withdraw_permission_index; } } // graphene::chain FC_REFLECT_DERIVED( graphene::chain::withdraw_permission_object, (graphene::db::object), (withdraw_from_account) (authorized_account) (withdrawal_limit) (withdrawal_period_sec) (period_start_time) (expiration) )
EchoThreeLLC/echothree
src/java/com/echothree/model/control/training/server/transfer/TrainingClassAnswerTranslationTransferCache.java
// -------------------------------------------------------------------------------- // Copyright 2002-2022 Echo Three, LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // -------------------------------------------------------------------------------- package com.echothree.model.control.training.server.transfer; import com.echothree.model.control.core.common.transfer.MimeTypeTransfer; import com.echothree.model.control.core.server.control.CoreControl; import com.echothree.model.control.party.common.transfer.LanguageTransfer; import com.echothree.model.control.training.common.transfer.TrainingClassAnswerTransfer; import com.echothree.model.control.training.common.transfer.TrainingClassAnswerTranslationTransfer; import com.echothree.model.control.training.server.control.TrainingControl; import com.echothree.model.data.core.server.entity.MimeType; import com.echothree.model.data.training.server.entity.TrainingClassAnswerTranslation; import com.echothree.model.data.user.server.entity.UserVisit; import com.echothree.util.server.persistence.Session; public class TrainingClassAnswerTranslationTransferCache extends BaseTrainingDescriptionTransferCache<TrainingClassAnswerTranslation, TrainingClassAnswerTranslationTransfer> { CoreControl coreControl = Session.getModelController(CoreControl.class); /** Creates a new instance of TrainingClassAnswerTranslationTransferCache */ public TrainingClassAnswerTranslationTransferCache(UserVisit userVisit, TrainingControl trainingControl) { super(userVisit, trainingControl); } public TrainingClassAnswerTranslationTransfer getTrainingClassAnswerTranslationTransfer(TrainingClassAnswerTranslation trainingClassAnswerTranslation) { TrainingClassAnswerTranslationTransfer trainingClassAnswerTranslationTransfer = get(trainingClassAnswerTranslation); if(trainingClassAnswerTranslationTransfer == null) { LanguageTransfer languageTransfer = partyControl.getLanguageTransfer(userVisit, trainingClassAnswerTranslation.getLanguage()); TrainingClassAnswerTransfer trainingClassAnswerTransfer = trainingControl.getTrainingClassAnswerTransfer(userVisit, trainingClassAnswerTranslation.getTrainingClassAnswer()); MimeTypeTransfer answerMimeTypeTransfer = coreControl.getMimeTypeTransfer(userVisit, trainingClassAnswerTranslation.getAnswerMimeType()); String answer = trainingClassAnswerTranslation.getAnswer(); MimeType selectedMimeType = trainingClassAnswerTranslation.getSelectedMimeType(); MimeTypeTransfer selectedMimeTypeTransfer = selectedMimeType == null? null: coreControl.getMimeTypeTransfer(userVisit, selectedMimeType); String selected = trainingClassAnswerTranslation.getSelected(); trainingClassAnswerTranslationTransfer = new TrainingClassAnswerTranslationTransfer(trainingClassAnswerTransfer, languageTransfer, answerMimeTypeTransfer, answer, selectedMimeTypeTransfer, selected); put(trainingClassAnswerTranslation, trainingClassAnswerTranslationTransfer); } return trainingClassAnswerTranslationTransfer; } }
allansrc/fuchsia
build/python/tests/bin/main.py
#!/usr/bin/env python3.8 # Copyright 2021 The Fuchsia Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import lib import sys def main(): lib.f() if __name__ == "__main__": sys.exit(main())
sergeytkachenko/siesta-template
tests/056_is_element_scrolled_out.t.js
StartTest(function (t) { // this test exersizes the "elementIsScrolledOut" method t.testExtJS(function (t) { t.it('Determine that element is scrolled out of the view', function (t) { document.body.innerHTML = '<div style="position:absolute; left:100px; border:1px solid #ddd; width:200px; height:200px; overflow:auto">' + '<div style="position:absolute; background:#aaa; left:250px; width:50px; height:50px" id="inner">FOO</div>' + '</div>'; t.ok(t.elementIsScrolledOut(document.getElementById('inner')), 'Correctly determined scrolled out element') }); t.it('Determine that element is not scrolled out of the view', function (t) { document.body.innerHTML = '<div style="position:absolute; left:100px; border:1px solid #ddd; width:200px; height:200px; overflow:auto">' + '<div style="position:absolute; background:#aaa; width:50px; height:50px" id="inner">FOO</div>' + '</div>'; t.notOk(t.elementIsScrolledOut(document.getElementById('inner')), 'Correctly determined scrolled out element') }); t.it('Determine that element is scrolled out of the view', function (t) { document.body.innerHTML = '<div style="position:absolute; left:100px; border:1px solid #ddd; width:200px; height:200px; overflow:auto">' + '<div style="overflow:hidden; position:absolute; background:#aaa; width:50px; height:50px" id="inner">' + '<div style="position:absolute; background:red; left:45px; width:10px; height:10px" id="inner2"></div>' + '</div>' + '</div>'; t.notOk(t.elementIsScrolledOut(document.getElementById('inner'))) t.ok(t.elementIsScrolledOut(document.getElementById('inner2'), [ 9, 9 ]), "Works with offset #1") t.notOk(t.elementIsScrolledOut(document.getElementById('inner2'), [ 1, 1 ]), "Works with offset #2") }); t.it('Determine that element is scrolled out of the view', function (t) { document.body.innerHTML = '<div style="position:absolute; left:100px; border:1px solid #ddd; width:200px; height:200px; overflow:auto">' + '<div style="overflow:hidden; position:absolute; background:#aaa; width:50px; height:50px" id="inner">' + '<div style="position:absolute; background:red; top:45px; width:10px; height:10px" id="inner2"></div>' + '</div>' + '</div>'; t.notOk(t.elementIsScrolledOut(document.getElementById('inner'))) t.ok(t.elementIsScrolledOut(document.getElementById('inner2'), [ 9, 9 ]), "Works with offset #1") t.notOk(t.elementIsScrolledOut(document.getElementById('inner2'), [ 1, 1 ]), "Works with offset #2") }); }); });
qyangge/wechat
node_modules/_flash-store@0.14.5@flash-store/dist/index.js
<reponame>qyangge/wechat<gh_stars>0 export { log } from './config'; export { FlashStoreSync, } from './flash-store-sync'; import { FlashStore, } from './flash-store'; export { FlashStore, }; export default FlashStore; //# sourceMappingURL=index.js.map
wangyuqing0424/graviti-python-sdk
graviti/portex/factory.py
<filename>graviti/portex/factory.py #!/usr/bin/env python3 # # Copyright 2022 Graviti. Licensed under MIT License. # """Template factory releated classes.""" from collections import OrderedDict from typing import ( Any, Callable, Dict, Generic, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, ) import yaml import graviti.portex.ptype as PTYPE from graviti.portex.base import PortexType from graviti.portex.field import Fields from graviti.portex.package import Imports, packages _C = TypeVar("_C", str, float, bool, None) class Dynamic: """The base class of the runtime parameter type analyzer.""" def __call__(self, **_: Any) -> PTYPE.PType: """Get the parameter type. Arguments: _: The input arguments. """ ... class DynamicPortexType(Dynamic): """The runtime parameter type analyzer for portex type.""" def __call__(self, **_: Any) -> PTYPE.PType: """Get the parameter type. Arguments: _: The input arguments. Returns: The ``PortexType``. """ return PTYPE.PortexType class DynamicDictParameter(Dynamic): """The runtime parameter type analyzer for dict values. Arguments: annotation_getter: A callable object returns the type of the dict. key: The key of the dict value. decl: The full dict. """ def __init__(self, ptype_getter: Dynamic, key: str, decl: Dict[str, Any]): self._ptype_getter = ptype_getter self._key = key self._decl = decl def __call__(self, **kwargs: Any) -> PTYPE.PType: """Get the parameter type. Arguments: kwargs: The input arguments. Returns: The parameter type of the dict value. """ ptype = self._ptype_getter(**kwargs) if ptype in {PTYPE.PortexType, PTYPE.Field}: if self._key == "type": return PTYPE.TypeName if self._key == "name" and ptype == PTYPE.Field: return PTYPE.String name_factory = string_factory_creator(self._decl["type"], PTYPE.TypeName) class_ = packages.builtins[name_factory(**kwargs)] for name, parameter in class_.params.items(): if name == self._key: return parameter.ptype return PTYPE.Any class DynamicListParameter(Dynamic): """The runtime parameter type analyzer for list values. Arguments: ptype_getter: A callable object returns the type of the list. """ def __init__(self, ptype_getter: Dynamic): self._ptype_getter = ptype_getter def __call__(self, **kwargs: Any) -> PTYPE.PType: """Get the parameter type. Arguments: kwargs: The input arguments. Returns: The parameter type of the list value. """ if self._ptype_getter(**kwargs) == PTYPE.Fields: return PTYPE.Field return PTYPE.Any class Factory: """The base class of the template factory.""" keys: Dict[str, Any] dependences: Set[Type[PortexType]] def __call__(self, **_: Any) -> Any: """Apply the input arguments to the template. Arguments: _: The input arguments. """ ... class BinaryExpression(Factory): """The Portex binary expression parser. Arguments: decl: A dict which indicates a portex expression. """ # Why not use typing.OrderedDict here? # typing.OrderedDict is supported after python 3.7.2 # typing_extensions.OrderedDict will trigger https://github.com/python/mypy/issues/11528 _OPERATORS: Mapping[str, Callable[[Any, Any], bool]] = OrderedDict( { "==": lambda x, y: x == y, # type: ignore[no-any-return] "!=": lambda x, y: x != y, # type: ignore[no-any-return] ">=": lambda x, y: x >= y, # type: ignore[no-any-return] "<=": lambda x, y: x <= y, # type: ignore[no-any-return] ">": lambda x, y: x > y, # type: ignore[no-any-return] "<": lambda x, y: x < y, # type: ignore[no-any-return] } ) def __init__(self, decl: str) -> None: keys = {} for operator, method in self._OPERATORS.items(): if operator not in decl: continue operands = decl.split(operator) if len(operands) != 2: raise SyntaxError("Binary operator only accept two operands") # TODO: Use "string_factory_creator" in non-string case factories = [string_factory_creator(operand.strip()) for operand in operands] for i, factory in enumerate(factories): if isinstance(factory, ConstantFactory): factories[i] = ConstantFactory(yaml.load(factory(), yaml.Loader)) else: keys.update(factory.keys) self._factories = factories self._method = method self.keys = keys return raise SyntaxError("No operator found in expression") def __call__(self, **kwargs: Any) -> bool: """Apply the input arguments to the expression. Arguments: kwargs: The input arguments. Returns: The bool result of the expression. """ return self._method(*(factory(**kwargs) for factory in self._factories)) class TypeFactory(Factory): """The template factory for portex type. Arguments: decl: A dict which indicates a portex type. """ def __init__(self, decl: Dict[str, Any], imports: Imports) -> None: class_ = imports[decl["type"]] factories = {} keys = {} dependences = {class_} for name, parameter in class_.params.items(): try: value = decl[name] except KeyError as error: if parameter.required: raise KeyError(f"Parameter '{name}' is required") from error continue factory = factory_creator(value, imports, parameter.ptype) factories[name] = factory keys.update(factory.keys) dependences.update(factory.dependences) self._factories = factories self.keys = keys self.dependences = dependences self._class = class_ def __call__(self, **kwargs: Any) -> PortexType: """Apply the input arguments to the type template. Arguments: kwargs: The input arguments. Returns: The applied Portex type. """ type_kwargs = {key: factory(**kwargs) for key, factory in self._factories.items()} return self._class(**type_kwargs) class DynamicTypeFactory(Factory): """The template factory for dynamic Portex type. Arguments: decl: A dict which indicates a dynamic Portex type. """ def __init__(self, decl: Dict[str, Any], imports: Imports) -> None: self._type_parameter = decl["type"][8:] self._decl = decl self._imports = imports self.keys = DictFactory(decl, DynamicPortexType()).keys.copy() self.dependences = set() self.keys[self._type_parameter] = PTYPE.TypeName def __call__(self, **kwargs: Any) -> PortexType: """Apply the input arguments to the dynamic type template. Arguments: kwargs: The input arguments. Returns: The applied Portex type. """ decl = self._decl.copy() decl["type"] = kwargs[self._type_parameter] return TypeFactory(decl, self._imports)(**kwargs) class ConstantFactory(Factory, Generic[_C]): """The template factory for a constant. Arguments: decl: The constant to be created by the factory. """ def __init__(self, decl: _C) -> None: self._constant: _C = decl self.dependences = set() self.keys: Dict[str, Any] = {} def __call__(self, **_: Any) -> _C: """Get the constant stored in the factory. Arguments: _: The input arguments. Returns: The constant stored in the factory. """ return self._constant class VariableFactory(Factory): """The template factory for a variable. Arguments: decl: The parameter name of the variable. ptype: The parameter type. """ def __init__(self, decl: str, ptype: PTYPE.PType = PTYPE.Any) -> None: self._key = decl self.dependences = set() self.keys = {decl: ptype} def __call__(self, **kwargs: Any) -> Any: """Apply the input arguments to the variable template. Arguments: kwargs: The input arguments. Returns: The applied variable. """ return kwargs[self._key] class ListFactory(Factory): """The template factory for a list. Arguments: decl: A list template. ptype: The parameter type of the list. """ def __init__(self, decl: List[Any], ptype: PTYPE.PType = PTYPE.Any) -> None: factories = [] dependences = set() keys = {} ptype = DynamicListParameter(ptype) if isinstance(ptype, Dynamic) else ptype for value in decl: factory = factory_creator(value, None, ptype) factories.append(factory) dependences.update(factory.dependences) keys.update(factory.keys) self._factories = factories self.dependences = dependences self.keys = keys def __call__(self, **kwargs: Any) -> List[Any]: """Apply the input arguments to the list template. Arguments: kwargs: The input arguments. Returns: The applied list. """ return list(factory(**kwargs) for factory in self._factories) class DictFactory(Factory): """The template factory for a dict. Arguments: decl: A dict template. ptype: The parameter type of the dict. """ def __init__(self, decl: Dict[str, Any], ptype: PTYPE.PType = PTYPE.Any) -> None: factories = {} dependences = set() keys = {} for key, value in decl.items(): ptype = DynamicDictParameter(ptype, key, decl) if isinstance(ptype, Dynamic) else ptype factory = factory_creator(value, None, ptype) factories[key] = factory dependences.update(factory.dependences) keys.update(factory.keys) self._factories = factories self.dependences = dependences self.keys = keys def __call__(self, **kwargs: Any) -> Dict[str, Any]: """Apply the input arguments to the dict template. Arguments: kwargs: The input arguments. Returns: The applied dict. """ return {key: factory(**kwargs) for key, factory in self._factories.items()} class FieldFactory(Factory): """The template factory for a tuple of name and PortexType. Arguments: decl: A dict which indicates a tuple of name and PortexType. """ def __init__(self, decl: Dict[str, Any], imports: Imports) -> None: self.creator: Callable[..., Tuple[str, PortexType]] item = decl.copy() dependences = set() keys = {} expression = expression_creator(item.pop("exist_if", None)) keys.update(expression.keys) name_factory = string_factory_creator(item.pop("name"), PTYPE.String) type_factory = type_factory_creator(item, imports) dependences.update(type_factory.dependences) keys.update(name_factory.keys) keys.update(type_factory.keys) self._expression = expression self._name_factory = name_factory self._type_factory = type_factory self.dependences = dependences self.keys = keys def __call__(self, **kwargs: Any) -> Optional[Tuple[str, PortexType]]: """Apply the input arguments to the template. Arguments: kwargs: The input arguments. Returns: The applied tuple of name and PortexType. """ if not self._expression(**kwargs): return None return self._name_factory(**kwargs), self._type_factory(**kwargs) class FieldsFactory(Factory): """The template factory for a ``Fields``. Arguments: decl: A list which indicates a ``Fields``. """ def __init__(self, decl: List[Dict[str, Any]], imports: Imports) -> None: self._factories = [FieldFactory(item, imports) for item in decl] dependences = set() keys = {} for factory in self._factories: dependences.update(factory.dependences) keys.update(factory.keys) self.dependences = dependences self.keys = keys def __call__(self, **kwargs: Any) -> Fields: """Apply the input arguments to the ``Fields`` template. Arguments: kwargs: The input arguments. Returns: The applied ``Fields``. """ return Fields( filter(bool, (factory(**kwargs) for factory in self._factories)), # type: ignore[misc] ) def type_factory_creator( decl: Dict[str, Any], imports: Imports ) -> Union[TypeFactory, DynamicTypeFactory]: """Check whether the input is dynamic and returns the corresponding type factory. Arguments: decl: A dict which indicates a portex type or a dynamic portex type. imports: The :class:`Imports` instance to specify the import scope of the template. Returns: A ``TypeFactory`` or a ``DynamicTypeFactory`` instance according to the input. """ if decl["type"].startswith("$params."): return DynamicTypeFactory(decl, imports) return TypeFactory(decl, imports) def string_factory_creator( decl: str, ptype: PTYPE.PType = PTYPE.Any ) -> Union[VariableFactory, ConstantFactory[str]]: """Check whether the input string is variable and returns the corresponding factory. Arguments: decl: A string which indicates a constant or a variable. ptype: The parameter type of the string. Returns: A ``VariableFactory`` or a ``ConstantFactory`` instance according to the input. """ if decl.startswith("$params."): return VariableFactory(decl[8:], ptype) return ConstantFactory(decl) def expression_creator(decl: Optional[str]) -> Union[BinaryExpression, ConstantFactory[bool]]: """Check whether the input string is binary expression and returns the corresponding factory. Arguments: decl: A string which indicates a expression. Returns: A ``BinaryExpression`` or a ``ConstantFactory`` instance according to the input. """ if decl is None: return ConstantFactory(True) return BinaryExpression(decl) def factory_creator( decl: Any, imports: Optional[Imports], ptype: PTYPE.PType = PTYPE.Any ) -> Factory: """Check input type and returns the corresponding factory. Arguments: decl: A template which indicates any Portex object. imports: The :class:`Imports` instance to specify the import scope of the template. ptype: The parameter type of the input. Returns: A ``Factory`` instance according the input. """ if isinstance(decl, str) and decl.startswith("$params."): return VariableFactory(decl[8:], ptype) if ptype == PTYPE.PortexType: assert isinstance(decl, dict) assert imports is not None return type_factory_creator(decl, imports) if ptype == PTYPE.Fields: assert isinstance(decl, list) assert imports is not None return FieldsFactory(decl, imports) if isinstance(decl, list): return ListFactory(decl, ptype) if isinstance(decl, dict): return DictFactory(decl, ptype) return ConstantFactory(decl)
carmensp85/appverse-sass
demo/app/components/ui-bootstrap/uib-accordion-all-code.js
<gh_stars>0 $scope.groups = [{ title: 'Dynamic Group Header - 1', content: 'Dynamic Group Body - 1' }, { title: 'Dynamic Group Header - 2', content: 'Dynamic Group Body - 2' }]; $scope.status = { isFirstOpen: true, isFirstDisabled: false }; $scope.status2 = { isFirstOpen: true, isFirstDisabled: false };
yangfanchuhai/pigeon
pigeon-remoting/src/main/java/com/dianping/pigeon/remoting/invoker/process/AbstractResponseProcessor.java
/** * */ package com.dianping.pigeon.remoting.invoker.process; import org.apache.logging.log4j.Logger; import com.dianping.pigeon.log.LoggerLoader; import com.dianping.pigeon.monitor.Monitor; import com.dianping.pigeon.monitor.MonitorLoader; import com.dianping.pigeon.remoting.common.domain.InvocationResponse; import com.dianping.pigeon.remoting.invoker.Client; import com.dianping.pigeon.remoting.invoker.process.threadpool.ResponseThreadPoolProcessor; /** * @author xiangwu * */ public abstract class AbstractResponseProcessor implements ResponseProcessor { protected static final Logger logger = LoggerLoader.getLogger(ResponseThreadPoolProcessor.class); private static final Monitor monitor = MonitorLoader.getMonitor(); public abstract void doProcessResponse(InvocationResponse response, Client client); @Override public void processResponse(InvocationResponse response, Client client) { try { doProcessResponse(response, client); } catch (Throwable e) { String error = String.format("process response failed:%s, processor stats:%s", response, getProcessorStatistics()); logger.error(error, e); monitor.logError(error, e); } } }
jiayidev/Business
app/src/main/java/com/dev/brian/business/module/recommand/RecommandBodyValue.java
package com.dev.brian.business.module.recommand; import com.dev.brian.business.module.BaseModel; import com.dev.brian.sdk.module.monitor.Monitor; import com.dev.brian.sdk.module.monitor.emevent.EMEvent; import java.util.ArrayList; /** * Author :BrianDev * Email :<EMAIL> * Create at:2017/11/16 0016 * Description: 搜索实体 */ public class RecommandBodyValue extends BaseModel { public int type; public String logo; public String title; public String info; public String price; public String text; public String site; public String from; public String zan; public ArrayList<String> url; //视频专用 public String thumb; public String resource; public String resourceID; public String adid; public ArrayList<Monitor> startMonitor; public ArrayList<Monitor> middleMonitor; public ArrayList<Monitor> endMonitor; public String clickUrl; public ArrayList<Monitor> clickMonitor; public EMEvent event; }
JustinKyleJames/irods
unit_tests/src/test_metadata.cpp
#include "catch.hpp" #include "getRodsEnv.h" #include "rodsClient.h" #include "rcConnect.h" #include "connection_pool.hpp" #include "filesystem.hpp" #include "metadata.hpp" #include "irods_at_scope_exit.hpp" #include "dstream.hpp" #include "transport/default_transport.hpp" #include <vector> #include <string> namespace ix = irods::experimental; namespace ixm = ix::metadata; using entity_type = ix::entity::entity_type; TEST_CASE("metadata") { load_client_api_plugins(); rodsEnv env; REQUIRE(getRodsEnv(&env) == 0); const int cp_size = 1; const int cp_refresh_time = 600; irods::connection_pool conn_pool{cp_size, env.rodsHost, env.rodsPort, env.rodsUserName, env.rodsZone, cp_refresh_time}; auto conn = conn_pool.get_connection(); // clang-format off namespace fs = irods::experimental::filesystem; using odstream = irods::experimental::io::odstream; using default_transport = irods::experimental::io::client::default_transport; // clang-format on const auto sandbox = fs::path{env.rodsHome} / "unit_testing_sandbox"; if (!fs::client::exists(conn, sandbox)) { REQUIRE(fs::client::create_collection(conn, sandbox)); } irods::at_scope_exit remove_sandbox{[&conn, &sandbox] { REQUIRE(fs::client::remove_all(conn, sandbox, fs::remove_options::no_trash)); }}; SECTION("collection remove") { auto md = ixm::avu{"a", "v", "u"}; ixm::set(conn, md, entity_type::collection, env.rodsHome); auto res = ixm::get(conn, entity_type::collection, env.rodsHome); REQUIRE(res.size() == 1); ixm::remove(conn, md, entity_type::collection, env.rodsHome); res = ixm::get(conn, entity_type::collection, env.rodsHome); REQUIRE(res.size() == 0); } SECTION("collection set") { auto md = ixm::avu{"a", "v", "u"}; ixm::set(conn, md, entity_type::collection, env.rodsHome); auto res = ixm::get(conn, entity_type::collection, env.rodsHome); REQUIRE(res[0] == md); ixm::remove(conn, md, entity_type::collection, env.rodsHome); } SECTION("collection add") { auto md = ixm::avu{"a", "v", "u"}; ixm::add(conn, md, entity_type::collection, env.rodsHome); auto res = ixm::get(conn, entity_type::collection, env.rodsHome); REQUIRE(res[0] == md); ixm::remove(conn, md, entity_type::collection, env.rodsHome); } SECTION("collection modify") { auto md = ixm::avu{"a", "v", "u"}; auto md2 = ixm::avu{"a2", "v2", "u2"}; ixm::add(conn, md, entity_type::collection, env.rodsHome); auto res = ixm::get(conn, entity_type::collection, env.rodsHome); REQUIRE(res[0] == md); ixm::modify(conn, md, md2, entity_type::collection, env.rodsHome); res = ixm::get(conn, entity_type::collection, env.rodsHome); REQUIRE(res[0] == md2); ixm::remove(conn, md2, entity_type::collection, env.rodsHome); } SECTION("object remove") { const fs::path p = sandbox / "data_object"; { default_transport tp{conn}; odstream{tp, p} << "hello world!"; } auto md = ixm::avu{"a", "v", "u"}; ixm::set(conn, md, entity_type::data_object, p.string()); auto res = ixm::get(conn, entity_type::data_object, p.string()); REQUIRE(res.size() == 1); ixm::remove(conn, md, entity_type::data_object, p.string()); res = ixm::get(conn, entity_type::data_object, p.string()); REQUIRE(res.size() == 0); REQUIRE(fs::client::remove(conn, p, fs::remove_options::no_trash)); } SECTION("object add") { const auto p = sandbox / "data_object"; { default_transport tp{conn}; odstream{tp, p} << "hello world!"; } auto md = ixm::avu{"a", "v", "u"}; ixm::add(conn, md, entity_type::data_object, p); auto res = ixm::get(conn, entity_type::data_object, p); REQUIRE(res.size() == 1); ixm::remove(conn, md, entity_type::data_object, p); REQUIRE(fs::client::remove(conn, p, fs::remove_options::no_trash)); } SECTION("object set") { const fs::path p = sandbox / "data_object"; INFO(fmt::format("This is the path: {}", p.string())); { default_transport tp{conn}; odstream{tp, p} << "hello world!"; } REQUIRE(fs::client::exists(conn, p)); auto md = ixm::avu{"a", "v", "u"}; ixm::set(conn, md, entity_type::data_object, p); auto res = ixm::get(conn, entity_type::data_object, p); REQUIRE(res.size() == 1); ixm::remove(conn, md, entity_type::data_object, p); REQUIRE(fs::client::remove(conn, p, fs::remove_options::no_trash)); } SECTION("object modify") { const fs::path p = sandbox / "data_object"; INFO(fmt::format("This is the path: {}", p.string())); { default_transport tp{conn}; odstream{tp, p} << "hello world!"; } REQUIRE(fs::client::exists(conn, p)); auto md = ixm::avu{"a", "v", "u"}; auto md2 = ixm::avu{"a2", "v2", "u2"}; ixm::add(conn, md, entity_type::data_object, p); auto res = ixm::get(conn, entity_type::data_object, p); REQUIRE(res[0] == md); ixm::modify(conn, md, md2, entity_type::data_object, p); res = ixm::get(conn, entity_type::data_object, p); REQUIRE(res[0] == md2); ixm::remove(conn, md2, entity_type::data_object, p); } SECTION("user remove") { auto md = ixm::avu{"a", "v", "u"}; ixm::set(conn, md, entity_type::user, env.rodsUserName); auto res = ixm::get(conn, entity_type::user, env.rodsUserName); REQUIRE(res.size() == 1); ixm::remove(conn, md, entity_type::user, env.rodsUserName); res = ixm::get(conn, entity_type::user, env.rodsUserName); REQUIRE(res.size() == 0); } SECTION("user set") { auto md = ixm::avu{"a", "v", "u"}; ixm::set(conn, md, entity_type::user, env.rodsUserName); auto res = ixm::get(conn, entity_type::user, env.rodsUserName); REQUIRE(res[0] == md); ixm::remove(conn, md, entity_type::user, env.rodsUserName); } SECTION("user add") { auto md = ixm::avu{"a", "v", "u"}; ixm::add(conn, md, entity_type::user, env.rodsUserName); auto res = ixm::get(conn, entity_type::user, env.rodsUserName); REQUIRE(res[0] == md); ixm::remove(conn, md, entity_type::user, env.rodsUserName); } SECTION("user modify") { auto md = ixm::avu{"a", "v", "u"}; auto md2 = ixm::avu{"a2", "v2", "u2"}; ixm::add(conn, md, entity_type::user, env.rodsUserName); auto res = ixm::get(conn, entity_type::user, env.rodsUserName); REQUIRE(res[0] == md); ixm::modify(conn, md, md2, entity_type::user, env.rodsUserName); res = ixm::get(conn, entity_type::user, env.rodsUserName); REQUIRE(res[0] == md2); ixm::remove(conn, md2, entity_type::user, env.rodsUserName); } SECTION("resource remove") { auto md = ixm::avu{"a", "v", "u"}; ixm::set(conn, md, entity_type::resource, "demoResc"); auto res = ixm::get(conn, entity_type::resource, "demoResc"); REQUIRE(res.size() == 1); ixm::remove(conn, md, entity_type::resource, "demoResc"); res = ixm::get(conn, entity_type::resource, "demoResc"); REQUIRE(res.size() == 0); } SECTION("resource set") { auto md = ixm::avu{"a", "v", "u"}; ixm::set(conn, md, entity_type::resource, "demoResc"); auto res = ixm::get(conn, entity_type::resource, "demoResc"); REQUIRE(res[0] == md); ixm::remove(conn, md, entity_type::resource, "demoResc"); } SECTION("resource add") { auto md = ixm::avu{"a", "v", "u"}; ixm::add(conn, md, entity_type::resource, "demoResc"); auto res = ixm::get(conn, entity_type::resource, "demoResc"); REQUIRE(res[0] == md); ixm::remove(conn, md, entity_type::resource, "demoResc"); } SECTION("resource modify") { auto md = ixm::avu{"a", "v", "u"}; auto md2 = ixm::avu{"a2", "v2", "u2"}; ixm::add(conn, md, entity_type::resource, "demoResc"); auto res = ixm::get(conn, entity_type::resource, "demoResc"); REQUIRE(res[0] == md); ixm::modify(conn, md, md2, entity_type::resource, "demoResc"); res = ixm::get(conn, entity_type::resource, "demoResc"); REQUIRE(res[0] == md2); ixm::remove(conn, md2, entity_type::resource, "demoResc"); } }
ooskapenaar/mojito
tests/fixtures/gsg5-appConfig/index.js
<gh_stars>100-1000 /* * Copyright (c) 2011 Yahoo! Inc. All rights reserved. */ // this file provides Manhattan integration process.chdir(__dirname); var http = require('http'), app = require('./app'); /** * @token given by manhattan and used to emit that the app is ready */ module.exports = function(config, token) { // send the application to Manhattan along with the token process.emit("application-ready", token, http.createServer(app)); };
bcvsolutions/czechidm-extras
Realization/backend/idm-extras/src/test/java/eu/bcvsolutions/idm/extras/event/processor/contract/CopyRolesFromCurrentContractProcessorTest.java
<reponame>bcvsolutions/czechidm-extras package eu.bcvsolutions.idm.extras.event.processor.contract; import java.time.LocalDate; import java.util.List; import java.util.UUID; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import eu.bcvsolutions.idm.core.api.domain.AutomaticRoleAttributeRuleComparison; import eu.bcvsolutions.idm.core.api.domain.AutomaticRoleAttributeRuleType; import eu.bcvsolutions.idm.core.api.dto.IdmAutomaticRoleAttributeDto; import eu.bcvsolutions.idm.core.api.dto.IdmIdentityContractDto; import eu.bcvsolutions.idm.core.api.dto.IdmIdentityDto; import eu.bcvsolutions.idm.core.api.dto.IdmIdentityRoleDto; import eu.bcvsolutions.idm.core.api.dto.IdmRoleDto; import eu.bcvsolutions.idm.core.api.dto.IdmTreeNodeDto; import eu.bcvsolutions.idm.core.api.dto.filter.IdmIdentityRoleFilter; import eu.bcvsolutions.idm.core.api.service.IdmIdentityContractService; import eu.bcvsolutions.idm.core.api.service.IdmIdentityRoleService; import eu.bcvsolutions.idm.core.api.service.IdmIdentityService; import eu.bcvsolutions.idm.core.model.entity.IdmIdentity_; import eu.bcvsolutions.idm.extras.config.domain.ExtrasConfiguration; import eu.bcvsolutions.idm.test.api.AbstractIntegrationTest; public class CopyRolesFromCurrentContractProcessorTest extends AbstractIntegrationTest { @Autowired private IdmIdentityContractService identityContractService; @Autowired private IdmIdentityRoleService identityRoleService; @Autowired private IdmIdentityService identityService; @Before public void init() { getHelper().enableProcessor(CopyRolesFromCurrentContractProcessor.PROCESSOR_NAME); } @After public void end() { getHelper().disableProcessor(CopyRolesFromCurrentContractProcessor.PROCESSOR_NAME); } @Test public void testCopyingRolesWithSkippedCheck() { getHelper().setConfigurationValue(ExtrasConfiguration.EXTRAS_COPY_ROLES_TO_NEW_CONTRACT_SKIP_POSITION_CHECK, "true"); IdmIdentityDto identity = getHelper().createIdentity(); IdmIdentityContractDto contract = identityContractService.getPrimeContract(identity.getId()); contract.setWorkPosition(getHelper().createTreeNode().getId()); contract = identityContractService.save(contract); IdmRoleDto role = getHelper().createRole(); getHelper().assignRoles(contract, role); IdmIdentityContractDto newContract = new IdmIdentityContractDto(); newContract.setIdentity(identity.getId()); newContract.setWorkPosition(getHelper().createTreeNode().getId()); newContract.setValidFrom(LocalDate.now().plusDays(15L)); newContract = identityContractService.save(newContract); // // check that role was copied to the new contract IdmIdentityRoleFilter identityRoleFilter = new IdmIdentityRoleFilter(); identityRoleFilter.setIdentityContractId(newContract.getId()); List<IdmIdentityRoleDto> newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(1, newAssignedRoles.size()); Assert.assertEquals(role.getId(), newAssignedRoles.get(0).getRole()); getHelper().setConfigurationValue(ExtrasConfiguration.EXTRAS_COPY_ROLES_TO_NEW_CONTRACT_SKIP_POSITION_CHECK, "false"); } @Test public void testCopyingRolesWithoutSkippedCheck() { String position = getHelper().createName(); UUID workPositionId = getHelper().createTreeNode().getId(); IdmIdentityDto identity = getHelper().createIdentity(); IdmIdentityContractDto contract = identityContractService.getPrimeContract(identity.getId()); contract.setWorkPosition(workPositionId); contract.setPosition(position); contract = identityContractService.save(contract); IdmRoleDto role = getHelper().createRole(); getHelper().assignRoles(contract, role); IdmIdentityContractDto newContract = new IdmIdentityContractDto(); newContract.setIdentity(identity.getId()); newContract.setWorkPosition(getHelper().createTreeNode().getId()); newContract.setValidFrom(LocalDate.now().plusDays(15L)); newContract.setPosition(getHelper().createName()); newContract = identityContractService.save(newContract); // // check that role was not copied to the new contract because contract is on a // different position IdmIdentityRoleFilter identityRoleFilter = new IdmIdentityRoleFilter(); identityRoleFilter.setIdentityContractId(newContract.getId()); List<IdmIdentityRoleDto> newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(0, newAssignedRoles.size()); // // create a new contract at the same position as the previous one newContract = new IdmIdentityContractDto(); newContract.setIdentity(identity.getId()); newContract.setWorkPosition(workPositionId); newContract.setValidFrom(LocalDate.now().plusDays(15L)); newContract.setPosition(position); newContract = identityContractService.save(newContract); identityRoleFilter = new IdmIdentityRoleFilter(); identityRoleFilter.setIdentityContractId(newContract.getId()); newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(1, newAssignedRoles.size()); Assert.assertEquals(role.getId(), newAssignedRoles.get(0).getRole()); } @Test public void testCopyingRolesWithoutSkippedCheckTestRoleValidity() { String position = getHelper().createName(); UUID workPositionId = getHelper().createTreeNode().getId(); IdmIdentityDto identity = getHelper().createIdentity(); IdmIdentityContractDto contract = identityContractService.getPrimeContract(identity.getId()); contract.setWorkPosition(workPositionId); contract.setPosition(position); contract = identityContractService.save(contract); IdmRoleDto role = getHelper().createRole(); getHelper().assignRoles(contract, role); IdmIdentityRoleFilter identityRoleFilter = new IdmIdentityRoleFilter(); identityRoleFilter.setIdentityContractId(contract.getId()); List<IdmIdentityRoleDto> assignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); for (IdmIdentityRoleDto assignedRole : assignedRoles) { assignedRole.setValidFrom(LocalDate.now().minusDays(150L)); assignedRole.setValidTill(LocalDate.now().plusDays(150L)); identityRoleService.save(assignedRole); } IdmIdentityContractDto newContract = new IdmIdentityContractDto(); newContract.setIdentity(identity.getId()); newContract.setWorkPosition(workPositionId); newContract.setValidFrom(LocalDate.now().plusDays(15L)); newContract.setPosition(position); newContract = identityContractService.save(newContract); // // check that one role was added with the proper identityRoleFilter = new IdmIdentityRoleFilter(); identityRoleFilter.setIdentityContractId(newContract.getId()); List<IdmIdentityRoleDto> newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(1, newAssignedRoles.size()); Assert.assertEquals(role.getId(), newAssignedRoles.get(0).getRole()); Assert.assertEquals(newContract.getValidFrom(), newAssignedRoles.get(0).getValidFrom()); Assert.assertEquals(assignedRoles.get(0).getValidTill(), newAssignedRoles.get(0).getValidTill()); } @Test public void testCopyingRolesWithoutSkippedCheckAutomaticRoles() { String position = getHelper().createName(); IdmTreeNodeDto workPosition = getHelper().createTreeNode(); IdmIdentityDto identity = getHelper().createIdentity(); String description = getHelper().createName(); identity.setDescription(description); identity = identityService.save(identity); // IdmIdentityContractDto contract = identityContractService.getPrimeContract(identity.getId()); contract.setValidTill(LocalDate.now().plusDays(15L)); contract.setWorkPosition(workPosition.getId()); contract.setPosition(position); contract = identityContractService.save(contract); IdmRoleDto roleNotAutomatic = getHelper().createRole(); getHelper().assignRoles(contract, roleNotAutomatic); // // create automatic role IdmRoleDto roleAutomatic = getHelper().createRole(); IdmAutomaticRoleAttributeDto automaticRole = getHelper().createAutomaticRole(roleAutomatic.getId()); getHelper().createAutomaticRoleRule(automaticRole.getId(), AutomaticRoleAttributeRuleComparison.EQUALS, AutomaticRoleAttributeRuleType.IDENTITY, IdmIdentity_.description.getName(), null, description); getHelper().recalculateAutomaticRoleByAttribute(automaticRole.getId()); IdmRoleDto roleAutomaticTwo = getHelper().createRole(); getHelper().createAutomaticRole(roleAutomaticTwo, workPosition); // IdmIdentityContractDto newContract = new IdmIdentityContractDto(); newContract.setIdentity(identity.getId()); newContract.setWorkPosition(workPosition.getId()); newContract.setValidFrom(LocalDate.now().plusDays(15L)); newContract.setPosition(position); newContract = identityContractService.save(newContract); // // check that only three roles are assigned (one copied, two automatic) // if automatic role was copied as well there would be more IdmIdentityRoleFilter identityRoleFilter = new IdmIdentityRoleFilter(); identityRoleFilter.setIdentityContractId(newContract.getId()); List<IdmIdentityRoleDto> newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(3, newAssignedRoles.size()); } @Test public void testCopyingRolesWithoutSkippedCheckBusinessRoles() { String position = getHelper().createName(); IdmTreeNodeDto workPosition = getHelper().createTreeNode(); IdmIdentityDto identity = getHelper().createIdentity(); String description = getHelper().createName(); identity.setDescription(description); identity = identityService.save(identity); // IdmIdentityContractDto contract = identityContractService.getPrimeContract(identity.getId()); contract.setValidTill(LocalDate.now().plusDays(15L)); contract.setWorkPosition(workPosition.getId()); contract.setPosition(position); contract = identityContractService.save(contract); IdmRoleDto roleNotAutomatic = getHelper().createRole(); getHelper().assignRoles(contract, roleNotAutomatic); // // create business role IdmRoleDto businessTopRole = getHelper().createRole(); IdmRoleDto businessSubRole = getHelper().createRole(); getHelper().createRoleComposition(businessTopRole, businessSubRole); getHelper().assignRoles(contract, businessTopRole); // IdmIdentityContractDto newContract = new IdmIdentityContractDto(); newContract.setIdentity(identity.getId()); newContract.setWorkPosition(workPosition.getId()); newContract.setValidFrom(LocalDate.now().plusDays(15L)); newContract.setPosition(position); newContract = identityContractService.save(newContract); // // check that only three roles are assigned (two copied, one business role) // if business role was copied as well there would be more IdmIdentityRoleFilter identityRoleFilter = new IdmIdentityRoleFilter(); identityRoleFilter.setIdentityContractId(newContract.getId()); List<IdmIdentityRoleDto> newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(3, newAssignedRoles.size()); } @Test public void testCopyingRolesWithoutSkippedCheckUpdate() { String position = getHelper().createName(); UUID workPositionId = getHelper().createTreeNode().getId(); IdmIdentityDto identity = getHelper().createIdentity(); IdmIdentityContractDto contract = identityContractService.getPrimeContract(identity.getId()); contract.setWorkPosition(workPositionId); contract.setPosition(position); contract = identityContractService.save(contract); IdmRoleDto role = getHelper().createRole(); getHelper().assignRoles(contract, role); IdmIdentityContractDto newContract = new IdmIdentityContractDto(); newContract.setIdentity(identity.getId()); newContract.setWorkPosition(getHelper().createTreeNode().getId()); newContract.setValidFrom(LocalDate.now().plusDays(15L)); newContract.setPosition(getHelper().createName()); newContract = identityContractService.save(newContract); // // check that role was not copied to the new contract because contract is on a // different position IdmIdentityRoleFilter identityRoleFilter = new IdmIdentityRoleFilter(); identityRoleFilter.setIdentityContractId(newContract.getId()); List<IdmIdentityRoleDto> newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(0, newAssignedRoles.size()); // // create a new contract at the same position as the previous one newContract = new IdmIdentityContractDto(); newContract.setIdentity(identity.getId()); newContract.setWorkPosition(workPositionId); newContract.setValidFrom(LocalDate.now().plusDays(15L)); newContract.setPosition(position); newContract = identityContractService.save(newContract); identityRoleFilter = new IdmIdentityRoleFilter(); identityRoleFilter.setIdentityContractId(newContract.getId()); newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(1, newAssignedRoles.size()); Assert.assertEquals(role.getId(), newAssignedRoles.get(0).getRole()); // // try empty update newContract = identityContractService.save(newContract); newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(1, newAssignedRoles.size()); Assert.assertEquals(role.getId(), newAssignedRoles.get(0).getRole()); // // try update with assigned new roles to the prime valid contract IdmRoleDto roleTwo = getHelper().createRole(); getHelper().assignRoles(contract, roleTwo); newContract = identityContractService.save(newContract); newAssignedRoles = identityRoleService.find(identityRoleFilter, null).getContent(); Assert.assertEquals(2, newAssignedRoles.size()); } }
notthatbreezy/geotrellis
spark/src/main/scala/geotrellis/spark/io/hadoop/reader/SimpleRasterReader.scala
/* * Copyright (c) 2014 DigitalGlobe. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package geotrellis.spark.io.hdfs.reader import geotrellis.raster._ import geotrellis.spark.io.hadoop.formats._ import geotrellis.spark.utils._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.io.MapFile import org.apache.hadoop.fs.FileUtil import org.apache.hadoop.fs.FileStatus import java.io.Closeable /* * An Iterable-based reader. Note that even though Iterables have a rich set of methods * this implementation currently would iterate through all tiles to perform any of the * operations. So for example last() would iterate through all tiles to find the last * tile. Clearly, this can be optimized further in at least two ways: * * 1. Point looksups on tile ids. Internally seeks the MapFile.Reader to the correct * location for fast performance * 2. Range lookups on ranges of tiles (with optional start/end values). Internally * seeks the MapFile.Reader to the start location and stops past the end of the * user-provided range * */ case class SimpleRasterReader(raster: Path, conf: Configuration) extends Iterable[(TileIdWritable, ArgWritable)] with Closeable { def close = iterator.close def iterator = new Iterator[(TileIdWritable, ArgWritable)] with Closeable { private val curKey: TileIdWritable = new TileIdWritable private val curValue: ArgWritable = new ArgWritable private var curPartition: Int = 0 // initialize readers and partitioner private val readers = getReaders def close = readers.foreach(r => if (r != null) r.close) override def hasNext = { if (curPartition >= readers.length) false else if (readers(curPartition).next(curKey, curValue)) true else { curPartition += 1 hasNext } } override def next = (curKey,curValue) private def getReaders: Array[MapFile.Reader] = { val fs = raster.getFileSystem(conf) val dirs = FileUtil.stat2Paths(fs.listStatus(raster)).sortBy(_.toUri.toString) def isData(fst: FileStatus) = fst.getPath.getName.equals("data") def isMapFileDir(path: Path) = fs.listStatus(path).find(isData(_)) match { case Some(f) => true case None => false } val readers = for { dir <- dirs if (isMapFileDir(dir)) } yield new MapFile.Reader(fs, dir.toUri().toString(), conf) readers } } } // TODO - replace with test object SimpleRasterReader { def main(args: Array[String]): Unit = { val raster = new Path("hdfs://localhost:9000/geotrellis/images/testcostdistance-gt-ingest/10") val conf = SparkUtils.hadoopConfiguration val reader = SimpleRasterReader(raster, conf) var count = 0 reader.foreach{ case(tw,aw) => { println(s"tileId=${tw.get}") count += 1 } } //val (tw,aw) = reader.last //println(s"last tile id = ${tw.get}") reader.close println(s"Got $count records") } }
ScarletteTout/PDFium
xfa/fxfa/cxfa_ffapp.cpp
<filename>xfa/fxfa/cxfa_ffapp.cpp // Copyright 2014 PDFium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Original code copyright 2014 Foxit Software Inc. http://www.foxitsoftware.com #include "xfa/fxfa/cxfa_ffapp.h" #include <algorithm> #include <memory> #include <utility> #include <vector> #include "third_party/base/ptr_util.h" #include "third_party/base/stl_util.h" #include "xfa/fgas/font/cfgas_fontmgr.h" #include "xfa/fwl/cfwl_notedriver.h" #include "xfa/fwl/cfwl_widgetmgr.h" #include "xfa/fxfa/app/xfa_fwladapter.h" #include "xfa/fxfa/app/xfa_fwltheme.h" #include "xfa/fxfa/cxfa_ffdoc.h" #include "xfa/fxfa/cxfa_ffdochandler.h" #include "xfa/fxfa/cxfa_ffwidgethandler.h" #include "xfa/fxfa/cxfa_fontmgr.h" CXFA_FFApp::CXFA_FFApp(IXFA_AppProvider* pProvider) : m_pProvider(pProvider), m_pWidgetMgrDelegate(nullptr), m_pFWLApp(pdfium::MakeUnique<CFWL_App>(this)) {} CXFA_FFApp::~CXFA_FFApp() {} CXFA_FFDocHandler* CXFA_FFApp::GetDocHandler() { if (!m_pDocHandler) m_pDocHandler = pdfium::MakeUnique<CXFA_FFDocHandler>(); return m_pDocHandler.get(); } std::unique_ptr<CXFA_FFDoc> CXFA_FFApp::CreateDoc( IXFA_DocEnvironment* pDocEnvironment, CPDF_Document* pPDFDoc) { if (!pPDFDoc) return nullptr; auto pDoc = pdfium::MakeUnique<CXFA_FFDoc>(this, pDocEnvironment); if (!pDoc->OpenDoc(pPDFDoc)) return nullptr; return pDoc; } void CXFA_FFApp::SetDefaultFontMgr(std::unique_ptr<CXFA_DefFontMgr> pFontMgr) { if (!m_pFontMgr) m_pFontMgr = pdfium::MakeUnique<CXFA_FontMgr>(); m_pFontMgr->SetDefFontMgr(std::move(pFontMgr)); } CXFA_FontMgr* CXFA_FFApp::GetXFAFontMgr() const { return m_pFontMgr.get(); } CFGAS_FontMgr* CXFA_FFApp::GetFDEFontMgr() { if (!m_pFDEFontMgr) { #if _FXM_PLATFORM_ == _FXM_PLATFORM_WINDOWS_ m_pFDEFontMgr = CFGAS_FontMgr::Create(FX_GetDefFontEnumerator()); #else m_pFontSource = pdfium::MakeUnique<CFX_FontSourceEnum_File>(); m_pFDEFontMgr = CFGAS_FontMgr::Create(m_pFontSource.get()); #endif } return m_pFDEFontMgr.get(); } CXFA_FWLTheme* CXFA_FFApp::GetFWLTheme() { if (!m_pFWLTheme) m_pFWLTheme = pdfium::MakeUnique<CXFA_FWLTheme>(this); return m_pFWLTheme.get(); } CXFA_FWLAdapterWidgetMgr* CXFA_FFApp::GetWidgetMgr( CFWL_WidgetMgrDelegate* pDelegate) { if (!m_pAdapterWidgetMgr) { m_pAdapterWidgetMgr = pdfium::MakeUnique<CXFA_FWLAdapterWidgetMgr>(); pDelegate->OnSetCapability(FWL_WGTMGR_DisableForm); m_pWidgetMgrDelegate = pDelegate; } return m_pAdapterWidgetMgr.get(); } IFWL_AdapterTimerMgr* CXFA_FFApp::GetTimerMgr() const { return m_pProvider->GetTimerMgr(); } void CXFA_FFApp::ClearEventTargets() { m_pFWLApp->GetNoteDriver()->ClearEventTargets(); }
edigonzales/LandRegisterParcelDescription
src/generated/java/ch/admin/geo/schemas/bj/tgbv/gbbasistypen/_2/GrundstueckWert.java
<gh_stars>0 package ch.admin.geo.schemas.bj.tgbv.gbbasistypen._2; import java.math.BigDecimal; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.NormalizedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import javax.xml.datatype.XMLGregorianCalendar; /** * <p>Java class for GrundstueckWertType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="GrundstueckWertType"&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="Wert" type="{http://www.w3.org/2001/XMLSchema}decimal"/&gt; * &lt;element name="NameWert" type="{http://www.w3.org/2001/XMLSchema}normalizedString"/&gt; * &lt;element name="TypWert" type="{http://www.w3.org/2001/XMLSchema}NCName"/&gt; * &lt;element name="Datum" type="{http://www.w3.org/2001/XMLSchema}date" minOccurs="0"/&gt; * &lt;element name="NameDatum" type="{http://www.w3.org/2001/XMLSchema}normalizedString" minOccurs="0"/&gt; * &lt;element ref="{http://schemas.geo.admin.ch/BJ/TGBV/GBBasisTypen/2.1}extensions" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "GrundstueckWertType", propOrder = { "wert", "nameWert", "typWert", "datum", "nameDatum", "extensions" }) @XmlRootElement(name = "GrundstueckWert") public class GrundstueckWert { @XmlElement(name = "Wert", required = true) protected BigDecimal wert; @XmlElement(name = "NameWert", required = true) @XmlJavaTypeAdapter(NormalizedStringAdapter.class) @XmlSchemaType(name = "normalizedString") protected String nameWert; @XmlElement(name = "TypWert", required = true) @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "NCName") protected String typWert; @XmlElement(name = "Datum") @XmlSchemaType(name = "date") protected XMLGregorianCalendar datum; @XmlElement(name = "NameDatum") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) @XmlSchemaType(name = "normalizedString") protected String nameDatum; protected Extensions extensions; /** * Gets the value of the wert property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getWert() { return wert; } /** * Sets the value of the wert property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setWert(BigDecimal value) { this.wert = value; } /** * Gets the value of the nameWert property. * * @return * possible object is * {@link String } * */ public String getNameWert() { return nameWert; } /** * Sets the value of the nameWert property. * * @param value * allowed object is * {@link String } * */ public void setNameWert(String value) { this.nameWert = value; } /** * Gets the value of the typWert property. * * @return * possible object is * {@link String } * */ public String getTypWert() { return typWert; } /** * Sets the value of the typWert property. * * @param value * allowed object is * {@link String } * */ public void setTypWert(String value) { this.typWert = value; } /** * Gets the value of the datum property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getDatum() { return datum; } /** * Sets the value of the datum property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setDatum(XMLGregorianCalendar value) { this.datum = value; } /** * Gets the value of the nameDatum property. * * @return * possible object is * {@link String } * */ public String getNameDatum() { return nameDatum; } /** * Sets the value of the nameDatum property. * * @param value * allowed object is * {@link String } * */ public void setNameDatum(String value) { this.nameDatum = value; } /** * Gets the value of the extensions property. * * @return * possible object is * {@link Extensions } * */ public Extensions getExtensions() { return extensions; } /** * Sets the value of the extensions property. * * @param value * allowed object is * {@link Extensions } * */ public void setExtensions(Extensions value) { this.extensions = value; } }
edusalguero/rexoubapp
src/main/java/com/edusalguero/rexoubapp/application/server/harvester/ServerHarvesterResponse.java
package com.edusalguero.rexoubapp.application.server.harvester; import com.edusalguero.rexoubapp.application.datatransformer.DateConverter; import com.edusalguero.rexoubapp.application.monitor.harvester.HarvesterResponse; import com.edusalguero.rexoubapp.domain.model.server.harvester.Harvest; import com.edusalguero.rexoubapp.domain.model.server.harvester.ServerHarvester; public class ServerHarvesterResponse { private HarvesterResponse harvesterResponse; private Harvest harvest; private String serverHarvesterId; public ServerHarvesterResponse(ServerHarvester serverHarvester) { this.harvesterResponse = new HarvesterResponse(serverHarvester.harvester()); this.harvest = serverHarvester.getLastHarvest(); this.serverHarvesterId = serverHarvester.id(); } public String getHarvestDate() { return DateConverter.getFormattedDateOrEmptyString(harvest.getDate()); } public Harvest getHarvest() { return harvest; } public String getServerHarvesterId() { return serverHarvesterId; } public HarvesterResponse getHarvester() { return harvesterResponse; } }
djknit/timeclock
server/controllers/User/utilities/errors.js
<filename>server/controllers/User/utilities/errors.js module.exports = { determineUserInfoError }; function determineUserInfoError(err) { const { code, errors, errmsg } = err; let problemMessages = []; let problems = {}; let status; if (code === 11000) { if (errmsg.indexOf('username') > -1) { return { message: 'That username is unavailable.', problems: { username: true }, status: 422 }; } if (errmsg.indexOf('lowercaseEmail') > -1) { return { message: 'There is already an account for that email address.', problems: { email: true }, status: 422 }; } } if (!errors) { return new Error('An unknown problem was encountered.'); } if (errors.password) { problemMessages.push(errors.password.message); problems.password = true; status = 422; } if (errors.username) { problemMessages.push(errors.username.message); problems.username = true; status = 422; } if (errors.lowercaseEmail) { problemMessages.push(errors.lowercaseEmail.message.replace('lowercaseEmail', 'email')); problems.email = true; status = 422; } if (problemMessages.length > 0) { return { messages: problemMessages, problems, status }; } return new Error('An unknown problem was encountered.'); }
gaganeggday/Tasks38
pkg/odo/cli/pipelines/pipelines.go
package pipelines import ( "fmt" "github.com/spf13/cobra" odoutil "github.com/openshift/odo/pkg/odo/util" ) // RecommendedPipelinesCommandName is the recommended pipelines command name. const RecommendedCommandName = "pipelines" // NewCmdComponent implements the component odo command func NewCmdComponent(name, fullName string) *cobra.Command { bootstrapCmd := NewCmdBootstrap(BootstrapRecommendedCommandName, odoutil.GetFullName(fullName, BootstrapRecommendedCommandName)) var pipelinesCmd = &cobra.Command{ Use: name, Short: "Manage pipelines", Example: fmt.Sprintf("%s\n%s\n\n See sub-commands individually for more examples", fullName, BootstrapRecommendedCommandName), Run: func(cmd *cobra.Command, args []string) { }, } pipelinesCmd.Flags().AddFlagSet(bootstrapCmd.Flags()) pipelinesCmd.AddCommand(bootstrapCmd) pipelinesCmd.Annotations = map[string]string{"command": "main"} pipelinesCmd.SetUsageTemplate(odoutil.CmdUsageTemplate) return pipelinesCmd }
graemepatt/BerryCamExpress
node_modules/grunt-styleguide/node_modules/kss/test/kss_section.js
var kss = require('../index.js'), KssStyleguide = kss.KssStyleguide, KssSection = kss.KssSection, KssModifier = kss.KssModifier, path = require('path'), assert = require('assert'), styleDirectory = path.normalize(__dirname + '/fixtures-styles/'), common = require('./common.js')(styleDirectory); suite('KssSection', function() { common.hasMethod(new KssSection({}), 'header'); common.hasMethod(new KssSection({}), 'description'); common.hasMethod(new KssSection({}), 'deprecated'); common.hasMethod(new KssSection({}), 'experimental'); common.hasMethod(new KssSection({}), 'modifiers'); common.hasMethod(new KssSection({}), 'firstModifier'); suite('#header', function() { common.testAllSections('returns section.data.header', '*.less|*.css', function(section) { assert.strictEqual(section.header(), section.data.header); }); }); suite('#description', function() { common.testAllSections('returns section.data.description', '*.less|*.css', function(section) { assert.strictEqual(section.description(), section.data.description); }); }); suite('#firstModifier', function() { common.testAllSections('returns section.data.modifiers[0], or first if not found', '*.less|*.css', function(section) { if (section.data.modifiers.length) { assert.equal(section.firstModifier(), section.modifiers(0)); } }); }); suite('#deprecated', function() { common.testAllSections('returns section.data.deprecated', '*.less|*.css', function(section) { assert.equal(section.deprecated(), section.data.deprecated); }); }); suite('#experimental', function() { common.testAllSections('returns section.data.experimental', '*.less|*.css', function(section) { assert.equal(section.experimental(), section.data.experimental); }); }); suite('#reference', function() { common.testAllSections('returns section.data.reference', '*.less|*.css', function(section) { assert.equal(section.reference(), section.data.reference); }); }); suite('#modifiers', function() { common.testAllSections('() returns section.data.modifiers', '*.less|*.css', function(section) { assert.strictEqual(section.modifiers(), section.data.modifiers); }); common.testAllSections('() all returned should be instances of KssModifier', '*.less|*.css', function(section) { var modifiers = section.modifiers(), i, l = modifiers.length; for (i = 0; i < l; i += 1) { assert.ok(modifiers[i] instanceof KssModifier); } }); common.testAllSections('(n) returns section.data.modifiers[n], or false if non-existent', '*.less|*.css', function(section) { var i, j = 5, l = section.data.modifiers.length; for (i = 0; i < j || i < l; i++) { if (i < l) { assert.deepEqual(section.modifiers(i), section.data.modifiers[i]); } else { assert.equal(section.modifiers(i), false); } } }); common.testAllSections('("n") coerces to a number if numerical', '*.less|*.css', function(section) { var i, j = 5, l = section.data.modifiers.length; for (i = 0; i < j || i < l; i++) { if (i < l) { assert.deepEqual(section.modifiers(i), section.data.modifiers[i]); } else { assert.equal(section.modifiers(i), false); } } }); common.testAllSections('("modifier") should search by name', '*.less|*.css', function(section) { var i, j, queries = [ '.red', '.yellow', ':hover', ':disabled' ], q = queries.length, l = section.data.modifiers.length; // Each modifier for (i = 0; i < l; i += 1) { // Each query for (j = 0; j < q; j += 1) { if (section.data.modifiers[i].data.name === queries[j]) { assert.deepEqual(section.modifiers(queries[j]), section.data.modifiers[i]); } } } }); common.testAllSections('("modifier") should only return false if not found', '*.less|*.css', function(section) { var i, l = section.data.modifiers.length; assert.equal(false, section.modifiers('__should_not_find___')); if (l) { assert.ok(section.modifiers(section.modifiers(0).data.name)); } }); }); });
mgrsantox/nmmis
nmmis/contrib/municipal/migrations/0004_auto_20200723_1714.py
<reponame>mgrsantox/nmmis<filename>nmmis/contrib/municipal/migrations/0004_auto_20200723_1714.py # Generated by Django 3.0.8 on 2020-07-23 11:29 import django.contrib.gis.db.models.fields from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('municipal', '0003_auto_20200723_1712'), ] operations = [ migrations.AlterField( model_name='road', name='geom', field=django.contrib.gis.db.models.fields.LineStringField(srid=4326), ), ]
joshelser/cloudbreak
datalake/src/main/java/com/sequenceiq/datalake/flow/delete/SdxDeleteActions.java
<gh_stars>0 package com.sequenceiq.datalake.flow.delete; import static com.sequenceiq.datalake.flow.delete.SdxDeleteEvent.SDX_DELETE_FAILED_HANDLED_EVENT; import static com.sequenceiq.datalake.flow.delete.SdxDeleteEvent.SDX_DELETE_FINALIZED_EVENT; import static com.sequenceiq.datalake.flow.delete.SdxDeleteEvent.SDX_STACK_DELETION_IN_PROGRESS_EVENT; import java.util.Map; import java.util.Optional; import javax.inject.Inject; import org.apache.commons.lang3.exception.ExceptionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.statemachine.StateContext; import org.springframework.statemachine.action.Action; import com.sequenceiq.cloudbreak.common.event.Selectable; import com.sequenceiq.datalake.entity.SdxClusterStatus; import com.sequenceiq.datalake.flow.SdxContext; import com.sequenceiq.datalake.flow.SdxEvent; import com.sequenceiq.datalake.flow.delete.event.StackDeletionFailedEvent; import com.sequenceiq.datalake.flow.delete.event.StackDeletionSuccessEvent; import com.sequenceiq.datalake.flow.delete.event.StackDeletionWaitRequest; import com.sequenceiq.datalake.service.AbstractSdxAction; import com.sequenceiq.datalake.service.sdx.ProvisionerService; import com.sequenceiq.datalake.service.sdx.SdxService; import com.sequenceiq.flow.core.FlowEvent; import com.sequenceiq.flow.core.FlowParameters; import com.sequenceiq.flow.core.FlowState; import com.sequenceiq.notification.NotificationService; import com.sequenceiq.notification.ResourceEvent; @Configuration public class SdxDeleteActions { private static final Logger LOGGER = LoggerFactory.getLogger(SdxDeleteActions.class); @Inject private SdxService sdxService; @Inject private ProvisionerService provisionerService; @Inject private NotificationService notificationService; @Bean(name = "SDX_DELETION_START_STATE") public Action<?, ?> sdxDeletion() { return new AbstractSdxAction<>(SdxEvent.class) { @Override protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxEvent payload) { return new SdxContext(flowParameters, payload.getResourceId(), payload.getUserId()); } @Override protected void doExecute(SdxContext context, SdxEvent payload, Map<Object, Object> variables) throws Exception { LOGGER.info("Start stack deletion for SDX: {}", payload.getResourceId()); provisionerService.startStackDeletion(payload.getResourceId()); sendEvent(context, SDX_STACK_DELETION_IN_PROGRESS_EVENT.event(), payload); } @Override protected Object getFailurePayload(SdxEvent payload, Optional<SdxContext> flowContext, Exception ex) { return new StackDeletionFailedEvent(payload.getResourceId(), payload.getUserId(), ex); } }; } @Bean(name = "SDX_STACK_DELETION_IN_PROGRESS_STATE") public Action<?, ?> sdxStackDeletionInProgress() { return new AbstractSdxAction<>(SdxEvent.class) { @Override protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxEvent payload) { return new SdxContext(flowParameters, payload.getResourceId(), payload.getUserId()); } @Override protected void doExecute(SdxContext context, SdxEvent payload, Map<Object, Object> variables) throws Exception { LOGGER.info("SDX stack deletion in progress: {}", payload.getResourceId()); notificationService.send(ResourceEvent.SDX_CLUSTER_DELETION_STARTED, context.getUserId()); sendEvent(context); } @Override protected Selectable createRequest(SdxContext context) { return new StackDeletionWaitRequest(context.getSdxId(), context.getUserId()); } @Override protected Object getFailurePayload(SdxEvent payload, Optional<SdxContext> flowContext, Exception ex) { return new StackDeletionFailedEvent(payload.getResourceId(), payload.getUserId(), ex); } }; } @Bean(name = "SDX_DELETION_FINISHED_STATE") public Action<?, ?> finishedAction() { return new AbstractSdxAction<>(StackDeletionSuccessEvent.class) { @Override protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, StackDeletionSuccessEvent payload) { return new SdxContext(flowParameters, payload.getResourceId(), payload.getUserId()); } @Override protected void doExecute(SdxContext context, StackDeletionSuccessEvent payload, Map<Object, Object> variables) throws Exception { LOGGER.info("SDX delete finalized: {}", payload.getResourceId()); notificationService.send(ResourceEvent.SDX_CLUSTER_DELETION_FINISHED, payload, context.getUserId()); sendEvent(context, SDX_DELETE_FINALIZED_EVENT.event(), payload); } @Override protected Object getFailurePayload(StackDeletionSuccessEvent payload, Optional<SdxContext> flowContext, Exception ex) { return null; } }; } @Bean(name = "SDX_DELETION_FAILED_STATE") public Action<?, ?> failedAction() { return new AbstractSdxAction<>(StackDeletionFailedEvent.class) { @Override protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, StackDeletionFailedEvent payload) { return new SdxContext(flowParameters, payload.getResourceId(), payload.getUserId()); } @Override protected void doExecute(SdxContext context, StackDeletionFailedEvent payload, Map<Object, Object> variables) throws Exception { Exception exception = payload.getException(); SdxClusterStatus deleteFailedStatus = SdxClusterStatus.DELETE_FAILED; LOGGER.info("Update SDX status to {} for resource: {}", deleteFailedStatus, payload.getResourceId(), exception); String statusReason = ExceptionUtils.getMessage(exception); sdxService.updateSdxStatus(payload.getResourceId(), deleteFailedStatus, statusReason); notificationService.send(ResourceEvent.SDX_CLUSTER_DELETION_FAILED, payload, context.getUserId()); sendEvent(context, SDX_DELETE_FAILED_HANDLED_EVENT.event(), payload); } @Override protected Object getFailurePayload(StackDeletionFailedEvent payload, Optional<SdxContext> flowContext, Exception ex) { return null; } }; } }
uncle-pier/spark-platform
spark-common/spark-common-datasource/src/main/java/com/spark/platform/common/datasource/emuns/DataScopeTypeEnum.java
package com.spark.platform.common.datasource.emuns; import lombok.AllArgsConstructor; import lombok.Getter; /** * @ProjectName: spark-platform * @Package: com.spark.platform.common.base.datasource.emuns * @ClassName: DataScopeTypeEnum * @Author: wangdingfeng * @Description: 数据权限枚举 * @Date: 2020/6/8 17:13 * @Version: 1.0 */ @Getter @AllArgsConstructor public enum DataScopeTypeEnum { /** * 全部 */ ALL(1, "全部"), /** * 本级 */ THIS_LEVEL(2, "本级"), /** * 自定义 */ CUSTOMIZE(3, "自定义"); private int type; private String description; }
DwangoMediaVillage/marltas_core
dqn/learner.py
<reponame>DwangoMediaVillage/marltas_core """Base learner class""" import logging from dataclasses import dataclass from pathlib import Path from typing import Any, Optional, Tuple from dqn.model import ModelBase from dqn.utils import ConfigBase, Counter, EventObject, MovingAverage @dataclass class LearnerConfig(ConfigBase): """Configuration of learner. Attributes: batch_size: Size of mini-batch. target_sync_interval: Online update interval to copy online network to target network. gpu_id: Index of GPU. adam_lr: Learning rate (alpha) of adam optimizer. adam_eps: Epsilon of adam optimizer. double_dqn: Using double-DQN algorithm to compute TD-error or not. """ batch_size: int target_sync_interval: int gpu_id: Optional[int] = None adam_lr: float = 0.0001 adam_eps: float = 1e-8 double_dqn: bool = True @dataclass class LearnerStatus(EventObject): """Status of learner. Args: online_update: Number of online network updates. target_update: Number of target network updates. online_update_per_sec: Online updates per second. loss: Moving average of loss. td_error_mean: Moving average of td error mean. q_value_mean: Moving average of predicted Q-values. """ online_update: int target_update: int online_update_per_sec: Optional[float] extrinsic_loss: Optional[float] intrinsic_loss: Optional[float] extrinsic_td_error_mean: Optional[float] intrinsic_td_error_mean: Optional[float] extrinsic_q_value_mean: Optional[float] intrinsic_q_value_mean: Optional[float] rnd_loss_mean: Optional[float] episodic_curiosity_loss_mean: Optional[float] @dataclass class UpdateResult: extrinsic_td_error_mean: float extrinsic_q_value_mean: float extrinsic_loss_mean: float intrinsic_td_error_mean: float intrinsic_q_value_mean: float intrinsic_loss_mean: float rnd_loss_mean: float episodic_curiosity_loss_mean: float class LearnerBase: """Base of learner. Args: model: Online network model. config: Configuration of learner. """ def __init__(self, online_model: ModelBase, target_model: ModelBase, target_sync_interval: int, logger: logging.Logger = logging.getLogger(__name__)): self.logger = logger self.online_model = online_model self.target_model = target_model self.target_sync_interval = target_sync_interval # internal buffer for status self.extrinsic_loss_mean = MovingAverage(0.99) self.extrinsic_q_value_mean = MovingAverage(0.99) self.extrinsic_td_error_mean = MovingAverage(0.99) self.intrinsic_loss_mean = MovingAverage(0.99) self.intrinsic_q_value_mean = MovingAverage(0.99) self.intrinsic_td_error_mean = MovingAverage(0.99) self.online_update_counter = Counter() self.target_update_counter = Counter() self.rnd_loss_mean = MovingAverage(0.99) self.episodic_curiosity_loss_mean = MovingAverage(0.99) self.logger.info(f"Learner is initialized") # def init_optimizer(self) -> None: def update(self, batch: Any) -> Any: """Update online network (and target network). Args: batch: mini batch. Returns: loss: Loss object for priority update. """ loss, result = self.update_core(batch) # update status self.extrinsic_loss_mean.step(result.extrinsic_loss_mean) self.extrinsic_q_value_mean.step(result.extrinsic_q_value_mean) self.extrinsic_td_error_mean.step(result.extrinsic_td_error_mean) self.intrinsic_loss_mean.step(result.intrinsic_loss_mean) self.intrinsic_q_value_mean.step(result.intrinsic_q_value_mean) self.intrinsic_td_error_mean.step(result.intrinsic_td_error_mean) self.rnd_loss_mean.step(result.rnd_loss_mean) self.online_update_counter.step() self.episodic_curiosity_loss_mean.step(result.episodic_curiosity_loss_mean) # update target model if self.online_update_counter.count % self.target_sync_interval == 0: self.target_model.load_state_dict(self.online_model.state_dict()) self.target_update_counter.step() return loss def update_core(self, batch: Any) -> Tuple[Any, UpdateResult]: """Core implementation of online update. Args: batch: mini batch. Returns: loss_object: Loss object for priority update. result: UpdateResult object for stats. """ raise NotImplementedError def get_status(self) -> LearnerStatus: """Returns status of learner. Returns: learner_states: LearnerStatus object. """ return LearnerStatus(online_update=self.online_update_counter.count, target_update=self.target_update_counter.count, online_update_per_sec=self.online_update_counter.get_count_per_sec(), extrinsic_loss=self.extrinsic_loss_mean.average, extrinsic_td_error_mean=self.extrinsic_td_error_mean.average, extrinsic_q_value_mean=self.extrinsic_q_value_mean.average, intrinsic_loss=self.intrinsic_loss_mean.average, intrinsic_td_error_mean=self.intrinsic_td_error_mean.average, intrinsic_q_value_mean=self.intrinsic_q_value_mean.average, rnd_loss_mean=self.rnd_loss_mean.average, episodic_curiosity_loss_mean=self.episodic_curiosity_loss_mean.average) def get_model_param(self) -> bytes: """Returns bytes expression of online network parameters.""" raise NotImplementedError def save_model(self, log_dir: Path, global_step: int) -> None: """Save online model parameters to disk. Args: log_dir: Directory to save pickle file(s). """ raise NotImplementedError def load_online_model(self, snap_filename: Path) -> None: """Load online model parameters from disk. Args: snap_filename: Save path of pickle file. """ raise NotImplementedError def get_batch_size(self) -> int: """Returns size of mini-batch.""" raise NotImplementedError
jbrasileiro/resume-online
resume-online-frontend/resume-online-jfx/src/main/java/resumeonline/jfx/FXMLResourceLoader.java
<gh_stars>0 package resumeonline.jfx; import java.io.IOException; import java.net.URL; import java.util.ResourceBundle; import javafx.fxml.FXMLLoader; import resumeonline.commons.ResourceThreadLoader; import resumeonline.commons.exeception.NoNewInstanceAllowed; public final class FXMLResourceLoader { private static final String FXML_FOLDER = "fxml/"; private FXMLResourceLoader() { super(); throw new NoNewInstanceAllowed(getClass()); } public static <T> T load( final String name, final ResourceBundle bundle) throws IOException { Thread currentThread = Thread.currentThread(); URL resource = ResourceThreadLoader.getResource(currentThread, FXML_FOLDER.concat(name)); return FXMLLoader.load(resource, bundle); } }
wwjiang007/fuchsia-1
src/firmware/gigaboot/host/stubs.h
<reponame>wwjiang007/fuchsia-1 // Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // This file defines stubs for device-only functionality that we don't // need for unittests, but have to define for compilation to succeed. #ifndef SRC_FIRMWARE_GIGABOOT_HOST_STUBS_H_ #define SRC_FIRMWARE_GIGABOOT_HOST_STUBS_H_ int puts16(char16_t *str) { return 0; } #endif // SRC_FIRMWARE_GIGABOOT_HOST_STUBS_H_
obecto/perper
functions/python/perper/cache/call_data.py
<gh_stars>10-100 from collections import OrderedDict from pyignite import GenericObjectMeta from pyignite.datatypes import * class CallData( metaclass=GenericObjectMeta, type_name="CallData", schema=OrderedDict( [ ("agent", String), ("agentdelegate", String), ("delegate", String), ("calleragentdelegate", String), ("caller", String), ("finished", BoolObject), ("localtodata", BoolObject), ("result", BinaryObject), ("error", String), ("parameters", BinaryObject), ] ), ): pass
pferrel/incubator-predictionio
e2/src/test/scala/org/apache/predictionio/e2/engine/BinaryVectorizerTest.scala
<filename>e2/src/test/scala/org/apache/predictionio/e2/engine/BinaryVectorizerTest.scala /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.predictionio.e2.engine import org.apache.predictionio.e2.fixture.BinaryVectorizerFixture import org.apache.predictionio.e2.fixture.SharedSparkContext import org.apache.spark.mllib.linalg.Vectors import org.apache.spark.rdd.RDD import org.scalatest.FlatSpec import org.scalatest.Matchers import scala.collection.immutable.HashMap import scala.language.reflectiveCalls class BinaryVectorizerTest extends FlatSpec with Matchers with SharedSparkContext with BinaryVectorizerFixture{ "toBinary" should "produce the following summed values:" in { val testCase = BinaryVectorizer(sc.parallelize(base.maps), base.properties) val vectorTwoA = testCase.toBinary(testArrays.twoA) val vectorTwoB = testCase.toBinary(testArrays.twoB) // Make sure vectors produced are the same size. vectorTwoA.size should be (vectorTwoB.size) // // Test case for checking food value not listed in base.maps. testCase.toBinary(testArrays.one).toArray.sum should be (1.0) // Test cases for making sure indices are preserved. val sumOne = vecSum(vectorTwoA, vectorTwoB) exactly (1, sumOne) should be (2.0) exactly (2,sumOne) should be (0.0) exactly (2, sumOne) should be (1.0) val sumTwo = vecSum(Vectors.dense(sumOne), testCase.toBinary(testArrays.twoC)) exactly (3, sumTwo) should be (1.0) } }
dateolive/Hi-Dream-Blog
blog-api/src/main/java/com/datealive/common/PageResult.java
package com.datealive.common; import lombok.AllArgsConstructor; import lombok.Data; import java.io.Serializable; import java.util.List; /** * @ClassName: PageResult * @Description: TODO * @author: datealive * @date: 2021/1/31 22:25 */ @Data public class PageResult<T> implements Serializable { /** * 状态码 */ private Integer code; /** * 总页数 */ private Integer totalPage; /** * 当前页 */ private Integer currentPage; /** * 当前页数据 */ private List<T> data; public PageResult(Integer code,Integer totalPage, Integer currentPage, List<T> data) { this.code=code; this.totalPage = totalPage; this.currentPage = currentPage; this.data = data; } }
gitter-badger/pygsuite
tests/test_docs.py
from pygsuite import DefaultFonts, TextStyle, Color from pygsuite.docs.doc_elements.paragraph import Paragraph BRIGHT_GREEN_HEX = "#72FF33" def test_text(test_document): document = test_document docbody = document.body docbody.delete() docbody.add_text( "TEST_CUSTOM\n", style=TextStyle(font_size=18, font_weight=200, color=Color(hex=BRIGHT_GREEN_HEX)), ) docbody.add_text("TEST_DEFAULT\n", style=DefaultFonts.NORMAL_TEXT) docbody.add_text("TEST_INDEX\n", style=DefaultFonts.NORMAL_TEXT, position=1) document.flush() text = [item for item in document.body if isinstance(item, Paragraph)] assert text[0].text.strip() == "TEST_INDEX" assert text[2].text.strip() == "TEST_DEFAULT" # TODO: return style objects assert text[1].elements[0].style.font_size == 18 def test_paragraph(test_document): document = test_document docbody = document.body docbody.delete() docbody.add_text( "TEST_CUSTOM\n", style=TextStyle(font_size=18, font_weight=200, color=Color(hex=BRIGHT_GREEN_HEX)), ) docbody.flush() docbody.content[1].text = "TEST_CUSTOM_SETTER" docbody.add_text("INSERT\n", position=0) docbody.flush() docbody.paragraphs[1].elements[0].style = TextStyle( font_size=24, font_weight=500, color=Color(hex=BRIGHT_GREEN_HEX) ) docbody.flush() assert docbody.content[2].text.strip() == "TEST_CUSTOM_SETTER" assert docbody.paragraphs[1].elements[0].style.font_size == 24
gingerik/ol3
examples/topojson.js
<reponame>gingerik/ol3 import Map from '../src/ol/Map.js'; import View from '../src/ol/View.js'; import TopoJSON from '../src/ol/format/TopoJSON.js'; import TileLayer from '../src/ol/layer/Tile.js'; import VectorLayer from '../src/ol/layer/Vector.js'; import TileJSON from '../src/ol/source/TileJSON.js'; import VectorSource from '../src/ol/source/Vector.js'; import Fill from '../src/ol/style/Fill.js'; import Stroke from '../src/ol/style/Stroke.js'; import Style from '../src/ol/style/Style.js'; const raster = new TileLayer({ source: new TileJSON({ url: 'https://api.tiles.mapbox.com/v3/mapbox.world-dark.json?secure' }) }); const style = new Style({ fill: new Fill({ color: 'rgba(255, 255, 255, 0.6)' }), stroke: new Stroke({ color: '#319FD3', width: 1 }) }); const vector = new VectorLayer({ source: new VectorSource({ url: 'data/topojson/world-110m.json', format: new TopoJSON({ // don't want to render the full world polygon (stored as 'land' layer), // which repeats all countries layers: ['countries'] }), overlaps: false }), style: style }); const map = new Map({ layers: [raster, vector], target: 'map', view: new View({ center: [0, 0], zoom: 1 }) });
rkondratenko/oxAuth
common/src/main/java/org/gluu/oxauth/claims/Audience.java
package org.gluu.oxauth.claims; import org.gluu.oxauth.model.jwt.JwtClaims; import org.gluu.oxauth.model.registration.Client; /** * @author <NAME> */ public class Audience { private Audience() { } public static void setAudience(JwtClaims claims, Client client) { if (claims == null || client == null) { return; } claims.addAudience(client.getClientId()); client.getAttributes().getAdditionalAudience().forEach(claims::addAudience); } }
AntonGavr92/agavrikov
chapter_007/src/main/java/ru/job4j/threads/Time.java
<reponame>AntonGavr92/agavrikov package ru.job4j.threads; import java.util.Date; /** * Класс реализующий контроль времени за другим потоком. * @author agavrikov * @since 24.07.2017 * @version 1 */ public class Time implements Runnable { /** * Точка входа. * @param arg параметры. */ public static void main(String[] arg) { Thread thread = new Thread(new Time()); thread.start(); } /** * Метод для запуска дополнительного потока и отслеживанием затраченного времени на выполнение. При выход за временную границу, метод устанавливает у своего потока interrupt. */ @Override public void run() { long timeStart = new Date().getTime(); Thread countChar = new Thread(new CountChar()); countChar.start(); while (!Thread.currentThread().isInterrupted()) { if (new Date().getTime() - timeStart > 1) { countChar.interrupt(); Thread.currentThread().interrupt(); } } } }
rLadia-demo/AttacknidPatch
decompiled_src/CFR/com/badlogic/gdx/physics/box2d/FixtureDef.java
<reponame>rLadia-demo/AttacknidPatch /* * Decompiled with CFR 0_79. */ package com.badlogic.gdx.physics.box2d; import com.badlogic.gdx.physics.box2d.Filter; import com.badlogic.gdx.physics.box2d.Shape; public class FixtureDef { public float density = 0.0f; public final Filter filter = new Filter(); public float friction = 0.2f; public boolean isSensor = false; public float restitution = 0.0f; public Shape shape; }
beardog-ukr/uncrustify-config-examples
examples/cmt_width/ex_80c.unc.cpp
<filename>examples/cmt_width/ex_80c.unc.cpp #include <vector> #include <iostream> #include <algorithm> int main() { int x = 10 + 10 + 12 + 13 + 14 + 15 + 16 + 17 + 18 + 19 + 20 + 21 + 22 + 23 + 24 + 25; // The quick brown fox jumps over the lazy dog. The quick brown fox // jumps over the lazy dog. std::cout << "x is " << x << '\n'; return 0; }
ministryofjustice/mtp-api
mtp_api/urls.py
<gh_stars>1-10 from django.conf import settings from django.conf.urls import include, url from django.contrib import admin from django.http import HttpResponse from django.utils.translation import gettext_lazy as _ from django.views.generic import RedirectView from moj_irat.views import HealthcheckView, PingJsonView from mtp_common.metrics.views import metrics_view from mtp_auth.patches import patch_oauth2_provider_token_view from .views import schema_view patch_oauth2_provider_token_view() urlpatterns = [ url(r'^', include('prison.urls')), url(r'^', include('mtp_auth.urls')), url(r'^', include('transaction.urls')), url(r'^', include('account.urls')), url(r'^', include('payment.urls')), url(r'^', include('credit.urls')), url(r'^', include('security.urls')), url(r'^', include('service.urls')), url(r'^', include('disbursement.urls')), url(r'^', include('core.urls')), url(r'^', include('notification.urls')), url(r'^', include('performance.urls')), url(r'^oauth2/', include(('oauth2_provider.urls', 'oauth2_provider'), namespace='oauth2_provider')), url(r'^admin/', admin.site.urls), url(r'^admin/', include('django.conf.urls.i18n')), url(r'^ping.json$', PingJsonView.as_view( build_date_key='APP_BUILD_DATE', commit_id_key='APP_GIT_COMMIT', version_number_key='APP_BUILD_TAG', ), name='ping_json'), url(r'^healthcheck.json$', HealthcheckView.as_view(), name='healthcheck_json'), url(r'^metrics.txt$', metrics_view, name='prometheus_metrics'), url(r'^favicon.ico$', RedirectView.as_view(url=settings.STATIC_URL + 'images/favicon.ico', permanent=True)), url(r'^robots.txt$', lambda request: HttpResponse('User-agent: *\nDisallow: /', content_type='text/plain')), url(r'^\.well-known/security\.txt$', RedirectView.as_view( url='https://raw.githubusercontent.com/ministryofjustice/security-guidance' '/main/contact/vulnerability-disclosure-security.txt', permanent=True, )), url(r'^404.html$', lambda request: HttpResponse( _('Page not found'), content_type='text/plain', status=404, )), url(r'^500.html$', lambda request: HttpResponse( _('Sorry, something went wrong'), content_type='text/plain', status=500, )), url(r'^$', lambda request: HttpResponse(content_type='text/plain', status=204)), ] if settings.ENVIRONMENT in ('test', 'local'): urlpatterns.extend([ url(r'^swagger(?P<format>\.json|\.yaml)$', schema_view.without_ui(cache_timeout=0), name='schema-json'), url(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'), url(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'), ])
LongerVisionUSA/VTK
Filters/Generic/Testing/Cxx/otherCreation.cxx
/*========================================================================= Program: Visualization Toolkit Module: otherCreation.cxx Copyright (c) <NAME>, <NAME>, <NAME> All rights reserved. See Copyright.txt or http://www.kitware.com/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notice for more information. =========================================================================*/ // .NAME // .SECTION Description // this program tests the creation of the BridgeDataSet #include "vtkBridgeDataSet.h" #include "vtkCellTypes.h" #include "vtkDoubleArray.h" #include "vtkGenericAdaptorCell.h" #include "vtkGenericAttribute.h" #include "vtkGenericAttributeCollection.h" #include "vtkGenericCellIterator.h" #include "vtkGenericPointIterator.h" #include "vtkIndent.h" #include "vtkLine.h" #include "vtkMath.h" #include "vtkPointData.h" #include "vtkTetra.h" #include "vtkTriangle.h" #include "vtkUnstructuredGrid.h" #include "vtkVertex.h" #include <cassert> #include <sstream> #include <string> //------------------------------------------------------------------------------ // Description: // Display message for a test result and return the test value int TestAssertion(ostream& strm, vtkIndent indent, const char* label, int assertion); int TestEmpty(ostream& strm); //------------------------------------------------------------------------------ // a dataset with points but no cells, and no pointdata and no celldata int TestWithPoints(ostream& strm); //------------------------------------------------------------------------------ // a dataset with points and cells, and no pointdata and no celldata int TestWithPointsAndCells(ostream& strm); //------------------------------------------------------------------------------ // a dataset with points and cells, pointdata but no celldata int TestWithPointsAndCellsAndPointData(ostream& strm); //------------------------------------------------------------------------------ // Description: // Display message for a test result and return the test value int TestAssertion(ostream& strm, vtkIndent indent, const char* label, int assertion) { strm << indent << "Test `" << label << "\': "; if (assertion) { strm << "passed." << endl; } else { strm << "FAILED!" << endl; } return assertion; } //------------------------------------------------------------------------------ // Description: // Call TestAssertion() and return with 1 if it fails, do nothing otherwise. // void TestAssertion(ostream &strm, // vtkIndent indent, // const *char label, // int assertion); #define MacroTest(strm, indent, label, assertion) \ if (!TestAssertion(strm, indent, label, assertion)) \ return 1 //------------------------------------------------------------------------------ int TestEmpty(ostream& strm) { vtkIndent indent; // actual test strm << "Test vtkBridgeDataSet Start" << endl; strm << "Create an empty vtkUnstructuredGrid" << endl; vtkUnstructuredGrid* g = vtkUnstructuredGrid::New(); strm << "Empty unstructured grid created" << endl; strm << "Create a vtkBridgeDataSet" << endl; vtkBridgeDataSet* ds = vtkBridgeDataSet::New(); strm << "vtkBridgeDataSet created" << endl; strm << "Init the vtkBridgeDataSet with the empty unstructured grid" << endl; ds->SetDataSet(g); strm << "vtkBridgeDataSet initialized with the empty unstructured grid" << endl; MacroTest(strm, indent, "number of points", ds->GetNumberOfPoints() == 0); MacroTest(strm, indent, "number of cells -1", ds->GetNumberOfCells(-1) == 0); MacroTest(strm, indent, "number of cells 0", ds->GetNumberOfCells(0) == 0); MacroTest(strm, indent, "number of cells 1", ds->GetNumberOfCells(1) == 0); MacroTest(strm, indent, "number of cells 2", ds->GetNumberOfCells(2) == 0); MacroTest(strm, indent, "number of cells 3", ds->GetNumberOfCells(3) == 0); MacroTest(strm, indent, "cell dimension", ds->GetCellDimension() == -1); strm << "GetCellTypes() start" << endl; vtkCellTypes* types = vtkCellTypes::New(); ds->GetCellTypes(types); MacroTest(strm, indent, "cell types", types->GetNumberOfTypes() == 0); types->Delete(); strm << "GetCellTypes() end" << endl; strm << "NewCellIterator() start" << endl; vtkGenericCellIterator* it = ds->NewCellIterator(-1); MacroTest(strm, indent, "empty cell iterator -1 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator -1", it->IsAtEnd()); it->Delete(); it = ds->NewCellIterator(0); MacroTest(strm, indent, "empty cell iterator 0 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator 0", it->IsAtEnd()); it->Delete(); it = ds->NewCellIterator(1); MacroTest(strm, indent, "empty cell iterator 1 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator 1", it->IsAtEnd()); it->Delete(); it = ds->NewCellIterator(2); MacroTest(strm, indent, "empty cell iterator 2 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator 2", it->IsAtEnd()); it->Delete(); it = ds->NewCellIterator(3); MacroTest(strm, indent, "empty cell iterator 3 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator 3", it->IsAtEnd()); it->Delete(); strm << "NewCellIterator() end" << endl; strm << "NewPointIterator() start" << endl; vtkGenericPointIterator* pit = ds->NewPointIterator(); MacroTest(strm, indent, "empty point iterator exists", pit != nullptr); pit->Begin(); MacroTest(strm, indent, "empty point iterator", pit->IsAtEnd()); pit->Delete(); strm << "NewPointIterator() end" << endl; double bounds[6]; double center[3]; double* c; const double epsilon = 0.000001; // 10^{-6} strm << "GetBounds() start" << endl; const double* b = ds->GetBounds(); MacroTest(strm, indent, "volatile bounds exist", b != nullptr); MacroTest(strm, indent, "default volatile bounds", !vtkMath::AreBoundsInitialized(b)); ds->GetBounds(bounds); MacroTest(strm, indent, "default bounds", !vtkMath::AreBoundsInitialized(bounds)); c = ds->GetCenter(); MacroTest(strm, indent, "volatile center exists", c != nullptr); MacroTest(strm, indent, "default volatile center", (fabs(c[0]) < epsilon) && (fabs(c[1]) < epsilon) && (fabs(c[2]) < epsilon)); ds->GetCenter(center); MacroTest(strm, indent, "volatile center", (fabs(center[0]) < epsilon) && (fabs(center[1]) < epsilon) && (fabs(center[2]) < epsilon)); MacroTest(strm, indent, "diagonal length", fabs(ds->GetLength() - 2 * sqrt(3.0)) < epsilon); strm << "GetBounds() end" << endl; vtkGenericAttributeCollection* attributes; attributes = ds->GetAttributes(); MacroTest(strm, indent, "attributes exist", attributes != nullptr); MacroTest(strm, indent, "empty attributes", attributes->IsEmpty()); MacroTest(strm, indent, "empty attributes", attributes->GetNumberOfAttributes() == 0); MacroTest(strm, indent, "empty attributes", attributes->GetNumberOfComponents() == 0); MacroTest(strm, indent, "empty attributes", attributes->GetMaxNumberOfComponents() == 0); #if 0 strm<<"NewBoundaryIterator() start"<<endl; it=ds->NewBoundaryIterator(-1,0); MacroTest(strm,indent,"empty boundary iterator -1,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty boundary iterator -1,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(0,0); MacroTest(strm,indent,"empty boundary iterator 0,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 0,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(1,0); MacroTest(strm,indent,"empty boundary iterator 1,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 1,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(2,0); MacroTest(strm,indent,"empty boundary iterator 2,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 2,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(3,0); MacroTest(strm,indent,"empty boundary iterator 3,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 3,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(-1,1); MacroTest(strm,indent,"empty boundary iterator -1,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty boundary iterator -1,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(0,1); MacroTest(strm,indent,"empty boundary iterator 0,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 0,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(1,1); MacroTest(strm,indent,"empty boundary iterator 1,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 1,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(2,1); MacroTest(strm,indent,"empty boundary iterator 2,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 2,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(3,1); MacroTest(strm,indent,"empty boundary iterator 3,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 3,true",it->IsAtEnd()); it->Delete(); strm<<"NewBoundaryIterator() end"<<endl; #endif strm << "Delete the vtkBridgeDataSet" << endl; ds->Delete(); strm << "vtkBridgeDataSet deleted" << endl; strm << "Delete the empty vtkUnstructuredGrid" << endl; g->Delete(); strm << "Empty vtkUnstructuredGrid deleted" << endl; strm << "Test vtkBridgeDataSet creation End" << endl; // Do the same thing for: // 1. a dataset with points but no cells, and no pointdata and no celldata // 2. a dataset with points and cells, and no pointdata and no celldata // 3. a dataset with points and cells, and pointdata but no celldata // 4. a dataset with points and cells, and celldata but not pointdata // 5. a dataset with points and cells, and pointdata but celldata return 0; } //------------------------------------------------------------------------------ // a dataset with points but no cells, and no pointdata and no celldata int TestWithPoints(ostream& strm) { vtkIndent indent; vtkPoints* pts; // actual test strm << "Test vtkBridgeDataSet Start" << endl; strm << "Create an empty vtkUnstructuredGrid" << endl; vtkUnstructuredGrid* g = vtkUnstructuredGrid::New(); strm << "Empty unstructured grid created" << endl; pts = vtkPoints::New(); pts->InsertNextPoint(-1, -2, -3); pts->InsertNextPoint(4, 5, 6); strm << "Add points to the vtkUnstructuredGrid" << endl; g->SetPoints(pts); strm << "Points added to the vtkUnstructuredGrid" << endl; strm << "Create a vtkBridgeDataSet" << endl; vtkBridgeDataSet* ds = vtkBridgeDataSet::New(); strm << "vtkBridgeDataSet created" << endl; strm << "Init the vtkBridgeDataSet with the unstructured grid" << endl; ds->SetDataSet(g); strm << "vtkBridgeDataSet initialized with the unstructured grid" << endl; MacroTest(strm, indent, "number of points", ds->GetNumberOfPoints() == 2); MacroTest(strm, indent, "number of cells -1", ds->GetNumberOfCells(-1) == 0); MacroTest(strm, indent, "number of cells 0", ds->GetNumberOfCells(0) == 0); MacroTest(strm, indent, "number of cells 1", ds->GetNumberOfCells(1) == 0); MacroTest(strm, indent, "number of cells 2", ds->GetNumberOfCells(2) == 0); MacroTest(strm, indent, "number of cells 3", ds->GetNumberOfCells(3) == 0); MacroTest(strm, indent, "cell dimension", ds->GetCellDimension() == -1); strm << "GetCellTypes() start" << endl; vtkCellTypes* types = vtkCellTypes::New(); ds->GetCellTypes(types); MacroTest(strm, indent, "cell types", types->GetNumberOfTypes() == 0); types->Delete(); strm << "GetCellTypes() end" << endl; strm << "NewCellIterator() start" << endl; vtkGenericCellIterator* it = ds->NewCellIterator(-1); MacroTest(strm, indent, "empty cell iterator -1 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator -1", it->IsAtEnd()); it->Delete(); it = ds->NewCellIterator(0); MacroTest(strm, indent, "empty cell iterator 0 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator 0", it->IsAtEnd()); it->Delete(); it = ds->NewCellIterator(1); MacroTest(strm, indent, "empty cell iterator 1 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator 1", it->IsAtEnd()); it->Delete(); it = ds->NewCellIterator(2); MacroTest(strm, indent, "empty cell iterator 2 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator 2", it->IsAtEnd()); it->Delete(); it = ds->NewCellIterator(3); MacroTest(strm, indent, "empty cell iterator 3 exists", it != nullptr); it->Begin(); MacroTest(strm, indent, "empty cell iterator 3", it->IsAtEnd()); it->Delete(); strm << "NewCellIterator() end" << endl; double x[3]; strm << "NewPointIterator() start" << endl; vtkGenericPointIterator* pit = ds->NewPointIterator(); MacroTest(strm, indent, "point iterator exists", pit != nullptr); pit->Begin(); MacroTest(strm, indent, "point iterator", !pit->IsAtEnd()); pit->GetPosition(x); MacroTest(strm, indent, "point iterator", (x[0] == -1) && (x[1] == -2) && (x[2] == -3)); MacroTest(strm, indent, "point iterator", pit->GetId() == 0); pit->Next(); MacroTest(strm, indent, "point iterator", !pit->IsAtEnd()); pit->GetPosition(x); MacroTest(strm, indent, "point iterator", (x[0] == 4) && (x[1] == 5) && (x[2] == 6)); MacroTest(strm, indent, "point iterator", pit->GetId() == 1); pit->Next(); MacroTest(strm, indent, "point iterator", pit->IsAtEnd()); pit->Delete(); strm << "NewPointIterator() end" << endl; double bounds[6]; double center[3]; double* c; const double epsilon = 0.000001; // 10^{-6} strm << "GetBounds() start" << endl; const double* b = ds->GetBounds(); MacroTest(strm, indent, "volatile bounds exist", b != nullptr); // strm<<"bounds=("<<b[0]<<','<<b[1]<<','<<b[2]<<','<<b[3]<<','<<b[4]<<','<<b[5]<<')'<<endl; MacroTest(strm, indent, "valid volatile bounds", (b[0] == -1) && (b[1] == 4) && (b[2] == -2) && (b[3] == 5) && (b[4] == -3) && (b[5] == 6)); ds->GetBounds(bounds); MacroTest(strm, indent, "valid bounds", (bounds[0] == -1) && (bounds[1] == 4) && (bounds[2] == -2) && (bounds[3] == 5) && (bounds[4] == -3) && (bounds[5] == 6)); c = ds->GetCenter(); MacroTest(strm, indent, "volatile center exists", c != nullptr); MacroTest(strm, indent, "volatile center", (fabs(c[0] - 1.5) < epsilon) && (fabs(c[1] - 1.5) < epsilon) && (fabs(c[2] - 1.5) < epsilon)); ds->GetCenter(center); MacroTest(strm, indent, "valid center", (fabs(center[0] - 1.5) < epsilon) && (fabs(center[1] - 1.5) < epsilon) && (fabs(center[2] - 1.5) < epsilon)); MacroTest(strm, indent, "diagonal length", fabs(ds->GetLength() - sqrt(155.0)) < epsilon); strm << "GetBounds() end" << endl; vtkGenericAttributeCollection* attributes; attributes = ds->GetAttributes(); MacroTest(strm, indent, "attributes exist", attributes != nullptr); MacroTest(strm, indent, "empty attributes", attributes->IsEmpty()); MacroTest(strm, indent, "empty attributes", attributes->GetNumberOfAttributes() == 0); MacroTest(strm, indent, "empty attributes", attributes->GetNumberOfComponents() == 0); MacroTest(strm, indent, "empty attributes", attributes->GetMaxNumberOfComponents() == 0); #if 0 strm<<"NewBoundaryIterator() start"<<endl; it=ds->NewBoundaryIterator(-1,0); MacroTest(strm,indent,"empty boundary iterator -1,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty boundary iterator -1,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(0,0); MacroTest(strm,indent,"empty boundary iterator 0,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 0,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(1,0); MacroTest(strm,indent,"empty boundary iterator 1,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 1,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(2,0); MacroTest(strm,indent,"empty boundary iterator 2,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 2,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(3,0); MacroTest(strm,indent,"empty boundary iterator 3,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 3,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(-1,1); MacroTest(strm,indent,"empty boundary iterator -1,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty boundary iterator -1,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(0,1); MacroTest(strm,indent,"empty boundary iterator 0,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 0,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(1,1); MacroTest(strm,indent,"empty boundary iterator 1,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 1,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(2,1); MacroTest(strm,indent,"empty boundary iterator 2,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 2,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(3,1); MacroTest(strm,indent,"empty boundary iterator 3,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 3,true",it->IsAtEnd()); it->Delete(); strm<<"NewBoundaryIterator() end"<<endl; #endif pts->Delete(); strm << "Delete the vtkBridgeDataSet" << endl; ds->Delete(); strm << "vtkBridgeDataSet deleted" << endl; strm << "Delete the vtkUnstructuredGrid" << endl; g->Delete(); strm << "vtkUnstructuredGrid deleted" << endl; strm << "Test vtkBridgeDataSet creation End" << endl; // Do the same thing for: // 2. a dataset with points and cells, and no pointdata and no celldata // 3. a dataset with points and cells, and pointdata but no celldata // 4. a dataset with points and cells, and celldata but not pointdata // 5. a dataset with points and cells, and pointdata but celldata return 0; } //------------------------------------------------------------------------------ // a dataset with points and cells, and no pointdata and no celldata int TestWithPointsAndCells(ostream& strm) { vtkIndent indent; vtkPoints* pts; // actual test strm << "----------------------------------------------------------" << endl; strm << "TestWithPointsAndCells Start" << endl; strm << "----------------------------------------------------------" << endl; strm << "Create an empty vtkUnstructuredGrid" << endl; vtkUnstructuredGrid* g = vtkUnstructuredGrid::New(); strm << "Empty unstructured grid created" << endl; pts = vtkPoints::New(); pts->InsertNextPoint(0, 0, 0); // 0 pts->InsertNextPoint(1, -1, 0); // 1 pts->InsertNextPoint(1, 1, 0); // 2 pts->InsertNextPoint(0.5, 0, 1); // 3 pts->InsertNextPoint(2, -1, 0); // 4 pts->InsertNextPoint(3, 0, 0); // 5 pts->InsertNextPoint(2, 1, 0); // 6 pts->InsertNextPoint(4, 0, 0); // 7 pts->InsertNextPoint(5, 0, 0); // 8 pts->InsertNextPoint(6, 0, 0); // 9 pts->InsertNextPoint(10, 0, 0); // 10 0 pts->InsertNextPoint(11, -1, 0); // 11 1,4 pts->InsertNextPoint(11, 1, 0); // 12 2,6 pts->InsertNextPoint(10.5, 0, 1); // 13 pts->InsertNextPoint(12, 0, 0); // 14 // 5,7 pts->InsertNextPoint(13, 0, 0); // 15 // 8,9 pts->InsertNextPoint(14, 0, 0); // extra point strm << "Add points to the vtkUnstructuredGrid" << endl; g->SetPoints(pts); strm << "Points added to the vtkUnstructuredGrid" << endl; vtkTetra* tetra = vtkTetra::New(); tetra->GetPointIds()->SetId(0, 0); tetra->GetPointIds()->SetId(1, 1); tetra->GetPointIds()->SetId(2, 2); tetra->GetPointIds()->SetId(3, 3); g->InsertNextCell(tetra->GetCellType(), tetra->GetPointIds()); tetra->Delete(); vtkTriangle* triangle = vtkTriangle::New(); triangle->GetPointIds()->SetId(0, 4); triangle->GetPointIds()->SetId(1, 5); triangle->GetPointIds()->SetId(2, 6); g->InsertNextCell(triangle->GetCellType(), triangle->GetPointIds()); triangle->Delete(); vtkLine* line = vtkLine::New(); line->GetPointIds()->SetId(0, 7); line->GetPointIds()->SetId(1, 8); g->InsertNextCell(line->GetCellType(), line->GetPointIds()); line->Delete(); vtkVertex* vertex = vtkVertex::New(); vertex->GetPointIds()->SetId(0, 9); g->InsertNextCell(vertex->GetCellType(), vertex->GetPointIds()); vertex->Delete(); tetra = vtkTetra::New(); tetra->GetPointIds()->SetId(0, 10); tetra->GetPointIds()->SetId(1, 11); tetra->GetPointIds()->SetId(2, 12); tetra->GetPointIds()->SetId(3, 13); g->InsertNextCell(tetra->GetCellType(), tetra->GetPointIds()); tetra->Delete(); triangle = vtkTriangle::New(); triangle->GetPointIds()->SetId(0, 11); triangle->GetPointIds()->SetId(1, 14); triangle->GetPointIds()->SetId(2, 12); g->InsertNextCell(triangle->GetCellType(), triangle->GetPointIds()); triangle->Delete(); line = vtkLine::New(); line->GetPointIds()->SetId(0, 14); line->GetPointIds()->SetId(1, 15); g->InsertNextCell(line->GetCellType(), line->GetPointIds()); line->Delete(); vertex = vtkVertex::New(); vertex->GetPointIds()->SetId(0, 15); g->InsertNextCell(vertex->GetCellType(), vertex->GetPointIds()); vertex->Delete(); strm << "Create a vtkBridgeDataSet" << endl; vtkBridgeDataSet* ds = vtkBridgeDataSet::New(); strm << "vtkBridgeDataSet created" << endl; strm << "Init the vtkBridgeDataSet with the unstructured grid" << endl; ds->SetDataSet(g); strm << "vtkBridgeDataSet initialized with the unstructured grid" << endl; MacroTest(strm, indent, "number of points", ds->GetNumberOfPoints() == 17); MacroTest(strm, indent, "number of cells -1", ds->GetNumberOfCells(-1) == 8); MacroTest(strm, indent, "number of cells 0", ds->GetNumberOfCells(0) == 2); MacroTest(strm, indent, "number of cells 1", ds->GetNumberOfCells(1) == 2); MacroTest(strm, indent, "number of cells 2", ds->GetNumberOfCells(2) == 2); MacroTest(strm, indent, "number of cells 3", ds->GetNumberOfCells(3) == 2); MacroTest(strm, indent, "cell dimension", ds->GetCellDimension() == -1); strm << "GetCellTypes() start" << endl; vtkCellTypes* types = vtkCellTypes::New(); ds->GetCellTypes(types); MacroTest(strm, indent, "cell types", types->GetNumberOfTypes() == 4); types->Delete(); strm << "GetCellTypes() end" << endl; strm << "NewCellIterator() start" << endl; int itNum = -1; int itCount = 4; int i; int count; std::string s; std::ostringstream ost; vtkGenericAdaptorCell* cab = nullptr; while (itNum < itCount) { vtkGenericCellIterator* it = ds->NewCellIterator(itNum); ost << "empty cell iterator " << itNum << " exists"; s = ost.str(); const char* cstring = s.c_str(); MacroTest(strm, indent, cstring, it != nullptr); it->Begin(); i = 0; count = ds->GetNumberOfCells(itNum); while (i < count) { ost.str(""); ost << "not finished cell iterator " << itNum; s = ost.str(); cstring = s.c_str(); MacroTest(strm, indent, cstring, !it->IsAtEnd()); ++i; cab = it->GetCell(); MacroTest(strm, indent, "cell at iterator position is set", cab != nullptr); it->Next(); } ost.str(""); ost << "Finished cell iterator " << itNum; s = ost.str(); cstring = s.c_str(); MacroTest(strm, indent, cstring, it->IsAtEnd()); it->Delete(); ++itNum; } strm << "NewCellIterator() end" << endl; double x[3]; double y[3]; strm << "NewPointIterator() start" << endl; vtkGenericPointIterator* pit = ds->NewPointIterator(); MacroTest(strm, indent, "point iterator exists", pit != nullptr); pit->Begin(); i = 0; count = ds->GetNumberOfPoints(); while (i < count) { MacroTest(strm, indent, "not finished point iterator", !pit->IsAtEnd()); pit->GetPosition(x); pts->GetPoint(i, y); MacroTest( strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); MacroTest(strm, indent, "point iterator id", pit->GetId() == i); ++i; pit->Next(); } pit->Delete(); strm << "NewPointIterator() end" << endl; strm << " cell::GetPointIterator() start" << endl; vtkGenericCellIterator* it = ds->NewCellIterator(-1); it->Begin(); count = 0; pit = ds->NewPointIterator(); int count2 = 0; while (!it->IsAtEnd()) { cab = it->GetCell(); cab->GetPointIterator(pit); pit->Begin(); switch (count) { case 0: // tetra count2 = 0; while (!pit->IsAtEnd()) { MacroTest(strm, indent, "point iterator id", pit->GetId() == count2); pit->GetPosition(x); pts->GetPoint(pit->GetId(), y); MacroTest(strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); pit->Next(); count2++; } break; case 1: // triangle count2 = 4; while (!pit->IsAtEnd()) { MacroTest(strm, indent, "point iterator id", pit->GetId() == count2); pit->GetPosition(x); pts->GetPoint(pit->GetId(), y); MacroTest(strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); pit->Next(); count2++; } break; case 2: // line count2 = 7; while (!pit->IsAtEnd()) { MacroTest(strm, indent, "point iterator id", pit->GetId() == count2); pit->GetPosition(x); pts->GetPoint(pit->GetId(), y); MacroTest(strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); pit->Next(); count2++; } break; case 3: // vertex count2 = 9; while (!pit->IsAtEnd()) { MacroTest(strm, indent, "point iterator id", pit->GetId() == count2); pit->GetPosition(x); pts->GetPoint(pit->GetId(), y); MacroTest(strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); pit->Next(); count2++; } break; case 4: // tetra count2 = 10; while (!pit->IsAtEnd()) { MacroTest(strm, indent, "point iterator id", pit->GetId() == count2); pit->GetPosition(x); pts->GetPoint(pit->GetId(), y); MacroTest(strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); pit->Next(); count2++; } break; case 5: // triangle count2 = 0; while (!pit->IsAtEnd()) { switch (count2) { case 0: MacroTest(strm, indent, "point iterator id", pit->GetId() == 11); break; case 1: MacroTest(strm, indent, "point iterator id", pit->GetId() == 14); break; case 2: MacroTest(strm, indent, "point iterator id", pit->GetId() == 12); break; default: MacroTest(strm, indent, "impossible case", 0); break; } pit->GetPosition(x); pts->GetPoint(pit->GetId(), y); MacroTest(strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); pit->Next(); count2++; } break; case 6: // line count2 = 14; while (!pit->IsAtEnd()) { MacroTest(strm, indent, "point iterator id", pit->GetId() == count2); pit->GetPosition(x); pts->GetPoint(pit->GetId(), y); MacroTest(strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); pit->Next(); count2++; } break; case 7: // vertex count2 = 15; while (!pit->IsAtEnd()) { MacroTest(strm, indent, "point iterator id", pit->GetId() == count2); pit->GetPosition(x); pts->GetPoint(pit->GetId(), y); MacroTest(strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); pit->Next(); count2++; } break; default: MacroTest(strm, indent, "impossible case", 0); break; } ++count; it->Next(); } pit->Delete(); it->Delete(); strm << " cell::GetPointIterator() end" << endl; double bounds[6]; double center[3]; double* c; const double epsilon = 0.000001; // 10^{-6} strm << "GetBounds() start" << endl; const double* b = ds->GetBounds(); MacroTest(strm, indent, "volatile bounds exist", b != nullptr); strm << "bounds=(" << b[0] << ',' << b[1] << ',' << b[2] << ',' << b[3] << ',' << b[4] << ',' << b[5] << ')' << endl; MacroTest(strm, indent, "valid volatile bounds", (b[0] == 0) && (b[1] == 14) && (b[2] == -1) && (b[3] == 1) && (b[4] == 0) && (b[5] == 1)); ds->GetBounds(bounds); MacroTest(strm, indent, "valid bounds", (bounds[0] == 0) && (bounds[1] == 14) && (bounds[2] == -1) && (bounds[3] == 1) && (bounds[4] == 0) && (bounds[5] == 1)); c = ds->GetCenter(); MacroTest(strm, indent, "volatile center exists", c != nullptr); MacroTest(strm, indent, "volatile center", (fabs(c[0] - 7) < epsilon) && (fabs(c[1]) < epsilon) && (fabs(c[2] - 0.5) < epsilon)); ds->GetCenter(center); MacroTest(strm, indent, "valid center", (fabs(center[0] - 7) < epsilon) && (fabs(center[1]) < epsilon) && (fabs(center[2] - 0.5) < epsilon)); MacroTest(strm, indent, "diagonal length", fabs(ds->GetLength() - sqrt(201.0)) < epsilon); strm << "GetBounds() end" << endl; vtkGenericAttributeCollection* attributes = nullptr; attributes = ds->GetAttributes(); MacroTest(strm, indent, "attributes exist", attributes != nullptr); MacroTest(strm, indent, "empty attributes", attributes->IsEmpty()); MacroTest(strm, indent, "empty attributes", attributes->GetNumberOfAttributes() == 0); MacroTest(strm, indent, "empty attributes", attributes->GetNumberOfComponents() == 0); MacroTest(strm, indent, "empty attributes", attributes->GetMaxNumberOfComponents() == 0); strm << "vtkBridgeCell::GetBoundaryIterator() test start" << endl; // iterate over dataset cell // for each cell, get the boundaries of each dimension less than the cell // dimension // int i; // int count; // std::string s; // vtkOStrStreamWrapper *ost=0; // vtkGenericAdaptorCell *cab=0; int dim; it = ds->NewCellIterator(-1); MacroTest(strm, indent, "cell iterator on all data set cells exists", it != nullptr); it->Begin(); vtkGenericCellIterator* boundaries = ds->NewCellIterator(-1); // just for creation MacroTest(strm, indent, "boundaries exists", boundaries != nullptr); i = 0; count = ds->GetNumberOfCells(-1); vtkGenericAdaptorCell* cab2; while (i < count) { MacroTest(strm, indent, "not finished cell iterator", !it->IsAtEnd()); cab = it->GetCell(); dim = cab->GetDimension(); int currentDim = dim - 1; while (currentDim >= -1) { cab->GetBoundaryIterator(boundaries, currentDim); boundaries->Begin(); while (!boundaries->IsAtEnd()) { cab2 = boundaries->GetCell(); MacroTest(strm, indent, "the cell at iterator position is set", cab2 != nullptr); boundaries->Next(); } --currentDim; } ++i; it->Next(); } boundaries->Delete(); it->Delete(); strm << "vtkBridgeCell::GetBoundaryIterator() test end" << endl; #if 0 strm<<"NewBoundaryIterator() start"<<endl; it=ds->NewBoundaryIterator(-1,0); MacroTest(strm,indent,"empty boundary iterator -1,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty boundary iterator -1,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(0,0); MacroTest(strm,indent,"empty boundary iterator 0,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 0,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(1,0); MacroTest(strm,indent,"empty boundary iterator 1,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 1,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(2,0); MacroTest(strm,indent,"empty boundary iterator 2,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 2,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(3,0); MacroTest(strm,indent,"empty boundary iterator 3,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 3,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(-1,1); MacroTest(strm,indent,"empty boundary iterator -1,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty boundary iterator -1,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(0,1); MacroTest(strm,indent,"empty boundary iterator 0,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 0,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(1,1); MacroTest(strm,indent,"empty boundary iterator 1,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 1,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(2,1); MacroTest(strm,indent,"empty boundary iterator 2,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 2,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(3,1); MacroTest(strm,indent,"empty boundary iterator 3,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 3,true",it->IsAtEnd()); it->Delete(); strm<<"NewBoundaryIterator() end"<<endl; #endif pts->Delete(); strm << "Delete the vtkBridgeDataSet" << endl; ds->Delete(); strm << "vtkBridgeDataSet deleted" << endl; strm << "Delete the vtkUnstructuredGrid" << endl; g->Delete(); strm << "vtkUnstructuredGrid deleted" << endl; strm << "Test vtkBridgeDataSet creation End" << endl; // Do the same thing for: // 3. a dataset with points and cells, and pointdata but no celldata // 4. a dataset with points and cells, and celldata but not pointdata // 5. a dataset with points and cells, and pointdata but celldata return 0; } //------------------------------------------------------------------------------ // a dataset with points and cells, pointdata but no celldata int TestWithPointsAndCellsAndPointData(ostream& strm) { vtkIndent indent; vtkPoints* pts; // actual test strm << "----------------------------------------------------------" << endl; strm << "TestWithPointsAndCellsAndPointData Start" << endl; strm << "----------------------------------------------------------" << endl; strm << "Create an empty vtkUnstructuredGrid" << endl; vtkUnstructuredGrid* g = vtkUnstructuredGrid::New(); strm << "Empty unstructured grid created" << endl; pts = vtkPoints::New(); pts->InsertNextPoint(0, 0, 0); // 0 pts->InsertNextPoint(1, -1, 0); // 1 pts->InsertNextPoint(1, 1, 0); // 2 pts->InsertNextPoint(0.5, 0, 1); // 3 pts->InsertNextPoint(2, -1, 0); // 4 pts->InsertNextPoint(3, 0, 0); // 5 pts->InsertNextPoint(2, 1, 0); // 6 pts->InsertNextPoint(4, 0, 0); // 7 pts->InsertNextPoint(5, 0, 0); // 8 pts->InsertNextPoint(6, 0, 0); // 9 pts->InsertNextPoint(10, 0, 0); // 10 0 pts->InsertNextPoint(11, -1, 0); // 11 1,4 pts->InsertNextPoint(11, 1, 0); // 12 2,6 pts->InsertNextPoint(10.5, 0, 1); // 13 pts->InsertNextPoint(12, 0, 0); // 14 // 5,7 pts->InsertNextPoint(13, 0, 0); // 15 // 8,9 pts->InsertNextPoint(14, 0, 0); // extra point strm << "Add points to the vtkUnstructuredGrid" << endl; g->SetPoints(pts); strm << "Points added to the vtkUnstructuredGrid" << endl; vtkTetra* tetra = vtkTetra::New(); tetra->GetPointIds()->SetId(0, 0); tetra->GetPointIds()->SetId(1, 1); tetra->GetPointIds()->SetId(2, 2); tetra->GetPointIds()->SetId(3, 3); g->InsertNextCell(tetra->GetCellType(), tetra->GetPointIds()); tetra->Delete(); vtkTriangle* triangle = vtkTriangle::New(); triangle->GetPointIds()->SetId(0, 4); triangle->GetPointIds()->SetId(1, 5); triangle->GetPointIds()->SetId(2, 6); g->InsertNextCell(triangle->GetCellType(), triangle->GetPointIds()); triangle->Delete(); vtkLine* line = vtkLine::New(); line->GetPointIds()->SetId(0, 7); line->GetPointIds()->SetId(1, 8); g->InsertNextCell(line->GetCellType(), line->GetPointIds()); line->Delete(); vtkVertex* vertex = vtkVertex::New(); vertex->GetPointIds()->SetId(0, 9); g->InsertNextCell(vertex->GetCellType(), vertex->GetPointIds()); vertex->Delete(); tetra = vtkTetra::New(); tetra->GetPointIds()->SetId(0, 10); tetra->GetPointIds()->SetId(1, 11); tetra->GetPointIds()->SetId(2, 12); tetra->GetPointIds()->SetId(3, 13); g->InsertNextCell(tetra->GetCellType(), tetra->GetPointIds()); tetra->Delete(); triangle = vtkTriangle::New(); triangle->GetPointIds()->SetId(0, 11); triangle->GetPointIds()->SetId(1, 14); triangle->GetPointIds()->SetId(2, 12); g->InsertNextCell(triangle->GetCellType(), triangle->GetPointIds()); triangle->Delete(); line = vtkLine::New(); line->GetPointIds()->SetId(0, 14); line->GetPointIds()->SetId(1, 15); g->InsertNextCell(line->GetCellType(), line->GetPointIds()); line->Delete(); vertex = vtkVertex::New(); vertex->GetPointIds()->SetId(0, 15); g->InsertNextCell(vertex->GetCellType(), vertex->GetPointIds()); vertex->Delete(); strm << "Add point data to the vtkUnstructuredGrid" << endl; int m = 0; vtkDoubleArray* attrib = vtkDoubleArray::New(); while (m < 17) { attrib->InsertNextValue(m + 100); ++m; } assert(g->GetPointData() != nullptr); g->GetPointData()->SetScalars(attrib); attrib->Delete(); attrib = nullptr; strm << "Point data added to the vtkUnstructuredGrid" << endl; strm << "Create a vtkBridgeDataSet" << endl; vtkBridgeDataSet* ds = vtkBridgeDataSet::New(); strm << "vtkBridgeDataSet created" << endl; strm << "Init the vtkBridgeDataSet with the unstructured grid" << endl; ds->SetDataSet(g); strm << "vtkBridgeDataSet initialized with the unstructured grid" << endl; MacroTest(strm, indent, "number of points", ds->GetNumberOfPoints() == 17); MacroTest(strm, indent, "number of cells -1", ds->GetNumberOfCells(-1) == 8); MacroTest(strm, indent, "number of cells 0", ds->GetNumberOfCells(0) == 2); MacroTest(strm, indent, "number of cells 1", ds->GetNumberOfCells(1) == 2); MacroTest(strm, indent, "number of cells 2", ds->GetNumberOfCells(2) == 2); MacroTest(strm, indent, "number of cells 3", ds->GetNumberOfCells(3) == 2); MacroTest(strm, indent, "cell dimension", ds->GetCellDimension() == -1); strm << "GetCellTypes() start" << endl; vtkCellTypes* types = vtkCellTypes::New(); ds->GetCellTypes(types); MacroTest(strm, indent, "cell types", types->GetNumberOfTypes() == 4); types->Delete(); strm << "GetCellTypes() end" << endl; strm << "NewCellIterator() start" << endl; int itNum = -1; int itCount = 4; int i; int count; std::string s; std::ostringstream ost; vtkGenericAdaptorCell* cab; while (itNum < itCount) { vtkGenericCellIterator* it = ds->NewCellIterator(itNum); ost << "empty cell iterator " << itNum << " exists"; s = ost.str(); const char* cstring = s.c_str(); MacroTest(strm, indent, cstring, it != nullptr); it->Begin(); i = 0; count = ds->GetNumberOfCells(itNum); while (i < count) { ost.str(""); ost << "not finished cell iterator " << itNum; s = ost.str(); cstring = s.c_str(); MacroTest(strm, indent, cstring, !it->IsAtEnd()); ++i; cab = it->GetCell(); MacroTest(strm, indent, "cell at current position is set", cab != nullptr); it->Next(); } ost.str(""); ost << "Finished cell iterator " << itNum; s = ost.str(); cstring = s.c_str(); MacroTest(strm, indent, cstring, it->IsAtEnd()); it->Delete(); ++itNum; } strm << "NewCellIterator() end" << endl; double x[3]; double y[3]; strm << "NewPointIterator() start" << endl; vtkGenericPointIterator* pit = ds->NewPointIterator(); MacroTest(strm, indent, "point iterator exists", pit != nullptr); pit->Begin(); i = 0; count = ds->GetNumberOfPoints(); while (i < count) { MacroTest(strm, indent, "not finished point iterator", !pit->IsAtEnd()); pit->GetPosition(x); pts->GetPoint(i, y); MacroTest( strm, indent, "point iterator position", (x[0] == y[0]) && (x[1] == y[1]) && (x[2] == y[2])); MacroTest(strm, indent, "point iterator id", pit->GetId() == i); ++i; pit->Next(); } pit->Delete(); strm << "NewPointIterator() end" << endl; double bounds[6]; double center[3]; double* c = nullptr; const double epsilon = 0.000001; // 10^{-6} strm << "GetBounds() start" << endl; const double* b = ds->GetBounds(); MacroTest(strm, indent, "volatile bounds exist", b != nullptr); strm << "bounds=(" << b[0] << ',' << b[1] << ',' << b[2] << ',' << b[3] << ',' << b[4] << ',' << b[5] << ')' << endl; MacroTest(strm, indent, "valid volatile bounds", (b[0] == 0) && (b[1] == 14) && (b[2] == -1) && (b[3] == 1) && (b[4] == 0) && (b[5] == 1)); ds->GetBounds(bounds); MacroTest(strm, indent, "valid bounds", (bounds[0] == 0) && (bounds[1] == 14) && (bounds[2] == -1) && (bounds[3] == 1) && (bounds[4] == 0) && (bounds[5] == 1)); c = ds->GetCenter(); MacroTest(strm, indent, "volatile center exists", c != nullptr); MacroTest(strm, indent, "volatile center", (fabs(c[0] - 7) < epsilon) && (fabs(c[1]) < epsilon) && (fabs(c[2] - 0.5) < epsilon)); ds->GetCenter(center); MacroTest(strm, indent, "valid center", (fabs(center[0] - 7) < epsilon) && (fabs(center[1]) < epsilon) && (fabs(center[2] - 0.5) < epsilon)); MacroTest(strm, indent, "diagonal length", fabs(ds->GetLength() - sqrt(201.0)) < epsilon); strm << "GetBounds() end" << endl; vtkGenericAttributeCollection* attributes = nullptr; attributes = ds->GetAttributes(); MacroTest(strm, indent, "attributes exist", attributes != nullptr); MacroTest(strm, indent, "not empty attributes", !attributes->IsEmpty()); MacroTest(strm, indent, "one attribute", attributes->GetNumberOfAttributes() == 1); MacroTest(strm, indent, "one scalar attribute", attributes->GetNumberOfComponents() == 1); MacroTest(strm, indent, "one scalar attribute", attributes->GetMaxNumberOfComponents() == 1); vtkGenericAttribute* attribute = nullptr; attribute = attributes->GetAttribute(0); MacroTest(strm, indent, "attribute exists", attribute != nullptr); MacroTest(strm, indent, "attribute name does not exist", attribute->GetName() == nullptr); int attribId; attribId = attributes->FindAttribute(""); MacroTest(strm, indent, "attribute not found", attribId == -1); g->GetPointData()->GetScalars()->SetName("pressure"); attribId = attributes->FindAttribute("pressure"); strm << "attribId=" << attribId << endl; MacroTest(strm, indent, "attribute found", attribId == 0); MacroTest(strm, indent, "attribute name exists", attribute->GetName() != nullptr); MacroTest(strm, indent, "valid attribute name", strcmp(attribute->GetName(), "pressure") == 0); MacroTest(strm, indent, "attribute components", attribute->GetNumberOfComponents() == 1); MacroTest(strm, indent, "attribute centering", attribute->GetCentering() == vtkPointCentered); MacroTest(strm, indent, "attribute type", attribute->GetComponentType() == VTK_DOUBLE); MacroTest(strm, indent, "attribute size", attribute->GetSize() == 17); double* range = attribute->GetRange(0); double myRange[2]; attribute->GetRange(0, myRange); MacroTest(strm, indent, "attribute component lower boundary", range[0] == 100); MacroTest(strm, indent, "attribute component upper boundary", range[1] == 116); MacroTest(strm, indent, "attribute component lower boundary", myRange[0] == 100); MacroTest(strm, indent, "attribute component upper boundary", myRange[1] == 116); MacroTest(strm, indent, "attribute max norm", fabs(attribute->GetMaxNorm() - 116) < 0.0001); strm << "vtkBridgeCell::GetBoundaryIterator() test start" << endl; // iterate over dataset cell // for each cell, get the boundaries of each dimension less than the cell // dimension // int i; // int count; // std::string s; // vtkOStrStreamWrapper *ost=0; // vtkGenericAdaptorCell *cab=0; int dim; vtkGenericCellIterator* it = ds->NewCellIterator(-1); MacroTest(strm, indent, "cell iterator on all data set cells exists", it != nullptr); it->Begin(); vtkGenericCellIterator* boundaries = ds->NewCellIterator(-1); // just for creation MacroTest(strm, indent, "boundaries exists", boundaries != nullptr); i = 0; count = ds->GetNumberOfCells(-1); vtkGenericAdaptorCell* cab2; while (i < count) { MacroTest(strm, indent, "not finished cell iterator", !it->IsAtEnd()); cab = it->GetCell(); dim = cab->GetDimension(); int currentDim = dim - 1; while (currentDim >= -1) { cab->GetBoundaryIterator(boundaries, currentDim); boundaries->Begin(); while (!boundaries->IsAtEnd()) { cab2 = boundaries->GetCell(); MacroTest(strm, indent, "the cell at iterator position is set", cab2 != nullptr); boundaries->Next(); } --currentDim; } ++i; it->Next(); } boundaries->Delete(); it->Delete(); strm << "vtkBridgeCell::GetBoundaryIterator() test end" << endl; // Description: // Attribute at all points of cell `c'. // \pre c_exists: c!=0 // \pre c_valid: !c->IsAtEnd() // \post result_exists: result!=0 // \post valid_result: sizeof(result)==GetNumberOfComponents()*c->GetCell()->GetNumberOfPoints() strm << "GetTuple() on cell iterator start" << endl; it = ds->NewCellIterator(-1); // tetra1 it->Begin(); double* tuples = attribute->GetTuple(it); double myTuples[4]; MacroTest(strm, indent, "tetra1, pt0", tuples[0] == 100); MacroTest(strm, indent, "tetra1, pt1", tuples[1] == 101); MacroTest(strm, indent, "tetra1, pt2", tuples[2] == 102); MacroTest(strm, indent, "tetra1, pt3", tuples[3] == 103); attribute->GetTuple(it, myTuples); MacroTest(strm, indent, "tetra1, pt0", myTuples[0] == 100); MacroTest(strm, indent, "tetra1, pt1", myTuples[1] == 101); MacroTest(strm, indent, "tetra1, pt2", myTuples[2] == 102); MacroTest(strm, indent, "tetra1, pt3", myTuples[3] == 103); // triangle1 it->Next(); tuples = attribute->GetTuple(it); MacroTest(strm, indent, "tri1, pt0", tuples[0] == 104); MacroTest(strm, indent, "tri1, pt1", tuples[1] == 105); MacroTest(strm, indent, "tri1, pt2", tuples[2] == 106); attribute->GetTuple(it, myTuples); MacroTest(strm, indent, "tri1, pt0", myTuples[0] == 104); MacroTest(strm, indent, "tri1, pt1", myTuples[1] == 105); MacroTest(strm, indent, "tri1, pt2", myTuples[2] == 106); // line1 it->Next(); tuples = attribute->GetTuple(it); MacroTest(strm, indent, "line1, pt0", tuples[0] == 107); MacroTest(strm, indent, "line1, pt1", tuples[1] == 108); attribute->GetTuple(it, myTuples); MacroTest(strm, indent, "line1, pt0", myTuples[0] == 107); MacroTest(strm, indent, "line1, pt1", myTuples[1] == 108); // vertex1 it->Next(); tuples = attribute->GetTuple(it); MacroTest(strm, indent, "vertex1, pt0", tuples[0] == 109); attribute->GetTuple(it, myTuples); MacroTest(strm, indent, "vertex1, pt0", myTuples[0] == 109); // tetra2 it->Next(); tuples = attribute->GetTuple(it); MacroTest(strm, indent, "tetra2, pt0", tuples[0] == 110); MacroTest(strm, indent, "tetra2, pt1", tuples[1] == 111); MacroTest(strm, indent, "tetra2, pt2", tuples[2] == 112); MacroTest(strm, indent, "tetra2, pt3", tuples[3] == 113); attribute->GetTuple(it, myTuples); MacroTest(strm, indent, "tetra2, pt0", myTuples[0] == 110); MacroTest(strm, indent, "tetra2, pt1", myTuples[1] == 111); MacroTest(strm, indent, "tetra2, pt2", myTuples[2] == 112); MacroTest(strm, indent, "tetra2, pt3", myTuples[3] == 113); // triangle2 it->Next(); tuples = attribute->GetTuple(it); MacroTest(strm, indent, "tri2, pt0", tuples[0] == 111); MacroTest(strm, indent, "tri2, pt1", tuples[1] == 114); MacroTest(strm, indent, "tri2, pt2", tuples[2] == 112); attribute->GetTuple(it, myTuples); MacroTest(strm, indent, "tri2, pt0", myTuples[0] == 111); MacroTest(strm, indent, "tri2, pt1", myTuples[1] == 114); MacroTest(strm, indent, "tri2, pt2", myTuples[2] == 112); // line1 it->Next(); tuples = attribute->GetTuple(it); MacroTest(strm, indent, "line2, pt0", tuples[0] == 114); MacroTest(strm, indent, "line2, pt1", tuples[1] == 115); attribute->GetTuple(it, myTuples); MacroTest(strm, indent, "line2, pt0", myTuples[0] == 114); MacroTest(strm, indent, "line2, pt1", myTuples[1] == 115); // vertex2 it->Next(); tuples = attribute->GetTuple(it); MacroTest(strm, indent, "vertex2, pt0", tuples[0] == 115); attribute->GetTuple(it, myTuples); MacroTest(strm, indent, "vertex2, pt0", myTuples[0] == 115); it->Delete(); strm << "GetTuple() on cell iterator end" << endl; strm << "GetTuple() on point iterator start" << endl; pit = ds->NewPointIterator(); pit->Begin(); m = 100; while (!pit->IsAtEnd()) { tuples = attribute->GetTuple(pit); MacroTest(strm, indent, "valid point tuple", tuples[0] == m); attribute->GetTuple(pit, myTuples); MacroTest(strm, indent, "valid point tuple", myTuples[0] == m); pit->Next(); ++m; } pit->Delete(); strm << "GetTuple() on point iterator end" << endl; strm << "GetComponent() on cell iterator start" << endl; it = ds->NewCellIterator(-1); // tetra1 it->Begin(); attribute->GetComponent(0, it, myTuples); MacroTest(strm, indent, "tetra1, pt0", myTuples[0] == 100); MacroTest(strm, indent, "tetra1, pt1", myTuples[1] == 101); MacroTest(strm, indent, "tetra1, pt2", myTuples[2] == 102); MacroTest(strm, indent, "tetra1, pt3", myTuples[3] == 103); // triangle1 it->Next(); attribute->GetComponent(0, it, myTuples); MacroTest(strm, indent, "tri1, pt0", myTuples[0] == 104); MacroTest(strm, indent, "tri1, pt1", myTuples[1] == 105); MacroTest(strm, indent, "tri1, pt2", myTuples[2] == 106); // line1 it->Next(); attribute->GetComponent(0, it, myTuples); MacroTest(strm, indent, "line1, pt0", myTuples[0] == 107); MacroTest(strm, indent, "line1, pt1", myTuples[1] == 108); // vertex1 it->Next(); attribute->GetComponent(0, it, myTuples); MacroTest(strm, indent, "vertex1, pt0", myTuples[0] == 109); // tetra2 it->Next(); attribute->GetComponent(0, it, myTuples); MacroTest(strm, indent, "tetra2, pt0", myTuples[0] == 110); MacroTest(strm, indent, "tetra2, pt1", myTuples[1] == 111); MacroTest(strm, indent, "tetra2, pt2", myTuples[2] == 112); MacroTest(strm, indent, "tetra2, pt3", myTuples[3] == 113); // triangle2 it->Next(); attribute->GetComponent(0, it, myTuples); MacroTest(strm, indent, "tri2, pt0", myTuples[0] == 111); MacroTest(strm, indent, "tri2, pt1", myTuples[1] == 114); MacroTest(strm, indent, "tri2, pt2", myTuples[2] == 112); // line1 it->Next(); attribute->GetComponent(0, it, myTuples); MacroTest(strm, indent, "line2, pt0", myTuples[0] == 114); MacroTest(strm, indent, "line2, pt1", myTuples[1] == 115); // vertex2 it->Next(); attribute->GetComponent(0, it, myTuples); MacroTest(strm, indent, "vertex2, pt0", myTuples[0] == 115); it->Delete(); strm << "GetComponent() on cell iterator end" << endl; strm << "GetComponent() on point iterator start" << endl; pit = ds->NewPointIterator(); pit->Begin(); m = 100; while (!pit->IsAtEnd()) { MacroTest(strm, indent, "valid point tuple", attribute->GetComponent(0, pit) == m); pit->Next(); ++m; } pit->Delete(); strm << "GetComponent() on point iterator end" << endl; // InterpolateTuple() strm << "InterpolateTuple() start" << endl; it = ds->NewCellIterator(-1); // tetra1 it->Begin(); double pcoords[3]; pcoords[0] = 0; pcoords[1] = 0; pcoords[2] = 0; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation p0", myTuples[0] == 100); pcoords[0] = 1; pcoords[1] = 0; pcoords[2] = 0; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation p1", myTuples[0] == 101); pcoords[0] = 0; pcoords[1] = 1; pcoords[2] = 0; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation p2", myTuples[0] == 102); pcoords[0] = 0; pcoords[1] = 0; pcoords[2] = 1; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation p3", myTuples[0] == 103); pcoords[0] = 0.5; pcoords[1] = 0; pcoords[2] = 0; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation mid p0p1", myTuples[0] == 100.5); pcoords[0] = 0; pcoords[1] = 0.5; pcoords[2] = 0; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation mid p0p2", myTuples[0] == 101); pcoords[0] = 0; pcoords[1] = 0; pcoords[2] = 0.5; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation mid p0p3", myTuples[0] == 101.5); pcoords[0] = 0.5; pcoords[1] = 0.5; pcoords[2] = 0; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation mid p1p2", myTuples[0] == 101.5); pcoords[0] = 0.5; pcoords[1] = 0; pcoords[2] = 0.5; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation mid p1p3", myTuples[0] == 102); pcoords[0] = 0; pcoords[1] = 0.5; pcoords[2] = 0.5; it->GetCell()->InterpolateTuple(attribute, pcoords, myTuples); MacroTest(strm, indent, "valid interpolation mid p2p3", myTuples[0] == 102.5); it->Delete(); strm << "InterpolateTuple() end" << endl; #if 0 strm<<"NewBoundaryIterator() start"<<endl; it=ds->NewBoundaryIterator(-1,0); MacroTest(strm,indent,"empty boundary iterator -1,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty boundary iterator -1,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(0,0); MacroTest(strm,indent,"empty boundary iterator 0,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 0,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(1,0); MacroTest(strm,indent,"empty boundary iterator 1,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 1,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(2,0); MacroTest(strm,indent,"empty boundary iterator 2,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 2,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(3,0); MacroTest(strm,indent,"empty boundary iterator 3,false exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 3,false",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(-1,1); MacroTest(strm,indent,"empty boundary iterator -1,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty boundary iterator -1,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(0,1); MacroTest(strm,indent,"empty boundary iterator 0,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 0,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(1,1); MacroTest(strm,indent,"empty boundary iterator 1,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 1,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(2,1); MacroTest(strm,indent,"empty boundary iterator 2,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 2,true",it->IsAtEnd()); it->Delete(); it=ds->NewBoundaryIterator(3,1); MacroTest(strm,indent,"empty boundary iterator 3,true exists",it!=0); it->Begin(); MacroTest(strm,indent,"empty cell iterator 3,true",it->IsAtEnd()); it->Delete(); strm<<"NewBoundaryIterator() end"<<endl; #endif pts->Delete(); strm << "Delete the vtkBridgeDataSet" << endl; ds->Delete(); strm << "vtkBridgeDataSet deleted" << endl; strm << "Delete the vtkUnstructuredGrid" << endl; g->Delete(); strm << "vtkUnstructuredGrid deleted" << endl; strm << "Test vtkBridgeDataSet creation End" << endl; // Do the same thing for: // 4. a dataset with points and cells, and celldata but not pointdata // 5. a dataset with points and cells, and pointdata but celldata return 0; } int otherCreation(int vtkNotUsed(argc), char* vtkNotUsed(argv)[]) { if (TestEmpty(cout)) { return 1; } if (TestWithPoints(cout)) { return 1; } if (TestWithPointsAndCells(cout)) { return 1; } if (TestWithPointsAndCellsAndPointData(cout)) { return 1; } return 0; }
lecousin/java-framework-core
net.lecousin.core/src/test/java/net/lecousin/framework/core/tests/collections/TestAsyncCollection.java
<filename>net.lecousin.core/src/test/java/net/lecousin/framework/core/tests/collections/TestAsyncCollection.java package net.lecousin.framework.core.tests.collections; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import org.junit.Assert; import org.junit.Test; import net.lecousin.framework.collections.AsyncCollection; import net.lecousin.framework.collections.CollectionsUtil; import net.lecousin.framework.core.test.LCCoreAbstractTest; import net.lecousin.framework.mutable.MutableBoolean; public class TestAsyncCollection extends LCCoreAbstractTest { @Test public void testListen() { List<Integer> list = new LinkedList<>(); MutableBoolean done = new MutableBoolean(false); MutableBoolean err = new MutableBoolean(false); AsyncCollection.Listen<Integer> col = new AsyncCollection.Listen<>( (elements) -> { list.addAll(elements); }, () -> { done.set(true); }, (error) -> { err.set(true); } ); Assert.assertTrue(list.isEmpty()); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertFalse(err.get()); Assert.assertFalse(col.hasError()); col.newElements(Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111))); Assert.assertTrue(CollectionsUtil.equals(list, Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111)))); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertFalse(err.get()); Assert.assertFalse(col.hasError()); col.newElements(Arrays.asList(Integer.valueOf(2), Integer.valueOf(22))); Assert.assertTrue(CollectionsUtil.equals(list, Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertFalse(err.get()); Assert.assertFalse(col.hasError()); col.done(); Assert.assertTrue(CollectionsUtil.equals(list, Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertTrue(done.get()); Assert.assertTrue(col.isDone()); Assert.assertFalse(err.get()); Assert.assertFalse(col.hasError()); } @Test public void testListenError() { List<Integer> list = new LinkedList<>(); MutableBoolean done = new MutableBoolean(false); MutableBoolean err = new MutableBoolean(false); AsyncCollection.Listen<Integer> col = new AsyncCollection.Listen<>( (elements) -> { list.addAll(elements); }, () -> { done.set(true); }, (error) -> { err.set(true); } ); Assert.assertTrue(list.isEmpty()); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertFalse(err.get()); Assert.assertFalse(col.hasError()); col.newElements(Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111))); Assert.assertTrue(CollectionsUtil.equals(list, Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111)))); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertFalse(err.get()); Assert.assertFalse(col.hasError()); col.newElements(Arrays.asList(Integer.valueOf(2), Integer.valueOf(22))); Assert.assertTrue(CollectionsUtil.equals(list, Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertFalse(err.get()); Assert.assertFalse(col.hasError()); col.error(new Exception()); Assert.assertTrue(CollectionsUtil.equals(list, Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertTrue(err.get()); Assert.assertTrue(col.hasError()); } @Test public void testListenErrorWithoutListener() { List<Integer> list = new LinkedList<>(); MutableBoolean done = new MutableBoolean(false); AsyncCollection.Listen<Integer> col = new AsyncCollection.Listen<>( (elements) -> { list.addAll(elements); }, () -> { done.set(true); }, null ); Assert.assertTrue(list.isEmpty()); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertFalse(col.hasError()); col.newElements(Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111))); Assert.assertTrue(CollectionsUtil.equals(list, Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111)))); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertFalse(col.hasError()); col.newElements(Arrays.asList(Integer.valueOf(2), Integer.valueOf(22))); Assert.assertTrue(CollectionsUtil.equals(list, Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertFalse(col.hasError()); col.error(new Exception()); Assert.assertTrue(CollectionsUtil.equals(list, Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); Assert.assertTrue(col.hasError()); } @Test public void testKeep() { MutableBoolean done = new MutableBoolean(false); AsyncCollection.Keep<Integer> col = new AsyncCollection.Keep<>(); col.ondone(() -> { done.set(true); }); Assert.assertTrue(col.getCurrentElements().isEmpty()); Assert.assertFalse(done.get()); col.newElements(Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111))); Assert.assertTrue(CollectionsUtil.equals(col.getCurrentElements(), Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111)))); Assert.assertFalse(done.get()); col.newElements(Arrays.asList(Integer.valueOf(2), Integer.valueOf(22))); Assert.assertTrue(CollectionsUtil.equals(col.getCurrentElements(), Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertFalse(done.get()); Assert.assertFalse(col.isDone()); col.done(); Assert.assertTrue(CollectionsUtil.equals(col.getCurrentElements(), Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertTrue(done.get()); Assert.assertTrue(col.isDone()); } @Test public void testAggregator() { AsyncCollection.Keep<Integer> main = new AsyncCollection.Keep<>(); AsyncCollection.Aggregator<Integer> aggr = new AsyncCollection.Aggregator<>(3, main); MutableBoolean done = new MutableBoolean(false); main.ondone(() -> { done.set(true); }); Assert.assertTrue(main.getCurrentElements().isEmpty()); Assert.assertFalse(done.get()); Assert.assertFalse(main.hasError()); aggr.newElements(Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111))); Assert.assertTrue(CollectionsUtil.equals(main.getCurrentElements(), Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111)))); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertFalse(main.hasError()); aggr.done(); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertFalse(main.hasError()); aggr.newElements(Arrays.asList(Integer.valueOf(2), Integer.valueOf(22))); Assert.assertTrue(CollectionsUtil.equals(main.getCurrentElements(), Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertFalse(main.hasError()); aggr.done(); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertFalse(main.hasError()); aggr.newElements(Arrays.asList(Integer.valueOf(3))); Assert.assertTrue(CollectionsUtil.equals(main.getCurrentElements(), Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22), Integer.valueOf(3)))); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertFalse(aggr.isDone()); Assert.assertFalse(main.hasError()); Assert.assertFalse(aggr.hasError()); aggr.done(); Assert.assertTrue(done.get()); Assert.assertTrue(main.isDone()); Assert.assertTrue(aggr.isDone()); Assert.assertTrue(CollectionsUtil.equals(main.getCurrentElements(), Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22), Integer.valueOf(3)))); Assert.assertFalse(main.hasError()); } @Test public void testAggregatorError() { AsyncCollection.Keep<Integer> main = new AsyncCollection.Keep<>(); AsyncCollection.Aggregator<Integer> aggr = new AsyncCollection.Aggregator<>(3, main); MutableBoolean done = new MutableBoolean(false); main.ondone(() -> { done.set(true); }); Assert.assertTrue(main.getCurrentElements().isEmpty()); Assert.assertFalse(done.get()); Assert.assertFalse(main.hasError()); aggr.newElements(Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111))); Assert.assertTrue(CollectionsUtil.equals(main.getCurrentElements(), Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111)))); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertFalse(main.hasError()); aggr.done(); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertFalse(main.hasError()); aggr.newElements(Arrays.asList(Integer.valueOf(2), Integer.valueOf(22))); Assert.assertTrue(CollectionsUtil.equals(main.getCurrentElements(), Arrays.asList(Integer.valueOf(1), Integer.valueOf(11), Integer.valueOf(111), Integer.valueOf(2), Integer.valueOf(22)))); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertFalse(main.hasError()); aggr.error(new Exception()); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertTrue(main.hasError()); aggr.done(); Assert.assertFalse(done.get()); Assert.assertFalse(main.isDone()); Assert.assertTrue(main.hasError()); aggr.done(); Assert.assertFalse(done.get()); Assert.assertFalse(aggr.isDone()); Assert.assertFalse(main.isDone()); Assert.assertTrue(main.hasError()); } }
rcooke-ast/VICE
vice/src/multizone/hydrodiskstars.c
/* * This file implements the interaction between the hydrodiskstars object and * and the multizone object, specifically the fast-tracked star particle setup. */ #include <stdlib.h> #include "hydrodiskstars.h" #include "../toolkit/hydrodiskstars.h" #include "../utils.h" #include "../singlezone.h" /* The hydrodiskstars object that drives this module */ static HYDRODISKSTARS *HDS; /* * Set the hydrodiskstars object globally. * * Parameters * ========== * address: The address of the hydrodiskstars object * * Notes * ===== * This is necessary to avoid issues with the Cython compiler. Since the * multizone and hydrodiskstars objects are separate, the _hds attribute of * the c_hydrodiskstars object cannot be accessed by the multizone object. * * header: hydrodiskstars.h */ extern void set_hydrodiskstars_object(unsigned long address) { HDS = (HYDRODISKSTARS *) ((void *) address); } /* * Setup the zone history for a single tracer object born in a given zone and * at a given timestep. * * Parameters * ========== * mz: The multizone object * hds: The hydrodiskstars object * t: A pointer to the tracer object being set up * birth_zone: The zone of birth * birth_timestep: The timestep of birth * analog_index: The index of the analog star particle in the hds data * * Returns * ======= * 0 on success, 1 on failure. * * header: hydrodiskstars.h */ extern unsigned short setup_hydrodisk_tracer(MULTIZONE mz, TRACER *t, unsigned int birth_zone, unsigned long birth_timestep, long analog_index) { /* The timestep size plus time and radius at which the star is born */ double dt = (*mz.zones[0]).dt; double birth_time = birth_timestep * dt; double birth_radius = ( ((*HDS).rad_bins[birth_zone] + (*HDS).rad_bins[birth_zone + 1u]) / 2 ); /* In case of sudden migration, this can't be done in the for-loop */ double migration_time = rand_range(birth_time, HYDRODISK_END_TIME); /* * The analog star particle will already be assigned by calling the * hydrodiskstars object in python, retaining the user's ability to write * additional output when subclassing the hydrodiskstars object. */ unsigned long i, N = n_timesteps(*mz.zones[0]); t -> zone_history = (int *) malloc (N * sizeof(int)); for (i = 0ul; i < N; i++) { if (i < birth_timestep) { /* Zone number is always -1 until it is born */ t -> zone_history[i] = -1; } else if (i == birth_timestep || birth_timestep >= N - BUFFER) { /* * This is either the timestep of birth, or the star forms in the * buffer timesteps. In either case, the zone number must be the * birth zone. */ t -> zone_history[i] = (signed) birth_zone; } else if (i >= N - BUFFER) { /* * If this timestep is in the buffer, assign it to value from * just outside the buffer. */ t -> zone_history[i] = (*t).zone_history[N - BUFFER - 1ul]; } else if (mz.simple && i != N - BUFFER - 1ul) { /* * If running in simple mode, the zone history should always be * the birth zone right up until the buffer, at which point it * switches. The second condition in the if statement allows this * algorithm to naturally proceed to the case-switch block in * below in the else-condition for exactly one iteration of the * for-loop to achieve this. */ t -> zone_history[i] = (signed) birth_zone; } else { /* * At this intermediate time, use the calczone_* functions that * the hydrodiskstars object employs anyway to determine the zone * number. */ switch(checksum((*HDS).mode)) { case LINEAR_MIGRATION: t -> zone_history[i] = (int) calczone_linear(*HDS, birth_time, birth_radius, HYDRODISK_END_TIME, analog_index, i * dt); break; case SUDDEN_MIGRATION: t -> zone_history[i] = (int) calczone_sudden(*HDS, migration_time, birth_radius, analog_index, i * dt); break; case DIFFUSION_MIGRATION: t -> zone_history[i] = (int) calczone_diffusive(*HDS, birth_time, birth_radius, HYDRODISK_END_TIME, analog_index, i * dt); break; default: return 1u; /* error handling */ } } } t -> timestep_origin = birth_timestep; t -> zone_origin = birth_zone; if (mz.simple) { t -> zone_current = (unsigned) t -> zone_history[N - BUFFER]; } else { t -> zone_current = birth_zone; } return 0u; }
entanmo/mall
okex-java-sdk-api/src/main/java/com/okcoin/commons/okex/open/api/bean/spot/result/OrderInfo.java
<filename>okex-java-sdk-api/src/main/java/com/okcoin/commons/okex/open/api/bean/spot/result/OrderInfo.java package com.okcoin.commons.okex.open.api.bean.spot.result; public class OrderInfo { /** * 订单id */ private Long order_id; /** * limit 订单类型的价格信息 */ private String price; /** * market 订单类型的价格信息 */ private String notional; /** * 委托数量 */ private String size; /** * 平均成交价 */ //private String avg_price; /** * 委托时间 */ private String timestamp; /** * 成交数量 */ private String filled_size; /** * 订单状态 -1 已撤销 0 未成交 */ private String status; /** * 订单买卖类型 buy/sell */ private String side; /** * 订单类型 limit/market */ private String type; /** * 币对信息 */ private String instrument_id; /** * 计价成交量 */ private String filled_notional; public Long getOrder_id() { return this.order_id; } public void setOrder_id(final Long order_id) { this.order_id = order_id; } public String getPrice() { return this.price; } public void setPrice(final String price) { this.price = price; } public String getNotional() { return this.notional; } public void setNotional(final String notional) { this.notional = notional; } public String getSize() { return this.size; } public void setSize(final String size) { this.size = size; } public String getTimestamp() { return this.timestamp; } public void setTimestamp(final String timestamp) { this.timestamp = timestamp; } public String getFilled_size() { return this.filled_size; } public void setFilled_size(final String filled_size) { this.filled_size = filled_size; } public String getStatus() { return this.status; } public void setStatus(final String status) { this.status = status; } public String getSide() { return this.side; } public void setSide(final String side) { this.side = side; } public String getType() { return this.type; } public void setType(final String type) { this.type = type; } public String getInstrument_id() { return this.instrument_id; } public void setInstrument_id(final String instrument_id) { this.instrument_id = instrument_id; } public String getFilled_notional() { return this.filled_notional; } public void setFilled_notional(final String filled_notional) { this.filled_notional = filled_notional; } }
msn2106/Coding-Websites
src/Codechef/KAVGMAT.java
package Codechef; import java.util.Arrays; import java.util.Scanner; public class KAVGMAT { public static void main(String[] args) { Scanner sc = new Scanner(System.in); int t = sc.nextInt(); while(t-- >0){ int n = sc.nextInt(); int m = sc.nextInt(); int k = sc.nextInt(); int[][] arr = new int[n][m]; for(int i=0;i<n;i++){ for(int j=0;j<m;j++){ arr[i][j] = sc.nextInt(); } } long startTime = System.currentTimeMillis(); int worthy = 0; int l = 1; int limit = n < m ? n : m; long sum = 0; for (int i = 0; i < n; i++) { for (int j = 0; j < m; j++) { if(arr[i][j] >= k)worthy++; System.out.print(arr[i][j]+" "); } System.out.println(); } System.out.println(worthy); System.out.println(); long endTime = System.currentTimeMillis(); System.out.println("Time taken:"+(endTime-startTime)); } sc.close(); } //function to find average of passed matrix - O(n^2) public static long matrixAvg (int[][] arr){ long avg = 0; int length = arr.length; int width = arr[0].length; int totalElements = length*width; long totalSum = 0; for(int i=0;i<length;i++){ for (int j=0;j<width;j++){ totalSum = totalSum + arr[i][j]; } } avg = totalSum/totalElements; return avg; } } /*tc:- 1 3 3 4 2 2 3 3 4 5 4 5 5 */ /*tc sol:- 7 */ //Not solved
tzhanl/azure-sdk-for-python
scripts/pylint_custom_plugin/pylint_guidelines_checker.py
# ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ """ Pylint custom checkers for SDK guidelines: C4717 - C4738 """ import logging import astroid from pylint.checkers import BaseChecker from pylint.interfaces import IAstroidChecker logger = logging.getLogger(__name__) class ClientConstructorTakesCorrectParameters(BaseChecker): __implements__ = IAstroidChecker name = "client-constructor" priority = -1 msgs = { "C4717": ( "Client constructor is missing a credential parameter. See details:" " https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods", "missing-client-constructor-parameter-credential", "All client types should accept a credential parameter.", ), "C4718": ( "Client constructor is missing a **kwargs parameter. See details:" " https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods", "missing-client-constructor-parameter-kwargs", "All client types should accept a **kwargs parameter.", ) } options = ( ( "ignore-missing-client-constructor-parameter-credential", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client constructors without a credential parameter", }, ), ( "ignore-missing-client-constructor-parameter-kwargs", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client constructors without a **kwargs parameter", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientConstructorTakesCorrectParameters, self).__init__(linter) def visit_functiondef(self, node): """Visits the constructor within a client class and checks that it has credential and kwargs parameters. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.name == "__init__" and node.parent.name.endswith("Client") and \ node.parent.name not in self.ignore_clients: arg_names = [argument.name for argument in node.args.args] if "credential" not in arg_names: self.add_message( msg_id="missing-client-constructor-parameter-credential", node=node, confidence=None ) if not node.args.kwarg: self.add_message( msg_id="missing-client-constructor-parameter-kwargs", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if constructor has correct parameters.") pass class ClientHasKwargsInPoliciesForCreateConfigurationMethod(BaseChecker): __implements__ = IAstroidChecker name = "configuration-policies-kwargs" priority = -1 msgs = { "C4719": ( "A policy in the create_configuration() function is missing a **kwargs argument. See details:" " https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods", "config-missing-kwargs-in-policy", "All policies should take a **kwargs parameter.", ) } options = ( ( "ignore-config-missing-kwargs-in-policy", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow clients instantiate a policy without a kwargs parameter.", }, ), ) def __init__(self, linter=None): super(ClientHasKwargsInPoliciesForCreateConfigurationMethod, self).__init__(linter) def visit_functiondef(self, node): """Visits the any method called `create_configuration` or `create_config` and checks that every policy in the method contains a kwargs parameter. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.name == "create_configuration" or node.name == "create_config": node.decorators = None for idx in range(len(node.body)): # Gets each line of the method as a string line = list(node.get_children())[idx].as_string() if line.find("Policy") != -1: if line.find("**kwargs") == -1: self.add_message( msg_id="config-missing-kwargs-in-policy", node=list(node.get_children())[idx], confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if kwargs parameter in policies.") pass class ClientHasApprovedMethodNamePrefix(BaseChecker): __implements__ = IAstroidChecker name = "client-approved-method-name-prefix" priority = -1 msgs = { "C4720": ( "Client is not using an approved method name prefix. See details:" " https://azure.github.io/azure-sdk/python_design.html#service-operations", "unapproved-client-method-name-prefix", "All clients should use the preferred verbs for method names.", ) } options = ( ( "ignore-unapproved-client-method-name-prefix", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow clients to not use preferred method name prefixes", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientHasApprovedMethodNamePrefix, self).__init__(linter) def visit_classdef(self, node): """Visits every class in file and checks if it is a client. If it is a client, checks that approved method name prefixes are present. :param node: class node :type node: ast.ClassDef :return: None """ try: if node.name.endswith("Client") and node.name not in self.ignore_clients: client_methods = [child for child in node.get_children() if child.is_function] approved_prefixes = ["get", "list", "create", "upsert", "set", "update", "replace", "append", "add", "delete", "remove", "begin"] for idx, method in enumerate(client_methods): if method.name.startswith("__") or "_exists" in method.name or method.name.startswith("_") \ or method.name.startswith("from"): continue prefix = method.name.split("_")[0] if prefix.lower() not in approved_prefixes: self.add_message( msg_id="unapproved-client-method-name-prefix", node=client_methods[idx], confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client has approved method name prefix.") pass class ClientMethodsUseKwargsWithMultipleParameters(BaseChecker): __implements__ = IAstroidChecker name = "client-method-multiple-parameters" priority = -1 msgs = { "C4721": ( "Client has too many positional arguments. Use keyword-only arguments." " See details: https://azure.github.io/azure-sdk/python_introduction.html#method-signatures", "client-method-has-more-than-5-positional-arguments", "Client method should use keyword-only arguments for some parameters.", ) } options = ( ( "ignore-client-method-has-more-than-5-positional-arguments", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client method to have more than 5 positional arguments", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientMethodsUseKwargsWithMultipleParameters, self).__init__(linter) def visit_functiondef(self, node): """Visits every method in the client and checks that it doesn't have more than 5 positional arguments. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.is_method() and node.parent.name not in self.ignore_clients: # Only bother checking method signatures with > 6 parameters (don't include self/cls/etc) if len(node.args.args) > 6: positional_args = len(node.args.args) - len(node.args.defaults) if positional_args > 6: self.add_message( msg_id="client-method-has-more-than-5-positional-arguments", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if kwargs is used for multiple parameters.") pass visit_asyncfunctiondef = visit_functiondef class ClientMethodsHaveTypeAnnotations(BaseChecker): __implements__ = IAstroidChecker name = "client-method-type-annotations" priority = -1 msgs = { "C4722": ( "Client method is missing type annotations/comments, return type annotations/comments, or " "mixing type annotations and comments. See details: " " https://azure.github.io/azure-sdk/python_introduction.html#types-or-not", "client-method-missing-type-annotations", "Client method should use type annotations.", ) } options = ( ( "ignore-client-method-missing-type-annotations", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client method without type annotations", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientMethodsHaveTypeAnnotations, self).__init__(linter) def visit_functiondef(self, node): """Visits every method in the client and checks that all type comments/annotations and type returns are present. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.is_method() and node.parent.name not in self.ignore_clients: if not node.name.startswith("_") or node.name == "__init__": # Checks that method has python 2/3 type comments or annotations as shown here: # https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code # check for type comments if node.type_comment_args is None or node.type_comment_returns is None: # type annotations default to a list of None when not present, # so need extra logic here to check for any hints that may be present type_annotations = [type_hint for type_hint in node.args.annotations if type_hint is not None] # check for type annotations # node.args.args is a list of ast.AssignName arguments # node.returns is the type annotation return # Note that if the method returns nothing it will be of type ast.Const.NoneType if (type_annotations == [] and len(node.args.args) > 1) or node.returns is None: self.add_message( msg_id="client-method-missing-type-annotations", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client methods missing type annotations.") pass visit_asyncfunctiondef = visit_functiondef class ClientMethodsHaveTracingDecorators(BaseChecker): __implements__ = IAstroidChecker name = "client-method-has-tracing-decorator" priority = -1 msgs = { "C4723": ( "Client method is missing the distributed tracing decorator - `distributed_trace`. See details:" " https://azure.github.io/azure-sdk/python_implementation.html#distributed-tracing", "client-method-missing-tracing-decorator", "Client method should support distributed tracing.", ), "C4724": ( "Client async method is missing the distributed tracing decorator - `distributed_trace_async`. " " See details: https://azure.github.io/azure-sdk/python_implementation.html#distributed-tracing", "client-method-missing-tracing-decorator-async", "Client method should support distributed tracing.", ), } options = ( ( "ignore-client-method-missing-tracing-decorator", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client method without tracing decorator.", }, ), ( "ignore-client-method-missing-tracing-decorator-async", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client method without tracing decorator.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientMethodsHaveTracingDecorators, self).__init__(linter) def visit_functiondef(self, node): """Visits every method in the client and checks that a distributed tracing decorator is present. Ignores private methods, from_connection_string, and methods that retrieve child clients. node.decoratornames() returns a set of the method's decorator names. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.is_method() and not node.name.startswith("_") and \ node.parent.name not in self.ignore_clients: if node.args.kwarg and "azure.core.tracing.decorator.distributed_trace" not in node.decoratornames() \ and "builtins.classmethod" not in node.decoratornames(): self.add_message( msg_id="client-method-missing-tracing-decorator", node=node, confidence=None ) except AttributeError: pass def visit_asyncfunctiondef(self, node): """Visits every method in the client and checks that a distributed tracing decorator is present. Ignores private methods, from_connection_string, and methods that retrieve child clients. node.decoratornames() returns a set of the method's decorator names. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.is_method() and not node.name.startswith("_") and \ node.parent.name not in self.ignore_clients: if node.args.kwarg and "azure.core.tracing.decorator_async.distributed_trace_async" not in \ node.decoratornames() and "builtins.classmethod" not in node.decoratornames(): self.add_message( msg_id="client-method-missing-tracing-decorator-async", node=node, confidence=None ) except AttributeError: pass class ClientsDoNotUseStaticMethods(BaseChecker): __implements__ = IAstroidChecker name = "client-does-not-use-static-methods" priority = -1 msgs = { "C4725": ( "Client should not use static methods (staticmethod). See details:" " https://azure.github.io/azure-sdk/python_introduction.html#method-signatures", "client-method-should-not-use-static-method", "Client method should not use staticmethod.", ), } options = ( ( "ignore-client-method-should-not-use-static-method", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client method to use staticmethod.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientsDoNotUseStaticMethods, self).__init__(linter) def visit_functiondef(self, node): """Visits every method in the client and checks that it does not use staticmethod. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.is_method() and node.parent.name not in self.ignore_clients: # ignores private methods or methods that don't have any decorators if not node.name.startswith("_") and node.decorators is not None: if "builtins.staticmethod" in node.decoratornames(): self.add_message( msg_id="client-method-should-not-use-static-method", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client methods do not use staticmethods.") pass visit_asyncfunctiondef = visit_functiondef class FileHasCopyrightHeader(BaseChecker): __implements__ = IAstroidChecker name = "file-has-copyright-header" priority = -1 msgs = { "C4726": ( "File is missing a copyright header. See details:" " https://azure.github.io/azure-sdk/policies_opensource.html", "file-needs-copyright-header", "Every source file should have a copyright header.", ), } options = ( ( "ignore-file-needs-copyright-header", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow file without a copyright header.", }, ), ) def __init__(self, linter=None): super(FileHasCopyrightHeader, self).__init__(linter) def visit_module(self, node): """Visits every file and checks that a copyright header is present. :param node: module node :type node: ast.Module :return: None """ try: if not node.package: # don't throw an error on an __init__.py file header = node.stream().read(200).lower() if header.find(b'copyright') == -1: self.add_message( msg_id="file-needs-copyright-header", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if file is missing a copyright header.") pass class ClientUsesCorrectNamingConventions(BaseChecker): __implements__ = IAstroidChecker name = "client-naming-conventions" priority = -1 msgs = { "C4727": ( "Client is using an incorrect naming convention. See details:" " https://azure.github.io/azure-sdk/python_introduction.html#naming-conventions", "client-incorrect-naming-convention", "Client method should use correct naming conventions.", ) } options = ( ( "ignore-client-incorrect-naming-convention", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client to use incorrect naming conventions.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientUsesCorrectNamingConventions, self).__init__(linter) def visit_classdef(self, node): """Visits every class in file and checks if it is a client. Checks that correct naming convention is used for the client. Also checks that any class constants use uppercase. :param node: class node :type node: ast.ClassDef :return: None """ # check for correct capitalization for "Client" and whatever the first letter of the prefix is if "_" in node.name or node.name.endswith("client") or node.name[0] != node.name[0].upper(): if not node.name.startswith("_") and node.name not in self.ignore_clients: self.add_message( msg_id="client-incorrect-naming-convention", node=node, confidence=None ) # check for correct naming convention in any class constants if node.name.endswith("Client"): for idx in range(len(node.body)): try: const_name = node.body[idx].targets[0].name if const_name != const_name.upper(): self.add_message( msg_id="client-incorrect-naming-convention", node=node.body[idx], confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client uses correct naming conventions.") pass # check that methods in client class do not use camelcase try: for func in node.body: if func.name != func.name.lower() and not func.name.startswith("_"): self.add_message( msg_id="client-incorrect-naming-convention", node=func, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client uses correct naming conventions.") pass class ClientMethodsHaveKwargsParameter(BaseChecker): __implements__ = IAstroidChecker name = "client-methods-have-kwargs" priority = -1 msgs = { "C4728": ( "Client method is missing a **kwargs parameter. See details:" " https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods", "client-method-missing-kwargs", "All client methods should accept a kwargs parameter.", ), } options = ( ( "ignore-client-method-missing-kwargs", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client method without a kwargs parameter", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientMethodsHaveKwargsParameter, self).__init__(linter) def visit_functiondef(self, node): """Visits every method in the client and checks that it has a kwargs parameter. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.is_method() and node.parent.name not in self.ignore_clients: # avoid false positive with @property if node.decorators is not None: if "builtins.property" in node.decoratornames(): return if not node.name.startswith("_") and \ ("azure.core.tracing.decorator.distributed_trace" in node.decoratornames() or "azure.core.tracing.decorator_async.distributed_trace_async" in node.decoratornames()): if not node.args.kwarg: self.add_message( msg_id="client-method-missing-kwargs", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client uses kwargs parameter in method.") pass visit_asyncfunctiondef = visit_functiondef class ClientMethodNamesDoNotUseDoubleUnderscorePrefix(BaseChecker): __implements__ = IAstroidChecker name = "client-methods-no-double-underscore" priority = -1 msgs = { "C4729": ( "Client method name should not use a double underscore prefix. See details:" " https://azure.github.io/azure-sdk/python_introduction.html#public-vs-private", "client-method-name-no-double-underscore", "Client method names should not use a leading double underscore prefix.", ), } options = ( ( "ignore-client-method-name-no-double-underscore", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client method to have double underscore prefix.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] acceptable_names = ["__init__", "__enter__", "__exit__", "__aenter__", "__aexit__"] def __init__(self, linter=None): super(ClientMethodNamesDoNotUseDoubleUnderscorePrefix, self).__init__(linter) def visit_functiondef(self, node): """Visits every method in the client and checks that no name begins with a double underscore. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.is_method() and node.parent.name not in self.ignore_clients: if node.name.startswith("__") and node.name not in self.acceptable_names: self.add_message( msg_id="client-method-name-no-double-underscore", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client method name does not use double underscore prefix.") pass visit_asyncfunctiondef = visit_functiondef class ClientDocstringUsesLiteralIncludeForCodeExample(BaseChecker): __implements__ = IAstroidChecker name = "client-docstring-literal-include" priority = -1 msgs = { "C4730": ( "Client docstring should use a literal include directive for the code example. See details:" " https://azure.github.io/azure-sdk/python_documentation.html#code-snippets", "client-docstring-use-literal-include", "Client/methods should use literal include directives for code examples.", ), } options = ( ( "ignore-client-docstring-use-literal-include", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client to use code block.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientDocstringUsesLiteralIncludeForCodeExample, self).__init__(linter) def visit_classdef(self, node): """Visits every class in file and checks if it is a client. Also checks that the class constructor uses literalinclude over a code-block for the code example. :param node: class node :type node: ast.ClassDef :return: None """ try: if node.name.endswith("Client") and node.name not in self.ignore_clients: if node.doc.find("code-block") != -1: self.add_message( msg_id="client-docstring-use-literal-include", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client uses literalinclude over code-block.") pass def visit_functiondef(self, node): """Visits every method in the client class and checks that it uses literalinclude over a code-block for the code example. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.parent.name not in self.ignore_clients and node.is_method(): if node.doc.find("code-block") != -1: self.add_message( msg_id="client-docstring-use-literal-include", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client uses literalinclude over code-block.") pass visit_asyncfunctiondef = visit_functiondef class AsyncClientCorrectNaming(BaseChecker): __implements__ = IAstroidChecker name = "async-client-correct-naming" priority = -1 msgs = { "C4731": ( "Async client should not include `Async` in the client name. See details:" " https://azure.github.io/azure-sdk/python_design.html#async-support", "async-client-bad-name", "Async clients should not have async in the name.", ), } options = ( ( "ignore-async-client-bad-name", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow async client to include async in its name.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(AsyncClientCorrectNaming, self).__init__(linter) def visit_classdef(self, node): """Visits every class in file and checks that an async client does not use async in its name. :param node: class node :type node: ast.ClassDef :return: None """ try: # avoid false positive when async name is used with a base class. if node.name.endswith("Client") and "async" in node.name.lower() and "base" not in node.name.lower(): if not node.name.startswith("_") and node.name not in self.ignore_clients: self.add_message( msg_id="async-client-bad-name", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if async client uses correct naming.") pass class SpecifyParameterNamesInCall(BaseChecker): __implements__ = IAstroidChecker name = "specify-parameter-names" priority = -1 msgs = { "C4732": ( "Specify the parameter names when calling methods with more than 2 required positional parameters." " See details: https://azure.github.io/azure-sdk/python_introduction.html#method-signatures", "specify-parameter-names-in-call", "You should specify the parameter names when the method has more than two positional arguments.", ) } options = ( ( "ignore-specify-parameter-names-in-call", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Call the method without specifying parameter names.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(SpecifyParameterNamesInCall, self).__init__(linter) def visit_call(self, node): """Visits every call in the client and checks that it specifies the parameter name in the call if there are more than 2 require positional parameters. :param node: call node :type node: ast.Call :return: None """ try: klass = node.parent.parent.parent function = node.parent.parent if klass.name.endswith("Client") and klass.name not in self.ignore_clients and function.is_method(): # node.args represent positional arguments if len(node.args) > 2 and node.func.attrname != "format": self.add_message( msg_id="specify-parameter-names-in-call", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client methods specify parameters name in call.") pass class ClientListMethodsUseCorePaging(BaseChecker): __implements__ = IAstroidChecker name = "client-list-methods-use-paging" priority = -1 msgs = { "C4733": ( "Operations that return collections should return a value that implements the Paging protocol. See details:" " https://azure.github.io/azure-sdk/python_design.html#response-formats", "client-list-methods-use-paging", "Client methods that return collections should use the Paging protocol.", ), } options = ( ( "ignore-client-list-methods-use-paging", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow collections method to not use paging protocol.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientListMethodsUseCorePaging, self).__init__(linter) def visit_functiondef(self, node): """Visits every method in the client and checks that any list_ methods return an ItemPaged or AsyncItemPaged value. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.parent.name not in self.ignore_clients and node.is_method(): if node.name.startswith("list"): try: # infer_call_result gives the method return value as a string returns = next(node.infer_call_result()).as_string() if returns.find("ItemPaged") == -1 and returns.find("AsyncItemPaged") == -1: self.add_message( msg_id="client-list-methods-use-paging", node=node, confidence=None ) except (astroid.exceptions.InferenceError, AttributeError): # astroid can't always infer the return logger.debug("Pylint custom checker failed to check if client list method uses core paging.") pass except AttributeError: logger.debug("Pylint custom checker failed to check if client list method uses core paging.") pass class ClientLROMethodsUseCorePolling(BaseChecker): __implements__ = IAstroidChecker name = "client-lro-methods-use-polling" priority = -1 msgs = { "C4734": ( "Long running operations should return a value that implements the Poller protocol. See details:" " https://azure.github.io/azure-sdk/python_design.html#response-formats", "client-lro-methods-use-polling", "Long running operations should use the polling protocol.", ), } options = ( ( "ignore-client-lro-methods-use-polling", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow LRO method to not use polling protocol.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientLROMethodsUseCorePolling, self).__init__(linter) def visit_functiondef(self, node): """Visits every method in the client and checks that any begin_ methods return an LROPoller value. :param node: function node :type node: ast.FunctionDef :return: None """ try: if node.parent.name.endswith("Client") and node.parent.name not in self.ignore_clients and node.is_method(): if node.name.startswith("begin"): try: # infer_call_result gives the method return value as a string returns = next(node.infer_call_result()).as_string() if returns.find("LROPoller") == -1: self.add_message( msg_id="client-lro-methods-use-polling", node=node, confidence=None ) except (astroid.exceptions.InferenceError, AttributeError): # astroid can't always infer the return logger.debug("Pylint custom checker failed to check if client begin method uses core polling.") pass except AttributeError: logger.debug("Pylint custom checker failed to check if client begin method uses core polling.") pass class ClientLROMethodsUseCorrectNaming(BaseChecker): __implements__ = IAstroidChecker name = "client-lro-methods-use-correct-naming" priority = -1 msgs = { "C4735": ( "Methods that return an LROPoller should be prefixed with `begin_`. See details:" " https://azure.github.io/azure-sdk/python_design.html#service-operations", "lro-methods-use-correct-naming", "Methods that return an LROPoller should be prefixed with `begin_`.", ), } options = ( ( "ignore-client-lro-methods-use-correct-naming", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow LRO method to use a different name.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientLROMethodsUseCorrectNaming, self).__init__(linter) self.is_client = [] def visit_classdef(self, node): """Visits every class in file and checks if it is a client. :param node: class node :type node: ast.ClassDef :return: None """ if node.name.endswith("Client") and node.name not in self.ignore_clients: self.is_client.append(True) else: self.is_client.append(False) def visit_return(self, node): if self.is_client and self.is_client[-1]: try: # check for a return value of LROPoller in client class if node.value.func.name == "LROPoller": # get the method in which LROPoller is returned method = node.value.func.scope() if not method.name.startswith("begin") and not method.name.startswith("_"): self.add_message( msg_id="lro-methods-use-correct-naming", node=method, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client method with polling uses correct naming.") pass class ClientConstructorDoesNotHaveConnectionStringParam(BaseChecker): __implements__ = IAstroidChecker name = "client-conn-str-not-in-constructor" priority = -1 msgs = { "C4736": ( "The constructor must not take a connection string. See details: " "https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods", "connection-string-should-not-be-constructor-param", "Client should have a method to create the client with a connection string.", ), } options = ( ( "ignore-connection-string-should-not-be-constructor-param", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow client to use connection string param in constructor.", }, ), ) ignore_clients = ["PipelineClient", "AsyncPipelineClient"] def __init__(self, linter=None): super(ClientConstructorDoesNotHaveConnectionStringParam, self).__init__(linter) def visit_classdef(self, node): """Visits every class in file and checks if it is a client. If it is a client, it checks that a connection string parameter is not used in the constructor. :param node: class node :type node: ast.ClassDef :return: None """ try: if node.name.endswith("Client") and node.name not in self.ignore_clients: for func in node.body: if func.name == "__init__": for argument in func.args.args: if argument.name == "connection_string" or argument.name == "conn_str": self.add_message( msg_id="connection-string-should-not-be-constructor-param", node=node, confidence=None ) except AttributeError: logger.debug("Pylint custom checker failed to check if client uses connection string param in constructor.") pass class PackageNameDoesNotUseUnderscoreOrPeriod(BaseChecker): __implements__ = IAstroidChecker name = "package-name-incorrect" priority = -1 msgs = { "C4737": ( "Package name should not use an underscore or period. Replace with dash (-). See details: " "https://azure.github.io/azure-sdk/python_implementation.html#packaging", "package-name-incorrect", "Package name should use dashes instead of underscore or period.", ), } options = ( ( "ignore-package-name-incorrect", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow package name to have a different naming convention.", }, ), ) def __init__(self, linter=None): super(PackageNameDoesNotUseUnderscoreOrPeriod, self).__init__(linter) def visit_module(self, node): """Visits setup.py and checks that its package name follows correct naming convention. :param node: module node :type node: ast.Module :return: None """ try: if node.file.endswith("setup.py"): for nod in node.body: if isinstance(nod, astroid.Assign): if nod.targets[0].name == "PACKAGE_NAME": package = nod.value if package.value.find(".") != -1 or package.value.find("_") != -1: self.add_message( msg_id="package-name-incorrect", node=node, confidence=None ) except Exception: logger.debug("Pylint custom checker failed to check if package name is correct.") pass class ServiceClientUsesNameWithClientSuffix(BaseChecker): __implements__ = IAstroidChecker name = "client-name-incorrect" priority = -1 msgs = { "C4738": ( "Service client types should use a `Client` suffix. See details: " "https://azure.github.io/azure-sdk/python_design.html#clients", "client-suffix-needed", "Client should use the correct suffix.", ), } options = ( ( "ignore-client-suffix-needed", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow the client to have a different suffix.", }, ), ) def __init__(self, linter=None): super(ServiceClientUsesNameWithClientSuffix, self).__init__(linter) def visit_module(self, node): """Visits a file that has "client" in the file name and checks that the service client uses a `Client` suffix. :param node: module node :type node: ast.Module :return: None """ try: # ignore base clients if node.file.endswith("base_client.py") or node.file.endswith("base_client_async.py"): return if node.file.endswith("client.py") or node.file.endswith("client_async.py"): has_client_suffix = False for idx in range(len(node.body)): if isinstance(node.body[idx], astroid.ClassDef): if node.body[idx].name.endswith("Client"): has_client_suffix = True if has_client_suffix is False: self.add_message( msg_id="client-suffix-needed", node=node, confidence=None ) except Exception: logger.debug("Pylint custom checker failed to check if service client has a client suffix.") pass class CheckDocstringParameters(BaseChecker): __implements__ = IAstroidChecker name = "check-docstrings" priority = -1 msgs = { "C4739": ( 'Params missing in docstring: "%s". See details: ' 'https://azure.github.io/azure-sdk/python_documentation.html#docstrings', "docstring-missing-param", "Docstring missing for param.", ), "C4740": ( 'Param types missing in docstring: "%s". See details: ' 'https://azure.github.io/azure-sdk/python_documentation.html#docstrings', "docstring-missing-type", "Docstring missing for param type.", ), "C4741": ( "A return doc is missing in the docstring. See details: " "https://azure.github.io/azure-sdk/python_documentation.html#docstrings", "docstring-missing-return", "Docstring missing for return doc.", ), "C4742": ( "A return type is missing in the docstring. See details: " "https://azure.github.io/azure-sdk/python_documentation.html#docstrings", "docstring-missing-rtype", "Docstring missing for return type.", ), "C4743": ( '"%s" not found as a parameter. Use :keyword type myarg: if a keyword argument. See details: ' 'https://azure.github.io/azure-sdk/python_documentation.html#docstrings', "docstring-should-be-keyword", "Docstring should use keywords.", ), } options = ( ( "ignore-docstring-missing-param", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow a docstring param mismatch.", }, ), ( "ignore-docstring-missing-type", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow a docstring param type mismatch.", }, ), ( "ignore-docstring-missing-return", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow a docstring return doc mismatch", }, ), ( "ignore-docstring-missing-rtype", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow a docstring rtype mismatch", }, ), ( "ignore-docstring-should-be-keyword", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow a docstring to not use keyword for documentation.", }, ), ) def __init__(self, linter=None): super(CheckDocstringParameters, self).__init__(linter) def check_parameters(self, node): """Parse the docstring for any params and types and compares it to the function's parameters. Throws a pylint error if... 1. Missing param in docstring. 2. Missing a param type in the docstring. 3. Missing a return doc in the docstring when a function returns something. 4. Missing an rtype in the docstring when a function returns something. 5. Extra params in docstring that aren't function parameters. Change to keywords. :param node: ast.ClassDef or ast.FunctionDef :return: None """ arg_names = [] # specific case for constructor where docstring found in class def if isinstance(node, astroid.ClassDef): for constructor in node.body: if isinstance(constructor, astroid.FunctionDef) and constructor.name == "__init__": arg_names = [arg.name for arg in constructor.args.args] break if isinstance(node, astroid.FunctionDef): arg_names = [arg.name for arg in node.args.args] try: # not every method will have a docstring so don't crash here, just return docstring = node.doc.split(":") except AttributeError: return docparams = {} for idx, line in enumerate(docstring): # this param has its type on a separate line if line.startswith("param") and line.count(" ") == 1: param = line.split("param ")[1] docparams[param] = None # this param has its type on the same line if line.startswith("param") and line.count(" ") == 2: _, param_type, param = line.split(" ") docparams[param] = param_type if line.startswith("type"): param = line.split("type ")[1] if param in docparams: docparams[param] = docstring[idx+1] # check that all params are documented missing_params = [] for param in arg_names: if param == "self" or param == "cls": continue if param not in docparams: missing_params.append(param) if missing_params: self.add_message( msg_id="docstring-missing-param", args=(", ".join(missing_params)), node=node, confidence=None ) # check if we have a type for each param and check if documented params that should be keywords missing_types = [] should_be_keywords = [] for param in docparams: if docparams[param] is None: missing_types.append(param) if param not in arg_names: should_be_keywords.append(param) if missing_types: self.add_message( msg_id="docstring-missing-type", args=(", ".join(missing_types)), node=node, confidence=None ) if should_be_keywords: self.add_message( msg_id="docstring-should-be-keyword", args=(", ".join(should_be_keywords)), node=node, confidence=None ) def check_return(self, node): """Checks if function returns anything. If return found, checks that the docstring contains a return doc and rtype. :param node: ast.FunctionDef :return: None """ try: returns = next(node.infer_call_result()).as_string() if returns == "None": return except (astroid.exceptions.InferenceError, AttributeError): # this function doesn't return anything, just return return try: # not every method will have a docstring so don't crash here, just return docstring = node.doc.split(":") except AttributeError: return has_return, has_rtype = False, False for line in docstring: if line.startswith("return"): has_return = True if line.startswith("rtype"): has_rtype = True if has_return is False: self.add_message( msg_id="docstring-missing-return", node=node, confidence=None ) if has_rtype is False: self.add_message( msg_id="docstring-missing-rtype", node=node, confidence=None ) def visit_classdef(self, node): """Visits every class in the file and finds the constructor. Makes a call to compare class docstring with constructor params. :param node: ast.ClassDef :return: None """ try: for func in node.body: if isinstance(func, astroid.FunctionDef) and func.name == "__init__": self.check_parameters(node) except Exception: logger.debug("Pylint custom checker failed to check docstrings.") pass def visit_functiondef(self, node): """Visits every function in the file and makes calls to check docstring parameters and return statements. :param node: ast.FunctionDef :return: None """ try: if node.name == "__init__": return self.check_parameters(node) self.check_return(node) except Exception: logger.debug("Pylint custom checker failed to check docstrings.") pass # this line makes it work for async functions visit_asyncfunctiondef = visit_functiondef class CheckForPolicyUse(BaseChecker): __implements__ = IAstroidChecker name = "check-for-policies" priority = -1 msgs = { "C4739": ( "You should include a UserAgentPolicy in your HTTP pipeline. See details: " "https://azure.github.io/azure-sdk/python_implementation.html#network-operations", "missing-user-agent-policy", "You should include a UserAgentPolicy in the HTTP Pipeline.", ), "C4740": ( "You should include a LoggingPolicy in your HTTP pipeline. See details: " "https://azure.github.io/azure-sdk/python_implementation.html#network-operations", "missing-logging-policy", "You should include a LoggingPolicy in the HTTP Pipeline.", ), "C4741": ( "You should include a RetryPolicy in your HTTP pipeline. See details: " "https://azure.github.io/azure-sdk/python_implementation.html#network-operations", "missing-retry-policy", "You should include a RetryPolicy in the HTTP Pipeline.", ), "C4742": ( "You should include a DistributedTracingPolicy in your HTTP pipeline. See details: " "https://azure.github.io/azure-sdk/python_implementation.html#network-operations", "missing-distributed-tracing-policy", "You should include a DistributedTracingPolicy in the HTTP Pipeline.", ), } options = ( ( "ignore-missing-user-agent-policy", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow the client to not have a UserAgentPolicy", }, ), ( "ignore-missing-logging-policy", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow the client to not have a LoggingPolicy", }, ), ( "ignore-missing-retry-policy", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow the client to not have a RetryPolicy", }, ), ( "ignore-missing-distributed-tracing-policy", { "default": False, "type": "yn", "metavar": "<y_or_n>", "help": "Allow the client to not have a DistributedTracingPolicy", }, ), ) def __init__(self, linter=None): super(CheckForPolicyUse, self).__init__(linter) self.node_to_use = None self.has_policies = set() self.ran_at_package_level = False self.disable_logging_error = False self.disable_user_agent_error = False self.disable_tracing_error = False self.disable_retry_error = False def visit_function(self, node, policy): """Visits the function and searches line by line for the policy being used. Also searches for if the policy came from the azure.core.configuration object. :param node: ast.FunctionDef :param policy: The policy imported in the file. :return: None """ for func in node.body: if isinstance(func, astroid.FunctionDef): for idx, item in enumerate(func.body): try: line = list(node.get_children())[idx].as_string() if line.find(policy) != -1: self.has_policies.add(policy) if line.find("config.logging_policy") != -1: self.has_policies.add("NetworkTraceLoggingPolicy") if line.find("config.retry_policy") != -1: self.has_policies.add("RetryPolicy") if line.find("config.user_agent_policy") != -1: self.has_policies.add("UserAgentPolicy") except IndexError: pass def visit_class(self, klass, policy): """Visits any classes in the file and then makes a call to search its methods for the policy being used. :param klass: A class within the file :param policy: The policy imported in the file. :return: None """ for idx, node in enumerate(klass): if isinstance(node, astroid.ClassDef): self.visit_function(node, policy) def visit_module(self, node): """Visits every file in the package and searches for policies as base classes or custom policies. If a core policy is imported in a file in calls helper methods to check that the policy was used in the code. This pylint checker is different from the others as it collects information across many files and then reports any errors. Due to this difference, disable commands must be searched for manually. :param node: ast.Module :return: None """ # only throw the error if pylint was run at package level since it needs to check all the files # infer run location based on the location of the init file highest in dir hierarchy if node.package: # the init file count = node.file.split("azure-sdk-for-python")[1].count("-") if node.file.split("azure-sdk-for-python")[1].count("\\") <= (5 + count) and \ node.file.split("azure-sdk-for-python")[1].count("/") <= (5 + count): self.ran_at_package_level = True # not really a good place to throw the pylint error, so we'll do it on the init file. # By running this checker on all the files first and then reporting errors, pylint disables need to be # done manually for some reason if node.file.endswith("__init__.py") and self.node_to_use is None: header = node.stream().read(200).lower() if header.find(b'disable') != -1: if header.find(b'missing-logging-policy') != -1: self.disable_logging_error = True if header.find(b'missing-user-agent-policy') != -1: self.disable_user_agent_error = True if header.find(b'missing-distributed-tracing-policy') != -1: self.disable_tracing_error = True if header.find(b'missing-retry-policy') != -1: self.disable_retry_error = True self.node_to_use = node for idx in range(len(node.body)): # Check if the core policy is the base class for some custom policy, or a custom policy is being used # and we try our best to find it based on common naming conventions. if isinstance(node.body[idx], astroid.ClassDef): if "NetworkTraceLoggingPolicy" in node.body[idx].basenames: self.has_policies.add("NetworkTraceLoggingPolicy") if node.body[idx].name.find("LoggingPolicy") != -1: self.has_policies.add("NetworkTraceLoggingPolicy") if "RetryPolicy" in node.body[idx].basenames or "AsyncRetryPolicy" in node.body[idx].basenames: self.has_policies.add("RetryPolicy") if node.body[idx].name.find("RetryPolicy") != -1: self.has_policies.add("RetryPolicy") if "UserAgentPolicy" in node.body[idx].basenames: self.has_policies.add("UserAgentPolicy") if node.body[idx].name.find("UserAgentPolicy") != -1: self.has_policies.add("UserAgentPolicy") if "DistributedTracingPolicy" in node.body[idx].basenames: self.has_policies.add("DistributedTracingPolicy") if node.body[idx].name.find("TracingPolicy") != -1: self.has_policies.add("DistributedTracingPolicy") # policy is imported in this file, let's check that it gets used in the code if isinstance(node.body[idx], astroid.ImportFrom): for imp, pol in enumerate(node.body[idx].names): if node.body[idx].names[imp][0].endswith("Policy") and \ node.body[idx].names[imp][0] not in self.has_policies: self.visit_class(node.body, node.body[idx].names[imp][0]) def close(self): """This method is inherited from BaseChecker and called at the very end of linting a module. It reports any errors and does a final check for any pylint disable statements. :return: None """ if self.ran_at_package_level: if self.disable_logging_error is False: if "NetworkTraceLoggingPolicy" not in self.has_policies: self.add_message( msg_id="missing-logging-policy", node=self.node_to_use, confidence=None ) if self.disable_retry_error is False: if "RetryPolicy" not in self.has_policies: self.add_message( msg_id="missing-retry-policy", node=self.node_to_use, confidence=None ) if self.disable_user_agent_error is False: if "UserAgentPolicy" not in self.has_policies: self.add_message( msg_id="missing-user-agent-policy", node=self.node_to_use, confidence=None ) if self.disable_tracing_error is False: if "DistributedTracingPolicy" not in self.has_policies: self.add_message( msg_id="missing-distributed-tracing-policy", node=self.node_to_use, confidence=None ) # if a linter is registered in this function then it will be checked with pylint def register(linter): linter.register_checker(ClientsDoNotUseStaticMethods(linter)) linter.register_checker(ClientConstructorTakesCorrectParameters(linter)) linter.register_checker(ClientMethodsUseKwargsWithMultipleParameters(linter)) linter.register_checker(ClientMethodsHaveTypeAnnotations(linter)) linter.register_checker(ClientUsesCorrectNamingConventions(linter)) linter.register_checker(ClientMethodsHaveKwargsParameter(linter)) linter.register_checker(ClientHasKwargsInPoliciesForCreateConfigurationMethod(linter)) linter.register_checker(AsyncClientCorrectNaming(linter)) linter.register_checker(FileHasCopyrightHeader(linter)) linter.register_checker(ClientMethodNamesDoNotUseDoubleUnderscorePrefix(linter)) linter.register_checker(SpecifyParameterNamesInCall(linter)) linter.register_checker(ClientConstructorDoesNotHaveConnectionStringParam(linter)) linter.register_checker(PackageNameDoesNotUseUnderscoreOrPeriod(linter)) linter.register_checker(ServiceClientUsesNameWithClientSuffix(linter)) # disabled by default, use pylint --enable=check-docstrings if you want to use it linter.register_checker(CheckDocstringParameters(linter)) # Rules are disabled until false positive rate improved # linter.register_checker(CheckForPolicyUse(linter)) # linter.register_checker(ClientHasApprovedMethodNamePrefix(linter)) # linter.register_checker(ClientMethodsHaveTracingDecorators(linter)) # linter.register_checker(ClientDocstringUsesLiteralIncludeForCodeExample(linter)) # linter.register_checker(ClientListMethodsUseCorePaging(linter)) # linter.register_checker(ClientLROMethodsUseCorePolling(linter)) # linter.register_checker(ClientLROMethodsUseCorrectNaming(linter))
yizhe-ang/MMSceneGraph
mmdet/models/anchor_heads/__init__.py
from .anchor_head import AnchorHead from .atss_head import ATSSHead from .fcos_head import FCOSHead from .fovea_head import FoveaHead from .free_anchor_retina_head import FreeAnchorRetinaHead from .ga_retina_head import GARetinaHead from .ga_rpn_head import GARPNHead from .guided_anchor_head import FeatureAdaption, GuidedAnchorHead from .reppoints_head import RepPointsHead from .dense_reppoints_head import DenseRepPointsHead from .retina_head import RetinaHead from .retina_sepbn_head import RetinaSepBNHead from .rpn_head import RPNHead from .ssd_head import SSDHead __all__ = [ 'AnchorHead', 'GuidedAnchorHead', 'FeatureAdaption', 'RPNHead', 'GARPNHead', 'RetinaHead', 'RetinaSepBNHead', 'GARetinaHead', 'SSDHead', 'FCOSHead', 'RepPointsHead', 'FoveaHead', 'FreeAnchorRetinaHead', 'ATSSHead', 'DenseRepPointsHead' ]
CaffeineShawn/gdutday-wechat
staticData/introdution.js
<gh_stars>1-10 export const introdution = { //一段为一个项 introdution: [ 'gdutday由五位广东工业大学爱好技术的同学开发,为同学们提供查询课表,考试安排,入馆二维码等功能', '本小程序在ui,交互和自定义化做出不同的尝试,希望给带你最好的体验感受。', '如果你发现了bug,或者有什么建议或者想法,我们非常欢迎你加入gdutday反馈群:1076030174,我们一起让gdutday变得更好 :)。', ], team1: [ { name: 'Cerbur', head: 'https://gdutday.gitee.io/textpage/images/pic10.jpg', introdution: '一级退堂鼓选手奶盖犬犬', position: '后台' }, { name: '星星', head: 'https://7869-xingxinglieo-tecft-1300484036.tcb.qcloud.la/head/xingxing.jpeg?sign=a68e51976e2df87e89581315c0761462&t=1575894047', introdution: '为何代码越写越烂?', position: '前端' }, ], cooperation: [ { name: '广工电子科技协会', head: 'https://gdutday.gitee.io/textpage/images/pic11.png', introdution: '合作协会', position: '技术支持' }, { name: '电子科技协会网络组', head: 'https://gdutday.gitee.io/textpage/images/pic12.jpg', introdution: '技术合作', position: '技术支持' }, { name: '研发中心工作室', head: 'https://cdn.nlark.com/yuque/0/2020/jpeg/517627/1604407939923-13428294-4388-43c0-9e26-85a34c5560fb.jpeg', introdution: '代码贡献', position: '技术支持' }, ], team0: [ { name: 'Mosarin', head: 'https://gdutday.gitee.io/textpage/images/pic13.jpg', introdution: '摸鱼工具人', position: '负责人' }, { name: '星星', head: 'https://7869-xingxinglieo-tecft-1300484036.tcb.qcloud.la/head/xingxing.jpeg?sign=a68e51976e2df87e89581315c0761462&t=1575894047', introdution: '一个小前端路过', position: '前端' }, { name: 'Cerbur', head: 'https://gdutday.gitee.io/textpage/images/pic10.jpg', introdution: '一级退堂鼓选手奶盖犬犬', position: '后台' }, { name: 'seehin', head: 'https://7869-xingxinglieo-tecft-1300484036.tcb.qcloud.la/head/ruxuan.jpeg?sign=045bc9280413b10b085e8589732fbd00&t=1575893942', introdution: '等等我们还有这个项目?', position: '后台' }, { name: 'YoungYang', head: 'https://7869-xingxinglieo-tecft-1300484036.tcb.qcloud.la/head/zeqin.jpeg?sign=0980c169d737f50528e0efae7e6a9d59&t=1575893905', introdution: '爬', position: '爬虫' }, ], }
exoplanetvetting/DAVE
fileio/test_loadMultipleDetrendings.py
<reponame>exoplanetvetting/DAVE # -*- coding: utf-8 -*- """ Created on Thu Jul 28 16:35:57 2016 @author: fergal $Id$ $URL$ """ import loadMultipleDetrendings as lmd import os def test_smoke(): epic = 211816003 campaign = 5 dataStorePath = os.path.join(os.environ['HOME'], ",mastio") detrendTypes = ["PDC", "Everest", "Agp", "sff"] #If this doesn't crash, then all detrendings were loaded. return lmd.loadMultipleDetrendings(epic, campaign, dataStorePath, detrendTypes)
NickPepper/MacScreensavers
rssavers-0.2/src/Plasma/Plasma.cpp
/* * Copyright (C) 1999-2010 <NAME> * * This file is part of Plasma. * * Plasma is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published * by the Free Software Foundation; either version 2 of the License, * or (at your option) any later version. * * Plasma is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ // Plasma screen saver #ifdef WIN32 #include <windows.h> #include <rsWin32Saver/rsWin32Saver.h> #include <process.h> #include <time.h> #include <regstr.h> #include <commctrl.h> #include <resource.h> #endif #ifdef RS_XSCREENSAVER #include <rsXScreenSaver/rsXScreenSaver.h> #endif #include <stdio.h> #include <math.h> #include <rsText/rsText.h> #include <GL/gl.h> #include <GL/glu.h> #define PIx2 6.28318530718f #define NUMCONSTS 18 #define TEXSIZE 1024 // Globals #ifdef WIN32 LPCTSTR registryPath = ("Software\\Really Slick\\Plasma"); HGLRC hglrc; HDC hdc; #endif int readyToDraw = 0; float frameTime = 0.0f; float aspectRatio; float wide; float high; float c[NUMCONSTS]; // constant float ct[NUMCONSTS]; // temporary value of constant float cv[NUMCONSTS]; // velocity of constant float position[TEXSIZE][TEXSIZE][2]; float plasma[TEXSIZE][TEXSIZE][3]; float plasmamap[TEXSIZE * TEXSIZE * 3]; unsigned int tex; int plasmasize = 64; // text output rsText* textwriter; // Parameters edited in the dialog box int dZoom; int dFocus; int dSpeed; int dResolution; // Useful random number macros // Don't forget to initialize with srand() inline int rsRandi(int x){ return rand() % x; } inline float rsRandf(float x){ return x * (float(rand()) / float(RAND_MAX)); } // Find absolute value and truncate to 1.0 inline float fabstrunc(float f){ if(f >= 0.0f) return(f <= 1.0f ? f : 1.0f); else return(f >= -1.0f ? -f : 1.0f); } void draw(){ int i, j; float rgb[3]; float temp; static float focus = float(dFocus) / 50.0f + 0.3f; static float maxdiff = 0.004f * float(dSpeed); static int index; //Update constants for(i=0; i<NUMCONSTS; i++){ ct[i] += cv[i]; if(ct[i] > PIx2) ct[i] -= PIx2; c[i] = sinf(ct[i]) * focus; } // Update colors for(i=0; i<plasmasize; i++){ for(j=0; j<int(float(plasmasize) / aspectRatio); j++){ // Calculate vertex colors rgb[0] = plasma[i][j][0]; rgb[1] = plasma[i][j][1]; rgb[2] = plasma[i][j][2]; plasma[i][j][0] = 0.7f * (c[0] * position[i][j][0] + c[1] * position[i][j][1] + c[2] * (position[i][j][0] * position[i][j][0] + 1.0f) + c[3] * position[i][j][0] * position[i][j][1] + c[4] * rgb[1] + c[5] * rgb[2]); plasma[i][j][1] = 0.7f * (c[6] * position[i][j][0] + c[7] * position[i][j][1] + c[8] * position[i][j][0] * position[i][j][0] + c[9] * (position[i][j][1] * position[i][j][1] - 1.0f) + c[10] * rgb[0] + c[11] * rgb[2]); plasma[i][j][2] = 0.7f * (c[12] * position[i][j][0] + c[13] * position[i][j][1] + c[14] * (1.0f - position[i][j][0] * position[i][j][1]) + c[15] * position[i][j][1] * position[i][j][1] + c[16] * rgb[0] + c[17] * rgb[1]); // Don't let the colors change too much temp = plasma[i][j][0] - rgb[0]; if(temp > maxdiff) plasma[i][j][0] = rgb[0] + maxdiff; if(temp < -maxdiff) plasma[i][j][0] = rgb[0] - maxdiff; temp = plasma[i][j][1] - rgb[1]; if(temp > maxdiff) plasma[i][j][1] = rgb[1] + maxdiff; if(temp < -maxdiff) plasma[i][j][1] = rgb[1] - maxdiff; temp = plasma[i][j][2] - rgb[2]; if(temp > maxdiff) plasma[i][j][2] = rgb[2] + maxdiff; if(temp < -maxdiff) plasma[i][j][2] = rgb[2] - maxdiff; // Put colors into texture index = (i * TEXSIZE + j) * 3; plasmamap[index] = fabstrunc(plasma[i][j][0]); plasmamap[index+1] = fabstrunc(plasma[i][j][1]); plasmamap[index+2] = fabstrunc(plasma[i][j][2]); } } // Update texture glPixelStorei(GL_UNPACK_ROW_LENGTH, TEXSIZE); glBindTexture(GL_TEXTURE_2D, tex); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, int(float(plasmasize) / aspectRatio), plasmasize, GL_RGB, GL_FLOAT, plasmamap); // Draw it // The "- 1" cuts off right and top edges to get rid of blending to black float texright = float(plasmasize - 1) / float(TEXSIZE); float textop = float(int(float(plasmasize) / aspectRatio) - 1) / float(TEXSIZE); glBegin(GL_TRIANGLE_STRIP); glTexCoord2f(0.0f, 0.0f); glVertex2f(0.0f, 0.0f); glTexCoord2f(0.0f, texright); glVertex2f(1.0f, 0.0f); glTexCoord2f(textop, 0.0f); glVertex2f(0.0f, 1.0f); glTexCoord2f(textop, texright); glVertex2f(1.0f, 1.0f); glEnd(); // print text static float totalTime = 0.0f; totalTime += frameTime; static std::string str; static int frames = 0; ++frames; if(frames == 20){ str = "FPS = " + to_string(20.0f / totalTime); totalTime = 0.0f; frames = 0; } if(kStatistics){ glMatrixMode(GL_PROJECTION); glPushMatrix(); glLoadIdentity(); glOrtho(0.0f, 50.0f * aspectRatio, 0.0f, 50.0f, -1.0f, 1.0f); glMatrixMode(GL_MODELVIEW); glPushMatrix(); glLoadIdentity(); glTranslatef(1.0f, 48.0f, 0.0f); glColor3f(1.0f, 0.6f, 0.0f); textwriter->draw(str); glPopMatrix(); glMatrixMode(GL_PROJECTION); glPopMatrix(); } #ifdef WIN32 wglSwapLayerBuffers(hdc, WGL_SWAP_MAIN_PLANE); #endif #ifdef RS_XSCREENSAVER glXSwapBuffers(xdisplay, xwindow); #endif } void idleProc(){ // update time static rsTimer timer; frameTime = timer.tick(); if(readyToDraw && !isSuspended && !checkingPassword) draw(); } void setPlasmaSize(){ if(aspectRatio >= 1.0f){ wide = 30.0f / float(dZoom); high = wide / aspectRatio; } else{ high = 30.0f / float(dZoom); wide = high * aspectRatio; } // Set resolution of plasma if(aspectRatio >= 1.0f) plasmasize = int(float(dResolution * TEXSIZE) * 0.01f); else plasmasize = int(float(dResolution * TEXSIZE) * aspectRatio * 0.01f); for(int i=0; i<plasmasize; i++){ for(int j=0; j<int(float(plasmasize) / aspectRatio); j++){ plasma[i][j][0] = 0.0f; plasma[i][j][1] = 0.0f; plasma[i][j][2] = 0.0f; position[i][j][0] = float(i * wide) / float(plasmasize - 1) - (wide * 0.5f); position[i][j][1] = float(j * high) / (float(plasmasize) / aspectRatio - 1.0f) - (high * 0.5f); } } } void setDefaults(){ dZoom = 10; dFocus = 30; dSpeed = 20; dResolution = 25; dFrameRateLimit = 30; } #ifdef RS_XSCREENSAVER void handleCommandLine(int argc, char* argv[]){ setDefaults(); getArgumentsValue(argc, argv, std::string("-zoom"), dZoom, 1, 100); getArgumentsValue(argc, argv, std::string("-focus"), dFocus, 1, 100); getArgumentsValue(argc, argv, std::string("-speed"), dSpeed, 1, 100); getArgumentsValue(argc, argv, std::string("-resolution"), dResolution, 1, 100); } void reshape(int width, int height){ glViewport(0, 0, width, height); glMatrixMode(GL_PROJECTION); glLoadIdentity(); aspectRatio = float(width) / float(height); gluOrtho2D(0.0f, 1.0f, 0.0f, 1.0f); glMatrixMode(GL_MODELVIEW); setPlasmaSize(); } #endif #ifdef WIN32 void initSaver(HWND hwnd){ RECT rect; // Window initialization hdc = GetDC(hwnd); setBestPixelFormat(hdc); hglrc = wglCreateContext(hdc); GetClientRect(hwnd, &rect); wglMakeCurrent(hdc, hglrc); glViewport(rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top); aspectRatio = float(rect.right) / float(rect.bottom); glMatrixMode(GL_PROJECTION); glLoadIdentity(); gluOrtho2D(0.0f, 1.0f, 0.0f, 1.0f); #endif #ifdef RS_XSCREENSAVER void initSaver(){ #endif int i, j; srand((unsigned)time(NULL)); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); // Initialize constants for(i=0; i<NUMCONSTS; i++){ ct[i] = rsRandf(PIx2); cv[i] = rsRandf(0.005f * float(dSpeed)) + 0.0001f; } // Make texture glGenTextures(1, &tex); glBindTexture(GL_TEXTURE_2D, tex); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP); glTexImage2D(GL_TEXTURE_2D, 0, 3, TEXSIZE, TEXSIZE, 0, GL_RGB, GL_FLOAT, plasmamap); glEnable(GL_TEXTURE_2D); glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL); setPlasmaSize(); // Initialize text textwriter = new rsText; readyToDraw = 1; } #ifdef RS_XSCREENSAVER void cleanUp(){ ; } #endif #ifdef WIN32 void cleanUp(HWND hwnd){ // Kill device context ReleaseDC(hwnd, hdc); wglMakeCurrent(NULL, NULL); wglDeleteContext(hglrc); } // Initialize all user-defined stuff void readRegistry(){ LONG result; HKEY skey; DWORD valtype, valsize, val; setDefaults(); result = RegOpenKeyEx(HKEY_CURRENT_USER, registryPath, 0, KEY_READ, &skey); if(result != ERROR_SUCCESS) return; valsize=sizeof(val); result = RegQueryValueEx(skey, "Zoom", 0, &valtype, (LPBYTE)&val, &valsize); if(result == ERROR_SUCCESS) dZoom = val; result = RegQueryValueEx(skey, "Focus", 0, &valtype, (LPBYTE)&val, &valsize); if(result == ERROR_SUCCESS) dFocus = val; result = RegQueryValueEx(skey, "Speed", 0, &valtype, (LPBYTE)&val, &valsize); if(result == ERROR_SUCCESS) dSpeed = val; result = RegQueryValueEx(skey, "Resolution", 0, &valtype, (LPBYTE)&val, &valsize); if(result == ERROR_SUCCESS) dResolution = val; result = RegQueryValueEx(skey, "FrameRateLimit", 0, &valtype, (LPBYTE)&val, &valsize); if(result == ERROR_SUCCESS) dFrameRateLimit = val; RegCloseKey(skey); } // Save all user-defined stuff void writeRegistry(){ LONG result; HKEY skey; DWORD val, disp; result = RegCreateKeyEx(HKEY_CURRENT_USER, registryPath, 0, NULL, REG_OPTION_NON_VOLATILE, KEY_WRITE, NULL, &skey, &disp); if(result != ERROR_SUCCESS) return; val = dZoom; RegSetValueEx(skey, "Zoom", 0, REG_DWORD, (CONST BYTE*)&val, sizeof(val)); val = dFocus; RegSetValueEx(skey, "Focus", 0, REG_DWORD, (CONST BYTE*)&val, sizeof(val)); val = dSpeed; RegSetValueEx(skey, "Speed", 0, REG_DWORD, (CONST BYTE*)&val, sizeof(val)); val = dResolution; RegSetValueEx(skey, "Resolution", 0, REG_DWORD, (CONST BYTE*)&val, sizeof(val)); val = dFrameRateLimit; RegSetValueEx(skey, "FrameRateLimit", 0, REG_DWORD, (CONST BYTE*)&val, sizeof(val)); RegCloseKey(skey); } BOOL aboutProc(HWND hdlg, UINT msg, WPARAM wpm, LPARAM lpm){ switch(msg){ case WM_CTLCOLORSTATIC: if(HWND(lpm) == GetDlgItem(hdlg, WEBPAGE)){ SetTextColor(HDC(wpm), RGB(0,0,255)); SetBkColor(HDC(wpm), COLORREF(GetSysColor(COLOR_3DFACE))); return(int(GetSysColorBrush(COLOR_3DFACE))); } break; case WM_COMMAND: switch(LOWORD(wpm)){ case IDOK: case IDCANCEL: EndDialog(hdlg, LOWORD(wpm)); break; case WEBPAGE: ShellExecute(NULL, "open", "http://www.reallyslick.com/", NULL, NULL, SW_SHOWNORMAL); } } return FALSE; } void initControls(HWND hdlg){ char cval[16]; SendDlgItemMessage(hdlg, ZOOM, TBM_SETRANGE, 0, LPARAM(MAKELONG(DWORD(1), DWORD(100)))); SendDlgItemMessage(hdlg, ZOOM, TBM_SETPOS, 1, LPARAM(dZoom)); SendDlgItemMessage(hdlg, ZOOM, TBM_SETLINESIZE, 0, LPARAM(1)); SendDlgItemMessage(hdlg, ZOOM, TBM_SETPAGESIZE, 0, LPARAM(5)); sprintf(cval, "%d", dZoom); SendDlgItemMessage(hdlg, ZOOMTEXT, WM_SETTEXT, 0, LPARAM(cval)); SendDlgItemMessage(hdlg, FOCUS, TBM_SETRANGE, 0, LPARAM(MAKELONG(DWORD(1), DWORD(100)))); SendDlgItemMessage(hdlg, FOCUS, TBM_SETPOS, 1, LPARAM(dFocus)); SendDlgItemMessage(hdlg, FOCUS, TBM_SETLINESIZE, 0, LPARAM(1)); SendDlgItemMessage(hdlg, FOCUS, TBM_SETPAGESIZE, 0, LPARAM(5)); sprintf(cval, "%d", dFocus); SendDlgItemMessage(hdlg, FOCUSTEXT, WM_SETTEXT, 0, LPARAM(cval)); SendDlgItemMessage(hdlg, SPEED, TBM_SETRANGE, 0, LPARAM(MAKELONG(DWORD(1), DWORD(100)))); SendDlgItemMessage(hdlg, SPEED, TBM_SETPOS, 1, LPARAM(dSpeed)); SendDlgItemMessage(hdlg, SPEED, TBM_SETLINESIZE, 0, LPARAM(1)); SendDlgItemMessage(hdlg, SPEED, TBM_SETPAGESIZE, 0, LPARAM(5)); sprintf(cval, "%d", dSpeed); SendDlgItemMessage(hdlg, SPEEDTEXT, WM_SETTEXT, 0, LPARAM(cval)); SendDlgItemMessage(hdlg, RESOLUTION, TBM_SETRANGE, 0, LPARAM(MAKELONG(DWORD(1), DWORD(100)))); SendDlgItemMessage(hdlg, RESOLUTION, TBM_SETPOS, 1, LPARAM(dResolution)); SendDlgItemMessage(hdlg, RESOLUTION, TBM_SETLINESIZE, 0, LPARAM(1)); SendDlgItemMessage(hdlg, RESOLUTION, TBM_SETPAGESIZE, 0, LPARAM(5)); sprintf(cval, "%d", dResolution); SendDlgItemMessage(hdlg, RESOLUTIONTEXT, WM_SETTEXT, 0, LPARAM(cval)); initFrameRateLimitSlider(hdlg, FRAMERATELIMIT, FRAMERATELIMITTEXT); } BOOL screenSaverConfigureDialog(HWND hdlg, UINT msg, WPARAM wpm, LPARAM lpm){ int ival; char cval[16]; switch(msg){ case WM_INITDIALOG: InitCommonControls(); readRegistry(); initControls(hdlg); return TRUE; case WM_COMMAND: switch(LOWORD(wpm)){ case IDOK: dZoom = SendDlgItemMessage(hdlg, ZOOM, TBM_GETPOS, 0, 0); dFocus = SendDlgItemMessage(hdlg, FOCUS, TBM_GETPOS, 0, 0); dSpeed = SendDlgItemMessage(hdlg, SPEED, TBM_GETPOS, 0, 0); dResolution = SendDlgItemMessage(hdlg, RESOLUTION, TBM_GETPOS, 0, 0); dFrameRateLimit = SendDlgItemMessage(hdlg, FRAMERATELIMIT, TBM_GETPOS, 0, 0); writeRegistry(); // Fall through case IDCANCEL: EndDialog(hdlg, LOWORD(wpm)); break; case DEFAULTS: setDefaults(); initControls(hdlg); break; case ABOUT: DialogBox(mainInstance, MAKEINTRESOURCE(DLG_ABOUT), hdlg, DLGPROC(aboutProc)); } return TRUE; case WM_HSCROLL: if(HWND(lpm) == GetDlgItem(hdlg, ZOOM)){ ival = SendDlgItemMessage(hdlg, ZOOM, TBM_GETPOS, 0, 0); sprintf(cval, "%d", ival); SendDlgItemMessage(hdlg, ZOOMTEXT, WM_SETTEXT, 0, LPARAM(cval)); } if(HWND(lpm) == GetDlgItem(hdlg, FOCUS)){ ival = SendDlgItemMessage(hdlg, FOCUS, TBM_GETPOS, 0, 0); sprintf(cval, "%d", ival); SendDlgItemMessage(hdlg, FOCUSTEXT, WM_SETTEXT, 0, LPARAM(cval)); } if(HWND(lpm) == GetDlgItem(hdlg, SPEED)){ ival = SendDlgItemMessage(hdlg, SPEED, TBM_GETPOS, 0, 0); sprintf(cval, "%d", ival); SendDlgItemMessage(hdlg, SPEEDTEXT, WM_SETTEXT, 0, LPARAM(cval)); } if(HWND(lpm) == GetDlgItem(hdlg, RESOLUTION)){ ival = SendDlgItemMessage(hdlg, RESOLUTION, TBM_GETPOS, 0, 0); sprintf(cval, "%d", ival); SendDlgItemMessage(hdlg, RESOLUTIONTEXT, WM_SETTEXT, 0, LPARAM(cval)); } if(HWND(lpm) == GetDlgItem(hdlg, FRAMERATELIMIT)) updateFrameRateLimitSlider(hdlg, FRAMERATELIMIT, FRAMERATELIMITTEXT); return TRUE; } return FALSE; } LONG screenSaverProc(HWND hwnd, UINT msg, WPARAM wpm, LPARAM lpm){ switch(msg){ case WM_CREATE: readRegistry(); initSaver(hwnd); readyToDraw = 1; break; case WM_DESTROY: readyToDraw = 0; cleanUp(hwnd); break; } return defScreenSaverProc(hwnd, msg, wpm, lpm); } #endif // WIN32
changziming/VTour
node_modules/grommet/es6/components/Calendar/stories/Simple.js
import React, { useState } from 'react'; import { storiesOf } from '@storybook/react'; import { Box, Calendar, Grommet } from 'grommet'; import { grommet } from 'grommet/themes'; var SimpleCalendar = function SimpleCalendar() { var _useState = useState(), date = _useState[0], setDate = _useState[1]; var onSelect = function onSelect(nextDate) { setDate(nextDate !== date ? nextDate : undefined); }; return React.createElement(Grommet, { theme: grommet }, React.createElement(Box, { align: "center", pad: "large" }, React.createElement(Calendar, { date: date, onSelect: onSelect, size: "small", bounds: ['2018-09-08', '2020-12-13'] })), React.createElement(Box, { align: "center", pad: "large" }, React.createElement(Calendar, { date: date, daysOfWeek: true, onSelect: onSelect, size: "small", bounds: ['2018-09-08', '2020-12-13'] }))); }; storiesOf('Calendar', module).add('Simple', function () { return React.createElement(SimpleCalendar, null); });
lianxiaopang/camel-store-api
apps/qfile/admin.py
import os from django.conf import urls from django.contrib import admin from django.http import HttpResponseRedirect from django.shortcuts import render from django.utils.safestring import mark_safe from . import models, multi_upload, validators, settings @admin.register(models.File) class FileAdmin(admin.ModelAdmin): list_display = ['thumbnail', 'label', 'download_btn', 'update_at'] search_fields = ['label'] change_form_template = 'file_change_form.html' list_per_page = 30 save_as = True def get_urls(self): super_urls = super().get_urls() custom_urls = [ urls.url( r'^file_upload_zip/$', self.admin_site.admin_view(self.upload_zip), name='file_upload_zip' ) ] return custom_urls + super_urls def upload_zip(self, request): context = { 'title': '批量上传文件', 'app_label': self.model._meta.app_label, 'opts': self.model._meta, 'has_change_permission': self.has_change_permission(request) } if request.method == 'POST': form = multi_upload.UploadZipForm(request.POST, request.FILES) if form.is_valid(): form.save() return HttpResponseRedirect('..') else: form = multi_upload.UploadZipForm() context['form'] = form context['adminform'] = admin.helpers.AdminForm( form=form, fieldsets=list([(None, {'fields': form.base_fields})]), prepopulated_fields={} ) return render(request, 'admin/qfile/file/upload_zip.html', context) def thumbnail(self, obj): attr = validators.FileValidator.get_file_attr(validators.FileValidator.get_file_suffix(str(obj.file.name))) if attr == 'image': src = obj.get_file_url else: src = os.path.join(settings.STATIC_URL, "{}.jpg".format(attr)) return mark_safe('<img height="35" width="35" src="{}" />'.format(src)) thumbnail.short_description = "缩略图" def download_btn(self, obj): return mark_safe('<a class="button" download="" href="{}">下载</a>'.format(obj.get_file_url)) download_btn.short_description = "操作"
kasworld/goguelike-single
game/glclient/meshmaker_ao.go
// Copyright 2014,2015,2016,2017,2018,2019,2020,2021 <NAME> (<EMAIL>) // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package glclient import ( "math" "github.com/kasworld/goguelike-single/enum/factiontype" "github.com/kasworld/h4o/geometry" "github.com/kasworld/h4o/graphic" "github.com/kasworld/h4o/material" "github.com/kasworld/h4o/math32" ) // manage active object var aoAttrib = [factiontype.FactionType_Count]struct { Co string }{ factiontype.Black: {"black"}, factiontype.Maroon: {"maroon"}, factiontype.Red: {"red"}, factiontype.Green: {"green"}, factiontype.Olive: {"olive"}, factiontype.DarkOrange: {"darkorange"}, factiontype.Lime: {"lime"}, factiontype.Chartreuse: {"chartreuse"}, factiontype.Yellow: {"yellow"}, factiontype.Navy: {"navy"}, factiontype.Purple: {"purple"}, factiontype.DeepPink: {"deeppink"}, factiontype.Teal: {"teal"}, factiontype.Salmon: {"salmon"}, factiontype.SpringGreen: {"springgreen"}, factiontype.LightGreen: {"lightgreen"}, factiontype.Khaki: {"khaki"}, factiontype.Blue: {"blue"}, factiontype.DarkViolet: {"darkviolet"}, factiontype.Magenta: {"magenta"}, factiontype.DodgerBlue: {"dodgerblue"}, factiontype.MediumSlateBlue: {"mediumslateblue"}, factiontype.Violet: {"violet"}, factiontype.Cyan: {"cyan"}, factiontype.Aquamarine: {"aquamarine"}, factiontype.White: {"white"}, } func newActiveObjMat(ft factiontype.FactionType) *material.Standard { return material.NewStandard(math32.NewColor(aoAttrib[ft].Co)) } func newActiveObjGeo(ft factiontype.FactionType) *geometry.Geometry { return geometry.NewCylinder(0.3, 1, 16, 8, true, true) } func (mm *MeshMaker) initActiveObj(dataFolder string) { // do nothing } func (mm *MeshMaker) newActiveObj(ft factiontype.FactionType) *graphic.Mesh { var mat *material.Standard if mat = mm.aoMat[ft]; mat == nil { mat = newActiveObjMat(ft) mm.aoMat[ft] = mat } var geo *geometry.Geometry if geo = mm.aoGeo[ft]; geo == nil { geo = newActiveObjGeo(ft) mm.aoGeo[ft] = geo } return graphic.NewMesh(geo, mat) } func (mm *MeshMaker) GetActiveObj(ft factiontype.FactionType, x, y int) *graphic.Mesh { mm.aoInUse.Inc(ft) var mesh *graphic.Mesh freeSize := len(mm.aoMeshFreeList[ft]) if freeSize > 0 { mesh = mm.aoMeshFreeList[ft][freeSize-1] mm.aoMeshFreeList[ft] = mm.aoMeshFreeList[ft][:freeSize-1] } else { mesh = mm.newActiveObj(ft) mesh.RotateX(math.Pi / 2) } mesh.SetPositionX(float32(x)) mesh.SetPositionY(float32(y)) mesh.SetPositionZ(0.5) mesh.SetUserData(ft) return mesh } func (mm *MeshMaker) PutActiveObj(mesh *graphic.Mesh) { ft := mesh.UserData().(factiontype.FactionType) mm.aoInUse.Dec(ft) mm.aoMeshFreeList[ft] = append(mm.aoMeshFreeList[ft], mesh) }
ProximaB/das
controller/admin/user.go
package admin import ( "encoding/json" "github.com/ProximaB/das/auth" "github.com/ProximaB/das/businesslogic" "github.com/ProximaB/das/controller/util" "github.com/ProximaB/das/viewmodel" "net/http" ) type AdminUserManagementServer struct { auth.IAuthenticationStrategy accountRepo businesslogic.IAccountRepository } func NewAdminUserManagementServer(auth auth.IAuthenticationStrategy, accountRepo businesslogic.IAccountRepository) AdminUserManagementServer { return AdminUserManagementServer{ auth, accountRepo, } } // GET /api/v1/admin/user func (server AdminUserManagementServer) SearchUserHandler(w http.ResponseWriter, r *http.Request) { currentUser, _ := server.GetCurrentUser(r) if !currentUser.HasRole(businesslogic.AccountTypeAdministrator) { util.RespondJsonResult(w, http.StatusBadRequest, "Not authorized to search user accounts", nil) return } searchCriteriaDTO := new(viewmodel.SearchAccountDTO) parseErr := util.ParseRequestData(r, searchCriteriaDTO) if parseErr != nil { util.RespondJsonResult(w, http.StatusBadRequest, "Invalid search criteria data", nil) return } criteria := businesslogic.SearchAccountCriteria{} searchCriteriaDTO.Populate(&criteria) results, err := server.accountRepo.SearchAccount(criteria) if err != nil { util.RespondJsonResult(w, http.StatusInternalServerError, "An internal ", nil) return } data := make([]viewmodel.AccountDTO, 0) for _, each := range results { dto := viewmodel.AccountDTO{} dto.Extract(each) data = append(data, dto) } output, _ := json.Marshal(data) w.Write(output) }
bzikarsky/php-driver
ext/util/types.c
<filename>ext/util/types.c /** * Copyright 2015-2017 DataStax, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "php_driver.h" #include "php_driver_globals.h" #include "php_driver_types.h" #include "util/types.h" #include <zend_smart_str.h> #include "src/Bigint.h" #include "src/Smallint.h" #include "src/Tinyint.h" #include "src/Blob.h" #include "src/Decimal.h" #include "src/Duration.h" #include "src/Float.h" #include "src/Inet.h" #include "src/Timestamp.h" #include "src/Date.h" #include "src/Time.h" #include "src/Timeuuid.h" #include "src/Uuid.h" #include "src/Varint.h" #include "src/Type/Tuple.h" #include "src/Type/UserType.h" struct node_s { struct node_s *parent; const char *name; size_t name_length; struct node_s *first_child; struct node_s *last_child; struct node_s *next_sibling; struct node_s *prev_sibling; }; static int hex_value(int c) { if (c >= '0' && c <= '9') { return c - '0'; } else if (c >= 'A' && c <= 'F') { return c - 'A' + 10; } else if (c >= 'a' && c <= 'f') { return c - 'a' + 10; } return -1; } static char* php_driver_from_hex(const char* hex, size_t hex_length) { size_t i, c = 0; size_t size = hex_length / 2; char *result; if ((hex_length & 1) == 1) { /* Invalid if not divisible by 2 */ return NULL; } result = emalloc(size + 1); for (i = 0; i < size; ++i) { int half0 = hex_value(hex[i * 2]); int half1 = hex_value(hex[i * 2 + 1]); if (half0 < 0 || half1 < 0) { efree(result); return NULL; } result[c++] = (char)(((uint8_t)half0 << 4) | (uint8_t)half1); } result[size] = '\0'; return result; } static zval php_driver_create_type(struct node_s *node); static zval php_driver_tuple_from_data_type(const CassDataType *data_type) { zval ztype; php_driver_type *type; size_t i, count; count = cass_data_sub_type_count(data_type); ztype = php_driver_type_tuple(); type = PHP_DRIVER_GET_TYPE(&(ztype)); for (i = 0; i < count; ++i) { zval sub_type = php_driver_type_from_data_type( cass_data_type_sub_data_type(data_type, i)); php_driver_type_tuple_add(type, &(sub_type) ); } return ztype; } static zval php_driver_tuple_from_node(struct node_s *node) { zval ztype; php_driver_type *type; struct node_s *current; ztype = php_driver_type_tuple(); type = PHP_DRIVER_GET_TYPE(&(ztype)); for (current = node->first_child; current != NULL; current = current->next_sibling) { zval sub_type = php_driver_create_type(current); php_driver_type_tuple_add(type, &(sub_type) ); } return ztype; } static zval php_driver_user_type_from_data_type(const CassDataType *data_type) { zval ztype; php_driver_type *type; const char *type_name, *keyspace; size_t type_name_len, keyspace_len; size_t i, count; count = cass_data_sub_type_count(data_type); ztype = php_driver_type_user_type(); type = PHP_DRIVER_GET_TYPE(&(ztype)); cass_data_type_type_name(data_type, &type_name, &type_name_len); type->data.udt.type_name = estrndup(type_name, type_name_len); cass_data_type_keyspace(data_type, &keyspace, &keyspace_len); type->data.udt.keyspace = estrndup(keyspace, keyspace_len); for (i = 0; i < count; ++i) { const char *name; size_t name_length; zval sub_type = php_driver_type_from_data_type( cass_data_type_sub_data_type(data_type, i)); cass_data_type_sub_type_name(data_type, i, &name, &name_length); php_driver_type_user_type_add(type, name, name_length, &(sub_type)); } return ztype; } static zval php_driver_user_type_from_node(struct node_s *node) { zval ztype; php_driver_type *type; struct node_s *current = node->first_child; ztype = php_driver_type_user_type(); type = PHP_DRIVER_GET_TYPE(&(ztype)); if (current) { type->data.udt.keyspace = estrndup(current->name, current->name_length); current = current->next_sibling; } if (current) { type->data.udt.type_name = php_driver_from_hex(current->name, current->name_length); current = current->next_sibling; } for (; current; current = current->next_sibling) { zval sub_type; char *name = php_driver_from_hex(current->name, current->name_length); current = current->next_sibling; if (!current) { efree(name); break; } sub_type = php_driver_create_type(current); php_driver_type_user_type_add(type, name, strlen(name), &(sub_type)); efree(name); } return ztype; } zval php_driver_type_from_data_type(const CassDataType *data_type) { zval ztype; zval key_type; zval value_type; const char *class_name; size_t class_name_length; CassValueType type = cass_data_type_type(data_type); ZVAL_UNDEF(&(ztype)); switch (type) { #define XX_SCALAR(name, value) \ case value: \ ztype = php_driver_type_scalar(value); \ break; PHP_DRIVER_SCALAR_TYPES_MAP(XX_SCALAR) #undef XX_SCALAR case CASS_VALUE_TYPE_CUSTOM: cass_data_type_class_name(data_type, &class_name, &class_name_length); ztype = php_driver_type_custom(class_name, class_name_length); break; case CASS_VALUE_TYPE_LIST: value_type = php_driver_type_from_data_type( cass_data_type_sub_data_type(data_type, 0)); ztype = php_driver_type_collection(&(value_type)); break; case CASS_VALUE_TYPE_MAP: key_type = php_driver_type_from_data_type( cass_data_type_sub_data_type(data_type, 0)); value_type = php_driver_type_from_data_type( cass_data_type_sub_data_type(data_type, 1)); ztype = php_driver_type_map(&(key_type), &(value_type)); break; case CASS_VALUE_TYPE_SET: value_type = php_driver_type_from_data_type( cass_data_type_sub_data_type(data_type, 0)); ztype = php_driver_type_set(&(value_type)); break; case CASS_VALUE_TYPE_TUPLE: ztype = php_driver_tuple_from_data_type(data_type); break; case CASS_VALUE_TYPE_UDT: ztype = php_driver_user_type_from_data_type(data_type); break; default: break; } return ztype; } int php_driver_type_validate(zval *object, const char *object_name) { if (!instanceof_function(Z_OBJCE_P(object), php_driver_type_scalar_ce) && !instanceof_function(Z_OBJCE_P(object), php_driver_type_collection_ce) && !instanceof_function(Z_OBJCE_P(object), php_driver_type_map_ce) && !instanceof_function(Z_OBJCE_P(object), php_driver_type_set_ce) && !instanceof_function(Z_OBJCE_P(object), php_driver_type_tuple_ce) && !instanceof_function(Z_OBJCE_P(object), php_driver_type_user_type_ce)) { throw_invalid_argument(object, object_name, "a valid " PHP_DRIVER_NAMESPACE "\\Type"); return 0; } return 1; } static inline int collection_compare(php_driver_type *type1, php_driver_type *type2) { return php_driver_type_compare(PHP_DRIVER_GET_TYPE(&(type1->data.collection.value_type)), PHP_DRIVER_GET_TYPE(&(type2->data.collection.value_type))); } static inline int map_compare(php_driver_type *type1, php_driver_type *type2) { int result; result = php_driver_type_compare(PHP_DRIVER_GET_TYPE(&(type1->data.map.key_type)), PHP_DRIVER_GET_TYPE(&(type2->data.map.key_type))); if (result != 0) return result; result = php_driver_type_compare(PHP_DRIVER_GET_TYPE(&(type1->data.map.value_type)), PHP_DRIVER_GET_TYPE(&(type2->data.map.value_type))); if (result != 0) return result; return 0; } static inline int set_compare(php_driver_type *type1, php_driver_type *type2) { return php_driver_type_compare(PHP_DRIVER_GET_TYPE(&(type1->data.set.value_type)), PHP_DRIVER_GET_TYPE(&(type2->data.set.value_type))); } static inline int tuple_compare(php_driver_type *type1, php_driver_type *type2) { HashPosition pos1; HashPosition pos2; zval *current1; zval *current2; if (zend_hash_num_elements(&type1->data.tuple.types) != zend_hash_num_elements(&type2->data.tuple.types)) { return zend_hash_num_elements(&type1->data.tuple.types) < zend_hash_num_elements(&type2->data.tuple.types) ? -1 : 1; } zend_hash_internal_pointer_reset_ex(&type1->data.tuple.types, &pos1); zend_hash_internal_pointer_reset_ex(&type2->data.tuple.types, &pos2); while (CASS_ZEND_HASH_GET_CURRENT_DATA_EX(&type1->data.tuple.types, current1, &pos1) && CASS_ZEND_HASH_GET_CURRENT_DATA_EX(&type2->data.tuple.types, current2, &pos2)) { php_driver_type *sub_type1 = PHP_DRIVER_GET_TYPE(current1); php_driver_type *sub_type2 = PHP_DRIVER_GET_TYPE(current2); int result = php_driver_type_compare(sub_type1, sub_type2); if (result != 0) return result; zend_hash_move_forward_ex(&type1->data.tuple.types, &pos1); zend_hash_move_forward_ex(&type2->data.tuple.types, &pos2); } return 0; } static inline int user_type_compare(php_driver_type *type1, php_driver_type *type2) { HashPosition pos1; HashPosition pos2; zend_string *key1; zend_string *key2; zval *current1; zval *current2; if (zend_hash_num_elements(&type1->data.udt.types) != zend_hash_num_elements(&type2->data.udt.types)) { return zend_hash_num_elements(&type1->data.udt.types) < zend_hash_num_elements(&type2->data.udt.types) ? -1 : 1; } zend_hash_internal_pointer_reset_ex(&type1->data.udt.types, &pos1); zend_hash_internal_pointer_reset_ex(&type2->data.udt.types, &pos2); while (zend_hash_get_current_key_ex(&type1->data.udt.types, &key1, ((void *) 0), &pos1) == HASH_KEY_IS_STRING && zend_hash_get_current_key_ex(&type2->data.udt.types, &key2, ((void *) 0), &pos2) == HASH_KEY_IS_STRING && CASS_ZEND_HASH_GET_CURRENT_DATA_EX(&type1->data.udt.types, current1, &pos1) && CASS_ZEND_HASH_GET_CURRENT_DATA_EX(&type2->data.udt.types, current2, &pos2)) { int result; php_driver_type *sub_type1 = PHP_DRIVER_GET_TYPE(current1); php_driver_type *sub_type2 = PHP_DRIVER_GET_TYPE(current2); result = cass_string_compare(key1, key2); if (result != 0) return result; result = php_driver_type_compare(sub_type1, sub_type2); if (result != 0) return result; zend_hash_move_forward_ex(&type1->data.udt.types, &pos1); zend_hash_move_forward_ex(&type2->data.udt.types, &pos2); } return 0; } static inline int is_string_type(CassValueType type) { return type == CASS_VALUE_TYPE_VARCHAR || type == CASS_VALUE_TYPE_TEXT; } int php_driver_type_compare(php_driver_type *type1, php_driver_type *type2) { if (type1->type != type2->type) { if (is_string_type(type1->type) && is_string_type(type2->type)) { /* varchar and text are aliases */ return 0; } return type1->type < type2->type ? -1 : 1; } else { switch (type1->type) { case CASS_VALUE_TYPE_LIST: return collection_compare(type1, type2); case CASS_VALUE_TYPE_MAP: return map_compare(type1, type2); case CASS_VALUE_TYPE_SET: return set_compare(type1, type2); case CASS_VALUE_TYPE_TUPLE: return tuple_compare(type1, type2); case CASS_VALUE_TYPE_UDT: return user_type_compare(type1, type2); default: break; } return 0; } } static inline void collection_string(php_driver_type *type, smart_str *string) { smart_str_appendl(string, "list<", 5); php_driver_type_string(PHP_DRIVER_GET_TYPE(&(type->data.collection.value_type)), string); smart_str_appendl(string, ">", 1); } static inline void map_string(php_driver_type *type, smart_str *string) { smart_str_appendl(string, "map<", 4); php_driver_type_string(PHP_DRIVER_GET_TYPE(&(type->data.map.key_type)), string); smart_str_appendl(string, ", ", 2); php_driver_type_string(PHP_DRIVER_GET_TYPE(&(type->data.map.value_type)), string); smart_str_appendl(string, ">", 1); } static inline void set_string(php_driver_type *type, smart_str *string) { smart_str_appendl(string, "set<", 4); php_driver_type_string(PHP_DRIVER_GET_TYPE(&(type->data.set.value_type)), string); smart_str_appendl(string, ">", 1); } static inline void tuple_string(php_driver_type *type, smart_str *string) { zval *current; int first = 1; smart_str_appendl(string, "tuple<", 6); ZEND_HASH_FOREACH_VAL(&type->data.tuple.types, current) { php_driver_type *sub_type = PHP_DRIVER_GET_TYPE(current); if (!first) smart_str_appendl(string, ", ", 2); first = 0; php_driver_type_string(sub_type, string); } ZEND_HASH_FOREACH_END(); smart_str_appendl(string, ">", 1); } static inline void user_type_string(php_driver_type *type, smart_str *string) { char *name; zval *current; int first = 1; if (type->data.udt.type_name) { if (type->data.udt.keyspace) { smart_str_appendl(string, type->data.udt.keyspace, strlen(type->data.udt.keyspace)); smart_str_appendl(string, ".", 1); } smart_str_appendl(string, type->data.udt.type_name, strlen(type->data.udt.type_name)); } else { smart_str_appendl(string, "userType<", 9); CASS_ZEND_HASH_FOREACH_STR_KEY_VAL(&type->data.udt.types, name, current) { php_driver_type *sub_type = PHP_DRIVER_GET_TYPE(current); if (!first) smart_str_appendl(string, ", ", 2); first = 0; smart_str_appendl(string, name, strlen(name)); smart_str_appendl(string, ":", 1); php_driver_type_string(sub_type, string); } ZEND_HASH_FOREACH_END(); smart_str_appendl(string, ">", 1); } } void php_driver_type_string(php_driver_type *type, smart_str *string) { switch (type->type) { #define XX_SCALAR(name, value) \ case value: \ smart_str_appendl(string, #name, strlen(#name)); \ break; PHP_DRIVER_SCALAR_TYPES_MAP(XX_SCALAR) #undef XX_SCALAR case CASS_VALUE_TYPE_LIST: collection_string(type, string); break; case CASS_VALUE_TYPE_MAP: map_string(type, string); break; case CASS_VALUE_TYPE_SET: set_string(type, string); break; case CASS_VALUE_TYPE_TUPLE: tuple_string(type, string); break; case CASS_VALUE_TYPE_UDT: user_type_string(type, string); break; default: smart_str_appendl(string, "invalid", 7); break; } } static zval php_driver_type_scalar_new(CassValueType type) { zval ztype; php_driver_type *scalar; object_init_ex(&(ztype), php_driver_type_scalar_ce); scalar = PHP_DRIVER_GET_TYPE(&(ztype)); scalar->type = type; scalar->data_type = cass_data_type_new(type); return ztype; } const char * php_driver_scalar_type_name(CassValueType type) { switch (type) { #define XX_SCALAR(name, value) \ case value: \ return #name; PHP_DRIVER_SCALAR_TYPES_MAP(XX_SCALAR) #undef XX_SCALAR default: return "invalid"; } } static void php_driver_varchar_init(INTERNAL_FUNCTION_PARAMETERS) { char *string; size_t string_len; if (zend_parse_parameters(ZEND_NUM_ARGS(), "s", &string, &string_len) == FAILURE) { return; } RETVAL_STRINGL(string, string_len); } static void php_driver_ascii_init(INTERNAL_FUNCTION_PARAMETERS) { php_driver_varchar_init(INTERNAL_FUNCTION_PARAM_PASSTHRU); } static void php_driver_boolean_init(INTERNAL_FUNCTION_PARAMETERS) { zend_bool value; if (zend_parse_parameters(ZEND_NUM_ARGS(), "b", &value) == FAILURE) { return; } RETURN_BOOL(value); } static void php_driver_counter_init(INTERNAL_FUNCTION_PARAMETERS) { php_driver_bigint_init(INTERNAL_FUNCTION_PARAM_PASSTHRU); } static void php_driver_double_init(INTERNAL_FUNCTION_PARAMETERS) { double value; if (zend_parse_parameters(ZEND_NUM_ARGS(), "d", &value) == FAILURE) { return; } RETURN_DOUBLE(value); } static void php_driver_int_init(INTERNAL_FUNCTION_PARAMETERS) { long value; if (zend_parse_parameters(ZEND_NUM_ARGS(), "l", &value) == FAILURE) { return; } RETURN_LONG(value); } static void php_driver_text_init(INTERNAL_FUNCTION_PARAMETERS) { php_driver_varchar_init(INTERNAL_FUNCTION_PARAM_PASSTHRU); } #define TYPE_INIT_METHOD(t) php_driver_ ## t ## _init void php_driver_scalar_init(INTERNAL_FUNCTION_PARAMETERS) { php_driver_type *self = PHP_DRIVER_GET_TYPE(getThis()); #define XX_SCALAR(name, value) \ if (self->type == value) { \ TYPE_INIT_METHOD(name)(INTERNAL_FUNCTION_PARAM_PASSTHRU); \ } PHP_DRIVER_SCALAR_TYPES_MAP(XX_SCALAR) #undef XX_SCALAR } #undef TYPE_INIT_METHOD #define TYPE_CODE(m) type_ ## m zval php_driver_type_scalar(CassValueType type) { zval result; ZVAL_UNDEF(&(result)); #define XX_SCALAR(name, value) \ if (value == type) { \ if (Z_ISUNDEF(PHP_DRIVER_G(TYPE_CODE(name)))) { \ PHP_DRIVER_G(TYPE_CODE(name)) = php_driver_type_scalar_new(type); \ } \ Z_ADDREF_P(&(PHP_DRIVER_G(TYPE_CODE(name)))); \ return PHP_DRIVER_G(TYPE_CODE(name)); \ } PHP_DRIVER_SCALAR_TYPES_MAP(XX_SCALAR) #undef XX_SCALAR zend_throw_exception_ex(php_driver_invalid_argument_exception_ce, 0, "Invalid type"); return result; } #undef TYPE_CODE zval php_driver_type_map(zval *key_type, zval *value_type) { zval ztype; php_driver_type *map; php_driver_type *sub_type; object_init_ex(&(ztype), php_driver_type_map_ce); map = PHP_DRIVER_GET_TYPE(&(ztype)); if (!Z_ISUNDEF_P(key_type)) { sub_type = PHP_DRIVER_GET_TYPE(key_type); cass_data_type_add_sub_type(map->data_type, sub_type->data_type); } if (!Z_ISUNDEF_P(value_type)) { sub_type = PHP_DRIVER_GET_TYPE(value_type); cass_data_type_add_sub_type(map->data_type, sub_type->data_type); } map->data.map.key_type = *key_type; map->data.map.value_type = *value_type; return ztype; } zval php_driver_type_map_from_value_types(CassValueType key_type, CassValueType value_type) { zval ztype; php_driver_type *map; php_driver_type *sub_type; object_init_ex(&(ztype), php_driver_type_map_ce); map = PHP_DRIVER_GET_TYPE(&(ztype)); map->data.map.key_type = php_driver_type_scalar(key_type); map->data.map.value_type = php_driver_type_scalar(value_type); sub_type = PHP_DRIVER_GET_TYPE(&(map->data.map.key_type)); cass_data_type_add_sub_type(map->data_type, sub_type->data_type); sub_type = PHP_DRIVER_GET_TYPE(&(map->data.map.value_type)); cass_data_type_add_sub_type(map->data_type, sub_type->data_type); return ztype; } zval php_driver_type_set(zval *value_type) { zval ztype; php_driver_type *set; php_driver_type *sub_type; object_init_ex(&(ztype), php_driver_type_set_ce); set = PHP_DRIVER_GET_TYPE(&(ztype)); if (!Z_ISUNDEF_P(value_type)) { sub_type = PHP_DRIVER_GET_TYPE(value_type); cass_data_type_add_sub_type(set->data_type, sub_type->data_type); } set->data.set.value_type = *value_type; return ztype; } zval php_driver_type_set_from_value_type(CassValueType type) { zval ztype; php_driver_type *set; php_driver_type *sub_type; object_init_ex(&(ztype), php_driver_type_set_ce); set = PHP_DRIVER_GET_TYPE(&(ztype)); set->data.set.value_type = php_driver_type_scalar(type); sub_type = PHP_DRIVER_GET_TYPE(&(set->data.set.value_type)); cass_data_type_add_sub_type(set->data_type, sub_type->data_type); return ztype; } zval php_driver_type_collection(zval *value_type) { zval ztype; php_driver_type *collection; php_driver_type *sub_type; object_init_ex(&(ztype), php_driver_type_collection_ce); collection = PHP_DRIVER_GET_TYPE(&(ztype)); if (!Z_ISUNDEF_P(value_type)) { sub_type = PHP_DRIVER_GET_TYPE(value_type); cass_data_type_add_sub_type(collection->data_type, sub_type->data_type); } collection->data.collection.value_type = *value_type; return ztype; } zval php_driver_type_collection_from_value_type(CassValueType type) { zval ztype; php_driver_type *collection; php_driver_type *sub_type; object_init_ex(&(ztype), php_driver_type_collection_ce); collection = PHP_DRIVER_GET_TYPE(&(ztype)); collection->data.collection.value_type = php_driver_type_scalar(type); sub_type = PHP_DRIVER_GET_TYPE(&(collection->data.collection.value_type)); cass_data_type_add_sub_type(collection->data_type, sub_type->data_type); return ztype; } zval php_driver_type_tuple() { zval ztype; object_init_ex(&(ztype), php_driver_type_tuple_ce); return ztype; } zval php_driver_type_user_type() { zval ztype; php_driver_type *user_type; object_init_ex(&(ztype), php_driver_type_user_type_ce); user_type = PHP_DRIVER_GET_TYPE(&(ztype)); user_type->data_type = cass_data_type_new(CASS_VALUE_TYPE_UDT); return ztype; } zval php_driver_type_custom(const char *name, size_t name_length) { zval ztype; php_driver_type *custom; object_init_ex(&(ztype), php_driver_type_custom_ce); custom = PHP_DRIVER_GET_TYPE(&(ztype)); custom->data.custom.class_name = estrndup(name, name_length); return ztype; } #define EXPECTING_TOKEN(expected) \ zend_throw_exception_ex(php_driver_invalid_argument_exception_ce, 0, \ "Unexpected %s at position %d in string \"%s\", expected " expected, \ describe_token(token), ((int) (str - validator) - 1), validator \ ); \ return FAILURE; enum token_type { TOKEN_ILLEGAL = 0, TOKEN_PAREN_OPEN, TOKEN_PAREN_CLOSE, TOKEN_COMMA, TOKEN_COLON, TOKEN_NAME, TOKEN_END }; enum parser_state { STATE_CLASS = 0, STATE_AFTER_CLASS, STATE_AFTER_PARENS, STATE_END }; static const char * describe_token(enum token_type token) { switch (token) { case TOKEN_ILLEGAL: return "illegal character"; break; case TOKEN_PAREN_OPEN: return "opening parenthesis"; break; case TOKEN_PAREN_CLOSE: return "closing parenthesis"; break; case TOKEN_COMMA: return "comma"; break; case TOKEN_COLON: return "colon"; break; case TOKEN_NAME: return "alphanumeric character"; break; case TOKEN_END: return "end of string"; break; default: return "unknown token"; } } static int isletter(char ch) { return isalnum(ch) || ch == '.'; } static enum token_type next_token(const char *str, size_t len, const char **token_str, size_t *token_len, const char **str_out, size_t *len_out) { enum token_type type; unsigned int i = 0; char c = str[i]; if (len == 0) { return TOKEN_END; } if (isalnum(c)) { type = TOKEN_NAME; while (i < len) { if (!isletter(str[i])) { break; } i++; } } else { switch (c) { case '\0': type = TOKEN_END; break; case '(': type = TOKEN_PAREN_OPEN; i++; break; case ')': type = TOKEN_PAREN_CLOSE; i++; break; case ',': type = TOKEN_COMMA; i++; break; case ':': type = TOKEN_COLON; i++; break; default: type = TOKEN_ILLEGAL; } } *token_str = &(str[0]); *token_len = i; *str_out = &(str[i]); *len_out = len - i; return type; } static struct node_s * php_driver_parse_node_new() { struct node_s *node; node = emalloc(sizeof(struct node_s)); node->parent = NULL; node->name = NULL; node->name_length = 0; node->first_child = NULL; node->last_child = NULL; node->next_sibling = NULL; node->prev_sibling = NULL; return node; } static void php_driver_parse_node_free(struct node_s *node) { if (node->first_child) { php_driver_parse_node_free(node->first_child); node->first_child = NULL; } node->last_child = NULL; if (node->next_sibling) { php_driver_parse_node_free(node->next_sibling); node->next_sibling = NULL; } efree(node); } static int php_driver_parse_class_name(const char *validator, size_t validator_len, struct node_s **result) { const char *str; size_t len; const char *token_str; size_t token_len; enum parser_state state; enum token_type token; struct node_s *root; struct node_s *node; struct node_s *child; token_str = NULL; token_len = 0; state = STATE_CLASS; str = validator; len = validator_len; root = php_driver_parse_node_new(); node = root; while (1) { token = next_token(str, len, &token_str, &token_len, &str, &len); if (token == TOKEN_ILLEGAL) { zend_throw_exception_ex(php_driver_invalid_argument_exception_ce, 0, "Illegal character \"%c\" at position %d in \"%s\"", *token_str, ((int) (str - validator) - 1), validator); php_driver_parse_node_free(root); return FAILURE; } if (state == STATE_AFTER_PARENS) { if (token == TOKEN_COMMA) { if (node->parent == NULL) { EXPECTING_TOKEN("end of string"); } state = STATE_CLASS; child = php_driver_parse_node_new(); child->parent = node->parent; child->prev_sibling = node; node->next_sibling = child; node->parent->last_child = child; node = child; continue; } else if (token == TOKEN_PAREN_CLOSE) { if (node->parent == NULL) { EXPECTING_TOKEN("end of string"); } node = node->parent; continue; } else if (token == TOKEN_END) { break; } else { EXPECTING_TOKEN("a comma, a closing parenthesis or an end of string"); } } if (state == STATE_AFTER_CLASS) { if (token == TOKEN_PAREN_OPEN) { state = STATE_CLASS; child = php_driver_parse_node_new(); child->parent = node; if (node->first_child == NULL) { node->first_child = child; } if (node->last_child) { node->last_child->next_sibling = child; } child->prev_sibling = node->last_child; node->last_child = child; node = child; continue; } else if (token == TOKEN_COMMA || token == TOKEN_COLON) { state = STATE_CLASS; child = php_driver_parse_node_new(); child->parent = node->parent; child->prev_sibling = node; node->next_sibling = child; node->parent->last_child = child; node = child; continue; } else if (token == TOKEN_PAREN_CLOSE) { state = STATE_AFTER_PARENS; node = node->parent; continue; } else if (token == TOKEN_END) { break; } else { php_driver_parse_node_free(root); EXPECTING_TOKEN("opening/closing parenthesis or comma"); } } if (state == STATE_CLASS) { if (token != TOKEN_NAME) { php_driver_parse_node_free(root); EXPECTING_TOKEN("fully qualified class name"); } state = STATE_AFTER_CLASS; node->name = token_str; node->name_length = token_len; } } *result = root; return SUCCESS; } static CassValueType php_driver_lookup_type(struct node_s *node) { if (strncmp("org.apache.cassandra.db.marshal.AsciiType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_ASCII; } if (strncmp("org.apache.cassandra.db.marshal.LongType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_BIGINT; } if (strncmp("org.apache.cassandra.db.marshal.ShortType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_SMALL_INT; } if (strncmp("org.apache.cassandra.db.marshal.ByteType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_TINY_INT; } if (strncmp("org.apache.cassandra.db.marshal.BytesType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_BLOB; } if (strncmp("org.apache.cassandra.db.marshal.BooleanType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_BOOLEAN; } if (strncmp("org.apache.cassandra.db.marshal.CounterColumnType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_COUNTER; } if (strncmp("org.apache.cassandra.db.marshal.DecimalType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_DECIMAL; } if (strncmp("org.apache.cassandra.db.marshal.DurationType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_DURATION; } if (strncmp("org.apache.cassandra.db.marshal.DoubleType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_DOUBLE; } if (strncmp("org.apache.cassandra.db.marshal.FloatType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_FLOAT; } if (strncmp("org.apache.cassandra.db.marshal.InetAddressType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_INET; } if (strncmp("org.apache.cassandra.db.marshal.Int32Type", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_INT; } if (strncmp("org.apache.cassandra.db.marshal.UTF8Type", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_VARCHAR; } if (strncmp("org.apache.cassandra.db.marshal.TimestampType", node->name, node->name_length) == 0 || strncmp("org.apache.cassandra.db.marshal.DateType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_TIMESTAMP; } if (strncmp("org.apache.cassandra.db.marshal.SimpleDateType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_DATE; } if (strncmp("org.apache.cassandra.db.marshal.TimeType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_TIME; } if (strncmp("org.apache.cassandra.db.marshal.UUIDType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_UUID; } if (strncmp("org.apache.cassandra.db.marshal.IntegerType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_VARINT; } if (strncmp("org.apache.cassandra.db.marshal.TimeUUIDType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_TIMEUUID; } if (strncmp("org.apache.cassandra.db.marshal.MapType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_MAP; } if (strncmp("org.apache.cassandra.db.marshal.SetType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_SET; } if (strncmp("org.apache.cassandra.db.marshal.ListType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_LIST; } if (strncmp("org.apache.cassandra.db.marshal.TupleType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_TUPLE; } if (strncmp("org.apache.cassandra.db.marshal.UserType", node->name, node->name_length) == 0) { return CASS_VALUE_TYPE_UDT; } return CASS_VALUE_TYPE_CUSTOM; } static void php_driver_node_dump_to(struct node_s *node, smart_str *text) { smart_str_appendl(text, node->name, node->name_length); if (node->first_child) { smart_str_appendl(text, "(", 1); php_driver_node_dump_to(node->first_child, text); smart_str_appendl(text, ")", 1); } if (node->next_sibling) { smart_str_appendl(text, ", ", 2); php_driver_node_dump_to(node->next_sibling, text); } } static zval php_driver_create_type(struct node_s *node) { CassValueType type = CASS_VALUE_TYPE_UNKNOWN; /* Skip wrapper types */ while (node && (strncmp("org.apache.cassandra.db.marshal.FrozenType", node->name, node->name_length) == 0 || strncmp("org.apache.cassandra.db.marshal.ReversedType", node->name, node->name_length) == 0 || strncmp("org.apache.cassandra.db.marshal.CompositeType", node->name, node->name_length) == 0)) { node = node->first_child; } if (node) { type = php_driver_lookup_type(node); } if (type == CASS_VALUE_TYPE_UNKNOWN) { zval undef; ZVAL_UNDEF(&(undef)); return undef; } if (type == CASS_VALUE_TYPE_CUSTOM) { zval ztype; smart_str class_name = {0}; php_driver_node_dump_to(node, &class_name); ztype = php_driver_type_custom(CASS_SMART_STR_VAL(class_name), CASS_SMART_STR_LEN(class_name)); smart_str_free(&class_name); return ztype; } else if (type == CASS_VALUE_TYPE_MAP) { zval key_type; zval value_type; if (node->first_child) { key_type = php_driver_create_type(node->first_child); value_type = php_driver_create_type(node->first_child->next_sibling); } else { ZVAL_UNDEF(&(key_type)); ZVAL_UNDEF(&(value_type)); } return php_driver_type_map(&(key_type), &(value_type)); } else if (type == CASS_VALUE_TYPE_LIST) { zval value_type; if (node->first_child) { value_type = php_driver_create_type(node->first_child); } else { ZVAL_UNDEF(&(value_type)); } return php_driver_type_collection(&(value_type)); } else if (type == CASS_VALUE_TYPE_SET) { zval value_type; if (node->first_child) { value_type = php_driver_create_type(node->first_child); } else { ZVAL_UNDEF(&(value_type)); } return php_driver_type_set(&(value_type)); } else if (type == CASS_VALUE_TYPE_TUPLE) { return php_driver_tuple_from_node(node); } else if (type == CASS_VALUE_TYPE_UDT) { return php_driver_user_type_from_node(node); } return php_driver_type_scalar(type); } int php_driver_parse_column_type(const char *validator, size_t validator_len, int *reversed_out, int *frozen_out, zval *type_out) { struct node_s *root; struct node_s *node = NULL; cass_bool_t reversed = 0; cass_bool_t frozen = 0; if (php_driver_parse_class_name(validator, validator_len, &root) == FAILURE) { return FAILURE; } node = root; while (node) { if (strncmp("org.apache.cassandra.db.marshal.ReversedType", node->name, node->name_length) == 0) { reversed = 1; node = node->first_child; continue; } if (strncmp("org.apache.cassandra.db.marshal.FrozenType", node->name, node->name_length) == 0) { frozen = 1; node = node->first_child; continue; } if (strncmp("org.apache.cassandra.db.marshal.CompositeType", node->name, node->name_length) == 0) { node = node->first_child; continue; } break; } if (node == NULL) { php_driver_parse_node_free(root); zend_throw_exception_ex(php_driver_invalid_argument_exception_ce, 0, "Invalid type"); return FAILURE; } *reversed_out = reversed; *frozen_out = frozen; *type_out = php_driver_create_type(node); php_driver_parse_node_free(root); return SUCCESS; }
jiantao88/shareMap
app/src/main/java/com/fenxiangditu/sharemap/ui/map/LocationActivity.java
package com.fenxiangditu.sharemap.ui.map; import android.Manifest; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.graphics.Color; import android.net.Uri; import android.os.Bundle; import android.provider.Settings; import android.support.annotation.NonNull; import android.support.v7.app.AlertDialog; import android.text.TextUtils; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import com.amap.api.location.AMapLocation; import com.amap.api.location.AMapLocationClient; import com.amap.api.location.AMapLocationClientOption; import com.amap.api.location.AMapLocationListener; import com.amap.api.maps2d.AMap; import com.amap.api.maps2d.CameraUpdateFactory; import com.amap.api.maps2d.LocationSource; import com.amap.api.maps2d.MapView; import com.amap.api.maps2d.model.BitmapDescriptorFactory; import com.amap.api.maps2d.model.LatLng; import com.amap.api.maps2d.model.MyLocationStyle; import com.amap.api.services.geocoder.GeocodeAddress; import com.amap.api.services.geocoder.GeocodeQuery; import com.amap.api.services.geocoder.GeocodeResult; import com.amap.api.services.geocoder.GeocodeSearch; import com.amap.api.services.geocoder.RegeocodeResult; import com.fenxiangditu.sharemap.ui.base.BaseActivity; import com.fenxiangditu.sharemap.utils.BrandUtils; import com.fenxiangditu.sharemap.utils.ToastUtils; import com.zaaach.citypicker.CityPicker; import com.zaaach.citypicker.adapter.OnPickListener; import com.zaaach.citypicker.model.City; import com.zaaach.citypicker.model.HotCity; import com.zaaach.citypicker.model.LocatedCity; import java.util.ArrayList; import java.util.List; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; import pub.devrel.easypermissions.EasyPermissions; import sharemap.R; /** * <pre> * @author : zhangjiantao * time : 2018/06/07 * desc : * version: 1.0 * </pre> */ public class LocationActivity extends BaseActivity implements LocationSource, EasyPermissions.PermissionCallbacks { //声明AMapLocationClient类对象 public AMapLocationClient mLocationClient = null; //声明定位回调监听器 public OnLocationChangedListener mLocationListener; @BindView(R.id.tv_city) TextView mTvCity; @BindView(R.id.et_location) EditText mEtLocation; @BindView(R.id.iv_location_close) ImageView mIvLocationClose; //声明AMapLocationClientOption对象 public AMapLocationClientOption mLocationOption = null; @BindView(R.id.mv_location) MapView mapView; @BindView(R.id.lv_location) LinearLayout mLvLocation; @BindView(R.id.tv_address) TextView mTvAddress; @BindView(R.id.btn_location_confirm) Button mBtnLocationConfirm; @BindView(R.id.ll_location_info) LinearLayout mLlLocationInfo; private AMap aMap; private static final int STROKE_COLOR = Color.argb(180, 3, 145, 255); private static final int FILL_COLOR = Color.argb(10, 0, 0, 180); private AMapLocation mAMapLocation; /** * 需要进行检测的权限数组 */ protected String[] needPermissions = { Manifest.permission.ACCESS_COARSE_LOCATION, Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.READ_PHONE_STATE }; private static final int PERMISSON_REQUESTCODE = 0; @Override protected void receiveEvent(Object object) { } @Override protected String registerEvent() { return null; } @Override protected int getLayoutId() { return R.layout.activity_location; } @Override protected boolean initToolbar() { return false; } @Override protected void getIntent(Intent intent) { } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); ButterKnife.bind(this); mapView.onCreate(savedInstanceState); if (aMap == null) { aMap = mapView.getMap(); setUpMap(); } } /** * 设置一些amap的属性 */ private void setUpMap() { aMap.setLocationSource(this);// 设置定位监听 aMap.getUiSettings().setMyLocationButtonEnabled(true);// 设置默认定位按钮是否显示 aMap.setMyLocationEnabled(true);// 设置为true表示显示定位层并可触发定位,false表示隐藏定位层并不可触发定位,默认是false setupLocationStyle(); } private void setupLocationStyle() { // 自定义系统定位蓝点 MyLocationStyle myLocationStyle = new MyLocationStyle(); // 自定义定位蓝点图标 myLocationStyle.myLocationIcon(BitmapDescriptorFactory. fromResource(R.drawable.poi_marker_pressed)); // 自定义精度范围的圆形边框颜色 myLocationStyle.strokeColor(STROKE_COLOR); //自定义精度范围的圆形边框宽度 myLocationStyle.strokeWidth(5); // 设置圆形的填充颜色 myLocationStyle.radiusFillColor(FILL_COLOR); // 将自定义的 myLocationStyle 对象添加到地图上 aMap.setMyLocationStyle(myLocationStyle); } /** * 方法必须重写 */ @Override protected void onResume() { super.onResume(); mapView.onResume(); } /** * 方法必须重写 */ @Override protected void onPause() { super.onPause(); mapView.onPause(); deactivate(); } /** * 方法必须重写 */ @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); mapView.onSaveInstanceState(outState); } /** * 方法必须重写 */ @Override protected void onDestroy() { super.onDestroy(); mapView.onDestroy(); if (null != mLocationClient) { mLocationClient.onDestroy(); } } @Override public void activate(OnLocationChangedListener onLocationChangedListener) { mLocationListener = onLocationChangedListener; mLocationOption = new AMapLocationClientOption(); mLocationOption.setLocationMode(AMapLocationClientOption.AMapLocationMode.Hight_Accuracy); mLocationOption.setOnceLocation(true); mLocationClient = new AMapLocationClient(getApplicationContext()); mLocationClient.setLocationListener(new AMapLocationListener() { @Override public void onLocationChanged(AMapLocation aMapLocation) { mAMapLocation = aMapLocation; String city = aMapLocation.getCity(); if (aMapLocation.getErrorCode() == 0) { if (!TextUtils.isEmpty(city)) { mTvCity.setText(city); } mTvAddress.setText(aMapLocation.getAddress()); mLocationListener.onLocationChanged(aMapLocation); } } }); aMap.moveCamera(CameraUpdateFactory.zoomBy(10)); /** * 设置定位场景,目前支持三种场景(签到、出行、运动,默认无场景) */ mLocationOption.setLocationPurpose(AMapLocationClientOption.AMapLocationPurpose.SignIn); if (null != mLocationClient) { mLocationClient.setLocationOption(mLocationOption); //设置场景模式后最好调用一次stop,再调用start以保证场景模式生效 mLocationClient.stopLocation(); mLocationClient.startLocation(); } } /** * 检查权限 */ private void checkPerm() { // String[] params = {Manifest.permission.READ_PHONE_STATE, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.CAMERA}; if (EasyPermissions.hasPermissions(this, needPermissions)) { } else { EasyPermissions.requestPermissions(this, getString(R.string.request_permission), PERMISSON_REQUESTCODE, needPermissions); } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == PERMISSON_REQUESTCODE) { checkPerm(); } } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); EasyPermissions.onRequestPermissionsResult(requestCode, permissions, grantResults, this); } @Override public void onPermissionsGranted(int requestCode, List<String> perms) { } @Override public void onPermissionsDenied(int requestCode, List<String> perms) { if (EasyPermissions.somePermissionPermanentlyDenied(this, perms)) { showMissingPermissionDialog(); } } public void settingPermissionActivity() { //判断是否为小米系统 if (TextUtils.equals(BrandUtils.getSystemInfo().getOs(), BrandUtils.SYS_MIUI)) { Intent miuiIntent = new Intent("miui.intent.action.APP_PERM_EDITOR"); miuiIntent.putExtra("extra_pkgname", getPackageName()); //检测是否有能接受该Intent的Activity存在 List<ResolveInfo> resolveInfos = getPackageManager().queryIntentActivities(miuiIntent, PackageManager.MATCH_DEFAULT_ONLY); if (resolveInfos.size() > 0) { startActivityForResult(miuiIntent, PERMISSON_REQUESTCODE); return; } } //如果不是小米系统 则打开Android系统的应用设置页 Intent intent = new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS); Uri uri = Uri.fromParts("package", getPackageName(), null); intent.setData(uri); } /** * 显示提示信息 * * @since 2.5.0 */ private void showMissingPermissionDialog() { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(R.string.notifyTitle); builder.setMessage(R.string.notifyMsg); // 拒绝, 退出应用 builder.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { finish(); } }); builder.setPositiveButton(R.string.setting, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { settingPermissionActivity(); } }); builder.setCancelable(false); builder.show(); } /** * 停止定位 */ @Override public void deactivate() { mLocationListener = null; if (mLocationClient != null) { mLocationClient.stopLocation(); mLocationClient.onDestroy(); } mLocationClient = null; } @OnClick({R.id.tv_city, R.id.iv_location_close, R.id.btn_location_confirm}) public void onViewClicked(View view) { switch (view.getId()) { case R.id.tv_city: getPublicCity(); break; case R.id.iv_location_close: finish(); break; case R.id.btn_location_confirm: Intent intent = new Intent(LocationActivity.this,LocationUploadActivity.class); startActivity(intent); break; default: } } private void getPublicCity() { List<HotCity> hotCities = new ArrayList<>(); hotCities.add(new HotCity("北京", "北京", "101010100")); hotCities.add(new HotCity("上海", "上海", "101020100")); hotCities.add(new HotCity("广州", "广东", "101280101")); hotCities.add(new HotCity("深圳", "广东", "101280601")); hotCities.add(new HotCity("杭州", "浙江", "101210101")); if (mAMapLocation != null) { //定位完成之后更新数据 CityPicker.getInstance().setLocatedCity(new LocatedCity(mAMapLocation.getCity(), mAMapLocation.getProvince(), mAMapLocation.getCityCode())); } CityPicker.getInstance() .setFragmentManager(getSupportFragmentManager()) //此方法必须调用 .enableAnimation(true) //启用动画效果 .setHotCities(hotCities) //指定热门城市 .setOnPickListener(new OnPickListener() { @Override public void onPick(int position, City data) { Toast.makeText(getApplicationContext(), data.getName(), Toast.LENGTH_SHORT).show(); getLatlon(data.getName()); mTvCity.setText(data.getName()); } @Override public void onLocate() { } }) .show(); } private void getLatlon(String cityName) { GeocodeSearch geocodeSearch = new GeocodeSearch(this); geocodeSearch.setOnGeocodeSearchListener(new GeocodeSearch.OnGeocodeSearchListener() { @Override public void onRegeocodeSearched(RegeocodeResult regeocodeResult, int i) { } @Override public void onGeocodeSearched(GeocodeResult geocodeResult, int i) { if (i == 1000) { if (geocodeResult != null && geocodeResult.getGeocodeAddressList() != null && geocodeResult.getGeocodeAddressList().size() > 0) { GeocodeAddress geocodeAddress = geocodeResult.getGeocodeAddressList().get(0); double latitude = geocodeAddress.getLatLonPoint().getLatitude();//緯度 double longititude = geocodeAddress.getLatLonPoint().getLongitude();//經度 String adcode = geocodeAddress.getAdcode();//區域編碼 LatLng latLng = new LatLng(latitude, longititude); aMap.moveCamera(CameraUpdateFactory.newLatLngZoom(latLng, 12)); } else { ToastUtils.showToast(LocationActivity.this, "地址名出錯"); } } } }); GeocodeQuery geocodeQuery = new GeocodeQuery(cityName.trim(), "29"); geocodeSearch.getFromLocationNameAsyn(geocodeQuery); } }