Dataset Preview Go to dataset viewer
repo_name (string)path (string)size (string)content (string)license (string)
chenyihan/Simple-SQLite-ORM-Android
src/org/cyy/fw/android/dborm/sqlite/InsertLogic.java
904
package org.cyy.fw.android.dborm.sqlite; import org.cyy.fw.android.dborm.sqlite.DBAccessTemplate.IDBAccessLogic; import android.database.sqlite.SQLiteDatabase; import android.text.TextUtils; /** * * * @author cyy * @version [V1.0, 2013-5-7] */ class InsertLogic implements IDBAccessLogic<long[]> { InsertItem[] insertItems; public InsertLogic(InsertItem[] insertItems) { this.insertItems = insertItems; } @Override public long[] doAccessLogic(SQLiteDatabase db) { if (insertItems == null) { return null; } long[] rowIds = new long[insertItems.length]; // insert item for (int i = 0; i < insertItems.length; i++) { InsertItem ins = insertItems[i]; if (TextUtils.isEmpty(ins.tableName)) { continue; } rowIds[i] = db.insert(ins.tableName, null, ins.contentValues); } return rowIds; } @Override public boolean isOpenTransaction() { return true; } }
isc
sdeleon28/coda
src/music/tests/chords.test.js
7999
import { expect } from 'chai'; import { NOTES } from '../constants'; import Chord from '../Chord'; import chordSpecs from '../specs/chords'; const cMajor = { id: 'CM', specId: 'M', notes: ['C', 'E', 'G'], label: 'C Major', }; const cMinor = { id: 'Cm', specId: 'm', notes: ['C', 'D#', 'G'], label: 'C Minor', }; const cSeventh = { id: 'C7', specId: '7', notes: 'C E G A#'.split(' '), label: 'C 7th', }; const cMajorSeventh = { id: 'CM7', specId: 'M7', notes: 'C E G B'.split(' '), label: 'C Major 7th', }; const cMinorSeventh = { id: 'Cm7', specId: 'm7', notes: 'C D# G A#'.split(' '), label: 'C Minor 7th', }; const cSixth = { id: 'C6', specId: '6', notes: 'C E G A'.split(' '), label: 'C 6th', }; const cMinorSixth = { id: 'Cm6', specId: 'm6', notes: 'C D# G A'.split(' '), label: 'C Minor 6th', }; const cDiminished = { id: 'Cdim', specId: 'dim', notes: 'C D# F#'.split(' '), label: 'C Diminished', }; const cDiminishedSeventh = { id: 'Cdim7', specId: 'dim7', notes: 'C D# F# A'.split(' '), label: 'C Diminished 7th', }; const cHalfDiminishedSeventh = { id: 'C7b5', specId: '7b5', notes: 'C D# F# A#'.split(' '), label: 'C Half Diminished 7th', }; const cAugmented = { id: 'Caug', specId: 'aug', notes: 'C E G#'.split(' '), label: 'C Augmented', }; const cSeventhSharpFive = { id: 'C7#5', specId: '7#5', notes: 'C E G# A#'.split(' '), label: 'C 7th #5', }; const cNinth = { id: 'C9', specId: '9', notes: 'C E G A# D'.split(' '), label: 'C 9th', }; const cSeventhSharpNine = { id: 'C7#9', specId: '7#9', notes: 'C E G A# D#'.split(' '), label: 'C 7th #9', }; const cMajorNinth = { id: 'CM9', specId: 'M9', notes: 'C E G B D'.split(' '), label: 'C Major 9th', }; const cAddedNinth = { id: 'Cadd9', specId: 'add9', notes: 'C E G D'.split(' '), label: 'C Added 9th', }; const cMinorNinth = { id: 'Cm9', specId: 'm9', notes: 'C D# G A# D'.split(' '), label: 'C Minor 9th', }; const cMinorAddedNinth = { id: 'Cmadd9', specId: 'madd9', notes: 'C D# G D'.split(' '), label: 'C Minor Added 9th', }; const cEleventh = { id: 'C11', specId: '11', notes: 'C G A# D F'.split(' '), label: 'C 11th', }; const cMinorEleventh = { id: 'Cm11', specId: 'm11', notes: 'C D# G A# D F'.split(' '), label: 'C Minor 11th', }; const cSeventhSharpEleven = { id: 'C7#11', specId: '7#11', notes: 'C E G A# F#'.split(' '), label: 'C 7th #11', }; const cMajorSeventhSharpEleven = { id: 'CM7#11', specId: 'M7#11', notes: 'C E G B D F#'.split(' '), label: 'C Major 7th #11', }; const cThirteenth = { id: 'C13', specId: '13', notes: 'C E G A# D A'.split(' '), label: 'C 13th', }; const cMajorThirteenth = { id: 'CM13', specId: 'M13', notes: 'C E G B D A'.split(' '), label: 'C Major 13th', }; const cMinorThirteenth = { id: 'Cm13', specId: 'm13', notes: 'C D# G A# D F A'.split(' '), label: 'C Minor 13th', }; const cSus4 = { id: 'Csus4', specId: 'sus4', notes: 'C F G'.split(' '), label: 'C Suspended 4th', }; const cSus2 = { id: 'Csus2', specId: 'sus2', notes: 'C D G'.split(' '), label: 'C Suspended 2nd', }; const cPowerChord = { id: 'C5', specId: '5', notes: 'C G'.split(' '), label: 'C 5th', }; const fSharpMinor = { id: 'F#m', specId: 'm', notes: ['F#', 'A', 'C#'], label: 'F# Minor', }; describe('Chord', () => { describe('make', () => { it('should make the C Major chord properly', () => { expect(Chord.make('C', 'M')).to.deep.equal(cMajor); }); it('should make the C Minor chord properly', () => { expect(Chord.make('C', 'm')).to.deep.equal(cMinor); }); it('should make the C Dominant 7th chord properly', () => { expect(Chord.make('C', '7')).to.deep.equal(cSeventh); }); it('should make the C Major 7th chord properly', () => { expect(Chord.make('C', 'M7')).to.deep.equal(cMajorSeventh); }); it('should make the C Minor 7th chord properly', () => { expect(Chord.make('C', 'm7')).to.deep.equal(cMinorSeventh); }); it('should make the C 6th chord properly', () => { expect(Chord.make('C', '6')).to.deep.equal(cSixth); }); it('should make the C Minor 6th chord properly', () => { expect(Chord.make('C', 'm6')).to.deep.equal(cMinorSixth); }); it('should make the C Diminished chord properly', () => { expect(Chord.make('C', 'dim')).to.deep.equal(cDiminished); }); it('should make the C Diminished 7th chord properly', () => { expect(Chord.make('C', 'dim7')).to.deep.equal(cDiminishedSeventh); }); it('should make the C Half Diminished 7th chord properly', () => { expect(Chord.make('C', '7b5')).to.deep.equal(cHalfDiminishedSeventh); }); it('should make the C Augmented chord properly', () => { expect(Chord.make('C', 'aug')).to.deep.equal(cAugmented); }); it('should make the C 7th #5 chord properly', () => { expect(Chord.make('C', '7#5')).to.deep.equal(cSeventhSharpFive); }); it('should make the C 9th chord properly', () => { expect(Chord.make('C', '9')).to.deep.equal(cNinth); }); it('should make the C 7th #9 chord properly', () => { expect(Chord.make('C', '7#9')).to.deep.equal(cSeventhSharpNine); }); it('should make the C Major Ninth chord properly', () => { expect(Chord.make('C', 'M9')).to.deep.equal(cMajorNinth); }); it('should make the C Added 9th chord properly', () => { expect(Chord.make('C', 'add9')).to.deep.equal(cAddedNinth); }); it('should make the C Minor 9th chord properly', () => { expect(Chord.make('C', 'm9')).to.deep.equal(cMinorNinth); }); it('should make the C Minor Added 9th chord properly', () => { expect(Chord.make('C', 'madd9')).to.deep.equal(cMinorAddedNinth); }); it('should make the C 11th chord properly', () => { expect(Chord.make('C', '11')).to.deep.equal(cEleventh); }); it('should make the C Minor 11th chord properly', () => { expect(Chord.make('C', 'm11')).to.deep.equal(cMinorEleventh); }); it('should make the C 7th #11 chord properly', () => { expect(Chord.make('C', '7#11')).to.deep.equal(cSeventhSharpEleven); }); it('should make the C Major 7th #11 chord properly', () => { expect(Chord.make('C', 'M7#11')).to.deep.equal(cMajorSeventhSharpEleven); }); it('should make the C 13 chord properly', () => { expect(Chord.make('C', '13')).to.deep.equal(cThirteenth); }); it('should make the C Major 13th chord properly', () => { expect(Chord.make('C', 'M13')).to.deep.equal(cMajorThirteenth); }); it('should make the C Minor 13th chord properly', () => { expect(Chord.make('C', 'm13')).to.deep.equal(cMinorThirteenth); }); it('should make the C Suspended 4th chord properly', () => { expect(Chord.make('C', 'sus4')).to.deep.equal(cSus4); }); it('should make the C Suspended 2nd chord properly', () => { expect(Chord.make('C', 'sus2')).to.deep.equal(cSus2); }); it('should make the C Power Chord (the only chord you need) properly', () => { expect(Chord.make('C', '5')).to.deep.equal(cPowerChord); }); }); describe('makeForAllRoots', () => { it('should generate one chord for each possible root', () => { const allScales = Chord.makeForAllRoots('M'); expect(allScales.length).to.equal(12); expect(allScales[0]).to.deep.equal(cMajor); }); }); describe('makeAll', () => { const chords = Chord.makeAll(); it('should return the appropriate amount of chords', () => { expect(chords.length).to.equal(chordSpecs.length * NOTES.length); }); it('should contain the F#m chord', () => { expect(chords).to.contain(fSharpMinor); }); }); describe('filterScalesByNotes', () => { expect(Chord.filterByNotes([cMajor, fSharpMinor], 'C G'.split(' '))) .to.deep.equal([cMajor]); }); });
isc
paddor/cztop
lib/cztop/version.rb
37
module CZTop VERSION = "1.0.0" end
isc
jussi-kalliokoski/depdiff
bin/depdiff.js
2364
#!/usr/bin/env node /*jshint maxlen:180 */ "use strict"; var fs = require("fs"); var _ = require("lodash"); var depdiff = require("../lib"); var outputStream = process.stdout; var write = function (data) { outputStream.write(data); }; var actions = module.exports = { displayHelp: function () { process.stderr.write( "Usage: depdiff <command>\n" + "\n" + "Available commands:\n" + " help Displays this message.\n" + " list-dependencies Prints the list of package information into stdout as JSON.\n" + " list-changes <old-package-path> Prints the list of package changes between current and old version (as provided by the JSON in the old-package-path).\n" + "\n" + "Options:\n" + " -o, --output <file> Prints the results into a file instead of stdout.\n" + "\n" ); }, listDependencies: function () { write(JSON.stringify(depdiff.listDependencies())); }, listChanges: function (filepath) { var oldDependencies = JSON.parse(fs.readFileSync(filepath)); var newDependencies = depdiff.listDependencies(); var changes = depdiff.listChanges(oldDependencies, newDependencies); _.each(changes, function (change) { var sign = change.type === "old" ? "-" : "+"; write(sign + " " + change.path + " (" + change.version + ")\n"); }); }, }; var availableCommands = { "help": actions.displayHelp, "--help": actions.displayHelp, "-h": actions.displayHelp, "list-dependencies": actions.listDependencies, "list-changes": actions.listChanges, }; var isBeingRunAsAnExecutable = require.main === module; if ( isBeingRunAsAnExecutable ) { var args = require("minimist")(process.argv.slice(3)); var outputFile = args.o || args.output; if ( outputFile && typeof outputFile === "string" ) { outputStream = fs.createWriteStream(outputFile, { encoding: "utf8" }); } var command = process.argv[2]; if ( !_.has(availableCommands, command) ) { process.stderr.write("Command not found: `" + command + "`\n"); process.exit(1); } availableCommands[command].apply(null, process.argv.slice(3)); }
isc
io7m/jcamera
com.io7m.jcamera.examples.jogl/src/main/java/com/io7m/jcamera/examples/jogl/ExampleFPSStyleMouseAdapter.java
2416
/* * Copyright © 2021 Mark Raynsford <code@io7m.com> https://www.io7m.com * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR * IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package com.io7m.jcamera.examples.jogl; import com.io7m.jcamera.JCameraFPSStyleInputType; import com.io7m.jcamera.JCameraFPSStyleMouseRegion; import com.io7m.jcamera.JCameraRotationCoefficientsMutable; import com.jogamp.newt.event.MouseAdapter; import com.jogamp.newt.event.MouseEvent; import java.util.concurrent.atomic.AtomicReference; /** * The mouse adapter used to handle mouse events. */ // CHECKSTYLE_JAVADOC:OFF public final class ExampleFPSStyleMouseAdapter extends MouseAdapter { private final AtomicReference<JCameraFPSStyleMouseRegion> mouse_region; private final JCameraFPSStyleInputType input; private final ExampleFPSStyleSimulationType sim; private final JCameraRotationCoefficientsMutable rotations; public ExampleFPSStyleMouseAdapter( final AtomicReference<JCameraFPSStyleMouseRegion> in_mouse_region, final ExampleFPSStyleSimulationType in_sim, final JCameraRotationCoefficientsMutable in_rotations) { this.mouse_region = in_mouse_region; this.input = in_sim.getInput(); this.sim = in_sim; this.rotations = in_rotations; } @Override public void mouseMoved( final MouseEvent e) { assert e != null; /* * If the camera is enabled, get the rotation coefficients for the mouse * movement. */ if (this.sim.cameraIsEnabled()) { this.rotations.from( this.mouse_region.get().coefficients( e.getX(), e.getY())); this.input.addRotationAroundHorizontal(this.rotations.horizontal()); this.input.addRotationAroundVertical(this.rotations.vertical()); } } }
isc
i-am-tom/schemer
src/Formatter/Text.php
2390
<?php namespace Schemer\Formatter; /** * String formatter. */ class Text extends FormatterAbstract { /** * The value will be a string. */ public function __construct() { $this->transformations = [ function ($value) : string { return (string) $value; } ]; } /** * The string will be transformed to lowercase. * @return Schemer\Formatter\Text */ public function lowercase() : Text { return $this->pipe('strtolower'); } /** * Substrings will be replaced according to a regex. * @param string $regex * @param string $replacement * @return Schemer\Formatter\Text */ public function replace(string $regex, string $replacement) : Text { return $this->pipe( function (string $value) use ($regex, $replacement) { return preg_replace($regex, $replacement, $value); } ); } /** * Given characters will be translated. * @param string $from * @param string $to * @return Schemer\Formatter\Text */ public function translate(string $from, string $to) : Text { return $this->pipe( function (string $value) use ($from, $to) : string { return strtr($value, $from, $to); } ); } /** * Given characters will be stripped from both string ends. * @param string $mask * @return Schemer\Formatter\Text */ public function trim(string $mask = " \t\n\r\0\x0B") : Text { return $this->pipe( function (string $value) use ($mask) : string { return trim($value, $mask); } ); } /** * The string will be truncated at a given length. * @param int $maximum * @return Schemer\Formatter\Text */ public function truncate(int $maximum) : Text { return $this->pipe( function (string $value) use ($maximum) : string { return strlen($value) > $maximum ? substr($value, 0, $maximum) : $value; } ); } /** * The string will be uppercase. * @return Schemer\Formatter\Text */ public function uppercase() : Text { return $this->pipe('strtoupper'); } }
isc
Japroriple/ripple-client
src/js/services/domainalias.js
3072
/** * DOMAIN ALIAS * * The domain alias service resolves ripple address to domains. * * In the AccountRoot entry of any ripple account users can provide a reference * to a domain they own. Ownership of the domain is verified via the ripple.txt * magic file. * * This service provides both the lookup in the ledger and the subsequent * verification via ripple.txt. */ var module = angular.module('domainalias', ['network', 'rippletxt']); module.factory('rpDomainAlias', ['$q', '$rootScope', 'rpNetwork', 'rpRippleTxt', function ($q, $scope, net, txt) { var aliases = {}; /** * Validates a domain against an object parsed from ripple.txt data. * * @private */ function validateDomain(domain, address, data) { // Validate domain if (!data.domain || data.domain.length !== 1 || data.domain[0] !== domain) { return false; } // Validate address if (!data.accounts) { return false; } for (var i = 0, l = data.accounts.length; i < l; i++) { if (data.accounts[i] === address) { return true; } } return false; } function getAliasForAddress(address) { var aliasPromise = $q.defer(); if (aliases[address] && aliases[address].checked) { if (aliases[address].domain) { aliasPromise.resolve(aliases[address].domain); } else { aliasPromise.reject(new Error("Invalid domain")); } return aliasPromise.promise; } net.remote.request_account_info(address) .on('success', function (data) { if (data.account_data.Domain) { $scope.$apply(function () { var domain = sjcl.codec.utf8String.fromBits(sjcl.codec.hex.toBits(data.account_data.Domain)); var txtData = txt.get(domain); txtData.then( function (data) { if(validateDomain(domain, address, data)) { aliases[address] = { checked: true, domain: domain }; aliasPromise.resolve(domain); } else { aliases[address] = { checked: true, domain: false }; aliasPromise.reject(new Error("Invalid domain")); } }, function (error) { aliases[address] = { checked: true, domain: false }; aliasPromise.reject(new Error(error)); } ); }); } else { aliases[address] = { checked: true, domain: false }; aliasPromise.reject(new Error("No domain found")); } }) .on('error', function () { aliasPromise.reject(new Error("No domain found")); }) .request(); return aliasPromise.promise; } return { getAliasForAddress: getAliasForAddress }; }]);
isc
DealerNextDoor/ApolloDev
src/org/apollo/game/model/inter/bank/BankUtils.java
3327
package org.apollo.game.model.inter.bank; import org.apollo.game.model.Inventory; import org.apollo.game.model.Item; import org.apollo.game.model.Player; import org.apollo.game.model.def.ItemDefinition; import org.apollo.game.model.inter.InterfaceListener; import org.apollo.game.model.inv.InventoryListener; import org.apollo.game.model.inv.SynchronizationInventoryListener; /** * Contains bank-related utility methods. * * @author Graham */ public final class BankUtils { /** * Deposits an item into the player's bank. * * @param player The player. * @param slot The slot. * @param id The id. * @param amount The amount. * @return {@code false} if the chain should be broken. */ public static boolean deposit(Player player, int slot, int id, int amount) { if (amount == 0) { return true; } Inventory inventory = player.getInventory(); Inventory bank = player.getBank(); Item item = inventory.get(slot); int newId = ItemDefinition.noteToItem(item.getId()); if (bank.freeSlots() == 0 && !bank.contains(item.getId())) { bank.forceCapacityExceeded(); return true; } int removed; if (amount > 1) { inventory.stopFiringEvents(); } try { removed = inventory.remove(item.getId(), amount); } finally { if (amount > 1) { inventory.startFiringEvents(); } } if (amount > 1) { inventory.forceRefresh(); } bank.add(newId, removed); return true; } /** * Opens a player's bank. * * @param player The player. */ public static void openBank(Player player) { InventoryListener invListener = new SynchronizationInventoryListener(player, BankConstants.SIDEBAR_INVENTORY_ID); InventoryListener bankListener = new SynchronizationInventoryListener(player, BankConstants.BANK_INVENTORY_ID); player.getInventory().addListener(invListener); player.getBank().addListener(bankListener); player.getInventory().forceRefresh(); player.getBank().forceRefresh(); InterfaceListener interListener = new BankInterfaceListener(player, invListener, bankListener); player.getInterfaceSet().openWindowWithSidebar(interListener, BankConstants.BANK_WINDOW_ID, BankConstants.SIDEBAR_ID); } /** * Withdraws an item from a player's bank. * * @param player The player. * @param slot The slot. * @param id The id. * @param amount The amount. * @return {@code false} if the chain should be broken. */ public static boolean withdraw(Player player, int slot, int id, int amount) { if (amount == 0) { return true; } Inventory inventory = player.getInventory(); Inventory bank = player.getBank(); Item item = bank.get(slot); if (amount >= item.getAmount()) { amount = item.getAmount(); } int newId = player.isWithdrawingNotes() ? ItemDefinition.itemToNote(item.getId()) : item.getId(); if (inventory.freeSlots() == 0 && !(inventory.contains(newId) && ItemDefinition.lookup(newId).isStackable())) { inventory.forceCapacityExceeded(); return true; } int remaining = inventory.add(newId, amount); bank.stopFiringEvents(); try { bank.remove(item.getId(), amount - remaining); bank.shift(); } finally { bank.startFiringEvents(); } bank.forceRefresh(); return true; } /** * Default private constructor to prevent instantiation. */ private BankUtils() { } }
isc
andrejewski/ascii-codes
index.js
1748
var symbolIndex = [ "NUL", "SOH", "STX", "ETX", "EOT", "ENQ", "ACK", "BEL", "BS", "TAB", "LF", "VT", "FF", "CR", "SO", "SI", "DLE", "DC1", "DC2", "DC3", "DC4", "NAK", "SYN", "ETB", "CAN", "EM", "SUB", "ESC", "FS", "GS", "RS", "US", " ", "!", "\"", "#", "$", "%", "&", "'", "(", ")", "*", "+", ",", "-", ".", "/", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", ":", ";", "<", "=", ">", "?", "@", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "[", "\\", "]", "^", "_", "`", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "{", "|", "}", "~" ]; function decimalFromHex(n) { return parseInt(n, 16); } function hexFromDecimal(n) { return n.toString(16); } function decimalFromOctal(n) { return parseInt(n, 8); } function octalFromDecimal(n) { return n.toString(8); } function symbolForDecimal(n) { return symbolIndex[n]; } function symbolForHex(n) { return symbolForDecimal(decimalFromHex(n)); } function symbolForOctal(n) { return symbolForDecimal(decimalFromOctal(n)); } function decimalForSymbol(s) { return symbolIndex.indexOf(s); } function hexForSymbol(s) { return hexFromDecimal(decimalForSymbol(s)); } function octalForSymbol(s) { return octalFromDecimal(decimalForSymbol(s)); } module.exports = { symbolForDecimal: symbolForDecimal, symbolForHex: symbolForHex, symbolForOctal: symbolForOctal, decimalForSymbol: decimalForSymbol, hexForSymbol: hexForSymbol, octalForSymbol: octalForSymbol, symbolIndex: symbolIndex };
isc
gatecat/prjoxide
fuzzers/LIFCL/067-copy-ebr/fuzzer.py
228
import database import libpyprjoxide def main(): db = libpyprjoxide.Database(database.get_db_root()) libpyprjoxide.copy_db(db, "LIFCL", "EBR_10", ["TRUNK_L_EBR_10", ], "PEWC", "") if __name__ == '__main__': main()
isc
jgillich/picoCDN
plugins.js
1081
var uglify = require('uglify-js'), beautify = require('js-beautify').js_beautify, sass = require('node-sass'), coffee = require('coffee-script'), marked = require('marked'), sanitizer = require('caja-sanitizer'); module.exports = { javascript: { uglify: function (text) { return uglify.minify(text, { fromString: true }).code; }, beautify: function (text) { return beautify(text, { indent_size: 4 }); } }, scss: { render: function (text) { this.contentType = 'text/css'; return sass.renderSync({ data: text }); } }, coffeescript: { compile: function (text) { this.contentType = 'application/javascript'; return coffee.compile(text); } }, markdown: { render: function (text) { this.escape = false; this.contentType = 'text/html'; return sanitizer.sanitize(marked(text)); } } };
isc
BitGo/prova
wire/msggetaddr_test.go
2778
// Copyright (c) 2013-2016 The btcsuite developers // Copyright (c) 2017 BitGo // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package wire import ( "bytes" "reflect" "testing" "github.com/davecgh/go-spew/spew" ) // TestGetAddr tests the MsgGetAddr API. func TestGetAddr(t *testing.T) { pver := ProtocolVersion // Ensure the command is expected value. wantCmd := "getaddr" msg := NewMsgGetAddr() if cmd := msg.Command(); cmd != wantCmd { t.Errorf("NewMsgGetAddr: wrong command - got %v want %v", cmd, wantCmd) } // Ensure max payload is expected value for latest protocol version. // Num addresses (varInt) + max allowed addresses. wantPayload := uint32(0) maxPayload := msg.MaxPayloadLength(pver) if maxPayload != wantPayload { t.Errorf("MaxPayloadLength: wrong max payload length for "+ "protocol version %d - got %v, want %v", pver, maxPayload, wantPayload) } return } // TestGetAddrWire tests the MsgGetAddr wire encode and decode for various // protocol versions. func TestGetAddrWire(t *testing.T) { msgGetAddr := NewMsgGetAddr() msgGetAddrEncoded := []byte{} tests := []struct { in *MsgGetAddr // Message to encode out *MsgGetAddr // Expected decoded message buf []byte // Wire encoding pver uint32 // Protocol version for wire encoding }{ // Latest protocol version. { msgGetAddr, msgGetAddr, msgGetAddrEncoded, ProtocolVersion, }, // Protocol version BIP0035Version. { msgGetAddr, msgGetAddr, msgGetAddrEncoded, BIP0035Version, }, // Protocol version BIP0031Version. { msgGetAddr, msgGetAddr, msgGetAddrEncoded, BIP0031Version, }, // Protocol version NetAddressTimeVersion. { msgGetAddr, msgGetAddr, msgGetAddrEncoded, NetAddressTimeVersion, }, // Protocol version MultipleAddressVersion. { msgGetAddr, msgGetAddr, msgGetAddrEncoded, MultipleAddressVersion, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Encode the message to wire format. var buf bytes.Buffer err := test.in.BtcEncode(&buf, test.pver) if err != nil { t.Errorf("BtcEncode #%d error %v", i, err) continue } if !bytes.Equal(buf.Bytes(), test.buf) { t.Errorf("BtcEncode #%d\n got: %s want: %s", i, spew.Sdump(buf.Bytes()), spew.Sdump(test.buf)) continue } // Decode the message from wire format. var msg MsgGetAddr rbuf := bytes.NewReader(test.buf) err = msg.BtcDecode(rbuf, test.pver) if err != nil { t.Errorf("BtcDecode #%d error %v", i, err) continue } if !reflect.DeepEqual(&msg, test.out) { t.Errorf("BtcDecode #%d\n got: %s want: %s", i, spew.Sdump(msg), spew.Sdump(test.out)) continue } } }
isc
OpenCampaign/opencampaign
db/migrate/20151107000004_create_basic_auths.rb
452
class CreateBasicAuths < ActiveRecord::Migration def up create_table :basic_auths do |t| t.column :email, :string, null: false t.column :password_digest, :string t.column :username, :string t.column :identity_id, :integer end change_table :basic_auths do |t| t.index :email, { unique: true } t.index :username t.index :identity_id end end def down drop_table :basic_auths end end
isc
taschik/ramcloud
src/SessionAlarm.cc
8080
/* Copyright (c) 2011-2012 Stanford University * * Permission to use, copy, modify, and distribute this software for any purpose * with or without fee is hereby granted, provided that the above copyright * notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF * CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "Buffer.h" #include "Cycles.h" #include "SessionAlarm.h" namespace RAMCloud { /** * Constructor for SessionAlarm objects. * \param timer * Shared structure that will manage this alarm. Usually comes from * a Context object. * \param session * The transport session to monitor in this alarm. Any RPCs in this * session should result in calls to rpcStarted and rpcFinished. * \param timeoutMs * If this many milliseconds elapse in an RPC with no sign of life from * from the server, then the session will be aborted. After half this * time has elapsed we will send a ping RPC to the server; as long as * it responds to the pings there will be no abort. */ SessionAlarm::SessionAlarm(SessionAlarmTimer* timer, Transport::Session* session, int timeoutMs) : session(session) , timer(timer) , timerIndex(0) , outstandingRpcs(0) , waitingForResponseMs(0) , pingMs(timeoutMs/2) , abortMs(timeoutMs) { // Because of estimation errors in SessionAlarmTimer, we need to enforce // a minimum threshold for pingMs. if (pingMs < 3*SessionAlarmTimer::TIMER_INTERVAL_MS) { pingMs = 3*SessionAlarmTimer::TIMER_INTERVAL_MS; abortMs = 2*pingMs; } } /** * Destructor for SessionAlarm objects. */ SessionAlarm::~SessionAlarm() { while (outstandingRpcs > 0) { rpcFinished(); } } /** * This method is invoked whenever an RPC is initiated on the session * associated with this alarm. As long as there are outstanding RPCs * for the session, we will make sure that either (a) RPCs are completing * or (b) the server is capable of receiving and responding to ping * requests (which effectively makes (a) true). If a long period of time * goes by without either of these conditions being satisfied, then the * abort method is invoked on the session. */ void SessionAlarm::rpcStarted() { outstandingRpcs++; if (outstandingRpcs == 1) { timerIndex = timer->activeAlarms.size(); timer->activeAlarms.push_back(this); if (timerIndex == 0) { // Before now there were no active alarms, so make sure the // timer is running. // Note: in some situations dispatch->currentTime may be stale // but this is should be OK; it will simply result in an extra // timer wakeup, resulting in waitingForResponseMs overestimating // by up to TIMER_INTERVAL_MS. This approach saves the time of // reading the clock every time an RPC starts. timer->start(timer->owner->currentTime + timer->timerIntervalTicks); } } } /** * This method must be invoked whenever an RPC completes on the session * associated with this alarm. */ void SessionAlarm::rpcFinished() { outstandingRpcs--; if (outstandingRpcs == 0) { assert(timerIndex < timer->activeAlarms.size()); assert(timer->activeAlarms[timerIndex] == this); // Deleting the element at timerIndex by // copying the tail element to timerIndex and deleting // the tail element timer->activeAlarms[timerIndex] = timer->activeAlarms.back(); timer->activeAlarms[timerIndex]->timerIndex = timerIndex; timer->activeAlarms.pop_back(); // Note: we don't turn off the timer here, even if there are no // active RPCs. Just let the timer fire, and it will turn itself // off if there are still no active RPCs. However, it's pretty // likely that more RPCs will start soon, in which case we might // as well save the overhead of stopping and restarting the timer. } waitingForResponseMs = 0; } /** * Constructor for PingRpc: initiates a ping RPC and returns once the RPC * has been initiated, without waiting for it to complete. * * \param context * Overall information about this RAMCloud server or client. * \param session * Send the ping on this session. */ SessionAlarmTimer::PingRpc::PingRpc(Context* context, Transport::SessionRef session) : RpcWrapper(sizeof(WireFormat::Ping::Response)) , context(context) { this->session = session; WireFormat::Ping::Request* reqHdr( allocHeader<WireFormat::Ping>()); reqHdr->callerId = ServerId().getId(); send(); } /** * Returns true if the ping RPC completed successfully, false otherwise. */ bool SessionAlarmTimer::PingRpc::succeeded() { return (getState() == RpcState::FINISHED) && (responseHeader != NULL) && (responseHeader->status == STATUS_OK); } /** * Constructor for SessionAlarmTimer objects. */ SessionAlarmTimer::SessionAlarmTimer(Context* context) : Dispatch::Timer(*context->dispatch) , context(context) , activeAlarms() , timerIntervalTicks(Cycles::fromNanoseconds(TIMER_INTERVAL_MS * 1000000)) , pings() { } /** * Destructor for SessionAlarmTimer objects. */ SessionAlarmTimer::~SessionAlarmTimer() { for (PingMap::iterator it = pings.begin(); it != pings.end(); it++) { delete it->second; } while (!activeAlarms.empty()) { activeAlarms[0]->rpcFinished(); } } /** * This method is invoked by the dispatcher every TIMER_INTERVAL_MS when * there are active RPCs. It scans all of the active sessions, checking * for slow server responses and issuing pings if needed to make sure that * the servers are still alive. */ void SessionAlarmTimer::handleTimerEvent() { foreach (SessionAlarm* alarm, activeAlarms) { alarm->waitingForResponseMs += TIMER_INTERVAL_MS; if (alarm->waitingForResponseMs < alarm->pingMs) continue; if (alarm->waitingForResponseMs > alarm->abortMs) { RAMCLOUD_LOG(WARNING, "Aborting %s after %d ms (server not responding)", alarm->session->getRpcInfo().c_str(), alarm->waitingForResponseMs); alarm->session->abort(); continue; } if (pings.find(alarm) != pings.end()) { // We have already sent a ping RPC for this alarm; no need to // send another. continue; } // It's time to initiate a ping RPC to make sure the server is still // alive. pings[alarm] = new PingRpc(context, alarm->session); RAMCLOUD_TEST_LOG("sent ping"); } // Clean up ping RPCs that completed successfully. for (PingMap::iterator it = pings.begin(); it != pings.end(); ) { PingMap::iterator current = it; PingRpc* rpc = current->second; it++; if (rpc->isReady()) { if (rpc->succeeded()) { RAMCLOUD_LOG(NOTICE, "Waiting for %s (ping succeeded)", current->first->session->getRpcInfo().c_str()); } else { RAMCLOUD_LOG(NOTICE, "Waiting for %s (ping failed)", current->first->session->getRpcInfo().c_str()); } delete rpc; pings.erase(current); } } if (!activeAlarms.empty()) { // Reschedule this timer. start(owner->currentTime + timerIntervalTicks); } } } // namespace RAMCloud
isc
AWildridge/ProtoScape
src/org/apollo/game/event/impl/PrivateChatEvent.java
1702
package org.apollo.game.event.impl; import org.apollo.game.event.Event; import org.apollo.util.NameUtil; public class PrivateChatEvent extends Event { /** * The friend. */ private final String friend; /** * The friend. */ private final long friendlong; /** * The friend's rights. */ private int friendrights; /** * The message. */ private final String message; /** * The compressed message. */ private final byte[] emessage; /** * The message id. */ private int lastid; /** * Create a new private chat event. * @param uncompressed * @param length * @param friend */ public PrivateChatEvent(String uncompressed, byte[] length, String friend) { this.message = uncompressed; this.emessage = length; this.friend = friend; this.friendlong = NameUtil.encodeBase37(this.friend); } /** * The friend. * @return {@link String} */ public String getFriend() { return friend; } /** * The message. * @return {@link String} */ public String getMessage() { return message; } /** * The message compressed. * @return {@link byte} */ public byte[] getMessageCompressed() { return emessage; } /** * The friend. * @return {@link long} */ public long getFriendLong() { return friendlong; } /** * The friend's rights. * @return {@link Integer} */ public int getFriendRights() { return friendrights; } /** * Set the friends rights. * @param rights */ public void setFriendRights(int rights) { this.friendrights = rights; } public int getLastId() { return lastid; } /** * Private chat id. * @param id */ public void setLastId(int id) { this.lastid = id; } }
isc
fw1121/annot-nf
bin/genes_gff3_to_csv.lua
3108
#!/usr/bin/env gt --[[ Copyright (c) 2015 Sascha Steinbiss <ss34@sanger.ac.uk> Copyright (c) 2015 Genome Research Ltd Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ]] function usage() io.stderr:write(string.format("Usage: %s <GFF>\n" , arg[0])) os.exit(1) end if #arg < 1 then usage() end package.path = gt.script_dir .. "/?.lua;" .. package.path require("lib") peps = {} gather_v = gt.custom_visitor_new() function gather_v:visit_feature(fn) for node in fn:children() do if node:get_type() == "polypeptide" then local dfrom = node:get_attribute("Derives_from") if dfrom then peps[dfrom] = node end end end return 0 end output_v = gt.custom_visitor_new() function output_v:visit_feature(fn) for node in fn:children() do local gprod local gtype if node:get_type() == "mRNA" then local id = node:get_attribute("ID") if id and peps[id] then pr = peps[id]:get_attribute("product") if pr then pr_a = gff3_extract_structure(peps[id]:get_attribute("product")) gtype = "coding" gprod = pr_a[1].term end end elseif node:get_type() == "pseudogenic_transcript" then local id = node:get_attribute("ID") if id and peps[id] then pr = peps[id]:get_attribute("product") if pr then pr_a = gff3_extract_structure(peps[id]:get_attribute("product")) gtype = "pseudogene" gprod = pr_a[1].term end end elseif node:get_type():match("RNA$") then gtype = "non_coding" gprod = node:get_type() end if gtype and gprod then print(fn:get_attribute("ID") .. "\t" .. gtype .. "\t" .. gprod .. "\t" .. fn:get_seqid() .. "\t" .. fn:get_range():get_start() .. "\t" .. fn:get_range():get_end() .. "\t" .. fn:get_strand()) end end return 0 end -- setup generic visitor stream vis_stream = gt.custom_stream_new_unsorted() function vis_stream:next_tree() local node = self.instream:next_tree() if node then node:accept(self.v) end return node end vis_stream.instream = gt.gff3_in_stream_new_sorted(arg[1]) vis_stream.v = gather_v local gn = vis_stream:next_tree() while (gn) do gn = vis_stream:next_tree() end vis_stream.instream = gt.gff3_in_stream_new_sorted(arg[1]) vis_stream.v = output_v local gn = vis_stream:next_tree() while (gn) do gn = vis_stream:next_tree() end
isc
Cosrnos/LynxJS
old/Geometry/Rectangle.js
2659
/* * Lynx Project * Started August 2013 * ------------------------------------------------------ * This file is covered under the LynxJS Game Library * License. Please read license.txt for more information * on usage of this library. * ------------------------------------------------------ * File Name: Rectangle.js * Description: A Rectangle shape. * Notes: The given point (pX, pY) is always the TOP LEFT point, just like any other object in HTML5 * Global Variables: Lynx.Rectangle, Lynx.Rect */ Lynx.Rectangle = function (pX, pY, pWidth, pHeight) { var that = {}; that.Points = { TopLeft: new Lynx.P(pX, pY), TopRight: new Lynx.P(pX + pWidth, pY), BottomRight: new Lynx.P(pX + pHeight, pY), BottomLeft: new Lynx.P(pX + pHeight, pY) }; //Should rename these since they could be transformed elsewhere. that.Lines = { North: new Lynx.L(that.Points.TopLeft, that.Points.TopRight), East: new Lynx.L(that.Points.TopRight, that.Points.BottomRight), South: new Lynx.L(that.Points.BottomLeft, that.Points.BottomRight), West: new Lynx.L(that.Points.TopLeft, that.Points.BottomLeft) }; /** * Description: Finds the Width of the rectangle * * @this {Lynx.Rectangle} * @return {decimal} The width */ Object.defineProperty(that, "Width", { get: function () { return this.Lines.North.Length; } }); /** * Description: Finds the height of the rectangle * * @this {Lynx.Rectangle} * @return {decimal} The height */ Object.defineProperty(that, "Height", { get: function () { return this.Lines.East.Length; } }); /** * Description: Finds the area of the rectangle * * @this {Lynx.Rectangle} * @return {decimal} The area */ Object.defineProperty(that, "Area", { get: function () { return this.Width * this.Height; } }); /** * Description: Finds the perimeter of the rectangle * * @this {Lynx.Rectangle} * @return {decimal} The perimeter */ Object.defineProperty(that, "Perimeter", { get: function () { return this.Width * 2 + this.Height * 2; } }); /** * Description: Tests whether a given point lies on the line segment * * @this {Lynx.Rectangle} * @param {Lynx.Point} <pPoint> The point to test * @return {boolean} Whether or not the point is inside the rectangle. */ that.Contains = (function (pPoint) { return (pPoint.X >= this.Points.BottomLeft.X && pPoint.Y >= this.Points.BottomLeft.Y && pPoint.X <= this.Points.TopRight.X && pPoint.Y <= this.Point.TopRight.Y); }).bind(that); return that; }; Lynx.Rect = function (pX, pY, pWidth, pHeight) { return Lynx.Rectangle(pX, pY, pWidth, pHeight); };
isc
bellbind/unicharadata
embed-udjson.js
393
"use strict"; const fs = require("fs"); const src = fs.readFileSync("unicharadata-raw.js", "utf8"); const unicodedata = fs.readFileSync("UnicodeData.json", "utf8"); const replace = `require("./UnicodeData.json")`; const head = `/* eslint comma-spacing: 0, indent: 0, max-len: 0 */ `; const result = `${head}${src.replace(replace, unicodedata)}`; fs.writeFileSync("unicharadata.js", result);
isc
simeonpp/Trip-Destination
Source/TripDestination/Services/TripDestination.Services.Data/Contracts/INewsletterServices.cs
366
namespace TripDestination.Services.Data.Contracts { using System.Linq; using Common.Infrastructure.Models; using TripDestination.Data.Models; public interface INewsletterServices { IQueryable<Newsletter> GetAll(); BaseResponseAjaxModel Create(string email, string ip, string userAgent); void Delete(int id); } }
isc
dileepa79/goeasy
app/whatisgoingon/whatisgoingon-request.ts
82
export class WhatIsGoingOnRequest { pageNo: number; pageSize: number; }
isc
io7m/jfprop
io7m-jfprop-server/src/main/java/com/io7m/jfprop/JFPAdminCommandRemoteAdd.java
2308
/* * Copyright © 2014 <code@io7m.com> http://io7m.com * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR * IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package com.io7m.jfprop; import java.io.IOException; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.eclipse.jetty.server.Request; import com.io7m.jfunctional.Pair; import com.io7m.jlog.LogUsableType; /** * Command to add a remote. */ public final class JFPAdminCommandRemoteAdd extends JFPAdminHandlerAbstract { JFPAdminCommandRemoteAdd( final JFPServerConfigType in_config, final JFPAdminDatabaseType db, final LogUsableType in_log) { super(in_config, db, in_log); } @Override public Pair<Integer, byte[]> handleAuthenticated( final String target, final Request base_request, final HttpServletRequest request, final JFPAdminDatabaseTransactionType transaction) throws JFPException, IOException { try { final Map<String, String[]> params = request.getParameterMap(); assert params != null; final JFPRemote remote = JFPRemote.fromParameters(params); final Integer id = transaction.remoteAdd(remote); return Pair.pair(HttpServletResponse.SC_OK, Integer .toString(id) .getBytes("UTF-8")); } catch (final JFPExceptionInvalidArgument e) { return Pair.pair(HttpServletResponse.SC_BAD_REQUEST, e .getMessage() .getBytes("UTF-8")); } catch (final JFPExceptionNonexistent e) { return Pair.pair(HttpServletResponse.SC_BAD_REQUEST, e .getMessage() .getBytes("UTF-8")); } } }
isc
ejhumphrey/harmonic-cnn
hcnn/driver.py
34382
"""Top-level routines, including: * extract_features * train_model * find_best_model * predict * analyze * fit_and_predict_one * fit_and_predict_cross_validation """ import boltons.fileutils import datetime import glob import json import logging import numpy as np import os import pandas as pd import shutil import sklearn.metrics import hcnn.common.config as C import hcnn.common.utils as utils import hcnn.data.cqt import hcnn.data.dataset import hcnn.train.models as models import hcnn.train.streams as streams import hcnn.evaluate.analyze import hcnn.evaluate.model_selection as MS import hcnn.evaluate.predict logger = logging.getLogger(__name__) class EarlyStoppingException(Exception): pass class StaleFeaturesError(Exception): pass class NoFeaturesException(Exception): pass def get_slicer_from_feature(feature_mode): if 'wcqt' == feature_mode: slicer = streams.wcqt_slices elif 'hcqt' == feature_mode: slicer = streams.hcqt_slices else: slicer = streams.cqt_slices return slicer class Driver(object): "Controller class for running experiments and holding state." @classmethod def available_experiments(cls, config_path): model_dir = os.path.expanduser( C.Config.load(config_path)['paths']['model_dir']) return [x for x in os.listdir(model_dir) if os.path.isdir(os.path.join(model_dir, x))] def __init__(self, config, partitions=None, model_name=None, experiment_name=None, dataset=None, load_features=True, skip_load_dataset=False, skip_features=False, skip_training=False, skip_cleaning=False ): """ Parameters ---------- config: str OR hcnn.config.Config Path to a config yaml file, or an instantiated config. The later is for testing; typical use case would be to load by string. partitions : str in ['rwc', 'philharmonia', 'uiowa'] Selects which datsaet will be used as the test set. OR if the dataset given has a 'partitions' section in the config, uses the partition file located at the key given. Can only be None when extracting features; otherwise, this value is required. experiment_name : str or None Name of the experiment. This is used to name the files/parameters saved. It is required for many but not all functions. model_name : str or None Use this to specify the model configuration to use. Otherwise, tries to load it from the config. dataset : hcnn.data.dataset.Dataset or None If None, tries to use the config to load the default dataset, using the features specified in feature_mode. Otherwise, uses the data given. [Intended for testing! Use the config if possible.] load_features : bool If true, attempts to load the features files from the dataset. """ if isinstance(config, str): self.config = C.Config.load(config) else: self.config = config self.experiment_name = experiment_name self.skip_features = skip_features self.skip_training = skip_training self.skip_cleaning = skip_cleaning # Initialize common paths 'n variables. self._init(model_name) if not skip_load_dataset: self.load_dataset(dataset=dataset, load_features=load_features) if partitions: self.setup_partitions(partitions) @property def selected_dataset(self): return self.config['data/selected'] @property def dataset_config(self): return self.config['data/{}'.format(self.selected_dataset)] @property def dataset_index(self): return self.dataset_config['notes_index'] @property def data_root(self): return self.dataset_config['root'] @property def feature_dir(self): return os.path.expanduser( self.config['paths/feature_dir']) @property def features_path(self): dataset_fn = os.path.basename(self.dataset_index) return os.path.join(self.feature_dir, dataset_fn) def _init(self, model_name): if model_name is not None: self.model_definition = model_name else: self.model_definition = self.config["model"] if self.model_definition: self.feature_mode = self.model_definition.split('_')[0] else: self.feature_mode = None self.max_files_per_class = self.config.get( "training/max_files_per_class", None) self.dataset = None if self.experiment_name: self._model_dir = os.path.join( os.path.expanduser(self.config["paths/model_dir"]), self.experiment_name) self._experiment_config_path = os.path.join( self._model_dir, self.config['experiment/config_path']) # if these don't exist, we're not actually running anything if self.model_definition and self.feature_mode: utils.create_directory(self._model_dir) @property def param_format_str(self): # Lazy instantiation for param_format_str if not hasattr(self, "_param_format_str") or \ self._param_format_str is None: # set up the param formatter. max_iterations = self.config['training/max_iterations'] params_zero_pad = int(np.ceil(np.log10(max_iterations))) param_format_str = self.config['experiment/params_format'] # insert the zero padding into the format string. self._param_format_str = param_format_str.format(params_zero_pad) return self._param_format_str def _format_params_fn(self, model_iter): "Convert the model iteration index into a params filename." return self.param_format_str.format(model_iter) def _format_predictions_fn(self, model_iter): return os.path.join( self._cv_model_dir, self.config.get('experiment/predictions_format', None).format( model_iter)) def _format_analysis_fn(self, model_iter): return os.path.join( self._cv_model_dir, self.config.get('experiment/analysis_format', None).format( model_iter)) def check_features_input(self): """Check to make sure everything's ready to run, including: * existing features """ if "cqt" not in self.dataset.to_df().columns: logger.error("No features for input data; please extract first.") return False return True def load_dataset(self, dataset=None, load_features=True): """Load the selected dataset in specified in the config file. Parameters ---------- load_features : bool If true, tries to load the features version of the dataset, else just loads the original specified version. """ # Always start by loading the dataset. if dataset: logger.info("load_dataset() - Using dataset passed as a parameter") # If it's a str, it's a path. if isinstance(dataset, str): self.dataset = hcnn.data.dataset.Dataset.load( dataset, data_root=self.data_root) elif isinstance(dataset, hcnn.data.dataset.Dataset): self.dataset = dataset else: logger.info(utils.colored( "load_dataset() - loading from {}".format(self.dataset_index))) self.dataset = hcnn.data.dataset.Dataset.load( self.dataset_index, data_root=self.data_root) logger.info(utils.colored("load_dataset() ... complete")) assert len(self.dataset) > 0 # If we want the features, additionally add it to the dataset. if load_features: logger.info(utils.colored("load_dataset() - extracting features.")) self.dataset = self.extract_features() def load_partition_df(self, test_partition): partition_file = self.dataset_config['partitions'][test_partition] return pd.read_csv(partition_file, index_col=0) def setup_partitions(self, test_partition): """Given the partition, setup the sets.""" # If the dataset we have selected has partitions if 'partitions' in self.dataset_config: data_df = self.dataset.to_df() self.partitions_df = self.load_partition_df(test_partition) # set the train_set, valid_set, test_set from the original dataset # using the indexes from teh partition_file. self.train_set = hcnn.data.dataset.Dataset( data_df.loc[( self.partitions_df['partition'] == 'train')]) self.valid_set = hcnn.data.dataset.Dataset( data_df.loc[( self.partitions_df['partition'] == 'valid')]) self.test_set = hcnn.data.dataset.Dataset( data_df.loc[( self.partitions_df['partition'] == 'test')]) assert (len(self.train_set) + len(self.valid_set) + len(self.test_set)) == len(self.dataset) else: raise ValueError( "partition files must be supplied for this dataset.") self._init_cross_validation(test_partition) def _init_cross_validation(self, test_set): self._cv_model_dir = os.path.join(self._model_dir, test_set) self._params_dir = os.path.join( self._cv_model_dir, self.config["experiment/params_dir"]) self._training_loss_path = os.path.join( self._cv_model_dir, self.config['experiment/training_loss']) if os.path.exists(self._cv_model_dir): logger.warning("Cleaning old experiment: {}".format( self._cv_model_dir)) utils.create_directory(self._cv_model_dir, # aka if DO the clean, recreate. recreate=(not self.skip_cleaning)) utils.create_directory(self._params_dir) def print_stats(self): dataset_df = self.dataset.to_df() datasets = ["rwc", "uiowa", "philharmonia"] def print_datasetcount(dataset): print("{:<20} {:<30}".format( "{} count".format(dataset), len(dataset_df[dataset_df["dataset"] == dataset]))) for dataset in datasets: print_datasetcount(dataset) def print_dataset_instcount(df, instrument): inst_filter = df[df["instrument"] == instrument] print("{:<20} {:<30} {:<30} {:<30}".format( "{} count".format(instrument), len(inst_filter[inst_filter["dataset"] == "rwc"]), len(inst_filter[inst_filter["dataset"] == "uiowa"]), len(inst_filter[inst_filter["dataset"] == "philharmonia"]))) classmap = hcnn.common.labels.InstrumentClassMap() print("---------------------------") print("Datasets-Instrument count / dataset") print("---------------------------") print(utils.colored("{:<20} {:<30} {:<30} {:<30}".format( "item", "rwc", "uiowa", "philharmonia"))) for inst in sorted(dataset_df["instrument"].unique()): if inst in classmap.allnames: print_dataset_instcount(dataset_df, inst) @property def feature_ds_path(self): return os.path.join( self.feature_dir, os.path.basename(self.dataset_index)) def load_existing_features(self, as_dataset=True): if os.path.exists(self.feature_ds_path): if as_dataset: return hcnn.data.dataset.Dataset.load(self.feature_ds_path) else: return pd.read_csv(self.feature_ds_path, index_col=0) def extract_features(self): """Extract CQTs from all files collected in collect.""" if self.skip_features: logger.info(utils.colored("--skip_features selected; " "loading from the constructed dataframe instead.")) updated_ds = self.load_existing_features() else: logger.info(utils.colored("Extracting features.")) updated_ds = hcnn.data.cqt.cqt_from_dataset( self.dataset, self.feature_dir, **self.config["features/cqt"]) if updated_ds is not None and \ len(updated_ds) == len(self.dataset): updated_ds.save(self.feature_ds_path) return updated_ds def train_model(self): """ Train a model, writing intermediate params to disk. Trains for max_iterations or max_time, whichever is fewer. [Specified in the config.] """ if self.skip_training: logger.info(utils.colored("--skip_training specified - skipping")) return True assert hasattr(self, 'train_set') and hasattr(self, 'valid_set') logger.info("Starting training for experiment: {}".format( self.experiment_name)) # Save the config we used in the model directory, just in case. self.config.save(self._experiment_config_path) # Duration parameters max_iterations = self.config['training/max_iterations'] max_time = self.config['training/max_time'] # in seconds # Collect various necessary parameters t_len = self.config['training/t_len'] batch_size = self.config['training/batch_size'] n_targets = self.config['training/n_targets'] logger.debug("Hyperparams:\nt_len: {}\nbatch_size: {}\n" "n_targets: {}\nmax_iterations: {}\nmax_time: {}s or {}h" .format(t_len, batch_size, n_targets, max_iterations, max_time, (max_time / 60. / 60.))) slicer = get_slicer_from_feature(self.feature_mode) # Set up our streamer logger.info("[{}] Setting up streamer".format(self.experiment_name)) slice_logger = utils.SliceLogger() streamer = streams.InstrumentStreamer( self.train_set.to_df(), slicer, slicer_kwargs={'slice_logger': slice_logger}, t_len=t_len, batch_size=batch_size) # create our model logger.info("[{}] Setting up model: {}".format(self.experiment_name, self.model_definition)) network_def = getattr(models, self.model_definition)(t_len, n_targets) model = models.NetworkManager(network_def) iter_print_freq = self.config.get( 'training/iteration_print_frequency', None) iter_write_freq = self.config.get( 'training/iteration_write_frequency', None) timers = utils.TimerHolder() iter_count = 0 train_stats = pd.DataFrame(columns=['timestamp', 'batch_train_dur', 'iteration', 'loss']) min_train_loss = np.inf timers.start("train") logger.info("[{}] Beginning training loop at {}".format( self.experiment_name, timers.get("train"))) try: timers.start(("stream", iter_count)) for batch in streamer: timers.end(("stream", iter_count)) timers.start(("batch_train", iter_count)) loss = model.train(batch) timers.end(("batch_train", iter_count)) row = dict(timestamp=timers.get_end( ("batch_train", iter_count)), batch_train_dur=timers.get( ("batch_train", iter_count)), iteration=iter_count, loss=loss) train_stats.loc[len(train_stats)] = row # Time Logging logger.debug("[Iter timing] iter: {} | loss: {} | " "stream: {} | train: {}".format( iter_count, loss, timers.get(("stream", iter_count)), timers.get(("batch_train", iter_count)))) # Print status if iter_print_freq and (iter_count % iter_print_freq == 0): mean_train_loss = \ train_stats["loss"][-iter_print_freq:].mean() output_str = ("Iteration: {} | Mean_Train_loss: {}" .format(iter_count, utils.conditional_colored( mean_train_loss, min_train_loss))) # On some small probability, do a randomly sampled # validation so we can see approximately how we're doing # on the validation set. if np.random.random() < .3: timers.start(("sampled_validation", iter_count)) valid_loss = self.sampled_validation_loss( model, slicer, t_len) output_str += " | Sampled_Valid_loss: {:0.4f}".format( valid_loss) timers.end(("sampled_validation", iter_count)) output_str += " | Val_time: {:0.2f}s".format( timers.get(( "sampled_validation", iter_count)).total_seconds()) logger.info(output_str) min_train_loss = min(mean_train_loss, min_train_loss) # Print the mean times for the last n frames logger.debug("Mean stream time: {}, Mean train time: {}" .format( timers.mean( "stream", iter_count - iter_print_freq, iter_count), timers.mean( "batch_train", iter_count - iter_print_freq, iter_count))) # save model, maybe if iter_write_freq and (iter_count % iter_write_freq == 0): save_path = os.path.join( self._params_dir, self.param_format_str.format(iter_count)) logger.debug("Writing params to {}".format(save_path)) model.save(save_path) slice_log = os.path.join(self._cv_model_dir, "slice_log.csv") slice_logger.save(slice_log) if datetime.datetime.now() > \ (timers.get("train") + datetime.timedelta( seconds=max_time)): raise EarlyStoppingException("Max Time reached") iter_count += 1 timers.start(("stream", iter_count)) # Stopping conditions if (iter_count >= max_iterations): raise EarlyStoppingException("Max Iterations Reached") except KeyboardInterrupt: logger.warn(utils.colored("Training Cancelled", "red")) print("User cancelled training at epoch:", iter_count) except EarlyStoppingException as e: logger.warn( utils.colored("Training Stopped for {}".format(e), "red")) print("Training halted for: ", e) timers.end("train") # Print final training loss logger.info("Total iterations: {}".format(iter_count)) logger.info("Trained for {}".format(timers.get("train"))) logger.info("Final training loss: {}".format( train_stats["loss"].iloc[-1])) # Make sure to save the final iteration's model. save_path = os.path.join( self._params_dir, self.param_format_str.format(iter_count)) model.save(save_path) logger.info("Completed training for experiment: {}".format( self.experiment_name)) # Save training loss logger.info("Writing training stats to {}".format( self._training_loss_path)) train_stats.to_pickle( self._training_loss_path) # We need these files for models election, so make sure they exist return os.path.exists(self._training_loss_path) def sampled_validation_loss(self, model, slicer, t_len): sample_valuation_set = self.valid_set.to_df().sample(500) validation_df = hcnn.evaluate.predict.predict_many( sample_valuation_set, model, slicer, t_len, show_progress=False) return validation_df['loss'].mean() def find_best_model(self): """Perform model selection on the validation set with a binary search for minimum validation loss. (Bayesean optimization might be another approach?) Parameters ---------- validation_df : pd.DataFrame Name of the held out dataset (used to specify the valid file) Returns ------- results_df : pd.DataFrame DataFrame containing the resulting losses. """ logger.info("Finding best model for {}".format( utils.colored(self.experiment_name, "magenta"))) # Commenting out skipping a previous model selection for exisitng file. # if not self.check_features_input(): # logger.error("find_best_model features missing invalid.") # return False validation_df = self.valid_set.to_df() # load all necessary config parameters from the ORIGINAL config original_config = C.Config.load(self._experiment_config_path) validation_error_file = os.path.join( self._cv_model_dir, original_config['experiment/validation_loss']) slicer = get_slicer_from_feature(self.feature_mode) t_len = original_config['training/t_len'] # if not os.path.exists(validation_error_file): model_files = glob.glob( os.path.join(self._params_dir, "params*.npz")) if len(model_files) > 0: result_df, best_model = MS.CompleteLinearWeightedF1Search( model_files, validation_df, slicer, t_len, show_progress=True)() result_df.to_pickle(validation_error_file) best_path = os.path.join(self._params_dir, original_config['experiment/best_params']) shutil.copyfile(best_model['model_file'], best_path) else: logger.warn(utils.colored( "No param files exist yet; did you skip training without " "running this model yet?", "red")) result_df = pd.DataFrame() # else: # logger.info("Model Search already done; printing previous results") # result_df = pd.read_pickle(validation_error_file) # # make sure model_iteration is an int so sorting makes sense. # result_df["model_iteration"].apply(int) # logger.info("\n{}".format( # result_df.sort_values("model_iteration"))) return result_df def select_best_iteration(self, model_selection_df): """Given the model selection df, return the iteration which produced the best model. Returns ------- best_model : int The iteration number which produced the best model. """ best = model_selection_df.loc[model_selection_df["mean_acc"].argmax()] return best["model_iteration"] def predict(self, model_iter): """Generates a prediction for *all* files, and writes them to disk as a dataframe. If features_df_override, replace the features_df with this dataframe (for testing) """ if not self.check_features_input(): logger.error("predict - features missing.") return False logger.info("Evaluating experient {} with params from iter {}".format( utils.colored(self.experiment_name, "magenta"), utils.colored(model_iter, "cyan"))) selected_param_file = self._format_params_fn(model_iter) original_config = C.Config.load(self._experiment_config_path) params_file = os.path.join(self._params_dir, selected_param_file) slicer = get_slicer_from_feature(self.feature_mode) logger.info("Deserializing Network & Params...") model = models.NetworkManager.deserialize_npz(params_file) dataset_df = self.dataset.to_df() logger.debug("Predicting across {} files.".format( len(dataset_df['cqt'].nonzero()[0]))) predictions_df_path = self._format_predictions_fn(model_iter) t_len = original_config['training/t_len'] logger.info("Running evaluation on all files...") predictions_df = hcnn.evaluate.predict.predict_many( dataset_df, model, slicer, t_len, show_progress=True) predictions_df.to_pickle(predictions_df_path) return predictions_df def analyze_from_predictions(self, model_iter, test_set): """Loads predictions from a file before calling analyze.""" original_config = C.Config.load(self._experiment_config_path) analyzer = hcnn.evaluate.analyze.PredictionAnalyzer.from_config( original_config, self.experiment_name, model_iter, test_set) analysis_path = self._format_analysis_fn(model_iter) logger.info("Saving analysis to:".format(analysis_path)) analyzer.save(analysis_path) return os.path.exists(analysis_path) def analyze(self, predictions, model_iter): logger.info("Evaluating experient {} with params from {}".format( utils.colored(self.experiment_name, "magenta"), utils.colored(model_iter, "cyan"))) analyzer = hcnn.evaluate.analyze.PredictionAnalyzer(predictions) analysis_path = self._format_analysis_fn(model_iter) logger.info("Saving analysis to:".format(analysis_path)) analyzer.save(analysis_path) return os.path.exists(analysis_path) def fit_and_predict_one(self, test_set, skip_training=False): """On a particular model, with a given set * train * model_selection * predict * analyze * Write all outputs to a file Parameters ---------- skip_training : boolean For situations where you need to re-run model selection and prediction, skip up to model selection. Returns ------- success : true if succeeded. """ self.setup_partitions(test_set) logger.info("Beginning fit_and_predict_one:{}".format(test_set)) result = False # Step 0: initialize the data for the current splits. # self.setup_data_splits(test_set) # Step 1: train result = self.train_model() # Step 2: model selection if result: results_df = self.find_best_model() if results_df.empty: return False best_iter = self.select_best_iteration(results_df) # Step 3: predictions predictions = self.predict(best_iter) # Step 4: analysis if result: self.analyze(predictions, best_iter) else: logger.error("Problem predicting on {}".format(test_set)) else: logger.error("Problem with training on {}".format(test_set)) logger.info("Completed fit_and_predict_one:{}. Result={}" .format(test_set, result)) return result def fit_and_predict_cross_validation(self, skip_training=False): """Master loop for running cross validation across all datasets. Parameters ---------- skip_training : boolean For situations where you need to re-run model selection and prediction, skip up to model selection. Returns ------- success : bool True if succeeded end-to-end, False if anything failed. """ logger.info("Beginning fit_and_predict_cross_validation") results = [] for test_set in ["rwc", "uiowa", "philharmonia"]: results.append(self.fit_and_predict_one(test_set, skip_training=False)) final_result = all(results) logger.info("Completed fit_and_predict_cross_validation. Result={}" .format(final_result)) return final_result def validate_data(self): return True def collect_results(self, result_dir): """ Moves the following files to result_dir/experiment_name: - [hold_out_set]/training_loss.pkl - [hold_out_set]/validation_loss.pkl - [hold_out_set]/model_[param_number]_predictions.pkl Parameters ---------- result_dir : str The root destination results directory. """ if not self.experiment_name: logger.error("No valid experiment_name; can't collect_results.") return False # Make sure result_dir/experiment name exists results_output_dir = os.path.join(result_dir, self.experiment_name) boltons.fileutils.mkdir_p(results_output_dir) # For each hold_out_set # TODO: find a master place - config file maybe? to make it # so we stop re-writing these. experiment_results = { "experiment": self.experiment_name } for dataset in ['rwc', 'uiowa', 'philharmonia']: source_dir = os.path.join(self._model_dir, dataset) destination_dir = os.path.join(results_output_dir, dataset) boltons.fileutils.mkdir_p(destination_dir) training_loss_fn = self.config['experiment/training_loss'] training_loss_source = os.path.join(source_dir, training_loss_fn) training_loss_dest = os.path.join(destination_dir, training_loss_fn) validation_loss_fn = self.config['experiment/validation_loss'] validation_loss_source = os.path.join(source_dir, validation_loss_fn) validation_loss_dest = os.path.join(destination_dir, validation_loss_fn) # Copy the training and validation loss if os.path.isfile(training_loss_source): shutil.copyfile(training_loss_source, training_loss_dest) if os.path.isfile(validation_loss_source): shutil.copyfile(validation_loss_source, validation_loss_dest) # Now, the prediction file. But we have to make sure it # matches the format! prediction_glob = os.path.join(source_dir, "model_*_predictions.pkl") prediction_files = glob.glob(prediction_glob) prediction_file = (prediction_files[0] if len(prediction_files) > 0 else None) if prediction_file: pred_destination = os.path.join(destination_dir, os.path.basename(prediction_file)) prediction_df = pd.read_pickle(prediction_file).dropna() prediction_df = pd.DataFrame([ x for index, x in prediction_df.iterrows() if dataset in index]) # To make this easy, we drop the nan's here. # Possibly this is going to bit me later. y_true = (prediction_df['y_true'] if 'y_true' in prediction_df else prediction_df['target']).astype(np.int) y_pred = (prediction_df['y_pred'] if 'y_pred' in prediction_df else prediction_df['vote']).astype(np.int) new_prediction_df = pd.concat([y_pred, y_true], axis=1, keys=['y_pred', 'y_true']) new_prediction_df.to_pickle(pred_destination) experiment_results[dataset] = { 'prediction_file': pred_destination, 'mean_accuracy': float(sklearn.metrics.accuracy_score( y_true, y_pred)), 'mean_precision': float(sklearn.metrics.precision_score( y_true, y_pred, average='macro')), # weighted? 'mean_recall': float(sklearn.metrics.recall_score( y_true, y_pred, average='macro')), # weighted? 'mean_f1': float(sklearn.metrics.f1_score( y_true, y_pred, average='macro')), # weighted? 'class_precision': sklearn.metrics.precision_score( y_true, y_pred, average=None).tolist(), # weighted? 'class_recall': sklearn.metrics.recall_score( y_true, y_pred, average=None).tolist(), # weighted? 'class_f1': sklearn.metrics.f1_score( y_true, y_pred, average=None).tolist(), # weighted? 'sample_weight': np.array( new_prediction_df['y_true'].value_counts()).tolist() } experiment_results_file = os.path.join( results_output_dir, "experiment_results.json") with open(experiment_results_file, 'w') as fh: json.dump(experiment_results, fh, indent=2) return True
isc
mashavorob/lfds
perftest/queues/queuetest.cpp
1548
/* * queuetest.cpp * * Created on: Jun 2, 2015 * Author: masha */ #include <xtomic/queue.hpp> #include <xtomic/aux/inttypes.hpp> #include <vector> #include "testfactory.hpp" #include "queuetest.hpp" #include "stdqueue.hpp" namespace xtomic { namespace perftest { namespace queues { typedef xtomic::uint64_t item_type; typedef xtomic::queue<item_type, xtomic::Queue::FixedSize, xtomic::Queue::OneProducer, xtomic::Queue::OneConsumer> wait_free_queue_type; typedef xtomic::queue<item_type, xtomic::Queue::FixedSize, xtomic::Queue::ManyProducers, xtomic::Queue::OneConsumer> lock_free_one_consumer_queue_type; typedef xtomic::queue<item_type, xtomic::Queue::FixedSize, xtomic::Queue::ManyProducers, xtomic::Queue::ManyConsumers> lock_free_many_consumers_queue_type; typedef adapter::stdqueue<item_type> stl_queue_type; template<typename Queue> class Registrar { private: typedef Queue queue_type; typedef BandwithTester<queue_type> tester_type; typedef PerfTestFactoryImpl<tester_type> factory_type; public: Registrar(const char* queue_name) : m_factory("queues", queue_name, "bandwith", "MItems/sec") { } private: factory_type m_factory; }; static Registrar<wait_free_queue_type> s_wfq("wait free queue"); static Registrar<lock_free_one_consumer_queue_type> s_lfscq("lock free single consumer queue"); static Registrar<lock_free_many_consumers_queue_type> s_lfmcq("lock free many consumers queue"); static Registrar<stl_queue_type> s_stdq("std::queue"); } } }
isc
vhermecz/python-disksorted
disksorted.py
5753
# -*- coding: utf-8 -*- """ Simple helper for sorting when your ordinary memory wont cut it. """ import itertools import sys import tempfile import operator import json import marshal import functools try: import cPickle as pickle except: import pickle import heapq __author__ = 'Vajk Hermecz' __email__ = 'vhermecz@gmail.com' __version__ = '0.9' __all__ = ['disksorted', 'diskiterator', 'merge', 'SERIALIZER_PICKLE', 'SERIALIZER_JSON', 'SERIALIZER_MARSHAL'] def chunks(iterable, size): """ Spliter iterator t chunks of size items @see: http://stackoverflow.com/a/434314/1442987 """ it = iter(iterable) chunk = tuple(itertools.islice(it, size)) while chunk: yield chunk chunk = tuple(itertools.islice(it, size)) def key_to_reverse_order(key_fn): # FIXME: applying twice should remove """Convert keys to reverse order""" class K(object): __slots__ = ['obj'] def __init__(self, obj, *args): self.obj = key_fn(obj) def __lt__(self, other): return operator.gt(self.obj, other.obj) def __gt__(self, other): return operator.lt(self.obj, other.obj) def __eq__(self, other): return operator.eq(self.obj, other.obj) def __le__(self, other): return operator.ge(self.obj, other.obj) def __ge__(self, other): return operator.le(self.obj, other.obj) def __ne__(self, other): return operator.ne(self.obj, other.obj) def __hash__(self): raise TypeError('hash not implemented') return K MERGE_SENTINEL = object() def merge(chunks, key=None, reverse=False): ''' Merge iterators together :param chunks: to be merged. :param key: specifies a function of one argument that is used to extract a comparison key from each list element. :param reverse: is a boolean value. If set to True, then the list elements are sorted as if each comparison were reversed. ''' # NOTE: consider using heapq.merge key = key or (lambda x: x) if reverse: key = key_to_reverse_order(key) heap = [(((0, ), idx), MERGE_SENTINEL) for idx in range(len(chunks))] heapq.heapify(heap) while heap: (_, stream_idx), record = heapq.heappop(heap) if record != MERGE_SENTINEL: yield record try: record = next(chunks[stream_idx]) heapq.heappush(heap, (((1, key(record)), stream_idx), record)) except StopIteration: pass def _json_dump(payload, fp): json.dump(payload, fp) fp.write("\n") def _json_load(fp): return json.loads(next(fp)) SERIALIZER_PICKLE = (functools.partial(pickle.dump, protocol=-1), pickle.load, "w+b") SERIALIZER_JSON = (_json_dump, _json_load, "w+t") SERIALIZER_MARSHAL = (marshal.dump, marshal.load, "w+b") def diskiterator(iterable, fp=None, serializer=SERIALIZER_PICKLE): ''' Cache iterator to disk :param iterable: to be cached :param fp: is the file-object to be used. (tempfile to be used if omitted.) :param serializer: defines the methods to be used for transfering data between disk and memory. :type fp: file|NoneType :type serializer: (function, function) ''' dump, load, filemode = serializer def chunk_writer(chunk, fp=None): fp = fp or tempfile.TemporaryFile(mode=filemode) for subchunk in chunks(chunk, 128): dump(list(subchunk), fp) dump(list(), fp) fp.seek(0) return fp def chunk_reader(fp): try: while True: sublist = load(fp) if not sublist: break for item in sublist: yield item finally: try: fp.close() except Exception: pass return chunk_reader(chunk_writer(iterable, fp=fp)) def disksorted(iterable, key=None, reverse=False, chunksize=sys.maxsize, serializer=SERIALIZER_PICKLE): ''' Sorting function for collections not fitting into memory NOTE: Uses temporary files :param iterable: of items to be sorted :param key: specifies a function of one argument that is used to extract a comparison key from each list element. :param reverse: is a boolean value. If set to True, then the list elements are sorted as if each comparison were reversed. :param chunksize: specifies the largest number of items to be held in memory at once. :param serializer: defines the methods to be used for transfering data between disk and memory. :type key: function|NoneType :type reverse: bool :type chunksize: int|NoneType :type serializer: (function, function) ''' if chunksize < 1: raise ValueError("chunksize to be positive integer") single = True pieces = [] chunk = [] for chunk in chunks(iterable, chunksize): chunk = sorted(chunk, key=key, reverse=reverse) if len(chunk) == chunksize: single = False if not single: pieces.append(diskiterator(chunk, serializer=serializer)) if not single: chunk = merge(pieces, key, reverse) for item in chunk: yield item if sys.version_info[0] == 2: _disksorted = disksorted def disksorted(iterable, cmp=None, key=None, reverse=False, chunksize=sys.maxint, serializer=SERIALIZER_PICKLE): if cmp: key = functools.cmp_to_key(cmp) return _disksorted(iterable, key=key, reverse=reverse, chunksize=chunksize, serializer=serializer) disksorted.__doc__ = _disksorted.__doc__
isc
damienmortini/dlib
node_modules/@gltf-transform/core/src/core.ts
990
/** @module core */ export { Document, Transform, TransformContext } from './document'; export { JSONDocument } from './json-document'; export { Extension } from './extension'; export { Accessor, Animation, AnimationChannel, AnimationSampler, Buffer, Camera, ExtensionProperty, Property, Material, Mesh, Node, Primitive, PrimitiveTarget, Root, Scene, Skin, Texture, TextureInfo, TextureLink, AttributeLink, IndexLink, COPY_IDENTITY, } from './properties'; export { Graph, GraphChild, GraphChildList, Link } from './graph/'; export { PlatformIO, NodeIO, WebIO, ReaderContext, WriterContext } from './io/'; export { BufferUtils, ColorUtils, FileUtils, ImageUtils, ImageUtilsFormat, Logger, MathUtils, bounds, uuid, } from './utils/'; export { TypedArray, TypedArrayConstructor, PropertyType, Format, TextureChannel, VertexLayout, vec2, vec3, vec4, mat3, mat4, bbox, GLB_BUFFER, VERSION, } from './constants'; export { GLTF } from './types/gltf';
isc
klpdotorg/tada-frontend
app/components/Institution/InsufficientPermissionMsg.js
379
import React from 'react'; const InsufficientPermissionMsg = () => { return ( <div> <div className="alert alert-danger"> <i className="fa fa-lock fa-lg" aria-hidden="true" /> Insufficient Privileges. Please contact administrator for permissions to modify the institution. </div> </div> ); }; export { InsufficientPermissionMsg };
isc
newbreedofgeek/react-stepzilla
src/examples/redux/reducer.js
390
const reducer = (state = {say: '', activeStep: 0}, action) => { switch (action.type) { case 'HELLO_REDUX': return { ...state, say : 'Hello World Redux' }; case 'BYE_REDUX': return { ...state, say : '' }; case 'UPDATE_ACTIVE_STEP': return { ...state, activeStep : action.payload }; default: return state; } }; export default reducer;
isc
io7m/r2
com.io7m.r2.rendering.translucent.api/src/main/java/com/io7m/r2/rendering/translucent/api/R2TranslucentBatchedType.java
2292
/* * Copyright © 2016 <code@io7m.com> http://io7m.com * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR * IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package com.io7m.r2.rendering.translucent.api; import com.io7m.jcanephora.renderstate.JCGLBlendState; import com.io7m.jcanephora.renderstate.JCGLCullingState; import com.io7m.jfunctional.PartialBiFunctionType; import com.io7m.r2.annotations.R2ImmutableStyleType; import com.io7m.r2.instances.R2InstanceBatchedType; import com.io7m.r2.shaders.translucent.api.R2ShaderTranslucentInstanceBatchedType; import org.immutables.value.Value; import java.util.Optional; /** * The type of batched instance translucents. * * @param <M> The type of shader parameters */ @R2ImmutableStyleType @Value.Immutable public interface R2TranslucentBatchedType<M> extends R2TranslucentType<M> { @Override default <A, B, E extends Throwable> B matchTranslucent( final A context, final PartialBiFunctionType<A, R2TranslucentSingleType<M>, B, E> on_single, final PartialBiFunctionType<A, R2TranslucentBatchedType<M>, B, E> on_batched, final PartialBiFunctionType<A, R2TranslucentBillboardedType<M>, B, E> on_billboarded) throws E { return on_batched.call(context, this); } /** * @return The instance to be rendered */ @Value.Parameter R2InstanceBatchedType instance(); /** * @return The shader */ @Value.Parameter R2ShaderTranslucentInstanceBatchedType<M> shader(); @Override @Value.Parameter M shaderParameters(); @Override @Value.Parameter Optional<JCGLBlendState> blending(); @Override @Value.Parameter JCGLCullingState culling(); }
isc
mortenae/webgl_bsp_thing
index.js
1186
window.onerror = function(message, blah, line) { //alert(line + ": " + message); document.title = message; } function getTime() { if(!window.start) window.start = new Date().getTime(); return (new Date().getTime() - window.start) / 1000.0; } var overlay = document.createElement("div"); overlay.style.zIndex = "1000"; var downloads_counter = 0; var downloads = {}; function download(uri, callback) { var id = downloads_counter++; var blah = {}; blah.element = document.createElement("section"); overlay.appendChild(blah.element); downloads[id] = blah; var request = new XMLHttpRequest(); request.open("GET", uri, true); if((/\.(bsp|bin)$/i).test(uri)) request.responseType = "arraybuffer"; request.onload = function(event) { overlay.removeChild(downloads[id].element); downloads[id] = undefined; callback(request.response); } request.onprogress = function(event) { if(event.lengthComputable) { downloads[id].element.innerHTML = "downloading \"" + uri + "\" " + Math.round(event.loaded * 100 / event.total) + "%"; } } request.send(null); } document.addEventListener("DOMContentLoaded", function() { document.body.appendChild(overlay); }, false);
isc
acdenisSK/serenity
src/utils/argument_convert/channel.rs
7132
use super::ArgumentConvert; use crate::{model::prelude::*, prelude::*}; /// Error that can be returned from [`Channel::convert`]. #[non_exhaustive] #[derive(Debug)] pub enum ChannelParseError { /// When channel retrieval via HTTP failed Http(SerenityError), /// The provided channel string failed to parse, or the parsed result cannot be found in the /// cache. NotFoundOrMalformed, } impl std::error::Error for ChannelParseError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { Self::Http(e) => Some(e), Self::NotFoundOrMalformed => None, } } } impl std::fmt::Display for ChannelParseError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Http(_) => f.write_str("Failed to request channel via HTTP"), Self::NotFoundOrMalformed => f.write_str("Channel not found or unknown format"), } } } fn channel_belongs_to_guild(channel: &Channel, guild: GuildId) -> bool { match channel { Channel::Guild(channel) => channel.guild_id == guild, Channel::Category(channel) => channel.guild_id == guild, Channel::Private(_channel) => false, } } async fn lookup_channel_global(ctx: &Context, s: &str) -> Result<Channel, ChannelParseError> { if let Some(channel_id) = s.parse::<u64>().ok().or_else(|| crate::utils::parse_channel(s)) { return ChannelId(channel_id).to_channel(ctx).await.map_err(ChannelParseError::Http); } let channels = ctx.cache.channels.read().await; if let Some(channel) = channels.values().find(|channel| channel.name.eq_ignore_ascii_case(s)).cloned() { return Ok(Channel::Guild(channel)); } Err(ChannelParseError::NotFoundOrMalformed) } /// Look up a Channel by a string case-insensitively. /// /// Lookup are done via local guild. If in DMs, the global cache is used instead. /// /// The cache feature needs to be enabled. /// /// The lookup strategy is as follows (in order): /// 1. Lookup by ID. /// 2. [Lookup by mention](`crate::utils::parse_channel`). /// 3. Lookup by name. #[cfg(feature = "cache")] #[async_trait::async_trait] impl ArgumentConvert for Channel { type Err = ChannelParseError; async fn convert( ctx: &Context, guild_id: Option<GuildId>, _channel_id: Option<ChannelId>, s: &str, ) -> Result<Self, Self::Err> { let channel = lookup_channel_global(ctx, s).await?; // Don't yield for other guilds' channels if let Some(guild_id) = guild_id { if !channel_belongs_to_guild(&channel, guild_id) { return Err(ChannelParseError::NotFoundOrMalformed); } }; Ok(channel) } } /// Error that can be returned from [`GuildChannel::convert`]. #[non_exhaustive] #[derive(Debug)] pub enum GuildChannelParseError { /// When channel retrieval via HTTP failed Http(SerenityError), /// The provided channel string failed to parse, or the parsed result cannot be found in the /// cache. NotFoundOrMalformed, /// When the referenced channel is not a guild channel NotAGuildChannel, } impl std::error::Error for GuildChannelParseError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { Self::Http(e) => Some(e), Self::NotFoundOrMalformed => None, Self::NotAGuildChannel => None, } } } impl std::fmt::Display for GuildChannelParseError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Http(_) => f.write_str("Failed to request channel via HTTP"), Self::NotFoundOrMalformed => f.write_str("Channel not found or unknown format"), Self::NotAGuildChannel => f.write_str("Channel is not a guild channel"), } } } /// Look up a GuildChannel by a string case-insensitively. /// /// Lookup is done by the global cache, hence the cache feature needs to be enabled. /// /// For more information, see the ArgumentConvert implementation for [`Channel`] #[cfg(feature = "cache")] #[async_trait::async_trait] impl ArgumentConvert for GuildChannel { type Err = GuildChannelParseError; async fn convert( ctx: &Context, guild_id: Option<GuildId>, channel_id: Option<ChannelId>, s: &str, ) -> Result<Self, Self::Err> { match Channel::convert(ctx, guild_id, channel_id, s).await { Ok(Channel::Guild(channel)) => Ok(channel), Ok(_) => Err(GuildChannelParseError::NotAGuildChannel), Err(ChannelParseError::Http(e)) => Err(GuildChannelParseError::Http(e)), Err(ChannelParseError::NotFoundOrMalformed) => { Err(GuildChannelParseError::NotFoundOrMalformed) }, } } } /// Error that can be returned from [`ChannelCategory::convert`]. #[non_exhaustive] #[derive(Debug)] pub enum ChannelCategoryParseError { /// When channel retrieval via HTTP failed Http(SerenityError), /// The provided channel string failed to parse, or the parsed result cannot be found in the /// cache. NotFoundOrMalformed, /// When the referenced channel is not a channel category NotAChannelCategory, } impl std::error::Error for ChannelCategoryParseError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { Self::Http(e) => Some(e), Self::NotFoundOrMalformed => None, Self::NotAChannelCategory => None, } } } impl std::fmt::Display for ChannelCategoryParseError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Http(_) => f.write_str("Failed to request channel via HTTP"), Self::NotFoundOrMalformed => f.write_str("Channel not found or unknown format"), Self::NotAChannelCategory => f.write_str("Channel is not a channel category"), } } } /// Look up a ChannelCategory by a string case-insensitively. /// /// Lookup is done by the global cache, hence the cache feature needs to be enabled. /// /// For more information, see the ArgumentConvert implementation for [`Channel`] #[cfg(feature = "cache")] #[async_trait::async_trait] impl ArgumentConvert for ChannelCategory { type Err = ChannelCategoryParseError; async fn convert( ctx: &Context, guild_id: Option<GuildId>, channel_id: Option<ChannelId>, s: &str, ) -> Result<Self, Self::Err> { match Channel::convert(ctx, guild_id, channel_id, s).await { Ok(Channel::Category(channel)) => Ok(channel), // TODO: accomodate issue #1352 somehow Ok(_) => Err(ChannelCategoryParseError::NotAChannelCategory), Err(ChannelParseError::Http(e)) => Err(ChannelCategoryParseError::Http(e)), Err(ChannelParseError::NotFoundOrMalformed) => { Err(ChannelCategoryParseError::NotFoundOrMalformed) }, } } }
isc
route4me/route4me-nodejs-sdk
examples/Routes/get-routes-from-date-range.js
859
"use strict" const path = require("path") const debug = require("debug")("route4me-node:examples") const chai = require("chai") require("../init-examples-suite") const helper = require("./../../test/helper") helper.describeIntegration(helper.toSuiteName(__filename), function T() { this.timeout(5000) this.slow(3000) it(path.basename(__filename), (done) => { // const Route4Me = require("route4me-node") const expect = chai.expect const apiKey = "11111111111111111111111111111111" const route4me = new Route4Me(apiKey) const options = { offset: 0, limit: 10, start_date: "2019-10-15", end_date: "2019-10-20" } route4me.Routes.list(options, (err, routes) => { debug("error ", err) debug("result ", routes) // Expectations about result expect(err).is.null expect(routes).is.an("array") }) done() }) })
isc
io7m/jparasol
io7m-jparasol-compiler-core/src/main/java/com/io7m/jparasol/untyped/ast/checked/UASTCVertexShaderVisitorType.java
2095
/* * Copyright © 2014 <code@io7m.com> http://io7m.com * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR * IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package com.io7m.jparasol.untyped.ast.checked; import java.util.List; import com.io7m.jparasol.untyped.ast.checked.UASTCDeclaration.UASTCDShaderVertex; import com.io7m.jparasol.untyped.ast.checked.UASTCDeclaration.UASTCDShaderVertexInput; import com.io7m.jparasol.untyped.ast.checked.UASTCDeclaration.UASTCDShaderVertexOutput; import com.io7m.jparasol.untyped.ast.checked.UASTCDeclaration.UASTCDShaderVertexOutputAssignment; import com.io7m.jparasol.untyped.ast.checked.UASTCDeclaration.UASTCDShaderVertexParameter; // CHECKSTYLE_JAVADOC:OFF public interface UASTCVertexShaderVisitorType<VS, PI, PP, PO, L, O, E extends Throwable> { VS vertexShaderVisit( final List<PI> inputs, final List<PP> parameters, final List<PO> outputs, final List<L> locals, final List<O> output_assignments, final UASTCDShaderVertex v) throws E; PI vertexShaderVisitInput( final UASTCDShaderVertexInput i) throws E; UASTCVertexShaderLocalVisitorType<L, E> vertexShaderVisitLocalsPre() throws E; PO vertexShaderVisitOutput( final UASTCDShaderVertexOutput o) throws E; O vertexShaderVisitOutputAssignment( final UASTCDShaderVertexOutputAssignment a) throws E; PP vertexShaderVisitParameter( final UASTCDShaderVertexParameter p) throws E; }
isc
Aryk/dead_simple_cms
lib/dead_simple_cms/section/builder.rb
2758
module DeadSimpleCMS class Section # Public: A Builder class which provides a nice DSL to describe different sections of the site that a user wants to modify # through a CMS. class Builder attr_accessor :group_hierarchy attr_reader :section delegate :display, :extend, :to => :current_group def self.define_attribute_builder_method(klass) class_eval <<-RUBY, __FILE__, __LINE__ + 1 def #{klass.builder_method_name}(identifier, options={}) group_hierarchy = self.group_hierarchy.presence || [section.root_group] # fallback on the root group attribute = #{klass}.new(identifier, options.merge(:group_hierarchy => group_hierarchy, :section => section)) group_hierarchy.last.add_attribute(attribute) section.add_attribute(attribute) end RUBY end def initialize(section, &block) @section = section @group_hierarchy = [] instance_eval(&block) end def group(*args, &block) options = args.extract_options! attribute_options_by_identifier = options.delete(:attribute_options) || {} identifier = args.first # If no identifier provided, first key, value pair of the hash is the identifier => group_configuration. identifier, group_configuration = options.shift unless identifier unless group_configuration.is_a?(DeadSimpleCMS::Group::Configuration) group_configuration = DeadSimpleCMS.group_configurations[group_configuration] end options.update(group_configuration.options) if group_configuration group = Group.new(identifier, options) nest_group(group) do if group_configuration display(group_configuration.presenter_class, &group_configuration.render_proc) group_configuration.attribute_arguments.each do |attribute_identifier, (attribute_type, attribute_options)| attribute_options = attribute_options.merge(attribute_options_by_identifier[attribute_identifier] || {}) send(attribute_type, attribute_identifier, attribute_options) end end instance_eval(&block) if block_given? end end private # Public: Returns the current group. Since Section inherits from Group, it is also considered a group as well. def current_group group_hierarchy.last || section end def nest_group(group) tmp = group_hierarchy current_group.add_group(group) # chain it with the last group or section if its top-level. self.group_hierarchy += [group] yield ensure self.group_hierarchy = tmp end end # Builder end end
mit
aerdman-aws/QlikApp
QlikApp/app/js/messagePane/messagePane.ts
1496
/// <reference path="../../../Scripts/typings/angularjs/angular.d.ts" /> /// <reference path="../typings.d.ts" /> 'use strict'; class MessagePaneDirective implements ng.IDirective { static $inject = []; constructor() { } scope = {}; templateUrl = './app/js/messagePane/messagePane.html'; controller = 'MessagePaneController'; }; class MessagePaneController implements qlik.IMessagePaneController { static $inject = ['$scope', 'messageService']; constructor(private $scope: qlik.IMessagePaneScope, private messageService: qlik.IMessageService) { $scope.controller = this; this.loadMessages(); $scope.$watch('controller.messageService.isDirty', (newValue: boolean, oldValue: boolean): void => { if (!oldValue && newValue) { //if message collection wasn't dirty, but now is dirty... this.loadMessages(); //... reload the messages } }); } private loadMessages(): void { this.messageService.getAll().then((messages: qlik.IMessage[]) => { this.$scope.messages = messages; }); } loadDetails(id: number): void { this.messageService.get(id).then((messageDetail: qlik.IMessageDetail) => { alert('"' + messageDetail.Message.Body + '" is ' + (!messageDetail.IsPalindrome ? 'not ' : '') + 'a palindrome'); }); } deleteMessage(id: number): void { this.messageService.delete(id); } } angular.module('qlik.messagePane', []) .directive('qlikMessagePane', () => new MessagePaneDirective()) .controller('MessagePaneController', MessagePaneController);
mit
ArcherSys/ArcherSys
Lua/examples/luatask/test.lua
1909
require 'task' TEST = {} function TEST.ts() local tl = task.list() io.stdout:write( '\nID SCRIPT COUNT REG/ID\n' ) io.stdout:write( '---- ---------------- --------- ----------------\n' ) if tl then for i, t in pairs( tl ) do io.stdout:write( string.format( '%4d %-16.16s %9d %-16.16s\n', i, t.script or arg[0], t.msgcount, t.id or '' ) ) end end end function TEST.main( arg ) task.register( 'Main' ) local cmd = 'ts' while cmd ~= 'quit' do if cmd == 'ts' then TEST.ts() elseif string.sub( cmd, 1, 3 ) == 'run' then local rarg = {} string.gsub( cmd, '([^ ]+)', function( x ) table.insert( rarg, x ) end ) if not rarg[2] then io.stdout:write( 'Run what?\n' ) else local script = rarg[2] table.remove( rarg, 1) table.remove( rarg, 1) local tsk = task.create( script, rarg ) if tsk == -1 then io.stdout:write( "-> Can't expand task list.\n" ) elseif tsk == -2 then io.stdout:write( "-> Can't strdup file name.\n" ) elseif tsk == -3 then io.stdout:write( "-> Can't create message queue.\n" ) elseif tsk == -4 then io.stdout:write( "-> Can't create os thread.\n" ) elseif tsk == -11 then io.stdout:write( "-> The library seems corrupt.\n" ) else io.stdout:write( "-> Task ", tsk, " started.\n" ) end end end io.stdout:write( 'TEST> ' ) io.stdout:flush() cmd = io.stdin:read() end io.stdout:write( '\nTEST terminated\n' ) os.exit( 0 ) end TEST.main( arg )
mit
larios96/RicardoWEB
phpchat/demo/demo27_dice.class.php
3416
<?php /** * Dice rolling, * test routines at the end of this file * * @author Alessandro Pasotti www.itopen.it * @copyright (C) itOpen 2006 * @licence LGPL * * Valid strings: * xdx * xdxx * xdxxx * xdxxx+x * xdxxx-x */ class Dice { var $command; function check($text){ $this->errors = array(); $this->command= ''; if(preg_match('/^([0-9]+)d([0-9]{1,3})([\+-][0-9]+)?$/', $text, $matches)){ $this->command['launch'] = (int) $matches[1]; $this->command['faces'] = (int) $matches[2]; // Now go for corrections if(count($matches) == 4){ $this->command['bias'] = $matches[3]; } if(!($this->command['launch'] && $this->command['faces'])){ //print_r($matches); $this->errors[] = "Be serious, not null dice please."; return false; } } else { //print_r($matches); // Too long //$this->errors[] = "'$text' is not a valid string for a dice launch. Valid strings match the following patterns xdyyy, xdyyy+z or xdyyy-z where x, y and z are digits, you can have up to three y."; $this->errors[] = 'Not valid. Valid launches are like xdyyy'; return false; } $this->text = $text; srand((double)microtime()*1000000); return true; } function roll(){ $sum = 0; $result = $this->text . ' &#187; ' ; for($i = 0; $i < $this->command['launch']; $i++){ $launchresult = rand(1, $this->command['faces']); $sum += $launchresult; $result .= ' + ' . $launchresult; } if(count($this->command) == 3){ $sum += $this->command['bias']; $result .= ' [' . $this->command['bias'] . ']'; } return $result . ' = ' . '<strong>' . $sum . '</strong>'; } function error_get(){ if(!count($this->errors)){ return ''; } else { return join("<br />\n", $this->errors); } } function test(){ // Valid $testvalid = array( '1d1' , '2d2' , '9d6' , '1d99' , '1d999' , '1d100' , '1d6+1' , '1d6-9' ); // Not valid $testnotvalid = array( '0d6' , '99d6' , '1d1000' , '1d000' , '1d000' , '1d6+99' , '1d6+10' , 'xad--' ); print "<br />\n---------------------------------------<br />\n"; print "Dice: testing valid launches" . "<br />\n"; $valid = 0; foreach($testvalid as $t){ if($this->check($t)){ $valid ++; print $this->roll() . "\n"; } else { print $this->error_get(). "\n"; } } print "<br />\n" . "Valid launches: " . $valid . '/' . count($testvalid) . "<br />\n"; print "<br />\n" . "Dice: testing notvalid launches" . "<br />\n"; $valid = 0; foreach($testnotvalid as $t){ if($this->check($t)){ $valid ++; print $this->roll() . "\n"; } else { print $this->error_get(). "\n"; } print "---------------------------------------<br />\n"; } print "<br />\n" . "Notvalid launches: " . (count($testnotvalid) - $valid) . '/' . count($testnotvalid) . "<br />\n"; } } /* * Uncomment for testing */ /*/ $d = new Dice(); $d->test(); //*/ ?>
mit
BeneathTheInk/temple-selector
test/lib.js
1332
var expect = require("./utils/expect"); var fromSelector = require("../"); var Temple = require("templejs"); describe("fromSelector Tests", function() { it("fromSelector should take a basic css selector and return a new Temple Element binding.", function() { var b = fromSelector("span.a-class#myid"); expect(b).to.be.instanceof(Temple.Element); expect(b.tagname).to.equal("span"); expect(b.node.className).to.equal("a-class"); expect(b.node.id).to.equal("myid"); }); it("fromSelector should accept multiple classes and keep their order.", function() { var b = fromSelector("span.class1.class2#myid.class3"); expect(b).to.be.instanceof(Temple.Element); expect(b.tagname).to.equal("span"); expect(b.node.className).to.equal("class1 class2 class3"); }); it("fromSelector should accept only one id", function() { expect(function() { fromSelector("span#id1.myclass#id2"); }).to.throw(Error); }); it("fromSelector should accept attribute tags, with and without quotes", function() { var b = fromSelector("span[attr1=foo][attr2='bar'][attr3=\"baz\"]"); expect(b).to.be.instanceof(Temple.Element); expect(b.tagname).to.equal("span"); expect(b.getAttribute("attr1")).to.equal("foo"); expect(b.getAttribute("attr2")).to.equal("bar"); expect(b.getAttribute("attr3")).to.equal("baz"); }); });
mit
sethgerou/DESC.org
app/models/page.rb
96
class Page < ApplicationRecord validates_presence_of :title validates_presence_of :body end
mit
Data2Semantics/mustard
mustard-learners/src/main/java/org/data2semantics/mustard/learners/evaluation/AUCPR.java
6170
package org.data2semantics.mustard.learners.evaluation; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.data2semantics.mustard.learners.Prediction; import org.data2semantics.mustard.learners.evaluation.utils.AUCUtils; import org.data2semantics.mustard.learners.evaluation.utils.ValueLabelPair; import org.data2semantics.mustard.utils.Pair; /** * Average area under the Precision Recall curve * * @author Gerben * */ public class AUCPR implements EvaluationFunction { public double computeScore(double[] target, Prediction[] prediction) { double auc = 0; Map<Integer,Double> foldSizes = new HashMap<Integer,Double>(); List<Double> classes = new ArrayList<Double>(); for (int i = 0; i < target.length; i++) { if (!classes.contains(target[i])) { // faster with a set, but classes should be short classes.add(target[i]); } int fold = 1; if (prediction[i].getFold() > 0 && !prediction[i].isProbabilities()) { // if we output probabilities, then folds are comparable, if we have SVM decision values, then they might not be. fold = prediction[i].getFold(); } if (!foldSizes.containsKey(fold)) { foldSizes.put(fold, 0.0); } foldSizes.put(fold, foldSizes.get(fold)+1); } Collections.sort(classes); for (int fold : foldSizes.keySet()) { double aucTemp = 0; if (prediction[0].isPairWise()) { Map<Pair<Double,Double>, List<ValueLabelPair>> classifiers = new HashMap<Pair<Double,Double>, List<ValueLabelPair>>(); for (int i = 0; i < classes.size(); i++) { for (int j = i+1; j < classes.size(); j++) { classifiers.put(new Pair<Double,Double>(classes.get(i), classes.get(j)), new ArrayList<ValueLabelPair>()); classifiers.put(new Pair<Double,Double>(classes.get(j), classes.get(i)), new ArrayList<ValueLabelPair>()); } } for (int i = 0; i < target.length; i++) { if (prediction[i].getFold() == fold) { int classIndex = getClassIndex(prediction[i].getClassLabels(), target[i]); Map<Pair<Double,Double>, ValueLabelPair> map = getRelevantDecisionValuesPairwise(target[i], classIndex, prediction[i].isProbabilities(), prediction[i].getClassLabels(), prediction[i].getDecisionValue()); for (Pair<Double,Double> p : map.keySet()) { // add the new decision values to the relevant classifiers classifiers.get(p).add(map.get(p)); } } } for (Pair<Double,Double> p : classifiers.keySet()) { aucTemp += AUCUtils.computePRAuc(classifiers.get(p)); } aucTemp /= (double) classifiers.size(); aucTemp *= foldSizes.get(fold) / (double) target.length; auc += aucTemp; } else { // not pairwise Map<Double, List<ValueLabelPair>> classifiers = new HashMap<Double, List<ValueLabelPair>>(); for (Double label : classes) { classifiers.put(label, new ArrayList<ValueLabelPair>()); } for (int i = 0; i < target.length; i++) { if (prediction[i].getFold() == fold) { Map<Double, ValueLabelPair> map = getRelevantDecisionValues(target[i], prediction[i].getClassLabels(), prediction[i].getDecisionValue()); for (Double d : map.keySet()) { classifiers.get(d).add(map.get(d)); } } } for (Double d : classifiers.keySet()) { aucTemp += AUCUtils.computePRAuc(classifiers.get(d)); } aucTemp /= (double) classifiers.size(); aucTemp *= foldSizes.get(fold) / (double) target.length; auc += aucTemp; } } return auc; } private Map<Double, ValueLabelPair> getRelevantDecisionValues(double label, int[] classLabels, double[] decVals) { Map<Double, ValueLabelPair> map = new HashMap<Double,ValueLabelPair>(); for (int i = 0; i < classLabels.length; i++) { if (classLabels[i] == (int)label) { map.put(new Double(classLabels[i]), new ValueLabelPair(decVals[i], true)); } else { map.put(new Double(classLabels[i]), new ValueLabelPair(decVals[i], false)); } } return map; } private Map<Pair<Double,Double>, ValueLabelPair> getRelevantDecisionValuesPairwise(double label, int classIndex, boolean probabilities, int[] classLabels, double[] decVals) { Map<Pair<Double,Double>, ValueLabelPair> map = new HashMap<Pair<Double,Double>,ValueLabelPair>(); int decValIndex = 0; // to keep track of the index in the decVals array for (int i = 0; i < classLabels.length; i++) { for (int j = i+1; j < classLabels.length; j++) { if (i == classIndex || j == classIndex) { // if we are dealing with the current class Pair<Double,Double> idPair; double decVal; // Since the PR curve is not symmetric like the ROC curve, we add both classifiers idPair = new Pair<Double,Double>(new Double(classLabels[i]),new Double(classLabels[j])); decVal = decVals[decValIndex]; if (idPair.getFirst() == label) { // if the first one is the current class, then it is positive map.put(idPair, new ValueLabelPair(decVal, true)); } else { map.put(idPair, new ValueLabelPair(decVal, false)); } idPair = new Pair<Double,Double>(new Double(classLabels[j]),new Double(classLabels[i])); if (!probabilities) { decVal = -decVals[decValIndex]; } else { decVal = 1 - decVals[decValIndex]; } if (idPair.getFirst() == label) { // if the first one is the current class, then it is positive map.put(idPair, new ValueLabelPair(decVal, true)); } else { map.put(idPair, new ValueLabelPair(decVal, false)); } } decValIndex++; } } return map; } private int getClassIndex(int[] classLabels, double label) { for (int i = 0; i < classLabels.length; i++) { if (classLabels[i] == (int) label) { return i; } } return -1; } public boolean isBetter(double scoreA, double scoreB) { return (scoreA > scoreB) ? true : false; } public String getLabel() { return "AUC-PR"; } public boolean isHigherIsBetter() { return true; } }
mit
qudou/xmlplus
example/docs/15-optimization/03/index.js
295
xmlplus("xp", function (xp, $_, t) { $_().imports({ Index: { xml: "<h1 id='index'>hello,world</h1>", map: { nofragment: true }, fun: function (sys, items, opts) { console.log(sys.index.width()); } } }); });
mit
sgarciac/spec
core/float/next_float_spec.rb
1470
require File.expand_path('../../../spec_helper', __FILE__) describe "Float#next_float" do it "returns a float the smallest possible step greater than the receiver" do barely_positive = 0.0.next_float barely_positive.should == 0.0.next_float barely_positive.should > 0.0 barely_positive.should < barely_positive.next_float midpoint = barely_positive / 2 [0.0, barely_positive].should include midpoint end it "returns Float::INFINITY for Float::INFINITY" do Float::INFINITY.next_float.should == Float::INFINITY end it "steps directly between MAX and INFINITY" do (-Float::INFINITY).next_float.should == -Float::MAX Float::MAX.next_float.should == Float::INFINITY end it "steps directly between 1.0 and 1.0 + EPSILON" do 1.0.next_float.should == 1.0 + Float::EPSILON end it "steps directly between -1.0 and -1.0 + EPSILON/2" do (-1.0).next_float.should == -1.0 + Float::EPSILON/2 end it "reverses the effect of prev_float for all Floats except INFINITY and +0.0" do num = -rand num.prev_float.next_float.should == num end it "returns negative zero when stepping upward from just below zero" do x = (-0.0).prev_float.next_float (1/x).should == -Float::INFINITY end it "gives the same result for -0.0 as for +0.0" do (-0.0).next_float.should == (0.0).next_float end it "returns NAN if NAN was the receiver" do Float::NAN.next_float.nan?.should == true end end
mit
jjcollinge/blanky
Templates/c#/Swashbuckle.SwaggerGen/Generator/SchemaRegistry.cs
8943
using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using Newtonsoft.Json; using Newtonsoft.Json.Serialization; using Newtonsoft.Json.Converters; namespace Swashbuckle.SwaggerGen.Generator { public class SchemaRegistry : ISchemaRegistry { private readonly JsonSerializerSettings _jsonSerializerSettings; private readonly IContractResolver _jsonContractResolver; private readonly SchemaRegistryOptions _options; private readonly IDictionary<string, Type> _referencedTypeMap; public SchemaRegistry( JsonSerializerSettings jsonSerializerSettings, SchemaRegistryOptions options = null) { _jsonSerializerSettings = jsonSerializerSettings; _jsonContractResolver = _jsonSerializerSettings.ContractResolver ?? new DefaultContractResolver(); _options = options ?? new SchemaRegistryOptions(); _referencedTypeMap = new Dictionary<string, Type>(); Definitions = new Dictionary<string, Schema>(); } public IDictionary<string, Schema> Definitions { get; private set; } public Schema GetOrRegister(Type type) { var schema = CreateSchema(type, true); // Ensure a corresponding definition exists for all referenced types string pendingSchemaId; while ((pendingSchemaId = GetPendingSchemaIds().FirstOrDefault()) != null) { Definitions.Add(pendingSchemaId, CreateSchema(_referencedTypeMap[pendingSchemaId], false)); } return schema; } private Schema CreateSchema(Type type, bool refIfComplex) { if (_options.CustomTypeMappings.ContainsKey(type)) return _options.CustomTypeMappings[type](); var jsonContract = _jsonContractResolver.ResolveContract(type); if (jsonContract is JsonPrimitiveContract) return CreatePrimitiveSchema((JsonPrimitiveContract)jsonContract); var dictionaryContract = jsonContract as JsonDictionaryContract; if (dictionaryContract != null) return dictionaryContract.IsSelfReferencing() && refIfComplex ? CreateJsonReference(type) : CreateDictionarySchema(dictionaryContract); var arrayContract = jsonContract as JsonArrayContract; if (arrayContract != null) return arrayContract.IsSelfReferencing() && refIfComplex ? CreateJsonReference(type) : CreateArraySchema(arrayContract); var objectContract = jsonContract as JsonObjectContract; if (objectContract != null) return refIfComplex ? CreateJsonReference(type) : CreateObjectSchema(objectContract); // None of the above, fallback to abstract "object" return CreateSchema(typeof(object), refIfComplex); } private Schema CreatePrimitiveSchema(JsonPrimitiveContract primitiveContract) { var type = Nullable.GetUnderlyingType(primitiveContract.UnderlyingType) ?? primitiveContract.UnderlyingType; if (type.GetTypeInfo().IsEnum) return CreateEnumSchema(primitiveContract, type); if (PrimitiveTypeMap.ContainsKey(type)) return PrimitiveTypeMap[type](); // None of the above, fallback to string return new Schema { Type = "string" }; } private Schema CreateEnumSchema(JsonPrimitiveContract primitiveContract, Type type) { var stringEnumConverter = primitiveContract.Converter as StringEnumConverter ?? _jsonSerializerSettings.Converters.OfType<StringEnumConverter>().FirstOrDefault(); if (_options.DescribeAllEnumsAsStrings || stringEnumConverter != null) { var camelCase = _options.DescribeStringEnumsInCamelCase || (stringEnumConverter != null && stringEnumConverter.CamelCaseText); return new Schema { Type = "string", Enum = (camelCase) ? Enum.GetNames(type).Select(name => name.ToCamelCase()).ToArray() : Enum.GetNames(type) }; } return new Schema { Type = "integer", Format = "int32", Enum = Enum.GetValues(type).Cast<object>().ToArray() }; } private Schema CreateJsonReference(Type type) { var schemaId = _options.SchemaIdSelector(type); if (_referencedTypeMap.ContainsKey(schemaId) && _referencedTypeMap[schemaId] != type) throw new InvalidOperationException(string.Format( "Conflicting schemaIds: Duplicate schemaIds detected for types {0} and {1}. " + "See the config setting - \"UseFullTypeNameInSchemaIds\" for a potential workaround", type.FullName, _referencedTypeMap[schemaId].FullName)); if (!_referencedTypeMap.ContainsKey(schemaId)) _referencedTypeMap.Add(schemaId, type); return new Schema { Ref = "#/definitions/" + schemaId }; } private Schema CreateDictionarySchema(JsonDictionaryContract dictionaryContract) { var valueType = dictionaryContract.DictionaryValueType ?? typeof(object); return new Schema { Type = "object", AdditionalProperties = CreateSchema(valueType, true) }; } private Schema CreateArraySchema(JsonArrayContract arrayContract) { var itemType = arrayContract.CollectionItemType ?? typeof(object); return new Schema { Type = "array", Items = CreateSchema(itemType, true) }; } private Schema CreateObjectSchema(JsonObjectContract jsonContract) { var properties = jsonContract.Properties .Where(p => !p.Ignored) .Where(p => !(_options.IgnoreObsoleteProperties && p.IsObsolete())) .ToDictionary( prop => prop.PropertyName, prop => CreateSchema(prop.PropertyType, true).AssignValidationProperties(prop) ); var required = jsonContract.Properties.Where(prop => prop.IsRequired()) .Select(propInfo => propInfo.PropertyName) .ToList(); var schema = new Schema { Required = required.Any() ? required : null, // required can be null but not empty Properties = properties, Type = "object" }; var filterContext = new ModelFilterContext( jsonContract.UnderlyingType, jsonContract, this); foreach (var filter in _options.ModelFilters) { filter.Apply(schema, filterContext); } return schema; } private IEnumerable<string> GetPendingSchemaIds() { var referenced = _referencedTypeMap.Keys; var defined = Definitions.Keys; return referenced.Except(defined); } private static readonly Dictionary<Type, Func<Schema>> PrimitiveTypeMap = new Dictionary<Type, Func<Schema>> { { typeof(short), () => new Schema { Type = "integer", Format = "int32" } }, { typeof(ushort), () => new Schema { Type = "integer", Format = "int32" } }, { typeof(int), () => new Schema { Type = "integer", Format = "int32" } }, { typeof(uint), () => new Schema { Type = "integer", Format = "int32" } }, { typeof(long), () => new Schema { Type = "integer", Format = "int64" } }, { typeof(ulong), () => new Schema { Type = "integer", Format = "int64" } }, { typeof(float), () => new Schema { Type = "number", Format = "float" } }, { typeof(double), () => new Schema { Type = "number", Format = "double" } }, { typeof(decimal), () => new Schema { Type = "number", Format = "double" } }, { typeof(byte), () => new Schema { Type = "string", Format = "byte" } }, { typeof(sbyte), () => new Schema { Type = "string", Format = "byte" } }, { typeof(bool), () => new Schema { Type = "boolean" } }, { typeof(DateTime), () => new Schema { Type = "string", Format = "date-time" } }, { typeof(DateTimeOffset), () => new Schema { Type = "string", Format = "date-time" } } }; } }
mit
AndrewQuijano/SSTREU2017
CompareMobile/app/src/androidTest/java/edu/fiu/reu2017/ExampleInstrumentedTest.java
714
package edu.fiu.reu2017; import android.content.Context; import android.support.test.InstrumentationRegistry; import android.support.test.runner.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.*; /** * Instrumented test, which will execute on an Android device. * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(AndroidJUnit4.class) public class ExampleInstrumentedTest { @Test public void useAppContext() { // Context of the app under test. Context appContext = InstrumentationRegistry.getTargetContext(); assertEquals("edu.fiu.reu2017", appContext.getPackageName()); } }
mit
jedwards1211/frcs-notes
src/parseTripSummaries.js
4586
'use strict'; var tripStart = /^ {2}\d | {1}\d{2} |\d{3} |\d{4} /; /** * Parses data from a STAT_sum.txt file. Here is an excerpt of the format: <pre> 1 2/15/81 258.60 17 ENTRANCE DROPS, JOE'S "I LOVE MY WIFE TRAVERSE", TRICKY TRAVERSE EXCLUDED: 0.00 0 Peter Quick Keith Ortiz A1 AD1-AD3 AE1 AE1 SIDE AE9 SIDE AE10-AE9 AE13 SIDE AE15 SIDE AE20-AE11 3 3/ 6/81 2371.20 61 DOUG'S DEMISE (50 FT DROP), CHRIS CROSS, CRAWL ABOVE DROP EXCLUDED: 0.00 0 Peter Quick Chris Gerace Phil Oden Chip Hopper A13 SIDE B1-B5 B2 SIDE B3 SIDE B6-B18 B17 SIDE B19-B38 B32 SIDE BS1-BS5 C1-C18 </pre> * * @param{lines} an array of strings representing lines of the file to parse. * @returns a array with the following format (ex. parsed from above). * array[3] gets the data for trip 3 (not trip 4 as programmers might expect :). * Note that there are null values at indices 0 and 2 because there are no trips * numbered 0 or 2 in the example above. Also the date fields will be date * objects, below is the JSON.stringify()ed version. <pre>[ null, { "tripNum": 1, "date": "1981-02-15T06:00:00.000Z", "footage": 258.6, "numShots": 17, "name": "ENTRANCE DROPS, JOE'S \"I LOVE MY WIFE TRAVERSE\", TRICKY TRAVERSE", "excludedFootage": 0, "numExcludedShots": 0, "surveyors": [ "Peter Quick", "Keith Ortiz" ], "shots": [ "A1", "AD1-AD3", "AE1", "AE1 SIDE", "AE9 SIDE", "AE10-AE9", "AE13 SIDE", "AE15 SIDE", "AE20-AE11" ] }, null, { "tripNum": 3, "date": "1981-03-06T06:00:00.000Z", "footage": 2371.2, "numShots": 61, "name": "DOUG'S DEMISE (50 FT DROP), CHRIS CROSS, CRAWL ABOVE DROP", "excludedFootage": 0, "numExcludedShots": 0, "surveyors": [ "Peter Quick", "Chris Gerace", "Phil Oden", "Chip Hopper" ], "shots": [ "A13 SIDE", "B1-B5", "B2 SIDE", "B3 SIDE", "B6-B18", "B17 SIDE", "B19-B38", "B32 SIDE", "BS1-BS5", "C1-C18" ] } ]</pre> */ module.exports = function(lines) { if (typeof lines === 'string') lines = lines.split(/\r\n|\n\r|\r|\n/); var result = []; var i = 0; function parseTrip() { var tripNum = parseInt(lines[i].substring(0, 4)); if (tripNum >= 1000) { // There are only 3 columns reserved for the trip number, so when we // get to trips in the 1000s an extra digit will push the rest of the // line over. So delete a space after the trip number so the rest // of the line can be parsed as usual. lines[i] = lines[i].substring(0, 4).concat(lines[i].substring(5)); } var year = parseInt(lines[i].substring(11, 14)); if (year >= 100) { year += 1900; // I discovered this by accident! // Dates after 2000 have 3 digits in the file (e.g. 5/28/114), and the // extra digit pushes the rest of the line over one character. So just // delete the extra character so that the rest of the line can be parsed // as usual. lines[i] = lines[i].substring(0, 11).concat(lines[i].substring(12)); } var trip = { tripNum: tripNum, date: new Date(year, parseInt(lines[i].substring(5, 7)) - 1, parseInt(lines[i].substring(8, 10))), footage: parseFloat(lines[i].substring(14, 23)), numShots: parseFloat(lines[i].substring(24, 31)), name: lines[i].substring(31, 111).trim(), excludedFootage: parseFloat(lines[i].substring(120, 127)), numExcludedShots: parseInt(lines[i].substring(127, 130)), surveyors: lines[++i].trim().split(/ /), shots: [] }; while (i < lines.length - 1 && !tripStart.test(lines[++i])) { var trimmed = lines[i].trim(); if (trimmed.length) { Array.prototype.push.apply(trip.shots, trimmed.split(/\s\s+|\t+/)); } } return trip; } while (i < lines.length) { if (tripStart.test(lines[i])) { var trip = parseTrip(); result[trip.tripNum] = trip; } else { i++; } } return result; }
mit
garlab/postfix-admin
app/views/domaines.php
1693
<h1>Domaines</h1> <?php if (!empty($domaines) && count($domaines)): ?> <table class="table"> <tr> <th>Name</th> <th>Active</th> </tr> <?php foreach ($domaines as $domain): ?> <tr> <td><?=$domain['name']?></td> <td><input type="checkbox" name="<?=$domain['name']?>" value="<?=$domain['etat']?>" /></td> </tr> <?php endforeach; ?> </table> <?php else: ?> <p>No domaines yet!</p> <?php endif;?> <div class="container"> <div class="row"> <div class="center span4 well"> <legend>Create a new domain</legend> <?php if (!empty($message)): ?> <div class="alert alert-success"> <a class="close" data-dismiss="alert" href="#">×</a><?=$message?> </div> <?php endif;?> <?php if (!empty($error)): ?> <div class="alert alert-danger"> <a class="close" data-dismiss="alert" href="#">×</a><?=$error?> </div> <?php endif;?> <form role="form" id="post-domain-form" class="form-inline" method="post"> <div class="form-group"> <label for="domain-field" class="sr-only">Domain name</label> <input type="text" class="form-control" name="domain" id="domain-field" placeholder="Enter domain"> </div> <div class="checkbox"> <label> <input type="checkbox" name="etat" checked> Active </label> </div> <button type="submit" class="btn btn-primary">Create</button> </form> </div> </div> </div>
mit
gophertrain/material
web/apis/demos/example3/main.go
2713
// All material is licensed under the Apache License Version 2.0, January 2004 // http://www.apache.org/licenses/LICENSE-2.0 // Sample program to show how to create a basic CRUD based web api // for customers. package main import ( "encoding/json" "log" "net/http" "strconv" "github.com/ardanlabs/gotraining/topics/web/customer" "github.com/gorilla/pat" ) // App loads the entire API set together for use. func App() http.Handler { // Create a version of the pat router. r := pat.New() // Define the routes and order matters. r.Get("/customers/{id}", showHandler) r.Get("/customers", indexHandler) r.Post("/customers", createHandler) // Redirect requests from `/`` to `/customers`. r.Handle("/", http.RedirectHandler("/customers", http.StatusMovedPermanently)) return r } // indexHandler returns the entire list of customers in the DB. func indexHandler(res http.ResponseWriter, req *http.Request) { // Retrieve the list of customers, encode to JSON // and send the response. if err := json.NewEncoder(res).Encode(customer.All()); err != nil { http.Error(res, err.Error(), http.StatusInternalServerError) return } } // showHandler returns a single specified customer. func showHandler(res http.ResponseWriter, req *http.Request) { // Retrieve the customer id from the request. idStr := req.URL.Query().Get(":id") id, err := strconv.Atoi(idStr) if err != nil { http.Error(res, err.Error(), http.StatusBadRequest) return } // Retreive that customer from the DB. c, err := customer.Find(id) if err != nil { http.Error(res, err.Error(), http.StatusNotFound) return } // Encode the customer to JSON and send the response. if err := json.NewEncoder(res).Encode(c); err != nil { http.Error(res, err.Error(), http.StatusInternalServerError) return } } // createHandler adds new customers to the DB. func createHandler(res http.ResponseWriter, req *http.Request) { // Create a customer value. var c customer.Customer // Encode the customer document received into the customer value. err := json.NewDecoder(req.Body).Decode(&c) if err != nil { http.Error(res, err.Error(), http.StatusInternalServerError) return } // Save the customer in the DB. c.ID, err = customer.Save(c) if err != nil { http.Error(res, err.Error(), http.StatusInternalServerError) return } // Encode the customer to JSON and send the response. b, err := json.Marshal(&c) if err != nil { http.Error(res, err.Error(), http.StatusInternalServerError) return } res.WriteHeader(http.StatusCreated) res.Write(b) } func main() { // Start the http server to handle the request for // both versions of the API. log.Fatal(http.ListenAndServe(":3000", App())) }
mit
supperbowen/bw-wechat-mgr
app_console/controllers/defaultController.js
212
/** * @module controllers/defaultController */ async function index (options) { console.log('Hello world defaultController & index action with options: ' +JSON.stringify(options)); }; export {index};
mit
Justineo/postcss-sort-style-rules
dist/index.js
3266
'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _postcss = require('postcss'); var _postcss2 = _interopRequireDefault(_postcss); var _specificity = require('specificity'); var _specificity2 = _interopRequireDefault(_specificity); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var SCOPE_RULES = ['media', 'supports']; function isScope(name) { if (typeof name !== 'string') { if (name.type !== 'atrule') { return false; } name = name.name; } return SCOPE_RULES.indexOf(_postcss2.default.vendor.unprefixed(name)) !== -1; } function compare(s1, s2) { return s1.reduce(function (prev, current, i) { if (prev !== 0) { return prev; } return current - s2[i]; }, 0); } function compareRange(r1, r2) { if (compare(r1.min, r2.max) > 0) { return 1; } else if (compare(r2.min, r1.max) > 0) { return -1; } return 0; } var MAX = Number.POSITIVE_INFINITY; var DEFAULT_RANGE = { max: [0, 0, 0, 0], min: [MAX, MAX, MAX, MAX] }; function reduceRange(prev, current) { if (compare(prev.min, current) > 0) { prev.min = current; } if (compare(current, prev.max) > 0) { prev.max = current; } return prev; } function reduceRanges(prev, current) { if (compare(prev.min, current.min) > 0) { prev.min = current.min; } if (compare(current.max, prev.max) > 0) { prev.max = current.max; } return prev; } /** * Get specificity range of a style rule or a scope */ function getRange(node) { if (isScope(node)) { return node.nodes.map(getRange).reduce(reduceRanges); } else if (node.type === 'rule') { return _specificity2.default.calculate(node.selector).map(function (result) { return result.specificity.split(',').map(function (v) { return Number(v); }); }).reduce(reduceRange, _extends({}, DEFAULT_RANGE)); } return null; } /** * Sort style rules inside a scope node (root / @media / @supports) */ function sortScope(scope) { var rules = []; scope.each(function (node) { /* skip progress in @keyframes */ if (node.type === 'rule' && node.selector.match(/^(?:from|to)$|%$/)) { return; } /* calculate range for rules and scopes */ if (node.type === 'rule' || isScope(node)) { rules.push(_extends(getRange(node), { node: node })); } /* sort inside scopes */ if (isScope(node)) { sortScope(node); } }); var sorted = rules.sort(compareRange); sorted.forEach(function (rule, i) { if (i > 0) { rule.node.moveAfter(sorted[i - 1].node); } }); } exports.default = _postcss2.default.plugin('postcss-sort-style-rules', function () { return sortScope; }); module.exports = exports['default'];
mit
shipshapecode/ember-3d-nav
app/components/nav-trigger.js
63
export { default } from 'ember-3d-nav/components/nav-trigger';
mit
AlexMog/LibNet
doc/html/search/files_63.js
86
var searchData= [ ['condvar_2ehh',['CondVar.hh',['../_cond_var_8hh.html',1,'']]] ];
mit
Morteuille/projectmanager
src/Projectmanager/HomeBundle/Entity/User.php
2093
<?php namespace Projectmanager\HomeBundle\Entity; use Doctrine\ORM\Mapping as ORM; /** * @ORM\Entity */ class User { //Attributes /** * @ORM\Column(name="id", type="integer") * @ORM\Id */ var $id; // type : int /** * @ORM\Column(name="User_FName", type="string", length=255) */ var $User_FName; // type : string /** * @ORM\Column(name="User_LName", type="string", length=255) */ var $User_LName; // type : string /** * @ORM\Column(name="User_Mail", type="string", length=255) */ var $User_Mail; // type : string /** * @ORM\Column(name="User_Phone_Office", type="string", length=255) */ var $User_Phone_Office; // type : string /** * @ORM\Column(name="User_Phone_Mobile", type="string", length=255) */ var $User_Phone_Mobile; // type : string /** * @ORM\Column(name="User_Service", type="string", length=255) */ var $User_Service; // type : string //Operations public function getUser_FName() { return $this->User_FName; } public function setUser_FName($User_FName) { $this->User_FName = $User_FName; } public function getUser_LName() { return $this->User_LName; } public function setUser_LName($User_LName) { $this->User_LName = $User_LName; } public function getUser_Mail() { return $this->User_Mail; } public function setUser_Mail($User_Mail) { $this->User_Mail = $User_Mail; } public function getUser_Phone_Office() { return $this->User_Phone_Office; } public function setUser_Phone_Office($User_Phone_Office) { $this->User_Phone_Office = $User_Phone_Office; } public function getUser_Phone_Mobile() { return $this->User_Phone_Mobile; } public function setUser_Phone_Mobile($User_Phone_Mobile) { $this->User_Phone_Mobile = $User_Phone_Mobile; } public function getUser_Service() { return $this->User_Service; } public function setUser_Service($User_Service) { $this->User_Service = $User_Service; } } // End Class User /** * @ORM\Entity */ class Manager extends User { //Attributes //Operations } // End Class Manager ?>
mit
jobinesh/jet-examples
node-jet1.2.0-mongo-app/public/js/libs/oj/v1.2.0/resources/nls/en-AU/localeElements.js
6494
define({main:{"en-AU":{identity:{version:{_cldrVersion:"24",_number:"$Revision: 9287 $"},generation:{_date:"$Date: 2013-08-28 21:32:04 -0500 (Wed, 28 Aug 2013) $"},language:"en",territory:"001"},dates:{calendars:{gregorian:{months:{format:{abbreviated:{1:"Jan",2:"Feb",3:"Mar",4:"Apr",5:"May",6:"Jun",7:"Jul",8:"Aug",9:"Sep",10:"Oct",11:"Nov",12:"Dec"},narrow:{1:"J",2:"F",3:"M",4:"A",5:"M",6:"J",7:"J",8:"A",9:"S",10:"O",11:"N",12:"D"},wide:{1:"January",2:"February",3:"March",4:"April",5:"May",6:"June", 7:"July",8:"August",9:"September",10:"October",11:"November",12:"December"}},"stand-alone":{abbreviated:{1:"Jan",2:"Feb",3:"Mar",4:"Apr",5:"May",6:"Jun",7:"Jul",8:"Aug",9:"Sep",10:"Oct",11:"Nov",12:"Dec"},narrow:{1:"J",2:"F",3:"M",4:"A",5:"M",6:"J",7:"J",8:"A",9:"S",10:"O",11:"N",12:"D"},wide:{1:"January",2:"February",3:"March",4:"April",5:"May",6:"June",7:"July",8:"August",9:"September",10:"October",11:"November",12:"December"}}},days:{format:{abbreviated:{sun:"Sun",mon:"Mon",tue:"Tue",wed:"Wed", thu:"Thu",fri:"Fri",sat:"Sat"},narrow:{sun:"S",mon:"M",tue:"T",wed:"W",thu:"T",fri:"F",sat:"S"},wide:{sun:"Sunday",mon:"Monday",tue:"Tuesday",wed:"Wednesday",thu:"Thursday",fri:"Friday",sat:"Saturday"}},"stand-alone":{abbreviated:{sun:"Sun",mon:"Mon",tue:"Tue",wed:"Wed",thu:"Thu",fri:"Fri",sat:"Sat"},narrow:{sun:"S",mon:"M",tue:"T",wed:"W",thu:"T",fri:"F",sat:"S"},wide:{sun:"Sunday",mon:"Monday",tue:"Tuesday",wed:"Wednesday",thu:"Thursday",fri:"Friday",sat:"Saturday"}}},dayPeriods:{format:{wide:{am:"am", pm:"pm"}}},eras:{eraAbbr:{0:"BC",1:"AD"}},dateFormats:{full:"EEEE, d MMMM y","long":"d MMMM y",medium:"d MMM y","short":"d/MM/y"},timeFormats:{full:"h:mm:ss a zzzz","long":"h:mm:ss a z",medium:"h:mm:ss a","short":"h:mm a"},dateTimeFormats:{full:"{1} {0}","long":"{1} {0}",medium:"{1} {0}","short":"{1} {0}",availableFormats:{d:"d",Ed:"E d",Ehm:"E h:mm a",EHm:"E HH:mm",Ehms:"E h:mm:ss a",EHms:"E HH:mm:ss",Gy:"y G",GyMMM:"MMM y G",GyMMMd:"d MMM y G",GyMMMEd:"E, d MMM y G",h:"h a",H:"HH",hm:"h:mm a",Hm:"HH:mm", hms:"h:mm:ss a",Hms:"HH:mm:ss",M:"LL",Md:"dd/MM",MEd:"E dd/MM",MMdd:"dd/MM",MMM:"LLL",MMMd:"d MMM",MMMEd:"E d MMM",MMMMd:"d MMMM",ms:"mm:ss",y:"y",yM:"MM/y",yMd:"d/M/y",yMEd:"E, d/M/y",yMMM:"MMM y",yMMMd:"d MMM y",yMMMEd:"E, d MMM y",yMMMM:"MMMM y",yQQQ:"QQQ y",yQQQQ:"QQQQ y"}}}},fields:{era:{displayName:"Era"},year:{displayName:"Year","relative-type--1":"Last year","relative-type-0":"This year","relative-type-1":"Next year"},month:{displayName:"Month","relative-type--1":"Last month","relative-type-0":"This month", "relative-type-1":"Next month"},week:{displayName:"Week","relative-type--1":"Last week","relative-type-0":"This week","relative-type-1":"Next week"},day:{displayName:"Day","relative-type--1":"Yesterday","relative-type-0":"Today","relative-type-1":"Tomorrow"},weekday:{displayName:"Day of the Week"},dayperiod:{displayName:"am/pm"},hour:{displayName:"Hour"},minute:{displayName:"Minute"},second:{displayName:"Second"},zone:{displayName:"Time Zone"}}},numbers:{defaultNumberingSystem:"latn",otherNumberingSystems:{"native":"latn"}, "symbols-numberSystem-latn":{decimal:".",group:",",list:";",percentSign:"%",plusSign:"+",minusSign:"-",exponential:"E",perMille:"‰",infinity:"∞",nan:"NaN"},"decimalFormats-numberSystem-latn":{standard:"#,##0.###","long":{decimalFormat:{"1000-count-one":"0 thousand","1000-count-other":"0 thousand","10000-count-one":"00 thousand","10000-count-other":"00 thousand","100000-count-one":"000 thousand","100000-count-other":"000 thousand","1000000-count-one":"0 million","1000000-count-other":"0 million","10000000-count-one":"00 million", "10000000-count-other":"00 million","100000000-count-one":"000 million","100000000-count-other":"000 million","1000000000-count-one":"0 billion","1000000000-count-other":"0 billion","10000000000-count-one":"00 billion","10000000000-count-other":"00 billion","100000000000-count-one":"000 billion","100000000000-count-other":"000 billion","1000000000000-count-one":"0 trillion","1000000000000-count-other":"0 trillion","10000000000000-count-one":"00 trillion","10000000000000-count-other":"00 trillion", "100000000000000-count-one":"000 trillion","100000000000000-count-other":"000 trillion"}},"short":{decimalFormat:{"1000-count-one":"0K","1000-count-other":"0K","10000-count-one":"00K","10000-count-other":"00K","100000-count-one":"000K","100000-count-other":"000K","1000000-count-one":"0M","1000000-count-other":"0M","10000000-count-one":"00M","10000000-count-other":"00M","100000000-count-one":"000M","100000000-count-other":"000M","1000000000-count-one":"0B","1000000000-count-other":"0B","10000000000-count-one":"00B", "10000000000-count-other":"00B","100000000000-count-one":"000B","100000000000-count-other":"000B","1000000000000-count-one":"0T","1000000000000-count-other":"0T","10000000000000-count-one":"00T","10000000000000-count-other":"00T","100000000000000-count-one":"000T","100000000000000-count-other":"000T"}}},"percentFormats-numberSystem-latn":{standard:"#,##0%"},"currencyFormats-numberSystem-latn":{standard:"¤#,##0.00","unitPattern-count-one":"{0} {1}","unitPattern-count-other":"{0} {1}"},currencies:{AUD:{displayName:"Australian Dollar", symbol:"$"},BRL:{displayName:"Brazilian Real",symbol:"R$"},CAD:{displayName:"Canadian Dollar",symbol:"CA$"},CHF:{displayName:"Swiss Franc",symbol:"CHF"},CNY:{displayName:"Chinese Yuan",symbol:"CN¥"},CZK:{displayName:"Czech Republic Koruna",symbol:"CZK"},DKK:{displayName:"Danish Krone",symbol:"DKK"},EUR:{displayName:"Euro",symbol:"€"},GBP:{displayName:"British Pound",symbol:"£"},HKD:{displayName:"Hong Kong Dollar",symbol:"HK$"},HUF:{displayName:"Hungarian Forint",symbol:"HUF"},IDR:{displayName:"Indonesian Rupiah", symbol:"IDR"},INR:{displayName:"Indian Rupee",symbol:"₹"},JPY:{displayName:"Japanese Yen",symbol:"¥"},KRW:{displayName:"South Korean Won",symbol:"₩"},LTL:{displayName:"Lithuanian Litas",symbol:"LTL"},LVL:{displayName:"Latvian Lats",symbol:"LVL"},MXN:{displayName:"Mexican Peso",symbol:"MX$"},NOK:{displayName:"Norwegian Krone",symbol:"NOK"},PLN:{displayName:"Polish Zloty",symbol:"PLN"},RUB:{displayName:"Russian Rouble",symbol:"RUB"},SAR:{displayName:"Saudi Riyal",symbol:"SAR"},SEK:{displayName:"Swedish Krona", symbol:"SEK"},THB:{displayName:"Thai Baht",symbol:"฿"},TRY:{displayName:"Turkish Lira",symbol:"TRY"},TWD:{displayName:"New Taiwan Dollar",symbol:"NT$"},USD:{displayName:"US Dollar",symbol:"US$"},ZAR:{displayName:"South African Rand",symbol:"ZAR"}}}}}});
mit
kpocza/thriot
Service/Misc/Thriot.TestHelpers/InMemoryStorage/InMemoryCloudStorageClientFactory.cs
370
using Thriot.Framework.Azure.DataAccess; using Thriot.Framework.Azure.TableOperations; namespace Thriot.TestHelpers.InMemoryStorage { public class InMemoryCloudStorageClientFactory : ICloudStorageClientFactory { public ITableEntityOperation GetTableEntityOperation() { return new InMemoryTableEntityOperations(); } } }
mit
nico01f/z-pec
ZimbraServer/src/java/com/zimbra/cs/service/SpnegoAuthServlet.java
3840
/* * ***** BEGIN LICENSE BLOCK ***** * Zimbra Collaboration Suite Server * Copyright (C) 2011 Zimbra, Inc. * * The contents of this file are subject to the Zimbra Public License * Version 1.3 ("License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://www.zimbra.com/license. * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. * ***** END LICENSE BLOCK ***** */ package com.zimbra.cs.service; import java.io.IOException; import java.security.Principal; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.zimbra.common.service.ServiceException; import com.zimbra.common.util.ZimbraLog; import com.zimbra.cs.account.AuthToken; import com.zimbra.cs.account.AccountServiceException.AuthFailedServiceException; import com.zimbra.cs.account.Provisioning; import com.zimbra.cs.account.auth.AuthContext; import com.zimbra.cs.service.authenticator.SSOAuthenticator.ZimbraPrincipal; public class SpnegoAuthServlet extends SSOServlet { @Override public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { ZimbraLog.clearContext(); addRemoteIpToLoggingContext(req); addUAToLoggingContext(req); boolean isAdminRequest = false; boolean isFromZCO = false; try { isAdminRequest = isOnAdminPort(req); isFromZCO = isFromZCO(req); Principal principal = req.getUserPrincipal(); if (principal == null) { throw AuthFailedServiceException.AUTH_FAILED("no principal"); } if (!(principal instanceof ZimbraPrincipal)) { throw AuthFailedServiceException.AUTH_FAILED(principal.getName(), "not ZimbraPrincipal", (Throwable)null); } ZimbraPrincipal zimbraPrincipal = (ZimbraPrincipal)principal; AuthToken authToken = authorize(req, AuthContext.Protocol.spnego, zimbraPrincipal, isAdminRequest); if (isFromZCO) { setAuthTokenCookieAndReturn(req, resp, authToken); } else { setAuthTokenCookieAndRedirect(req, resp, zimbraPrincipal.getAccount(), authToken); } } catch (ServiceException e) { if (e instanceof AuthFailedServiceException) { AuthFailedServiceException afe = (AuthFailedServiceException)e; ZimbraLog.account.info("spnego auth failed: " + afe.getMessage() + afe.getReason(", %s")); } else { ZimbraLog.account.info("spnego auth failed: " + e.getMessage()); } ZimbraLog.account.debug("spnego auth failed", e); if (isFromZCO) { resp.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage()); } else { try { redirectToErrorPage(req, resp, isAdminRequest, Provisioning.getInstance().getConfig().getSpnegoAuthErrorURL()); } catch (ServiceException se) { ZimbraLog.account.info("failed to redirect to error page: " + se.getMessage()); resp.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage()); } } } } @Override public void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { doGet(req, resp); } @Override protected boolean redirectToRelativeURL() { return true; } }
mit
teetech/programming_interview_exposed-solved_problems
Chapter 4/Null or Cycle.java
281
public boolean hasCycle(){ if (head == null)return false; Node fast = head.next; Node slow = head; while(fast != null && fast.next != null && slow != null){ if(fast == slow){ return true; } fast = fast.next.next; slow = slow.next; } return false; }
mit
l33tdaima/l33tdaima
p189m/rotate.js
951
/** * @param {number[]} nums * @param {number} k * @return {void} Do not return anything, modify nums in-place instead. */ var rotate = function (nums, k) { const swap = function (s, e) { while (s < e) [nums[s++], nums[e--]] = [nums[e], nums[s]]; }; let n = nums.length; k = k % n; if (k === 0 || k === n) return; swap(n - k, n - 1); swap(0, n - k - 1); swap(0, n - 1); }; // TEST [ [[1, 2, 3, 4, 5, 6, 7], 0, [1, 2, 3, 4, 5, 6, 7]], [[1, 2, 3, 4, 5, 6, 7], 1, [7, 1, 2, 3, 4, 5, 6]], [[1, 2, 3, 4, 5, 6, 7], 2, [6, 7, 1, 2, 3, 4, 5]], [[1, 2, 3, 4, 5, 6, 7], 3, [5, 6, 7, 1, 2, 3, 4]], [[1, 2, 3, 4, 5, 6, 7], 11, [4, 5, 6, 7, 1, 2, 3]], [[-1, -100, 3, 99], 2, [3, 99, -1, -100]], ].forEach(([nums, k, expected]) => { console.log('Rotate', nums, 'to the right by', k, 'steps ->'); rotate(nums, k); console.log(' ', nums); for (let i = 0; i < nums.length; ++i) console.assert(nums[i] === expected[i]); });
mit
mathiasbynens/unicode-data
6.0.0/blocks/Number-Forms-code-points.js
647
// All code points in the Number Forms block as per Unicode v6.0.0: [ 0x2150, 0x2151, 0x2152, 0x2153, 0x2154, 0x2155, 0x2156, 0x2157, 0x2158, 0x2159, 0x215A, 0x215B, 0x215C, 0x215D, 0x215E, 0x215F, 0x2160, 0x2161, 0x2162, 0x2163, 0x2164, 0x2165, 0x2166, 0x2167, 0x2168, 0x2169, 0x216A, 0x216B, 0x216C, 0x216D, 0x216E, 0x216F, 0x2170, 0x2171, 0x2172, 0x2173, 0x2174, 0x2175, 0x2176, 0x2177, 0x2178, 0x2179, 0x217A, 0x217B, 0x217C, 0x217D, 0x217E, 0x217F, 0x2180, 0x2181, 0x2182, 0x2183, 0x2184, 0x2185, 0x2186, 0x2187, 0x2188, 0x2189, 0x218A, 0x218B, 0x218C, 0x218D, 0x218E, 0x218F ];
mit
DazBoot/KamcordChatBot
python/src/dazbot.py
3027
import time from config import * from kamcordutils import * from messages import MessageManager USERNAME_INDEX = 0 MESSAGE_INDEX = 1 def compareMessages( firstMessage, secondMessage ): return firstMessage[ USERNAME_INDEX ] == secondMessage[ USERNAME_INDEX ] and firstMessage[ MESSAGE_INDEX ] == secondMessage[ MESSAGE_INDEX ] class DazBot( object ): def __init__( self, streamUrl ): #Create the driver self.driver = webdriver.Firefox() self.driver.get( streamUrl ) #Give the driver a chance to connect time.sleep( 2 ) self.authUsers = [] self.commands = {} self.messageManager = MessageManager( getMessages( self.driver ) ) self.messageLog = [] self.lastMessageScrape = [] def addAuthorizedUser( self, username ): if username not in self.authUsers: self.authUsers.append( username ) def addCommand( self, command, message ): if command not in self.commands.keys(): self.commands[ command ] = message def parseMessage( self, message ): if message.message in self.commands.keys() and message.username in self.authUsers: sendMessage( self.driver, self.commands[ message.message ] ) def findMessageMatchPoint( self, scrapedMessages ): #Find find the index to current message match point, start by iterating backwards through the log for logIdx in range( len( self.messageLog ) - 1, -1, -1 ): #Compare against the messages in the last grab for msgIdx in range( 0, len( scrapedMessages ) ): #If the username and message are the same, we have a tentative match if( compareMessages( self.messageLog[ logIdx ], scrapedMessages[ msgIdx ] ) ): return msgIdx return 0 def checkForMessages( self ): newMessages = self.messageManager.processMessages( getMessages( self.driver ) ) for msg in newMessages: self.parseMessage( msg ) if __name__ == "__main__": #Create the bot dazBot = DazBot( "https://www.kamcord.com/live/evolution590/chat" ) #Add all of the users and commands dazBot.addAuthorizedUser( "evolution590" ) dazBot.addAuthorizedUser( "DazBoot" ) dazBot.addAuthorizedUser( "Gravithon" ) dazBot.addCommand( "!test", "This is a test command!" ) dazBot.addCommand( "!commands", "GET OUT OF HERE!" ) #Connect and login time.sleep( 1 ) #Wait 1 second for the page to load before we continue sendMessage( dazBot.driver, "Test message from DazBot" ) time.sleep( 1 ) #Wait 1 second for the login prompt before we continue login( dazBot.driver, USERNAME, PASSWORD ) time.sleep( 1 ) #Wait 1 second for login to complete before we continue while( True ): dazBot.checkForMessages() time.sleep( 0.5 )
mit
georgiwe/TelerikAcademyExams
Web Services - 2014-09-23/Exam.Data/Repositories/IRepository.cs
387
namespace Exam.Data.Repositories { using System.Linq; public interface IRepository<TEntity> where TEntity : class { IQueryable<TEntity> All(); TEntity Find(object id); void Add(TEntity entity); void Update(TEntity entity); TEntity Delete(TEntity entity); TEntity Delete(object id); int SaveChanges(); } }
mit
plouc/nivo
packages/colors/src/schemes/diverging.ts
1421
import { schemeBrBG, interpolateBrBG, schemePRGn, interpolatePRGn, schemePiYG, interpolatePiYG, schemePuOr, interpolatePuOr, schemeRdBu, interpolateRdBu, schemeRdGy, interpolateRdGy, schemeRdYlBu, interpolateRdYlBu, schemeRdYlGn, interpolateRdYlGn, schemeSpectral, interpolateSpectral, } from 'd3-scale-chromatic' // Diverging color schemes support a size k ranging from 3 to 11 export const divergingColorSchemes = { brown_blueGreen: schemeBrBG, purpleRed_green: schemePRGn, pink_yellowGreen: schemePiYG, purple_orange: schemePuOr, red_blue: schemeRdBu, red_grey: schemeRdGy, red_yellow_blue: schemeRdYlBu, red_yellow_green: schemeRdYlGn, spectral: schemeSpectral, } export type DivergingColorSchemeId = keyof typeof divergingColorSchemes export const divergingColorSchemeIds = Object.keys( divergingColorSchemes ) as DivergingColorSchemeId[] export const divergingColorInterpolators = { brown_blueGreen: interpolateBrBG, purpleRed_green: interpolatePRGn, pink_yellowGreen: interpolatePiYG, purple_orange: interpolatePuOr, red_blue: interpolateRdBu, red_grey: interpolateRdGy, red_yellow_blue: interpolateRdYlBu, red_yellow_green: interpolateRdYlGn, spectral: interpolateSpectral, } export type DivergingColorInterpolatorId = keyof typeof divergingColorInterpolators
mit
debatanu-thakur/music-search
webpack.config.js
3596
require('babel-polyfill'); const path = require('path'); const CopyWebpackPlugin = require('copy-webpack-plugin'); const webpack = require('webpack'); const ExtractTextPlugin = require('extract-text-webpack-plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const DashboardPlugin = require('webpack-dashboard/plugin'); const HappyPack = require('happypack'); const happyThreadCount = 4; const config = { context: path.resolve(__dirname, './src'), entry: { app: './bootstrap.js', }, output: { path: path.resolve(__dirname, './dist'), filename: '[name].bundle.js', }, resolve: { modules: [ 'node_modules', ], }, devtool: 'eval-source-map', devServer: { contentBase: path.resolve(__dirname, './src'), port: 8700, inline: true, hot: true, watchContentBase: true, open: true, }, plugins: [ new DashboardPlugin(), new ExtractTextPlugin({ filename: '[name].css', disable: false, allChunks: true, }), new HtmlWebpackPlugin({ filename: 'index.html', template: 'index.html', }), new webpack.HotModuleReplacementPlugin(), new webpack.optimize.CommonsChunkPlugin({ name: 'vendor', minChunks: function(mod, count) { // Don't include things under '/src' folder return mod.resource && mod.resource .indexOf(path.resolve(__dirname, 'src')) === -1; }, }), new webpack.ProvidePlugin({ '$': 'jquery', 'jQuery': 'jquery', 'window.jQuery': 'jquery', }), new HappyPack({ id: 'eslint', loaders: [ {loader: 'eslint-loader'}, ], threads: happyThreadCount, }), new HappyPack({ id: 'babel', loaders: [ {loader: 'babel-loader', options: {presets: ['es2015']}, }, ], threads: happyThreadCount, }), new CopyWebpackPlugin([ {from: 'assets', to: 'assets'}, ]), ], module: { rules: [ { test: /\.json$/, loader: 'json-loader', }, { test: /\.html$/, loader: 'html-loader', }, { test: /\.js$/, exclude: /node_modules/, enforce: 'pre', use: [{loader: 'happypack/loader?id=eslint'}], }, { test: /\.js/, exclude: /node_modules/, use: [{loader: 'happypack/loader?id=babel'}], }, { test: /\.scss$/, use: ExtractTextPlugin.extract( { fallback: 'style-loader', loader: [{loader: 'css-loader'}, {loader: 'sass-loader', options: {modules: true}}], }), }, { test: /\.(jpg|png|gif|svg)$/, use: [ { loader: 'url-loader', query: { limit: 2000, name: '[name].[ext]', }, }, ], }, { test: /\.(ico|woff|eot|woff2|ttf)$/, use: [ { loader: 'url-loader', query: { limit: 1, name: '[name].[ext]', }, }, ], }, ], }, }; module.exports = config;
mit
osulp/oe-ng2-seed
src/client/app/shared/components/share_link/share.link.component.ts
12471
import {Component, Input, Output, EventEmitter} from '@angular/core'; import 'rxjs/Rx'; declare var $: any; declare var toastr: any; @Component({ moduleId: module.id, selector: 'share-link', templateUrl: 'share.link.component.html', styleUrls: ['share.link.component.css'] }) export class ShareLinkComponent { @Input() renderTo: any;//Coming from detail or explore @Output() onDownloadClick = new EventEmitter(); showShare: boolean = false; isMobile: boolean = $(window).width() < 770; fileName: string = ''; downloadUri: string = ''; url_api_key: string = 'AIzaSyDwjtLPJ9fvJ1dhAtguCCKijs-ZIEe1aX8'; print() { console.log('print?', window); window.print(); } shareHandler(shareType: any) { //this._bitly.insertUrl(window.location.href).subscribe((result: any) => { // console.log('shortened url!', result); //}); if (shareType === 'copy') { let url = window.location.href; this.copyTextToClipboard(url); } else { var shareScope = this; $.ajax({ url: 'https://www.googleapis.com/urlshortener/v1/url?key=' + this.url_api_key, type: 'POST', contentType: 'application/json; charset=utf-8', data: '{ longUrl: "' + window.location.href + '"}', success: function (response: any) { console.log('url shortened!', response); shareScope.processShareRequest(shareType, response.id); }, error: function (err: any) { console.log('failed to get short url'); shareScope.processShareRequest(shareType); } }); } } processShareRequest(shareType: any, url?: any) { url = url ? url : encodeURI(window.location.href); switch (shareType) { case 'email': let body = encodeURIComponent('Check out this tool!\r\n\r\n' + url); window.location.href = 'mailto:?Subject=Communities Reporter Tool!&body=' + body; break; case 'facebook': //$('meta[name=title]').attr('content', 'Just checking'); window.open('https://www.facebook.com/sharer.php?u=' + encodeURI(window.location.href), '_blank'); break; case 'google': window.open('https://plus.google.com/share?url=' + url, '_blank'); break; case 'linkedin': window.open('https://www.linkedin.com/shareArticle?mini=true&amp;url=' + url, '_blank'); break; case 'twitter': window.open('https://twitter.com/share?url=' + url + ';text=Communities%20Reporter%20Tool&amp;hashtags=communitiesreportertool', '_blank'); break; case 'pinterest': var e = document.createElement('script'); e.setAttribute('type', 'text/javascript'); e.setAttribute('charset', 'UTF-8'); e.setAttribute('src', 'https://assets.pinterest.com/js/pinmarklet.js?r=' + Math.random() * 99999999); document.body.appendChild(e); break; default: break; } } downloadClickHandler() { console.log('download click,share'); toastr['info']('Pulling data for download.', 'Please wait...'); this.onDownloadClick.emit(true); } ConvertToCSV(objArray: any, years: any[], batch?: boolean, isLast?: boolean) { console.log('data to convert to csv', objArray); var data = objArray.Data; var Metadata = objArray.Metadata; var reportYears = objArray.Years.map((year: any) => year.Year); var str = ''; var row = ''; var line = ''; var counter = 0; var columns: any; var colsToKeep = ['community', 'Variable', 'geoid', 'geoType']; //Metadata for indicator str += Metadata[0].Dashboard_Chart_Title !== null ? Metadata[0].Dashboard_Chart_Title : Metadata[0].Variable; str += '\r\n'; str += Metadata[0]['Y-Axis'] !== null ? Metadata[0]['Y-Axis'].replace('$', 'Dollars ') + '\r\n' : ''; str += Metadata[0].Description_v4 !== null ? Metadata[0].Description_v4.replace(/\<br\/>/g, '') + '\r\n' : ''; str += Metadata[0].Formula !== null ? Metadata[0].Formula : ''; str += '\r\r\n'; console.log('data to convert to csv', reportYears,years); if (data.length > 0) { data.some((row: any) => { console.log('data row', row,Object.keys(row)); columns = Object.keys(row) .sort(this.sortAlphaNumeric) .filter((colsA: any) => { if (reportYears.length > 0) { return reportYears.indexOf(colsA .replace('_MOE', '') .replace('_D', '') .replace('_N', '') .replace('_MOE_D', '') .replace('_MOE_N', '') ) !== -1 || colsToKeep.indexOf(colsA) !== -1; } else { return true; } }) .filter((colsB: any) => { if (years.length > 0) { return years.indexOf(colsB .replace('_MOE', '') .replace('_D', '') .replace('_N', '') .replace('_MOE_D', '') .replace('_MOE_N', '') ) !== -1 || colsToKeep.indexOf(colsB) !== -1; } else { return true; } }); return counter === 0; }); columns.forEach((column: any) => { //table column headers row += (column === 'Variable' ? 'indicator' : column) + ','; }); row = row.slice(0, -1); str += row + '\r\n'; data.forEach((row: any) => { line = ''; columns.forEach((key: any) => { line += line !== '' ? ',' : ''; console.log('row key', row, key); if (row[key]) { let val = row[key].toString(); if (val !== null) { if (val.match(/^[-+]?[1-9]\.[0-9]+e[-]?[1-9][0-9]*$/)) { let precision = this.getPrecision(val); val = parseFloat((+val).toFixed(precision)); } } line += val === null ? '' : val.indexOf(',') !== -1 ? '\"' + val + '\"' : val; } }); str += line + '\r\n'; }); let showDateTimeDownload = batch ? batch && isLast : true; if (showDateTimeDownload) { var currentDate = new Date(); var day = currentDate.getDate(); var month = currentDate.getMonth() + 1; var year = currentDate.getFullYear(); str += '\r\n\Downloaded from the Communities Reporter Tool on ' + month + '/' + day + '/' + year + '\r\n'; str += window.location.href; } else { str += '\r\n'; str += '*****************************************************************************************\r\n'; } return str; } else { return ''; } } getPrecision(sval: any) { var arr = new Array(); // Get the exponent after 'e', make it absolute. arr = sval.split('e'); arr = arr[0].split('.'); var precision = arr[1].length; return parseInt(precision); } sortAlphaNumeric(a: any, b: any) { var aA = a.replace(/[^a-zA-Z]/g, '').replace('MOE', ''); var bA = b.replace(/[^a-zA-Z]/g, '').replace('MOE', ''); if (a === 'community') { return -1; } else if (aA === bA) { var aN = parseInt(a.replace(/[^0-9]/g, ''), 10); var bN = parseInt(b.replace(/[^0-9]/g, ''), 10); return aN === bN ? 0 : aN > bN ? 1 : -1; } else { return aA > bA ? -1 : 1; } } copyTextToClipboard(text: any) { var textArea = document.createElement('textarea'); // // *** This styling is an extra step which is likely not required. *** // // Why is it here? To ensure: // 1. the element is able to have focus and selection. // 2. if element was to flash render it has minimal visual impact. // 3. less flakyness with selection and copying which **might** occur if // the textarea element is not visible. // // The likelihood is the element won't even render, not even a flash, // so some of these are just precautions. However in IE the element // is visible whilst the popup box asking the user for permission for // the web page to copy to the clipboard. // // Place in top-left corner of screen regardless of scroll position. textArea.style.position = 'fixed'; textArea.style.top = '0'; textArea.style.left = '0'; // Ensure it has a small width and height. Setting to 1px / 1em // doesn't work as this gives a negative w/h on some browsers. textArea.style.width = '2em'; textArea.style.height = '2em'; // We don't need padding, reducing the size if it does flash render. textArea.style.padding = '0'; // Clean up any borders. textArea.style.border = 'none'; textArea.style.outline = 'none'; textArea.style.boxShadow = 'none'; // Avoid flash of white box if rendered for any reason. textArea.style.background = 'transparent'; textArea.value = text; document.body.appendChild(textArea); textArea.select(); try { var successful = document.execCommand('copy'); var msg = successful ? 'successful' : 'unsuccessful'; console.log('Copying text command was ' + msg); } catch (err) { console.log('Oops, unable to copy'); } document.body.removeChild(textArea); } download(JSONData: any, years: any, places: any[], indicator: string, batch?: boolean) { //console.log('data to convert to csv', JSONData, years, places); var placeNames = places.map((p: any) => p.Name).toString().replace(/\,/g, '').replace(/\ /g, ''); var csvData = batch ? JSONData : this.ConvertToCSV(JSONData, years); //var a = document.createElement('a'); //a.setAttribute('style', 'display:none;'); //document.body.appendChild(a); //var blob = new Blob([csvData], { type: 'text/csv' }); //var url = window.URL.createObjectURL(blob); //a.href = url; //a.id = 'crt_download' + Math.random(); //a.download = (batch ? 'CRTDownload' : indicator.replace(/\ /g, '')) + placeNames + '.csv'; //a.click(); //toastr.clear(); //document.body.removeChild(a); let filename = (batch ? 'CRTDownload' : indicator.replace(/\ /g, '')) + placeNames + '.csv'; var blob = new Blob([csvData], { type: 'text/csv;charset=utf-8;' }); if (window.navigator.msSaveOrOpenBlob) { // IE hack; see https://msdn.microsoft.com/en-us/library/ie/hh779016.aspx window.navigator.msSaveBlob(blob, filename); } else { var a = window.document.createElement('a'); a.href = window.URL.createObjectURL(blob); a.download = filename; document.body.appendChild(a); a.click(); // IE: "Access is denied"; see: https://connect.microsoft.com/IE/feedback/details/797361/ie-10-treats-blob-url-as-cross-origin-and-denies-access toastr.clear(); document.body.removeChild(a); } } }
mit
mbouclas/mcms-node-framework
App/projectServiceProvider.js
1861
module.exports = (function(App){ var express = require('express'); var miniApp = express(); var path = require('path'); var Command = App.Command(App); var lo = require('lodash'); function projectServiceProvider(){ this.packageName = 'project'; this.services = {}; this.controllers = {}; this.viewsDir = __dirname + '/views'; if (App.CLI){ var commandFolder = path.join(__dirname , 'Commands/'); Command.registerCommand([ ]); return; } App.frontPageLayout = require(App.Config.baseDir + 'App/Storage/frontPageLayout.json'); App.Controllers[this.packageName] = App.Helpers.services.loadService(__dirname + '/Controllers',true,this); App.viewEngine.registerTemplates(this.viewsDir, miniApp); miniApp.set('views', this.viewsDir); App.viewEngine.registerFilterDir(__dirname + '/viewFilters/' + App.Config.view.default); App.Services['mcmsNodeMenus'].Menu.addToCache(function(err,results){ }); miniApp.use(function(req,res,next){ var locale = req.session.locale || App.Config.app.locale; res.locals.Lang = App.Lang; res.locals.Config = App.Config; res.locals.Translations = { userPanel : App.Lang.translations[locale].userPanel }; res.locals.lo = lo; res.locals.Menu = App.Cache.Menu; next(); }); App.server.use(miniApp); require('./routes')(App, miniApp,this); } function ajaxUser(user){ if (!user){ return {}; } var ret = lo.clone(user); delete ret.username; delete ret.uid; delete ret.userClass; return ret; } return new projectServiceProvider(); });
mit
KaySchneider/meanio4
lib/core_modules/server/ExpressEngine.jssave.js
6667
/** * patched only the line 63 to 74 * the original is inside the comment. Mean Core Version 4.4 * @type {exports} */ var express = require('express'), session = require('express-session'), mongoStore = require('connect-mongo')(session), cookieParser = require('cookie-parser'), expressValidator = require('express-validator'), bodyParser = require('body-parser'), methodOverride = require('method-override'), http = require('http'), https = require('https'), fs = require('fs'), ServerEngine = require('./engine'), Grid = require('gridfs-stream'), errorHandler = require('errorhandler'), passport = require('passport'); function ExpressEngine(){ ServerEngine.call(this); this.app = null; this.db = null; this.mean = null; } ExpressEngine.prototype = Object.create(ServerEngine,{constructor:{ value: ExpressEngine, configurable: false, writable: false, enumerable: false }}); ExpressEngine.prototype.destroy = function(){ this.mean = null; this.db = null; this.app = null; ServerEngine.prototype.destroy.call(this); }; ExpressEngine.prototype.name = function(){ return 'express'; }; ExpressEngine.prototype.initApp = function(){ var config = this.mean.config.clean; this.app.use(function(req,res,next){ res.setHeader('X-Powered-By','Empplan-Server'); next(); }); // The cookieParser should be above session this.app.use(cookieParser()); // Request body parsing middleware should be above methodOverride this.app.use(expressValidator()); /** * TODO: the mean.io people set here the init for json and urlendoced without any limit * so it falls back to the limit. * When you try to override this inside the expresss settings. There is every request an instance * with an small limit. * Because, we add than an new Middleware with the bodyParser * this.app.use(bodyParser.json()); this.app.use(bodyParser.urlencoded({ extended: true })); **/ if(typeof(config.bodyParser) !== 'undefined') { var bconfig = config.bodyParser; this.app.use(bodyParser.json(bconfig.json)); this.app.use(bodyParser.urlencoded(bconfig.urlencoded)); this.app.use(bodyParser.raw(bconfig.raw)); } else { //TODO:clear the code this.app.use(bodyParser.json()); this.app.use(bodyParser.urlencoded({ extended: true })); } this.app.use(methodOverride()); // Express/Mongo session storage this.app.use(session({ secret: config.sessionSecret, store: new mongoStore({ db: this.db.connection.db, collection: config.sessionCollection }), cookie: config.sessionCookie, name: config.sessionName, resave: true, saveUninitialized: true })); this.app.use(passport.initialize()); this.app.use(passport.session()); this.mean.register('passport',passport); require(process.cwd() + '/config/express')(this.app, this.db); return this.app; }; ExpressEngine.prototype.beginBootstrap = function(meanioinstance, database){ this.mean = meanioinstance; this.db = database.connection; var config = meanioinstance.config.clean; // Express settings var app = express(); app.useStatic = function(a,b){ if('undefined' === typeof b){ this.use(express.static(a)); }else{ this.use(a,express.static(b)); } }; this.app = app; // Register app dependency; meanioinstance.register('app', this.initApp.bind(this)); var gfs = new Grid(this.db.connection.db, this.db.mongo); function themeHandler(req, res) { res.setHeader('content-type', 'text/css'); gfs.files.findOne({ filename: 'theme.css' }, function(err, file) { if (!file) { fs.createReadStream(config.root + '/bower_components/bootstrap/dist/css/bootstrap.css').pipe(res); } else { // streaming to gridfs var readstream = gfs.createReadStream({ filename: 'theme.css' }); //error handling, e.g. file does not exist readstream.on('error', function(err) { console.log('An error occurred!', err.message); throw err; }); readstream.pipe(res); } }); } // We override this file to allow us to swap themes // We keep the same public path so we can make use of the bootstrap assets app.get('/bower_components/bootstrap/dist/css/bootstrap.css', themeHandler); // Listen on http.port (or port as fallback for old configs) var httpServer = http.createServer(app); meanioinstance.register('http', httpServer); httpServer.listen(config.http ? config.http.port : config.port, config.hostname); if (config.https && config.https.port) { var httpsOptions = { key: fs.readFileSync(config.https.ssl.key), cert: fs.readFileSync(config.https.ssl.cert) }; var httpsServer = https.createServer(httpsOptions, app); meanioinstance.register('https', httpsServer); httpsServer.listen(config.https.port); } meanioinstance.name = config.app.name; meanioinstance.app = app; meanioinstance.menus = new (meanioinstance.Menus)(); }; function finalRouteHandler(req, res, next) { if (!this.template) return next(); this.template(req, res, next); } function NotFoundHandler(err, req, res, next) { // Treat as 404 if (~err.message.indexOf('not found')) return next(); // Log it console.error(err.stack); // Error page res.status(500).render('500', { error: err.stack }); } function FourOFourHandler(req, res) { res.status(404).render('404', { url: req.originalUrl, error: 'Not found' }); } ExpressEngine.prototype.endBootstrap = function(callback){ // We are going to catch everything else here this.app.route('*').get(finalRouteHandler.bind(this)); // Assume "not found" in the error msgs is a 404. this is somewhat // silly, but valid, you can do whatever you like, set properties, // use instanceof etc. this.app.use(NotFoundHandler); // Assume 404 since no middleware responded this.app.use(FourOFourHandler); // Error handler - has to be last if (process.env.NODE_ENV === 'development') { this.app.use(errorHandler()); } callback(this); }; module.exports = ExpressEngine;
mit
convoyinc/apollo-client
src/optimistic-data/store.ts
1986
import { ApolloAction, isMutationInitAction, isMutationResultAction, isMutationErrorAction, } from '../actions'; import { data, NormalizedCache, } from '../data/store'; import { getDataWithOptimisticResults, Store, } from '../store'; import assign = require('lodash.assign'); import pick = require('lodash.pick'); // a stack of patches of new or changed documents export type OptimisticStore = { mutationId: string, data: NormalizedCache, }[]; const optimisticDefaultState = []; export function optimistic( previousState = optimisticDefaultState, action, store, config ): OptimisticStore { if (isMutationInitAction(action) && action.optimisticResponse) { const fakeMutationResultAction = { type: 'APOLLO_MUTATION_RESULT', result: { data: action.optimisticResponse }, mutationId: action.mutationId, resultBehaviors: action.resultBehaviors, } as ApolloAction; const fakeStore = assign({}, store, { optimistic: previousState }) as Store; const optimisticData = getDataWithOptimisticResults(fakeStore); const fakeDataResultState = data( optimisticData, fakeMutationResultAction, store.queries, store.mutations, config ); const changedKeys = Object.keys(fakeDataResultState).filter( key => optimisticData[key] !== fakeDataResultState[key]); const patch = pick(fakeDataResultState, changedKeys); const optimisticState = { data: patch, mutationId: action.mutationId, }; const newState = [...previousState, optimisticState]; return newState; } else if ((isMutationErrorAction(action) || isMutationResultAction(action)) && previousState.some(change => change.mutationId === action.mutationId)) { // throw away optimistic changes of that particular mutation const newState = previousState.filter( (change) => change.mutationId !== action.mutationId); return newState; } return previousState; }
mit
yushroom/FishEngine
Engine/Include/FishEngine/Generated/Enum_WrapMode.hpp
1884
#pragma once #include <ReflectEnum.hpp> #include <WrapMode.hpp> namespace FishEngine { /************************************************** * FishEngine::WrapMode **************************************************/ // enum count template<> constexpr int EnumCount<FishEngine::WrapMode>() { return 5; } // string array static const char* WrapModeStrings[] = { "Once", "Loop", "PingPong", "Default", "ClampForever" }; // cstring array template<> inline constexpr const char** EnumToCStringArray<FishEngine::WrapMode>() { return WrapModeStrings; } // index to enum template<> inline FishEngine::WrapMode ToEnum<FishEngine::WrapMode>(const int index) { switch (index) { case 0: return FishEngine::WrapMode::Once; break; case 1: return FishEngine::WrapMode::Loop; break; case 2: return FishEngine::WrapMode::PingPong; break; case 3: return FishEngine::WrapMode::Default; break; case 4: return FishEngine::WrapMode::ClampForever; break; default: abort(); break; } } // enum to index template<> inline int EnumToIndex<FishEngine::WrapMode>(FishEngine::WrapMode e) { switch (e) { case FishEngine::WrapMode::Once: return 0; break; case FishEngine::WrapMode::Loop: return 1; break; case FishEngine::WrapMode::PingPong: return 2; break; case FishEngine::WrapMode::Default: return 3; break; case FishEngine::WrapMode::ClampForever: return 4; break; default: abort(); break; } } // string to enum template<> inline FishEngine::WrapMode ToEnum<FishEngine::WrapMode>(const std::string& s) { if (s == "Once") return FishEngine::WrapMode::Once; if (s == "Loop") return FishEngine::WrapMode::Loop; if (s == "PingPong") return FishEngine::WrapMode::PingPong; if (s == "Default") return FishEngine::WrapMode::Default; if (s == "ClampForever") return FishEngine::WrapMode::ClampForever; abort(); } } // namespace FishEngine
mit
joshlevy89/the-book-thing
app/src/components/MyTrade.js
1881
import React, { Component } from 'react'; import Book from './Book'; import { connect } from 'react-redux'; import { reject_trade, accept_trade } from '../actions'; import { Button } from 'react-bootstrap'; class MyTrade extends Component { render() { const { mytrade, mybooks, reject_trade, accept_trade, tradeType } = this.props; var firstItem = mytrade.trade.trade_info.firstItem; var secondItem = mytrade.trade.trade_info.secondItem; // always list book in your library first... var isFirstItemInLibrary = mybooks.filter(book=> { return book._id===firstItem }).length > 0; var firstShow; var secondShow; if (isFirstItemInLibrary){ firstShow = firstItem; secondShow = secondItem; } else { firstShow = secondItem; secondShow = firstItem; } return ( <div> <Book key={firstShow} entryId={firstShow} overlayType={'standard'} {...this.props}/> <img style={{'text-align':'center'}} src='https://image.freepik.com/free-icon/exchange-arrows_318-42193.png' width='20px' height='20px'/> <Book key={secondShow} entryId={secondShow} overlayType={'standard'} {...this.props}/> {tradeType==='offer' ? <span> <Button bsStyle="success" onClick={()=>reject_trade(mytrade)}>Delete</Button> <Button bsStyle="danger" onClick={()=>accept_trade(mytrade)}>Accept</Button> </span> :<span></span>} </div> ) } } function mapStateToProps(state){ return { mybooks: state.user.mybooks, books: state.books } } MyTrade = connect( mapStateToProps, { reject_trade, accept_trade } )(MyTrade) export default MyTrade
mit
devxkh/FrankE
src/include/XEngine/Controller/WindowManager.hpp
1042
#ifndef __WINDOWMANAGER_HPP__ #define __WINDOWMANAGER_HPP__ // //#include <sfml/Window/Window.hpp> //#include <sfml/System/String.hpp> #include <memory> namespace XE { class Scene; class Controller; class XEngine; //class WindowManager //{ //public: // WindowManager(); // ~WindowManager(); // //void UpdateViews(); // sf::Window* createWindow(sf::VideoMode mode, const sf::String& title, sf::Uint32 style = sf::Style::Default); // sf::Window* createWindow(sf::WindowHandle hwnd); // // sf::Window* getWindow(sf::Uint8 id) { return _window; } // //void createWorkspace(Controller& controller); // //inline sf::Window* getWindow() { return mWindow; } //protected: //private: // //obsolete // //void createViewport(Controller& controller, Ogre::RenderWindow* renderWindow = nullptr, Ogre::RenderTarget* renderTarget = nullptr); // //Ogre::TexturePtr mRtt_texture; // XEngine* mEngine; // sf::Window* _window; // //Ogre::RenderWindow* mRenderWindow; //}; } // namespace XE #endif // __WINDOWMANAGER_HPP__
mit
stijnster/appsignal-ruby
lib/appsignal/hooks/puma.rb
916
module Appsignal class Hooks class PumaHook < Appsignal::Hooks::Hook register :puma def dependencies_present? defined?(::Puma) && ::Puma.respond_to?(:cli_config) && ::Puma.cli_config end def install ::Puma.cli_config.options[:before_worker_boot] ||= [] ::Puma.cli_config.options[:before_worker_boot] << Proc.new do |id| Appsignal.forked end ::Puma.cli_config.options[:before_worker_shutdown] ||= [] ::Puma.cli_config.options[:before_worker_shutdown] << Proc.new do |id| Appsignal.stop('puma before_worker_shutdown') end ::Puma::Cluster.class_eval do alias stop_workers_without_appsignal stop_workers def stop_workers Appsignal.stop('puma cluster') stop_workers_without_appsignal end end end end end end
mit
fpommerening/PostSharpSamples
UiManipulation/FP.PostSharpSamples.UI.Controls/MainForm.cs
594
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace FP.PostSharpSamples.UI.Controls { public partial class MainForm : Form { public MainForm() { InitializeComponent(); } private void tsmiOpen_Click(object sender, EventArgs e) { UserForm frm = new UserForm(); frm.MdiParent = this; frm.Show(); } } }
mit
MuteG/gpmagic
GPMagicBase/Model/Database/CardAbilitie.cs
1937
using System; using System.Collections.Generic; using System.Text; namespace GPSoft.Games.GPMagic.GPMagicBase.Model.Database { public sealed class CardAbilitie : AbstractTableInstance { public CardAbilitie() { this.tableName = "ListAbilities"; } /// <summary> /// 新生成一个本实例对应的表结构实例 /// </summary> /// <returns></returns> public override object NewDataInstance() { return new ListAbilities(); } } /// <summary> /// 卡牌异能 /// </summary> public sealed class ListAbilities { int abilitiesID; string abilitiesName = string.Empty; string reminderText = string.Empty; /// <summary> /// 获取或者设置异能说明文字 /// </summary> public string ReminderText { get { return reminderText; } set { reminderText = value; } } /// <summary> /// 获取或者设置异能名称 /// </summary> [ColumnInfo(IsDisplayKeyWord = true)] public string AbilitiesName { get { return abilitiesName; } set { abilitiesName = value; } } private string m_EnglishName = string.Empty; /// <summary> /// 获取或者设置异能英文名称 /// </summary> public string EnglishName { get { return m_EnglishName; } set { m_EnglishName = value; } } /// <summary> /// 获取或者设置异能编号(主键、自增) /// </summary> [ColumnInfo(IsPrimaryKey = true, IsAutoIncrement = true)] public int AbilitiesID { get { return abilitiesID; } set { abilitiesID = value; } } } }
mit
vikerman/angular
packages/common/locales/fa-AF.ts
2230
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js const u = undefined; function plural(n: number): number { let i = Math.floor(Math.abs(n)); if (i === 0 || n === 1) return 1; return 5; } export default [ 'fa-AF', [['ق', 'ب'], ['ق.ظ.', 'ب.ظ.'], ['قبل\u200cازظهر', 'بعدازظهر']], u, [ ['ی', 'د', 'س', 'چ', 'پ', 'ج', 'ش'], [ 'یکشنبه', 'دوشنبه', 'سه\u200cشنبه', 'چهارشنبه', 'پنجشنبه', 'جمعه', 'شنبه' ], u, ['۱ش', '۲ش', '۳ش', '۴ش', '۵ش', 'ج', 'ش'] ], u, [ ['ج', 'ف', 'م', 'ا', 'م', 'ج', 'ج', 'ا', 'س', 'ا', 'ن', 'د'], [ 'جنو', 'فبروری', 'مارچ', 'اپریل', 'می', 'جون', 'جول', 'اگست', 'سپتمبر', 'اکتوبر', 'نومبر', 'دسم' ], [ 'جنوری', 'فبروری', 'مارچ', 'اپریل', 'می', 'جون', 'جولای', 'اگست', 'سپتمبر', 'اکتوبر', 'نومبر', 'دسمبر' ] ], [ ['ج', 'ف', 'م', 'ا', 'م', 'ج', 'ج', 'ا', 'س', 'ا', 'ن', 'د'], [ 'جنوری', 'فبروری', 'مارچ', 'اپریل', 'می', 'جون', 'جولای', 'اگست', 'سپتمبر', 'اکتوبر', 'نومبر', 'دسمبر' ], u ], [['ق', 'م'], ['ق.م.', 'م.'], ['قبل از میلاد', 'میلادی']], 6, [4, 5], ['y/M/d', 'd MMM y', 'd MMMM y', 'EEEE d MMMM y'], ['H:mm', 'H:mm:ss', 'H:mm:ss (z)', 'H:mm:ss (zzzz)'], ['{1}،\u200f {0}', u, '{1}، ساعت {0}', u], ['.', ',', ';', '%', '\u200e+', '\u200e−', 'E', '×', '‰', '∞', 'ناعدد', ':'], ['#,##0.###', '#,##0%', '¤ #,##0.00', '#E0'], '؋', 'افغانی افغانستان', { 'AFN': ['؋'], 'CAD': ['$CA', '$'], 'CNY': ['¥CN', '¥'], 'HKD': ['$HK', '$'], 'IRR': ['ریال'], 'MXN': ['$MX', '$'], 'NZD': ['$NZ', '$'], 'THB': ['฿'], 'XCD': ['$EC', '$'] }, 'rtl', plural ];
mit
alexandrucucu/symfony-blog
src/AppBundle/Form/PostType.php
4483
<?php namespace AppBundle\Form; use Symfony\Component\Form\AbstractType; use Symfony\Component\Form\FormBuilderInterface; use Symfony\Component\OptionsResolver\OptionsResolver; use Symfony\Component\Form\Extension\Core\Type\TextType; use Symfony\Component\Form\Extension\Core\Type\TextareaType; use Symfony\Component\Form\Extension\Core\Type\ChoiceType; use Symfony\Component\Form\Extension\Core\Type\FileType; use Symfony\Component\Form\Extension\Core\Type\DateTimeType; use Symfony\Component\Form\Extension\Core\Type\CheckboxType; use Symfony\Bridge\Doctrine\Form\Type\EntityType; use Doctrine\ORM\EntityRepository; //use AppBundle\Repository\CategoryRepository; //use AppBundle\Entity\Category; class PostType extends AbstractType { /** * {@inheritdoc} */ public function buildForm(FormBuilderInterface $builder, array $options) { $builder ->add('title', TextType::class, array( 'label' => 'Titulo:', 'required' => 'required' )) ->add('content', TextareaType::class, array( 'label' => 'Contenido:', 'required' => 'required', 'attr' => array('cols' => '10', 'rows' => '10') )) ->add('status', ChoiceType::class, array( 'label' => 'Estado:', 'required' => 'required', 'choices' => array( 'Publicado' => 'public', 'Privado' => 'private', 'Borrador' => 'draft' ) )) ->add('image', FileType::class, array( 'label' => 'Imagen destacada:', 'data_class' => null, 'required' => false )) ->add('createdAt', DateTimeType::class, array( 'label' => 'Fecha:', 'required' => 'required', 'widget' => 'single_text', // 'time_widget' => 'single_text', 'format' => 'dd/MM/yyyy HH:mm:ss', // 'model_timezone' => 'Europe/Madrid', // 'view_timezone' => 'Europe/Madrid', // 'attr' => ['class' => 'datetimepicker'], 'attr' => array( 'class' => 'datetimepicker' ), 'html5' => false )) ->add('comments_enabled', CheckboxType::class, array( 'label' => 'Comentarios habilitados', 'required' => false )) ->add('category', EntityType::class, array( 'class' => 'AppBundle:Category', 'query_builder' => function(EntityRepository $repository) { return $repository->createQueryBuilder('c')->orderBy('c.name', 'ASC'); }, // 'class' => Category::class, // 'query_builder' => function(CategoryRepository $repo) { // return $repo->createAlphabeticalQueryBuilder(); // }, 'label' => 'Categorias:', // 'choice_label' => 'name' 'placeholder' => 'Elige una categoría', 'empty_data' => null, 'required' => false )) ->add('tags', EntityType::class, array( 'class' => 'AppBundle:Tag', 'query_builder' => function(EntityRepository $repository) { return $repository->createQueryBuilder('t')->orderBy('t.id', 'DESC'); }, 'label' => 'Etiquetas:', 'choice_label' => 'name', 'multiple' => true, // 'expanded' => true, 'attr' => array( 'class' => 'tags' ), 'required' => false )) ->add('newTags', TextType::class, array( 'mapped' => false, 'required' => false, 'label' => 'Nuevas etiquetas:', 'attr' => array('placeholder' => 'etiqueta1, etiqueta2, etiqueta3') )) ; } /** * {@inheritdoc} */ public function configureOptions(OptionsResolver $resolver) { $resolver->setDefaults(array( 'data_class' => 'AppBundle\Entity\Post' )); } /** * {@inheritdoc} */ public function getBlockPrefix() { return 'appbundle_post'; } }
mit
GCallVietNam/GCALL-2-Android
GCALL2/app/src/main/java/vn/gcall/gcall2/DataStruct/CallLog.java
1783
package vn.gcall.gcall2.DataStruct; /** * Created by This PC on 28/06/2016. * Data struct describes Call log entity */ public class CallLog { private String group; private String dateCreated; private String duration; private String from; private String status; private String objectID; private String groupID; public CallLog(String date, String lenght, String f,String to, String stt,String gid){ dateCreated=date; duration=lenght; from=f; status=stt; group=to; objectID=""; groupID=gid; } public CallLog(String date,String f,String to,String objID,String gid){ dateCreated=date; duration="0 secs"; from=f; status="unsolved"; group=to; objectID=objID; groupID=gid; } public String getGroupID() { return groupID; } public String getObjectID() { return objectID; } public void setObjectID(String objectID) { this.objectID = objectID; } public String getDuration() { return duration; } public String getDateCreated() { return dateCreated; } public String getFrom() { return from; } public String getGroup() { return group; } public String getStatus() { return status; } public void setDateCreated(String dateCreated) { this.dateCreated = dateCreated; } public void setFrom(String from) { this.from = from; } public void setGroup(String group) { this.group = group; } public void setStatus(String status) { this.status = status; } public void setDuration(String duration) { this.duration = duration; } }
mit
mayugechief/srsg
app/controllers/concerns/cms/node_filter/view_cell.rb
1006
# coding: utf-8 module Cms::NodeFilter::ViewCell extend ActiveSupport::Concern include Cms::RssFilter included do helper ApplicationHelper before_action :prepend_current_view_path before_action :inherit_variables before_action :set_item end module ClassMethods def model(cls) self.model_class = cls if cls end end private def prepend_current_view_path prepend_view_path "app/cells/#{controller_path}" end def inherit_variables controller.instance_variables.select {|m| m =~ /^@[a-z]/ }.each do |name| next if instance_variable_defined?(name) instance_variable_set name, controller.instance_variable_get(name) end end def set_model @model = self.class.model_class controller.instance_variable_set :@model, @model end def set_item @cur_node = @cur_node.becomes_with_route end def redirect_to(*args) controller.redirect_to(*args) end end
mit
Freshwood/matterbridge
src/main/scala/com/freshsoft/matterbridge/client/codinglove/CodingLoveIntegration.scala
3467
package com.freshsoft.matterbridge.client.codinglove import java.util.UUID import akka.event.Logging import akka.http.scaladsl.Http import akka.http.scaladsl.model.headers.Location import akka.http.scaladsl.model.{HttpRequest, HttpResponse, StatusCodes} import akka.util.ByteString import com.freshsoft.matterbridge.client.IMatterBridgeResult import model.MatterBridgeEntities.SlashResponse import com.freshsoft.matterbridge.server.{CodingLoveActorService, MatterBridgeContext} import com.freshsoft.matterbridge.util.MatterBridgeConfig import model.{CodingLoveEntity, SlashCommandRequest} import net.ruippeixotog.scalascraper.browser.JsoupBrowser import net.ruippeixotog.scalascraper.dsl.DSL.Extract._ import net.ruippeixotog.scalascraper.dsl.DSL._ import scala.concurrent.Future /** * The matter bridge client which is raising the integrations */ object CodingLoveIntegration extends IMatterBridgeResult with MatterBridgeConfig with MatterBridgeContext with CodingLoveActorService { private val log = Logging.getLogger(system, this) private val randomUrl = "http://thecodinglove.com/random" private lazy val browser = JsoupBrowser() override def getResult(request: SlashCommandRequest): Future[Option[SlashResponse]] = getDataFromWebsite(randomUrl, request) /** * Get the response from thecodinglove web page * * @param uri The uri we are calling * @param request The SlashRequest to work with * @return Future as Option from SlashResponse */ private def getDataFromWebsite(uri: String, request: SlashCommandRequest): Future[Option[SlashResponse]] = { Http().singleRequest(HttpRequest(uri = uri)).flatMap { case HttpResponse(StatusCodes.OK, _, entity, _) => entity.dataBytes.runFold(ByteString(""))(_ ++ _) map { response => buildSlashResponse((response.decodeString("UTF-8"), request)) } case HttpResponse(StatusCodes.Found, headers, _, _) => val result: Future[Option[SlashResponse]] = headers.find(_.isInstanceOf[Location]) map { case header: Location => getDataFromWebsite(header.uri.toString, request) } getOrElse Future.successful(None) result } } private def buildSlashResponse : PartialFunction[(String, SlashCommandRequest), Option[SlashResponse]] = { case (response, _) if response.isEmpty => log.info(s"Got no result from coding love"); None case (response, x) if response.nonEmpty => log.debug(response) val codingLoveEntity = getCodingLoveResponseContent(response) persistCodingLoveResult(codingLoveEntity) Some( SlashResponse( codingLoveResponseType, s"${codingLoveEntity.name}\n${codingLoveEntity.gifUrl}\nSearched for ${x.text}", List())) } private def persistCodingLoveResult(codingLoveEntity: CodingLoveEntity): Future[Boolean] = codingLoveService.add(codingLoveEntity.name, codingLoveEntity.gifUrl) /** * Filter the content to fit our needs * * @param htmlContent The web result to retrieve the information * @return The response message as String */ private def getCodingLoveResponseContent(htmlContent: String): CodingLoveEntity = { val doc = browser.parseString(htmlContent) val text = doc >> element("div div h3") val gif = doc >> element("div div p img") CodingLoveEntity(UUID.randomUUID(), text.innerHtml, gif.attr("src"), None, None) } }
mit
bmdeveloper/bmdeveloper.github.io
gulpfile.js
4246
var gulp = require('gulp'); var jshint = require('gulp-jshint'); var concat = require('gulp-concat'); var rename = require('gulp-rename'); var uglify = require('gulp-uglify'); var less = require('gulp-less'); var path = require('path'); var minifyCSS = require('gulp-minify-css'); var replace = require('gulp-replace'); var removeEmptyLines = require('gulp-remove-empty-lines'); var sourceFolder = 'src/'; var appFolder = 'src/app/'; var cssFolder = 'src/styles/'; var buildFolder = 'dist/'; var buildJS = '<script src="dist/js/all.min.js"></script>'; var DevJS = '<script src="src/app/core/app.js"></script>\n\t<script src="src/app/photo-details-view/photo-details.js"></script>\n\t<script src="src/app/photo-list-view/photo-list.js"></script>\n\t<script src="src/app/core/services.js"></script>'; var devCSS = cssFolder + 'main.css'; var buildCSS = buildFolder + 'styles/main.min.css'; //lint combine minify js gulp.task('js-linting-combining-minifying', function () { return gulp.src(appFolder + '**/*.js') .pipe(jshint()) .pipe(jshint.reporter('default')) .pipe(concat('all.min.js')) .pipe(uglify()) .pipe(gulp.dest(buildFolder + 'js')); }); //lint dev js gulp.task('js-linting-dev', function () { return gulp.src(appFolder + '**/*.js') .pipe(jshint()) .pipe(jshint.reporter('default')); }); //compile minify less gulp.task('less-compiling-minifying', function () { return gulp.src(cssFolder + '*.less') .pipe(less()) .pipe(rename({ suffix: ".min" })) .pipe(minifyCSS()) .pipe(gulp.dest(buildFolder + 'styles')); }); //compile less for dev environment gulp.task('less-compiling-dev', function () { return gulp.src(cssFolder + '*.less') .pipe(less()) .pipe(gulp.dest(cssFolder)); }); //copy markup to build folder environment gulp.task('markup-copy', function () { return gulp.src(appFolder + '**/*.html', { base: appFolder }) .pipe(gulp.dest(buildFolder)); }); gulp.task('image-copy', function () { return gulp.src(sourceFolder + 'images/**/*.*', { base: appFolder }) .pipe(gulp.dest(buildFolder + 'images')); }); //copy ext js to build folder gulp.task('external-js-copy', function () { return gulp.src(sourceFolder + 'js/**/*.js', { base: appFolder }) .pipe(gulp.dest(buildFolder + "js")); }); //replace all build links to dev links in index gulp.task('replace-dev-main', function () { gulp.src(['index.html']) .pipe(replace(buildCSS, devCSS)) .pipe(replace(buildJS, DevJS)) .pipe(replace(buildFolder, sourceFolder)) .pipe(gulp.dest('./')); }); //replace all dev links to build links in index gulp.task('replace-build-main', function () { return gulp.src(['index.html']) .pipe(replace(devCSS, buildCSS)) .pipe(replace('<script src="src/app/core/app.js"></script>', buildJS)) .pipe(replace('<script src="src/app/photo-details-view/photo-details.js"></script>', '')) .pipe(replace('<script src="src/app/photo-list-view/photo-list.js"></script>', '')) .pipe(replace('<script src="src/app/core/services.js"></script>', '')) .pipe(replace(sourceFolder, buildFolder)) .pipe(removeEmptyLines()) .pipe(gulp.dest('./')); }); //replace all build links to dev links in views gulp.task('replace-dev-viewsource', function () { return gulp.src([appFolder + '**/*.js', '!' + appFolder + 'core/']) .pipe(replace(buildFolder, appFolder)) .pipe(gulp.dest(appFolder)); }); //replace all dev links to build links in views gulp.task('replace-build-viewsource', function () { return gulp.src([appFolder + '**/*.js', '!' + appFolder + 'core/']) .pipe(replace(appFolder, buildFolder)) .pipe(gulp.dest(appFolder)); }); gulp.task('watch', function () { gulp.watch(cssFolder + '*.less', ['less-compiling-dev'], function () { }); gulp.watch(appFolder + '**/*.js', ['js-linting-dev'], function () { }); }); gulp.task('build', ['replace-build-viewsource', 'js-linting-combining-minifying', 'less-compiling-minifying', 'markup-copy', 'external-js-copy', 'image-copy', 'replace-build-main']); gulp.task('dev', ['less-compiling-dev', 'replace-dev-viewsource', 'replace-dev-main', 'watch']);
mit
bootstrap-vue/bootstrap-vue
docs/content.js
3704
import { importAll, parseVersion, parseFullVersion } from '~/utils' import { version, dependencies, devDependencies, description } from '~/../package.json' const componentsContext = require.context('~/../src/components/', true, /package.json/) export const components = importAll(componentsContext) const directivesContext = require.context('~/../src/directives/', true, /package.json/) export const directives = importAll(directivesContext) const iconsContext = require.context('~/../src/icons', false, /package.json/) const icons = importAll(iconsContext) || {} // Since there a lot of icons, we only return `BIcon` and `BIconstack` component, plus // one extra example icon component which we modify the icon name to be `BIcon{IconName}` // We sort the array to ensure `BIcon` appears first icons[''].components = icons[''].components .filter(c => c.component === 'BIconBlank' || !/^BIcon[A-Z]/.test(c.component)) .sort((a, b) => (a.component < b.component ? -1 : a.component > b.component ? 1 : 0)) .map(c => { c = { ...c } if (c.component === 'BIconBlank') { c.component = 'BIcon{IconName}' // We add a special `srcComponent` to grab the prop `$options` data from c.srcComponent = 'BIconBlank' } return c }) export { icons } const referenceContext = require.context('~/markdown/reference', true, /meta.json/) export const reference = importAll(referenceContext) export const nav = [ { title: 'Getting Started', base: '', exact: true }, { title: 'Components', base: 'components/', pages: components, description: 'BootstrapVue components and component group plugins.' }, { title: 'Directives', base: 'directives/', pages: directives, description: 'BootstrapVue directives and directive group plugins.' }, { title: 'Icons', base: 'icons', version: '2.3.0', description: 'BootstrapVue icons.' }, { title: 'Reference', base: 'reference/', pages: reference, description: 'BootstrapVue and Bootstrap reference, and additional resources documentation.' } ] // RegExp to grab the minor version from a full version const minorRE = /^(\d+\.\d+)(\..+)$/ // RegExp to grab the major version from a full version const majorRE = /^(\d+)(\.\d+\..+)$/ export const bootstrapVersion = parseVersion(dependencies.bootstrap) export const bootstrapVersionMinor = bootstrapVersion.replace(minorRE, '$1') export const bootstrapVersionMajor = bootstrapVersion.replace(majorRE, '$1') export const bootstrapIconsVersion = parseFullVersion(devDependencies['bootstrap-icons']) export const bootstrapIconsVersionMinor = bootstrapIconsVersion.replace(minorRE, '$1') export const bootstrapIconsVersionMajor = bootstrapIconsVersion.replace(majorRE, '$1') export const bootstrapIconsCount = 1200 export const popperVersion = parseVersion(dependencies['popper.js']) export const popperVersionMinor = popperVersion.replace(minorRE, '$1') export const popperVersionMajor = popperVersion.replace(majorRE, '$1') export const portalVueVersion = parseVersion(dependencies['portal-vue']) export const portalVueVersionMinor = portalVueVersion.replace(minorRE, '$1') export const portalVueVersionMajor = portalVueVersion.replace(majorRE, '$1') export const nuxtVersion = parseVersion(devDependencies.nuxt) export const nuxtVersionMinor = nuxtVersion.replace(minorRE, '$1') export const nuxtVersionMajor = nuxtVersion.replace(majorRE, '$1') export const vueVersion = parseVersion(devDependencies.vue) export const vueVersionMinor = vueVersion.replace(minorRE, '$1') export const vueVersionMajor = vueVersion.replace(majorRE, '$1') export const bvDescription = description export { version }
mit
EastsidePreparatorySchool/Ephemera
SpaceCritters/src/org/eastsideprep/spacecritters/gamelogic/AlienContainer.java
17940
/* * This work is licensed under a Creative Commons Attribution-NonCommercial 3.0 United States License. * For more information go to http://creativecommons.org/licenses/by-nc/3.0/us/ */ package org.eastsideprep.spacecritters.gamelogic; import org.eastsideprep.spacecritters.gameengineinterfaces.AlienSpec; import org.eastsideprep.spacecritters.alieninterfaces.*; import org.eastsideprep.spacecritters.gameengineinterfaces.GameVisualizer; import java.lang.reflect.Constructor; import java.util.HashMap; import org.eastsideprep.spacecritters.orbit.DummyTrajectory; import org.eastsideprep.spacecritters.orbit.Orbitable; import org.eastsideprep.spacecritters.orbit.Trajectory; /** * * @author guberti */ public class AlienContainer { private static int currentID = 0; public final String domainName; public final String packageName; public final String className; public String fullName; public String speciesName; public AlienSpecies species; public final Constructor<?> constructor; public Alien alien; public AlienComplex calien; public final ContextImplementation ctx; public final SpaceGrid grid; public int alienHashCode; public int speciesID; public Action.ActionCode currentActionCode; public double currentActionPower; public String currentActionMessage; public Action currentAction; public boolean listening; Planet planet; double tech; double energy; public HashMap<String, Integer> secrets; boolean participatedInAction; public Position p = new Position(0, 0); public Position nextP; public WorldVector nextWP; public String outgoingMessage; public double outgoingPower; int turnsInSafeZone; boolean isComplex; Trajectory trajectory; public boolean updated = true; // Declare stats here // // Heads up: This constructs an AlienContainer and contained Alien // public AlienContainer(SpaceGrid sg, GameVisualizer vis, int x, int y, String alienDomainName, String alienPackageName, String alienClassName, Constructor<?> cns, AlienSpecies as, double energy, double tech, int parent, String message, Trajectory trajectory) throws InstantiationException { //[Q] this.domainName = alienDomainName; this.packageName = alienPackageName; this.className = alienClassName; this.constructor = cns; this.species = as; this.energy = energy; this.tech = tech; this.ctx = new ContextImplementation(this, vis); this.grid = sg; this.listening = false; this.secrets = new HashMap<>(); this.speciesID = as.speciesID; this.planet = null; Alien a = null; // if position = (0,0) assign random position in safe zone if (x == 0 && y == 0) { this.p.x = ctx.getRandomInt(Constants.safeZoneRadius + 1); this.p.x *= (ctx.getRandomInt(2) == 0 ? 1 : -1); this.p.y = ctx.getRandomInt(Constants.safeZoneRadius + 1); this.p.y *= (ctx.getRandomInt(2) == 0 ? 1 : -1); } else { this.p.x = x; this.p.y = y; } this.alienHashCode = 0; // construct and initialize alien try { a = (Alien) cns.newInstance(); this.alien = a; this.alienHashCode = ++currentID; } catch (Throwable t) { this.alien = null; grid.gridDebugErr("ac: Error constructing Alien"); t.printStackTrace(System.out); throw new InstantiationException(); } try { if (a instanceof AlienComplex) { initComplex(a, trajectory); ((AlienComplex) a).initComplex(this.ctx, this.alienHashCode, parent, message); } else { a.init(this.ctx, this.alienHashCode, parent, message); } } catch (UnsupportedOperationException e) { // let this go } fullName = this.getFullName(); speciesName = this.getFullSpeciesName(); } // class-related helpers public void initComplex(Alien a, Trajectory trajectory) throws InstantiationException { calien = (AlienComplex) a; isComplex = true; if (trajectory == null) { grid.gridDebugErr("ac: No trajectory or focus given"); throw new InstantiationException(); } if (trajectory instanceof DummyTrajectory) { this.trajectory = new Trajectory( trajectory.currentFocus, //focus from the dummy trajectory (grid.rand.nextDouble() * 2 + 10) * Constants.deltaX, //semi-latus rectum 0.01,//grid.rand.nextDouble() / 10 + 0.1, //Eccentricity grid.rand.nextInt(2) == 0 ? -1 : 1, //signum Vector2.normalizeAngle(grid.rand.nextDouble() * 2 * Math.PI), //rotation grid); } else { this.trajectory = trajectory.clone(); } p = new Position(this.trajectory.getWorldPositionAtTime(0)); } public double getMass() { return Constants.alienMass; } public String getFullSpeciesName() { if (speciesName == null) { speciesName = domainName + ":" + packageName + ":" + className; } return speciesName; } public String getFullName() { if (fullName == null) { fullName = getFullSpeciesName() + "(" + Integer.toHexString(alien.hashCode()).toUpperCase() + ")"; } return fullName; } public AlienSpec getFullAlienSpec() { return new AlienSpec(this.domainName, this.packageName, this.className, this.species.speciesID, this.alienHashCode, this.p.round().x, this.p.round().y, this.tech, this.energy, this.fullName, this.speciesName, this.currentActionPower); } /*public AlienSpec getSimpleAlienSpec() { return new AlienSpec(this.domainName, this.packageName, this.className, this.species.speciesID, this.fullName, this.speciesName); }*/ public AlienSpecies getAlienSpecies() { if (this.species == null) { assert false; species = new AlienSpecies(this.domainName, this.packageName, this.className, species.speciesID, this.p.round().x, this.p.round().y); } return species; } public String toStringExpensive() { return getFullName() + ": " + "X:" + (p.x) + " Y:" + (p.y) + " E:" + (energy) + " T:" + (tech) + " r:" + ((int) Math.floor(Math.hypot(p.x, p.y))); } public void processResults() { try { this.alien.processResults(); } catch (UnsupportedOperationException e) { } } public void move() throws NotEnoughTechException { // if on planet, ignore move if (this.planet != null) { return; } if (isComplex) { movecomplex(); } else { movestandard(); } } public void movecomplex() throws NotEnoughTechException { /* FIND DELTAV */ updated = true; double tolerance = 10; // percent trajectory.conic.updateStateVectors(grid.getTime()); WorldVector deltaV; try { deltaV = calien.getAccelerate(); } catch (UnsupportedOperationException e) { deltaV = null; } // don't scale deltav by mass. we will fix accounting some other time. // if (deltaV != null) { // deltaV = new WorldVector(deltaV.scale(1f / getMass())); // } if (deltaV != null && deltaV.x == 0 && deltaV.y == 0) { deltaV = null; //if there is no acceleration, don't do anything } WorldVector oldWP = nextWP; nextWP = trajectory.getWorldPositionAtTime(grid.getTime()); nextP = new Position(nextWP); // debug: // calculate apparent velocity from move distance // if (oldWP != null) { // Vector2 d = nextWP.subtract(oldWP).scale(1 / Constants.deltaT); // System.out.println("apparent alien velocity: " + d + ", mag:" + d.magnitude()); // } /* CHARGE ALIEN FOR DELTAV */ //aliens cannot change their trajectory if they are enot in the game limits if (GridDisk.isValidPoint(nextP.round())) { if (deltaV != null) { double m = deltaV.magnitude(); if (m < Constants.maxDeltaV(tech)) { this.energy -= Constants.accelerationCost(m); } else { deltaV = null; } } } else if (!trajectory.isBound()) { System.out.println("Floated into the abyss"); kill("Floated into the abyss"); return; } if (deltaV != null) { // alter the old trajectory Vector2 v = trajectory.getVelocityAtTime(grid.getTime()); trajectory.accelerate(deltaV, grid.getTime()); Vector2 v2 = trajectory.getVelocityAtTime(grid.getTime()).subtract(deltaV); if ((v.x / v2.x) > (1.0 + tolerance / 100.0) || (v.x / v2.x) < (1.0 - tolerance / 100.0) || (v.y / v2.y) > (1.0 + tolerance / 100.0) || (v.y / v2.y) < (1.0 - tolerance / 100.0)) { System.out.println("Accelerate: adjusted entry/exit velocities do not match within tolerance values"); System.out.println(" exit velocity: " + v + " mag " + v.magnitude()); System.out.println(" entry velocity: " + v2 + " mag " + v2.magnitude()); } // call shell visualizer // just make sure we don't blow up the alien because of an exception in the shell try { grid.vis.showAcceleration(alienHashCode, nextP, deltaV); } catch (Exception e) { grid.vis.debugOut("Unhandled exception in showAcceleration(): " + e); } } /* DETERMINE CURRENT FOCUS */ Orbitable focus = findFocus(); /* FINALLY, COMPUTE NEW TRAJECTORY */ if (focus != trajectory.currentFocus) { //make a new trajectory if the focus has changed System.out.print("Changing focus "); if (focus instanceof Planet) { System.out.print("to planet "); System.out.println(((Planet) focus).className); } else if (focus instanceof Star) { System.out.print("to star "); System.out.println(((Star) focus).className); } Vector2 v = trajectory.getVelocityAtTime(grid.getTime()); trajectory.setFocus(focus); Vector2 v2 = trajectory.getVelocityAtTime(grid.getTime()); if (Math.abs(v.x / v2.x) > (1.0 + tolerance / 100.0) || Math.abs(v.x / v2.x) < (1.0 - tolerance / 100.0) || Math.abs(v.y / v2.y) > (1.0 + tolerance / 100.0) || Math.abs(v.y / v2.y) < (1.0 - tolerance / 100.0)) { System.out.println("Focus change: entry/exit velocities do not match within tolerance values"); System.out.println(" exit velocity: " + v + " mag " + v.magnitude()); System.out.println(" entry velocity: " + v2 + " mag " + v2.magnitude()); } } } public Orbitable findFocus() { Orbitable focus = trajectory.currentFocus; boolean altered = false; //if orbiting a planet and within that planet's hill sphere, you're staying there //if not, enter the parent star's orbit if (focus instanceof Planet) { System.out.println("hill radius: " + focus.hillRadius()); if (focus.position(grid.getTime()).subtract(nextP).magnitude() <= focus.hillRadius()) { return focus; } else { focus = ((Planet) focus).trajectory.currentFocus; } } //parent must be a star if code gets here double F = focus.mass() / focus.position(grid.getTime()).subtract(nextP).magnitude(); //for each star //if it exerts more force on you than your parent star, it becomes your new parent for (InternalSpaceObject iso : grid.objects) { if (iso instanceof Star && iso != focus) { if (iso.mass() / iso.position(grid.getTime()).subtract(nextP).magnitude() > F) { focus = iso; altered = true; F = focus.mass() / focus.position(grid.getTime()).subtract(nextP).magnitude(); } } } //for each planet orbiting your parent star //if you're in their hill sphere, they are your new parent //TODO: does not account for overlapping hill spheres for (InternalSpaceObject iso : grid.objects) { if (iso instanceof Planet && iso.trajectory.currentFocus == focus) { if (iso.position(grid.getTime()).subtract(nextP).magnitude() <= iso.hillRadius()) { focus = iso; altered = true; } } } return focus; } public void movestandard() throws NotEnoughTechException { //[Q] // Whether the move goes off the board will be determined by the grid Vector2 direction = null; try { direction = alien.getMove(); } catch (UnsupportedOperationException e) { // we'll let that go direction = new Direction(0, 0); } checkMove(direction); // Throws an exception if illegal // we want to contain aliens in the 250 sphere, so apply the "cosmic drift" direction = this.containMove(p.x, p.y, direction); nextP = new Position(p.add(direction)); this.energy -= direction.magnitude() * Constants.standardMoveCost; } // this does the actual checking private void checkMove(Vector2 direction) throws NotEnoughTechException { //[Q] int moveLength = (int) direction.magnitude(); // let one x one moves go if (Math.abs(direction.x) <= 1 && Math.abs(direction.y) <= 1) { return; } // If the move is farther than the alien has the power to move if (moveLength > tech) { debugErr("Illegal move(" + moveLength + "): " + (direction.x) + "," + (direction.y) + " tech " + tech); throw new NotEnoughTechException(); } // If the move is farther than the alien has the tech to move if (moveLength > tech) { debugErr("Illegal move: " + (direction.x) + "," + (direction.y) + " tech " + tech); throw new NotEnoughTechException(); } } public Direction containMove(double x, double y, Vector2 dir) { //[Q] double dxi, dyi; dxi = dir.x; dyi = dir.y; if (x + dxi > Constants.width / 2) { dxi = Constants.width / 2 - x; } if (x + dxi < -Constants.width / 2) { dxi = -Constants.width / 2 - x; } if (y + dyi > Constants.width / 2) { dyi = Constants.width / 2 - y; } if (y + dyi < -Constants.width / 2) { dyi = -Constants.width / 2 - y; } return new Direction(dxi, dyi); } // easy way to kill an alien public void kill(String s) { debugOut(s + " with T:" + (Math.round(tech * 10) / 10) + " and E:" + (Math.round(energy * 10) / 10) + " during turn:" + grid.currentTurn ); energy = 0; } // checked action private final static Action NULLACTION = new Action(Action.ActionCode.Gain); public void getAction() throws NotEnoughEnergyException, UnknownActionException { Action a = alien.getAction(); if (a == null) { a = NULLACTION; } this.currentAction = a; this.currentActionCode = a.code; this.currentActionPower = a.power; this.currentActionMessage = a.message; switch (a.code) { case None: case Gain: break; case Research: if (tech >= energy) { // If the tech can't be researched due to lack of energy debugOut("AC: Research violation with " + ctx.getStateString()); throw new NotEnoughEnergyException(); } break; case Spawn: if (a.power + ctx.getSpawningCost() > energy) { debugOut("AC: Spawn violation with P:" + a.power + " " + ctx.getStateString()); throw new NotEnoughEnergyException(); } break; case Fight: if (energy < (a.power + ctx.getFightingCost())) { debugOut("AC: Fight violation with P:" + a.power + " " + ctx.getStateString()); throw new NotEnoughEnergyException(); } // limit fight power by tech if (a.power > tech) { this.currentActionPower = tech; } //TODO: Perform necessary checks break; default: debugOut("AC: Checking action, unknown: " + a.code.toString()); throw new UnknownActionException(); } } // this debugOut is not sensitive to chatter control public void debugOut(String s) { ctx.vis.debugOut("Alien " + getFullName() + ": " + s); } public void debugErr(String s) { ctx.vis.debugErr("Alien " + getFullName() + ": " + s); } }
mit
golderboy/risk2
pages/user/rpt_007.php
4475
<?php if(isset($_POST['date_start']) && isset($_POST['date_end'])){ $text_sql = " AND risk_datetime Between '$_POST[date_start] 00:00:00' and '$_POST[date_end] 23:59:59' "; $date_show = "ระหว่างวันที่ ".$func->format_date($_POST['date_start'])." ถึง ".$func->format_date($_POST['date_end']); } ?> <?php $sql_x = "select cat_id,cat_name from sys_category "; $res_x = mysql_query($sql_x); $text_x =" "; $num_x =1; while ($rec_x = mysql_fetch_array($res_x)){ if($rec_x['cat_name'] != ""){ $text_x .= "'"; $text_x .= $rec_x['cat_name']; $text_x .= "',"; $text_name[$num_x] = $rec_x['cat_name']; $num_x++; } } for($n=1;$n<$num_x;$n++){ $sql[$n] = "select cat_id from tb_risk where cat_id = '$n' "; if(isset($_POST['date_start']) && isset($_POST['date_end'])){ $sql[$n] .= $text_sql;} $count[$n] = $func->getRows($sql[$n]); $text_y[$n] = "$count[$n]"; } ?> <?php $text_data = ""; for($y=1;$y<$num_x;$y++){ $text_data .= "{"; $text_data .= "X : ' "; $text_data .= $text_name[$y]; $text_data .=" ' ,"; $text_data .="Y : ".$text_y[$y]." },"; } ?> <div id="page-wrapper"> <div class="row"> <div class="col-lg-12"> <h1 class="page-header">กราฟรายงาน แยกหมวดความเสี่ยง </h1> <?php if(isset($_POST['date_start']) && isset($_POST['date_end'])){ echo "<h3>ระหว่างวันที่ ".$func->format_date($_POST['date_start'])." - ".$func->format_date($_POST['date_end']."</h3>"); } ?> </div> <!-- /.col-lg-12 --> </div> <div class="row"> <div class="col-lg-12"> <h3>กำหนดช่วงวันที่</h3> </div> <div class="panel-body"> <form action="?page=<?php echo sha1('rpt_007') ?>" method="post" name="frm" id="frm" autocomplete="off"> <div class="col-sm-4 "> <div class="form-group"> <label >ตั้งแต่วันที่</label> <input type="date" name="date_start" id="date_start" class="form-control" required=""> </div> </div> <div class="col-sm-4 "> <div class="form-group"> <label >ถึงวันที่</label> <input type="date" name="date_end" id="date_end" class="form-control" required=""> </div> </div> <div class="col-sm-3"> <div class="form-group"> <label >&zwj;</label> <input type="hidden" id="report" name="report" value="1"> <input type="submit" name="submit" id="submit" class="form-control btn-success " value="ค้นหา"> </div> </div> </form> </div> </div> <?php if(!isset($_POST['date_start']) && !isset($_POST['date_end'])){ ?> <div class="row"> <div class="col-sm-4 col-md-12"> <div class="panel panel-red"> <div class="panel-heading"> แจ้งเตือน </div> <div class="panel-body"> <p style="font-size: 20;font-style: bold;"> กรุณาเลือกช่วงวันที่</p> </div> </div> </div> </div> <?php } ?> <div class="row"> <div class="col-lg-12"> <div class="panel-body"> <div id="bar-example"></div> <?php if(isset($_POST['date_start']) && isset($_POST['date_end'])){ echo "<button id='print' >PDF</button>"; } ?> </div> </div> </div> <script type="text/javascript"> Morris.Bar({ element: 'bar-example', data: [ <?php echo $text_data; ?> ], xkey: 'X', ykeys: ['Y',], labels: ['หมวด',] }); $('#print').click(function () { print(); }); // This will render SVG only as PDF and download function printMe() { xepOnline.Formatter.Format('bar-example', {render:'download', srctype:'svg'}); } </script>
mit
Chamberlab/datadeck-nwjs
src/vue-ui/components/channel/index.js
207
import Grid from './grid'; import List from './list'; import Menu from './menu'; import Plot from './plot'; import Specs from './specs'; export default { Grid, List, Menu, Plot, Specs };
mit
requtize/query-builder
src/QueryBuilder/Container.php
556
<?php /** * Copyright (c) 2017 by Adam Banaszkiewicz * * @license MIT License * @copyright Copyright (c) 2017, Adam Banaszkiewicz * @link https://github.com/requtize/query-builder */ namespace Requtize\QueryBuilder; class Container { /** * @var Container */ protected $connections = []; public function addConnection($name, $connection) { $this->connections[$name] = $connection; return $this; } public function getConnection($name) { return $this->connections[$name]; } }
mit
pbnjay/clustering
distancemap_test.go
3710
package clustering import "testing" func TestDistanceMapClustering1(t *testing.T) { d := NewDistanceMapClusterSet(nil) if d == nil { t.Errorf("could not create empty DistanceMapClusterSet") } d = NewDistanceMapClusterSet(DistanceMap{"a": {"b": 0.0}}) if d == nil { t.Errorf("could not create 2-node DistanceMapClusterSet") } if d.Count() != 2 { t.Errorf("2-node DistanceMapClusterSet doesn't start with 2 clusters") } n1, n2 := 0, 0 d.EachCluster(-1, func(cluster int) { n1++ d.EachItem(cluster, func(x ClusterItem) { n2++ }) }) if n1 != 2 || n2 != 2 { t.Errorf("2-node DistanceMapClusterSet didn't enumerate 2 clusters w/start=-1") } n1, n2 = 0, 0 d.EachCluster(0, func(cluster int) { n1++ d.EachItem(cluster, func(x ClusterItem) { n2++ }) }) if n1 != 1 || n2 != 1 { t.Errorf("2-node DistanceMapClusterSet didn't enumerate 1 clusters w/start=0") } n1 = 0 d.EachCluster(1, func(cluster int) { n1++ }) if n1 != 0 { t.Errorf("2-node DistanceMapClusterSet didn't enumerate 0 clusters w/start=1") } if d.Distance(0, 1, "a", "b") != 0.0 { t.Errorf("2-node DistanceMapClusterSet gave wrong distance") } d.Merge(0, 1) if d.Count() != 1 { t.Errorf("after Merge(0,1), 2-node DistanceMapClusterSet isn't 1 cluster") } n1, n2 = 0, 0 d.EachCluster(-1, func(cluster int) { n1++ d.EachItem(cluster, func(x ClusterItem) { n2++ }) }) if n1 != 1 || n2 != 2 { t.Errorf("after Merge(0,1), 2-node DistanceMapClusterSet isn't 1 cluster with 2 items") } } func TestDistanceMapClustering2(t *testing.T) { d := NewDistanceMapClusterSet(DistanceMap{"a": {"b": 0.0, "c": 0.0}}) if d == nil { t.Errorf("could not create 3-node DistanceMapClusterSet") } if d.Count() != 3 { t.Errorf("3-node DistanceMapClusterSet doesn't start with 3 clusters") } n1, n2 := 0, 0 d.EachCluster(-1, func(cluster int) { n1++ d.EachItem(cluster, func(x ClusterItem) { n2++ }) }) if n1 != 3 || n2 != 3 { t.Errorf("3-node DistanceMapClusterSet didn't enumerate 3 clusters w/start=-1") } Cluster(d, Threshold(1.0), CompleteLinkage()) if d.Count() != 1 { t.Errorf("after clustering, 3-node DistanceMapClusterSet isn't 1 cluster") } n1, n2 = 0, 0 d.EachCluster(-1, func(cluster int) { n1++ d.EachItem(cluster, func(x ClusterItem) { n2++ }) }) if n1 != 1 || n2 != 3 { t.Errorf("after clustering, 3-node DistanceMapClusterSet isn't 1 cluster with 3 items") } } func TestDistanceMapClustering3(t *testing.T) { d := NewDistanceMapClusterSet(DistanceMap{ "a": {"b": 0.0, "c": 0.0, "d": 1.0, "e": 0.4}, "b": {"c": 0.1, "d": 0.9, "e": 0.4}, "c": {"d": 0.9, "e": 0.2}, "d": {"e": 0.1}, }) if d == nil { t.Errorf("could not create 5-node DistanceMapClusterSet") } if d.Count() != 5 { t.Errorf("5-node DistanceMapClusterSet doesn't start with 5 clusters") } n1, n2 := 0, 0 d.EachCluster(-1, func(cluster int) { n1++ d.EachItem(cluster, func(x ClusterItem) { n2++ }) }) if n1 != 5 || n2 != 5 { t.Errorf("5-node DistanceMapClusterSet didn't enumerate 5 clusters w/start=-1") } Cluster(d, Threshold(0.4), CompleteLinkage()) if d.Count() != 2 { t.Errorf("after clustering, 5-node DistanceMapClusterSet isn't 2 clusters") } n1, n2 = 0, 0 c0, c1 := 0, 0 d.EachCluster(-1, func(cluster int) { n1++ d.EachItem(cluster, func(x ClusterItem) { n2++ if cluster == 0 { c0++ } if cluster == 1 { c1++ } }) }) if n1 != 2 || n2 != 5 { t.Errorf("after clustering, 5-node DistanceMapClusterSet isn't 2 clusters with 5 items") } if (c0 < 2 || c0 > 3) || (c1 < 2 || c1 > 3) || (c0+c1) != 5 { t.Errorf("after clustering, 5-node DistanceMapClusterSet should be 2,3") } }
mit
danisio/MVC-Project
Source/Web/MySurveys.Web/Areas/Surveys/ViewModels/Creating/PossibleAnswerViewModel.cs
867
namespace MySurveys.Web.Areas.Surveys.ViewModels.Creating { using System.ComponentModel.DataAnnotations; using System.Web.Mvc; using AutoMapper; using Models; using MvcTemplate.Web.Infrastructure.Mapping; public class PossibleAnswerViewModel : IMapFrom<PossibleAnswer>, IHaveCustomMappings { [HiddenInput(DisplayValue = false)] public int Id { get; set; } [Display(Name = "Possible answer")] [StringLength(200), MinLength(2)] public string Content { get; set; } public int QuestionId { get; set; } public void CreateMappings(IMapperConfiguration configuration) { configuration.CreateMap<PossibleAnswer, PossibleAnswerViewModel>() .ForMember(s => s.QuestionId, opt => opt.MapFrom(u => u.Question.Id)) .ReverseMap(); } } }
mit
bcvsolutions/CzechIdMng
Realization/backend/core/core-impl/src/main/java/eu/bcvsolutions/idm/core/model/repository/IdmAutomaticRoleAttributeRuleRequestRepository.java
443
package eu.bcvsolutions.idm.core.model.repository; import eu.bcvsolutions.idm.core.api.repository.AbstractEntityRepository; import eu.bcvsolutions.idm.core.model.entity.IdmAutomaticRoleAttributeRuleRequest; /** * Repository of rules for automatic role attribute request * * @author svandav * */ public interface IdmAutomaticRoleAttributeRuleRequestRepository extends AbstractEntityRepository<IdmAutomaticRoleAttributeRuleRequest> { }
mit
abjerner/Skybrud.Social.BitBucket
src/Skybrud.Social.BitBucket/Models/Users/BitBucketUserEmailsCollection.cs
2770
using System; using Newtonsoft.Json.Linq; using Skybrud.Essentials.Json.Extensions; namespace Skybrud.Social.BitBucket.Models.Users { /// <summary> /// Class describing a BitBucket user. /// </summary> public class BitBucketUserEmailsCollection : BitBucketObject { #region Properties /// <summary> /// Gets the maximum amount of email addresses listed on each page. /// </summary> public int PageLength { get; private set; } /// <summary> /// Gets an array of email addresses on the returned page. /// </summary> public BitBucketUserEmail[] Values { get; private set; } /// <summary> /// Gets the page number. /// </summary> public int Page { get; private set; } /// <summary> /// Gets the total amount of email addresses added by the user. /// </summary> public int Size { get; private set; } /// <summary> /// Gets the URL for the previous page if not the first page, otherwiser <code>null</code>. /// </summary> public string Previous { get; private set; } /// <summary> /// Gets whether there is a previous page. /// </summary> public bool HasPrevious { get { return !String.IsNullOrWhiteSpace(Previous); } } /// <summary> /// Gets the URL for the next page if there are more pages. /// </summary> public string Next { get; private set; } /// <summary> /// Gets whether there is a next page, otherwise <code>null</code>. /// </summary> public bool HasNext { get { return !String.IsNullOrWhiteSpace(Next); } } #endregion #region Constructors private BitBucketUserEmailsCollection(JObject obj) : base(obj) { PageLength = obj.GetInt32("pagelen"); Values = obj.GetArray("values", BitBucketUserEmail.Parse); Page = obj.GetInt32("page"); Size = obj.GetInt32("size"); Previous = obj.GetString("previous"); Next = obj.GetString("next"); } #endregion #region Static methods /// <summary> /// Parses the specified <code>obj</code> into an instance of <see cref="BitBucketUserEmailsCollection"/>. /// </summary> /// <param name="obj">The instance of <see cref="JObject"/> to be parsed.</param> /// <returns>Returns an instance of <see cref="BitBucketUserEmailsCollection"/>.</returns> public static BitBucketUserEmailsCollection Parse(JObject obj) { return obj == null ? null : new BitBucketUserEmailsCollection(obj); } #endregion } }
mit
CptFight/immofficev2
application/controllers/Profile.php
189
<?php defined('BASEPATH') OR exit('No direct script access allowed'); class Profile extends MY_Controller { public function index() { $this->load->view('template', $this->data); } }
mit
wellingWilliam/prometheus
remote/src/main/java/com/prometheus/service/RemoteAuthenticationService.java
2788
package com.prometheus.service; import static org.springframework.http.HttpMethod.GET; import static org.springframework.http.HttpMethod.POST; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.ResponseEntity; import org.springframework.messaging.simp.stomp.StompSessionHandlerAdapter; import org.springframework.stereotype.Service; import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.RestTemplate; import com.prometheus.factory.HttpEntityFactory; import com.prometheus.request.AuthenticationRequest; import com.prometheus.request.RegistrationRequest; import com.prometheus.request.VerifyRequest; import com.prometheus.response.AuthenticationResponse; import com.prometheus.response.GenericResponse; import com.prometheus.security.JwtUserDetails; @Service public class RemoteAuthenticationService extends StompSessionHandlerAdapter { private static final String PROTOCOL_STRING = "http://"; @Autowired private RestTemplate restTemplate; @Value("${prometheus.auth.name}") private String authName; @Value("${prometheus.auth.path}") private String authPath; @Value("${prometheus.token}") private String token; @Value("${prometheus.refresh}") private String refresh; @Value("${prometheus.verify}") private String verify; @Value("${prometheus.register}") private String register; @Value("${prometheus.user}") private String user; public ResponseEntity<GenericResponse> verify(VerifyRequest verifyRequest) { return restTemplate.exchange(PROTOCOL_STRING + authName + authPath + verify, POST, HttpEntityFactory.requestWithBody(verifyRequest), GenericResponse.class); } public ResponseEntity<GenericResponse> register(RegistrationRequest registrationRequest) { return restTemplate.exchange(PROTOCOL_STRING + authName + authPath + register, POST, HttpEntityFactory.requestWithBody(registrationRequest), GenericResponse.class); } public ResponseEntity<AuthenticationResponse> token(AuthenticationRequest authenticationRequest) { return restTemplate.exchange(PROTOCOL_STRING + authName + authPath + token, POST, HttpEntityFactory.requestWithBody(authenticationRequest), AuthenticationResponse.class); } public ResponseEntity<AuthenticationResponse> refresh(String token) { return restTemplate.exchange(PROTOCOL_STRING + authName + authPath + refresh, GET, HttpEntityFactory.requestWithAuthentication(token), AuthenticationResponse.class); } public ResponseEntity<JwtUserDetails> user(String token) throws HttpClientErrorException { return restTemplate.exchange(PROTOCOL_STRING + authName + authPath + user, GET, HttpEntityFactory.requestWithAuthentication(token), JwtUserDetails.class); } }
mit
thejandroman/jira_dependency_visualizer
spec/unit/jira_dependency_visualizer_spec.rb
161
require 'spec_helper' describe JiraDependencyVisualizer do it 'has a version number' do expect(JiraDependencyVisualizer::VERSION).not_to be nil end end
mit
bound1ess/toolset
spec/spec_helper.rb
124
require "simplecov" require "simplecov-json" SimpleCov.formatters = [SimpleCov::Formatter::JSONFormatter] SimpleCov.start
mit
Brunomm/br_boleto
test/br_boleto/retorno/base_test.rb
1939
require 'test_helper' describe BrBoleto::Retorno::Base do subject { BrBoleto::Retorno::Base.new('') } before do BrBoleto::Retorno::Base.any_instance.stubs(:read_file!) end describe "#initialize" do it "Se passar um arquivo por parâmetro deve setar o valor em @file e chamar o metodo read_file!" do BrBoleto::Retorno::Base.any_instance.expects(:read_file!) retorno = BrBoleto::Retorno::Base.new('file/path.ret') retorno.instance_variable_get(:@file).must_equal 'file/path.ret' end it "quando instanciar um retorno deve setar os pagamentos com um array" do retorno = BrBoleto::Retorno::Base.new('file/path.ret') retorno.instance_variable_get(:@pagamentos).must_be_kind_of Array end it "se não passar o caminho de um arquivo por parâmetro não deve tentar ler o arquivo" do BrBoleto::Retorno::Base.any_instance.expects(:read_file!).never retorno = BrBoleto::Retorno::Base.new('') retorno.instance_variable_get(:@file).must_equal '' end end it "deve validar que o file é obrigatório" do subject.instance_variable_set(:@file, '') subject.valid? subject.errors.messages[:file].must_include( I18n.t("errors.messages.blank") ) subject.instance_variable_set(:@file, 'something') subject.valid? subject.errors.messages[:file].must_be_nil end describe "#pagamentos" do it "mesmo se setar um valor normal deve retornar um array" do subject.pagamentos = 'valor_normal' subject.pagamentos.must_equal(['valor_normal']) end it "se setar valores em Matriz deve retornar um array" do subject.pagamentos = [['valor_1'],'valor_2'] subject.pagamentos.must_equal(['valor_1','valor_2']) end it "deve ser possível utilizar o operador << para incrementar valores aos pagamentos" do subject.pagamentos << 'valor_1' subject.pagamentos.must_equal(['valor_1']) subject.pagamentos << 'valor_2' subject.pagamentos.must_equal(['valor_1','valor_2']) end end end
mit
weaondara/BungeePerms
src/main/java/net/alpenblock/bungeeperms/platform/bungee/BungeeNotifier.java
7386
/* * Copyright (C) 2020 wea_ondara * * BungeePerms is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * BungeePerms is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package net.alpenblock.bungeeperms.platform.bungee; import java.util.UUID; import lombok.RequiredArgsConstructor; import net.alpenblock.bungeeperms.BungeePerms; import net.alpenblock.bungeeperms.Group; import net.alpenblock.bungeeperms.Statics; import net.alpenblock.bungeeperms.User; import net.alpenblock.bungeeperms.platform.NetworkNotifier; import net.alpenblock.bungeeperms.platform.proxy.NetworkType; import net.alpenblock.bungeeperms.platform.proxy.ProxyConfig; import net.md_5.bungee.api.ProxyServer; import net.md_5.bungee.api.config.ServerInfo; import net.md_5.bungee.api.connection.ProxiedPlayer; @RequiredArgsConstructor public class BungeeNotifier implements NetworkNotifier { private final ProxyConfig config; @Override public void deleteUser(User u, String origin) { if (config.isUseUUIDs()) { sendPM(u.getUUID(), "deleteUser;" + u.getUUID(), origin); } else { sendPM(u.getName(), "deleteUser;" + u.getName(), origin); } } @Override public void deleteGroup(Group g, String origin) { sendPMAll("deleteGroup;" + g.getName(), origin); } @Override public void reloadUser(User u, String origin) { if (config.isUseUUIDs()) { sendPM(u.getUUID(), "reloadUser;" + u.getUUID(), origin); } else { sendPM(u.getName(), "reloadUser;" + u.getName(), origin); } } @Override public void reloadGroup(Group g, String origin) { sendPMAll("reloadGroup;" + g.getName(), origin); } @Override public void reloadUsers(String origin) { sendPMAll("reloadUsers", origin); } @Override public void reloadGroups(String origin) { sendPMAll("reloadGroups", origin); } @Override public void reloadAll(String origin) { sendPMAll("reloadall", origin); } public void sendUUIDAndPlayer(String name, UUID uuid) { if (config.isUseUUIDs()) { sendPM(uuid, "uuidcheck;" + name + ";" + uuid, null); } } //bukkit-bungeeperms reload information functions private void sendPM(String player, String msg, String origin) { //if standalone no network messages if (config.getNetworkType() == NetworkType.Standalone) { return; } ProxiedPlayer pp = ProxyServer.getInstance().getPlayer(player); if (pp != null && pp.getServer() != null) { //ignore servers not in config and netork type is server dependend if (config.getNetworkType() == NetworkType.ServerDependend && !Statics.listContains(config.getNetworkServers(), pp.getServer().getInfo().getName())) { return; } if (config.getNetworkType() == NetworkType.ServerDependendBlacklist && Statics.listContains(config.getNetworkServers(), pp.getServer().getInfo().getName())) { return; } //no feedback loop if (origin != null && pp.getServer().getInfo().getName().equalsIgnoreCase(origin)) { return; } //send message pp.getServer().getInfo().sendData(BungeePerms.CHANNEL, msg.getBytes()); sendConfig(pp.getServer().getInfo()); } } private void sendPM(UUID player, String msg, String origin) { //if standalone no network messages if (config.getNetworkType() == NetworkType.Standalone) { return; } ProxiedPlayer pp = ProxyServer.getInstance().getPlayer(player); if (pp != null && pp.getServer() != null) { //ignore servers not in config and netork type is server dependend if (config.getNetworkType() == NetworkType.ServerDependend && !Statics.listContains(config.getNetworkServers(), pp.getServer().getInfo().getName())) { return; } if (config.getNetworkType() == NetworkType.ServerDependendBlacklist && Statics.listContains(config.getNetworkServers(), pp.getServer().getInfo().getName())) { return; } //no feedback loop if (origin != null && pp.getServer().getInfo().getName().equalsIgnoreCase(origin)) { return; } //send message pp.getServer().getInfo().sendData(BungeePerms.CHANNEL, msg.getBytes()); sendConfig(pp.getServer().getInfo()); } } private void sendPMAll(String msg, String origin) { //if standalone no network messages if (config.getNetworkType() == NetworkType.Standalone) { return; } for (ServerInfo si : ProxyServer.getInstance().getConfig().getServers().values()) { //ignore servers not in config and netork type is server dependend if (config.getNetworkType() == NetworkType.ServerDependend && !Statics.listContains(config.getNetworkServers(), si.getName())) { return; } if (config.getNetworkType() == NetworkType.ServerDependendBlacklist && Statics.listContains(config.getNetworkServers(), si.getName())) { return; } //no feedback loop if (origin != null && si.getName().equalsIgnoreCase(origin)) { continue; } //send message si.sendData(BungeePerms.CHANNEL, msg.getBytes()); sendConfig(si); } } private long lastConfigUpdate = 0; private void sendConfig(ServerInfo info) { synchronized (this) { long now = System.currentTimeMillis(); if (lastConfigUpdate + 5 * 60 * 1000 < now) { lastConfigUpdate = now; info.sendData(BungeePerms.CHANNEL, ("configcheck" + ";" + info.getName() + ";" + config.getBackendType() + ";" + config.isUseUUIDs() + ";" + config.getResolvingMode() + ";" + config.isGroupPermission() + ";" + config.isUseRegexPerms()).getBytes()); } } } }
mit
zporky/langs-and-paradigms
projects/G4R8AJ/C#/ProgNyelvekBead/States/SignedStart.cs
760
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace ProgNyelvekBead.States { class SignedStart : AbstractState { public SignedStart(String word, String curVal) : base(word, curVal) { } protected override void calculateNextState(Char curChar) { if(curChar == '-' || curChar == '+' ) { this.remWord = this.remWord.Remove(0, 1); this.followingState = new UnsignedStart(this.remWord,this.curVal); } else { this.followingState = new UnsignedStart(this.remWord,this.curVal); } this.isStepped = true; } } }
mit
biplav/bakbak
app/controllers/bakbaks.js
2209
var userUtil = require(__dirname + '/user') , mongoose = require('mongoose') , ApplicationModel = mongoose.model('ApplicationModel') , everyauth = require('everyauth'); //create app created step and use it. exports.index = function(req,res) { userUtil.userById(req,function(user){ ApplicationModel.findOne({adminId:user._id},function(err,application) { if(err || application == null) { res.redirect('/application/create'); } else { if(application.bakbak == null) { res.render('bakbak_list', {user:user, create:false, createOrEdit:false, application:application, success_message:req.session.success_message, current:'bakbak' }); } } }); }); } exports.create = function(req,res) { userUtil.userById(req,function(user){ ApplicationModel.findOne({adminId:user._id},function(err,application) { if(err || application == null) { res.redirect('/application/create'); } else { var bakbak = {}; if(req.param('id') != null) { if(application.bakbaks) { for(var i=0;i<application.bakbaks.length;i++) { console.log(req.param('id')); console.log(application.bakbaks[i].id.toString()); if(application.bakbaks[i].id.toString() == req.param('id')) { bakbak = application.bakbaks[i]; console.log(application.bakbaks[i]); break; } } } } res.render('bakbak_create', {user:user, application:application, success_message:req.session.success_message, bakbak:bakbak, current:'bakbak' }); } }); }); } exports.upsert = function(req, res){ userUtil.userById(req,function(user){ ApplicationModel.findOne({adminId:user._id},function(err,application) { if(err || application == null) { res.redirect('/application/create'); } else { application.upsertBakBak(req.body,function(err,cb){ if(err) { res.render('bakbak_create', {user:user, create:false, createOrEdit:false, application:application, success_message:req.session.success_message, current:'bakbak' }); } else { res.redirect('/application/create'); } }); } }); }); };
mit
alextse/tutorials-node-js
00-hello-world/server.js
219
var http = require("http"); http.createServer(function(req,res) { res.writeHead(200, {"Content-Type": "text/plain"}); res.end("Hello World\n"); }).listen(1337, "127.0.0.1"); console.log("Server running at port 1337");
mit
tantalor/emend
app/test/homepage_test.py
645
# -*- coding: utf-8 -*- import unittest from urllib import urlencode from mocks import mock_handler from emend import stubs class TestHomepage(unittest.TestCase): def setUp(self): stubs.all() def test_unicode_suggest(self): original = u"the
original design" request = '/?%s' % urlencode(dict(original=original.encode('utf8'))) # mock handler import handlers.default handler = mock_handler(page=handlers.default, request=request) # execute handlers try: handler.get() except UnicodeEncodeError: self.fail('failed to encode unicode') if __name__ == "__main__": unittest.main()
mit
jbmlaird/DiscogsBrowser
app/src/main/java/bj/vinylbrowser/home/HomeContract.java
1448
package bj.vinylbrowser.home; import android.app.Activity; import android.support.v7.widget.Toolbar; import com.mikepenz.materialdrawer.Drawer; import java.util.List; import bj.vinylbrowser.model.listing.Listing; import bj.vinylbrowser.model.order.Order; import io.reactivex.Single; /** * Created by j on 18/02/2017. */ public interface HomeContract { interface View { void showLoading(boolean b); Activity getActivity(); void retryHistory(); void retryRecommendations(); // RecyclerView gets detached upon adding the NavigationDrawer void setupRecyclerView(); void displayOrder(String id); void displayOrdersActivity(String username); void displayListingsActivity(String username); void displayListing(String listingId, String username, String title, String s, String username1); void displayError(boolean b); void retry(); void displayRelease(String releaseName, String id); void learnMore(); void setDrawer(Drawer buildNavigationDrawer); } interface Presenter { void connectAndBuildNavigationDrawer(Toolbar toolbar); void buildViewedReleases(); void retry(); Single<List<Order>> fetchOrders(); Single<List<Listing>> fetchSelling(); void buildRecommendations(); void showLoadingRecommendations(boolean isLoading); } }
mit
JonkiPro/REST-Web-Services
web/src/main/java/com/web/web/security/package-info.java
71
/** * Extension of Spring Security. */ package com.web.web.security;
mit
kekeh/vsdatatable
js/directive/vsdttablebodyrow.js
2776
/** * @ngdoc object * @name tableBodyRow * @description tableBodyRow directive handles row clicks done by user. It also hover the row in case defined in * the configuration. */ vsdt.directive('tableBodyRow', ['vsdtServ', function (vsdtServ) { return { restrict: 'A', scope: false, link: function (scope, element, attrs) { scope.rowClicked = function (event, data) { if (checkEvent(event)) { var oper = scope.config.ROW_SELECT; var idx = scope.selectedRows.indexOf(data); if (scope.options.row.selection === 1 && vsdtServ.isEqual(idx, -1)) { if (scope.selectedRows.length > 0) { scope.notifyRowSelect(scope.config.ROW_DESELECT, scope.selectedRows[0]); } scope.selectedRows[0] = data; } else if (scope.options.row.selection === 1 && !vsdtServ.isEqual(idx, -1)) { scope.selectedRows.splice(0, 1); oper = scope.config.ROW_DESELECT; } else if (scope.options.row.selection === 2 && vsdtServ.isEqual(idx, -1)) { scope.selectedRows.push(data); } else if (scope.options.row.selection === 2 && !vsdtServ.isEqual(idx, -1)) { scope.selectedRows.splice(idx, 1); oper = scope.config.ROW_DESELECT; } scope.notifyRowSelect(oper, data); } }; scope.isRowSelected = function (data) { return !vsdtServ.isEqual(scope.selectedRows.indexOf(data), -1); }; function checkEvent(event) { return (vsdtServ.isEqual(event.which, 1) || vsdtServ.isEqual(event.which, 13)) && (scope.options.row.selection === 1 || scope.options.row.selection === 2); } function onMouseEnter() { element.addClass('hoverRow'); } function onMouseLeave() { element.removeClass('hoverRow'); } scope.$on('$destroy', function () { if (scope.options.row.hover) { element.off('mouseenter', onMouseEnter); element.off('mouseleave', onMouseLeave); } }); function init() { if (scope.options.row.hover) { element.on('mouseenter', onMouseEnter); element.on('mouseleave', onMouseLeave); } } init(); } }; }]);
mit
ivargrimstad/dukes-microprofile
dukes-kumuluz/src/main/java/eu/agilejava/dukes/RootResource.java
1506
/* * The MIT License * * Copyright 2015 Ivar Grimstad (ivar.grimstad@gmail.com). * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package eu.agilejava.dukes; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.core.Response; /** * * @author Ivar Grimstad (ivar.grimstad@gmail.com) */ @Path("/") public class RootResource { @GET public Response info() { return Response.ok("Welcome to the Hello Kumuluz Microservice!").build(); } }
mit
End of preview (truncated to 100 rows)

No dataset card yet

New: Create and edit this dataset card directly on the website!

Contribute a Dataset Card
Add dataset card
Evaluate models HF Leaderboard