code
stringlengths
2
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
2
1.05M
'use strict'; var clear = require('es5-ext/array/#/clear') , eIndexOf = require('es5-ext/array/#/e-index-of') , setPrototypeOf = require('es5-ext/object/set-prototype-of') , callable = require('es5-ext/object/valid-callable') , d = require('d') , ee = require('event-emitter') , Symbol = require('es6-symbol') , iterator = require('es6-iterator/valid-iterable') , forOf = require('es6-iterator/for-of') , Iterator = require('./lib/iterator') , isNative = require('./is-native-implemented') , call = Function.prototype.call, defineProperty = Object.defineProperty , SetPoly, getValues; module.exports = SetPoly = function (/*iterable*/) { var iterable = arguments[0]; if (!(this instanceof SetPoly)) return new SetPoly(iterable); if (this.__setData__ !== undefined) { throw new TypeError(this + " cannot be reinitialized"); } if (iterable != null) iterator(iterable); defineProperty(this, '__setData__', d('c', [])); if (!iterable) return; forOf(iterable, function (value) { if (eIndexOf.call(this, value) !== -1) return; this.push(value); }, this.__setData__); }; if (isNative) { if (setPrototypeOf) setPrototypeOf(SetPoly, Set); SetPoly.prototype = Object.create(Set.prototype, { constructor: d(SetPoly) }); } ee(Object.defineProperties(SetPoly.prototype, { add: d(function (value) { if (this.has(value)) return this; this.emit('_add', this.__setData__.push(value) - 1, value); return this; }), clear: d(function () { if (!this.__setData__.length) return; clear.call(this.__setData__); this.emit('_clear'); }), delete: d(function (value) { var index = eIndexOf.call(this.__setData__, value); if (index === -1) return false; this.__setData__.splice(index, 1); this.emit('_delete', index, value); return true; }), entries: d(function () { return new Iterator(this, 'key+value'); }), forEach: d(function (cb/*, thisArg*/) { var thisArg = arguments[1], iterator, result, value; callable(cb); iterator = this.values(); result = iterator._next(); while (result !== undefined) { value = iterator._resolve(result); call.call(cb, thisArg, value, value, this); result = iterator._next(); } }), has: d(function (value) { return (eIndexOf.call(this.__setData__, value) !== -1); }), keys: d(getValues = function () { return this.values(); }), size: d.gs(function () { return this.__setData__.length; }), values: d(function () { return new Iterator(this); }), toString: d(function () { return '[object Set]'; }) })); defineProperty(SetPoly.prototype, Symbol.iterator, d(getValues)); defineProperty(SetPoly.prototype, Symbol.toStringTag, d('c', 'Set'));
Socratacom/socrata-europe
wp-content/themes/sage/node_modules/asset-builder/node_modules/main-bower-files/node_modules/vinyl-fs/node_modules/glob-stream/node_modules/unique-stream/node_modules/es6-set/polyfill.js
JavaScript
gpl-2.0
2,730
'use strict'; const TYPE = Symbol.for('type'); class Data { constructor(options) { // File details this.filepath = options.filepath; // Type this[TYPE] = 'data'; // Data Object.assign(this, options.data); } } module.exports = Data;
mshick/velvet
core/classes/data.js
JavaScript
isc
266
package sodium // #cgo pkg-config: libsodium // #include <stdlib.h> // #include <sodium.h> import "C" func RuntimeHasNeon() bool { return C.sodium_runtime_has_neon() != 0 } func RuntimeHasSse2() bool { return C.sodium_runtime_has_sse2() != 0 } func RuntimeHasSse3() bool { return C.sodium_runtime_has_sse3() != 0 }
GoKillers/libsodium-go
sodium/runtime.go
GO
isc
322
function collectWithWildcard(test) { test.expect(4); var api_server = new Test_ApiServer(function handler(request, callback) { var url = request.url; switch (url) { case '/accounts?username=chariz*': let account = new Model_Account({ username: 'charizard' }); return void callback(null, [ account.redact() ]); default: let error = new Error('Invalid url: ' + url); return void callback(error); } }); var parameters = { username: 'chariz*' }; function handler(error, results) { test.equals(error, null); test.equals(results.length, 1); var account = results[0]; test.equals(account.get('username'), 'charizard'); test.equals(account.get('type'), Enum_AccountTypes.MEMBER); api_server.destroy(); test.done(); } Resource_Accounts.collect(parameters, handler); } module.exports = { collectWithWildcard };
burninggarden/burninggarden
test/unit/resource/accounts.js
JavaScript
isc
886
<?php interface Container { /** * Checks if a $x exists. * * @param unknown $x * * @return boolean */ function contains($x); }
guide42/php-immutable
src/base.php
PHP
isc
165
angular.module('appTesting').service("LoginLocalStorage", function () { "use strict"; var STORE_NAME = "login"; var setUser = function setUser(user) { localStorage.setItem(STORE_NAME, JSON.stringify(user)); } var getUser = function getUser() { var storedTasks = localStorage.getItem(STORE_NAME); if (storedTasks) { return JSON.parse(storedTasks); } return {}; } return { setUser: setUser, getUser: getUser } });
pikachumetal/cursoangular05
app/loginModule/services/localstorage.js
JavaScript
isc
515
/* eslint-disable no-console */ const buildData = require('./build_data'); const buildSrc = require('./build_src'); const buildCSS = require('./build_css'); let _currBuild = null; // if called directly, do the thing. buildAll(); function buildAll() { if (_currBuild) return _currBuild; return _currBuild = Promise.resolve() .then(() => buildCSS()) .then(() => buildData()) .then(() => buildSrc()) .then(() => _currBuild = null) .catch((err) => { console.error(err); _currBuild = null; process.exit(1); }); } module.exports = buildAll;
kartta-labs/iD
build.js
JavaScript
isc
591
function LetterProps(o, sw, sc, fc, m, p) { this.o = o; this.sw = sw; this.sc = sc; this.fc = fc; this.m = m; this.p = p; this._mdf = { o: true, sw: !!sw, sc: !!sc, fc: !!fc, m: true, p: true, }; } LetterProps.prototype.update = function (o, sw, sc, fc, m, p) { this._mdf.o = false; this._mdf.sw = false; this._mdf.sc = false; this._mdf.fc = false; this._mdf.m = false; this._mdf.p = false; var updated = false; if (this.o !== o) { this.o = o; this._mdf.o = true; updated = true; } if (this.sw !== sw) { this.sw = sw; this._mdf.sw = true; updated = true; } if (this.sc !== sc) { this.sc = sc; this._mdf.sc = true; updated = true; } if (this.fc !== fc) { this.fc = fc; this._mdf.fc = true; updated = true; } if (this.m !== m) { this.m = m; this._mdf.m = true; updated = true; } if (p.length && (this.p[0] !== p[0] || this.p[1] !== p[1] || this.p[4] !== p[4] || this.p[5] !== p[5] || this.p[12] !== p[12] || this.p[13] !== p[13])) { this.p = p; this._mdf.p = true; updated = true; } return updated; };
damienmortini/dlib
node_modules/lottie-web/player/js/utils/text/LetterProps.js
JavaScript
isc
1,212
System.register(["angular2/test_lib", "angular2/src/test_lib/test_bed", "angular2/src/core/annotations_impl/annotations", "angular2/src/core/annotations_impl/view", "angular2/src/core/compiler/dynamic_component_loader", "angular2/src/core/compiler/element_ref", "angular2/src/directives/if", "angular2/src/render/dom/direct_dom_renderer", "angular2/src/dom/dom_adapter"], function($__export) { "use strict"; var AsyncTestCompleter, beforeEach, ddescribe, xdescribe, describe, el, dispatchEvent, expect, iit, inject, beforeEachBindings, it, xit, TestBed, Component, View, DynamicComponentLoader, ElementRef, If, DirectDomRenderer, DOM, ImperativeViewComponentUsingNgComponent, ChildComp, DynamicallyCreatedComponentService, DynamicComp, DynamicallyCreatedCmp, DynamicallyLoaded, DynamicallyLoaded2, DynamicallyLoadedWithHostProps, Location, MyComp; function main() { describe('DynamicComponentLoader', function() { describe("loading into existing location", (function() { it('should work', inject([TestBed, AsyncTestCompleter], (function(tb, async) { tb.overrideView(MyComp, new View({ template: '<dynamic-comp #dynamic></dynamic-comp>', directives: [DynamicComp] })); tb.createView(MyComp).then((function(view) { var dynamicComponent = view.rawView.locals.get("dynamic"); expect(dynamicComponent).toBeAnInstanceOf(DynamicComp); dynamicComponent.done.then((function(_) { view.detectChanges(); expect(view.rootNodes).toHaveText('hello'); async.done(); })); })); }))); it('should inject dependencies of the dynamically-loaded component', inject([TestBed, AsyncTestCompleter], (function(tb, async) { tb.overrideView(MyComp, new View({ template: '<dynamic-comp #dynamic></dynamic-comp>', directives: [DynamicComp] })); tb.createView(MyComp).then((function(view) { var dynamicComponent = view.rawView.locals.get("dynamic"); dynamicComponent.done.then((function(ref) { expect(ref.instance.dynamicallyCreatedComponentService).toBeAnInstanceOf(DynamicallyCreatedComponentService); async.done(); })); })); }))); it('should allow to destroy and create them via viewcontainer directives', inject([TestBed, AsyncTestCompleter], (function(tb, async) { tb.overrideView(MyComp, new View({ template: '<div><dynamic-comp #dynamic template="if: ctxBoolProp"></dynamic-comp></div>', directives: [DynamicComp, If] })); tb.createView(MyComp).then((function(view) { view.context.ctxBoolProp = true; view.detectChanges(); var dynamicComponent = view.rawView.viewContainers[0].views[0].locals.get("dynamic"); dynamicComponent.done.then((function(_) { view.detectChanges(); expect(view.rootNodes).toHaveText('hello'); view.context.ctxBoolProp = false; view.detectChanges(); expect(view.rawView.viewContainers[0].views.length).toBe(0); expect(view.rootNodes).toHaveText(''); view.context.ctxBoolProp = true; view.detectChanges(); var dynamicComponent = view.rawView.viewContainers[0].views[0].locals.get("dynamic"); return dynamicComponent.done; })).then((function(_) { view.detectChanges(); expect(view.rootNodes).toHaveText('hello'); async.done(); })); })); }))); })); describe("loading next to an existing location", (function() { it('should work', inject([DynamicComponentLoader, TestBed, AsyncTestCompleter], (function(loader, tb, async) { tb.overrideView(MyComp, new View({ template: '<div><location #loc></location></div>', directives: [Location] })); tb.createView(MyComp).then((function(view) { var location = view.rawView.locals.get("loc"); loader.loadNextToExistingLocation(DynamicallyLoaded, location.elementRef).then((function(ref) { expect(view.rootNodes).toHaveText("Location;DynamicallyLoaded;"); async.done(); })); })); }))); it('should return a disposable component ref', inject([DynamicComponentLoader, TestBed, AsyncTestCompleter], (function(loader, tb, async) { tb.overrideView(MyComp, new View({ template: '<div><location #loc></location></div>', directives: [Location] })); tb.createView(MyComp).then((function(view) { var location = view.rawView.locals.get("loc"); loader.loadNextToExistingLocation(DynamicallyLoaded, location.elementRef).then((function(ref) { loader.loadNextToExistingLocation(DynamicallyLoaded2, location.elementRef).then((function(ref2) { expect(view.rootNodes).toHaveText("Location;DynamicallyLoaded;DynamicallyLoaded2;"); ref2.dispose(); expect(view.rootNodes).toHaveText("Location;DynamicallyLoaded;"); async.done(); })); })); })); }))); it('should update host properties', inject([DynamicComponentLoader, TestBed, AsyncTestCompleter], (function(loader, tb, async) { tb.overrideView(MyComp, new View({ template: '<div><location #loc></location></div>', directives: [Location] })); tb.createView(MyComp).then((function(view) { var location = view.rawView.locals.get("loc"); loader.loadNextToExistingLocation(DynamicallyLoadedWithHostProps, location.elementRef).then((function(ref) { ref.instance.id = "new value"; view.detectChanges(); var newlyInsertedElement = DOM.childNodesAsList(view.rootNodes[0])[1]; expect(newlyInsertedElement.id).toEqual("new value"); async.done(); })); })); }))); })); describe('loading into a new location', (function() { it('should allow to create, update and destroy components', inject([TestBed, AsyncTestCompleter], (function(tb, async) { tb.overrideView(MyComp, new View({ template: '<imp-ng-cmp #impview></imp-ng-cmp>', directives: [ImperativeViewComponentUsingNgComponent] })); tb.createView(MyComp).then((function(view) { var userViewComponent = view.rawView.locals.get("impview"); userViewComponent.done.then((function(childComponentRef) { view.detectChanges(); expect(view.rootNodes).toHaveText('hello'); childComponentRef.instance.ctxProp = 'new'; view.detectChanges(); expect(view.rootNodes).toHaveText('new'); childComponentRef.dispose(); expect(view.rootNodes).toHaveText(''); async.done(); })); })); }))); })); }); } $__export("main", main); return { setters: [function($__m) { AsyncTestCompleter = $__m.AsyncTestCompleter; beforeEach = $__m.beforeEach; ddescribe = $__m.ddescribe; xdescribe = $__m.xdescribe; describe = $__m.describe; el = $__m.el; dispatchEvent = $__m.dispatchEvent; expect = $__m.expect; iit = $__m.iit; inject = $__m.inject; beforeEachBindings = $__m.beforeEachBindings; it = $__m.it; xit = $__m.xit; }, function($__m) { TestBed = $__m.TestBed; }, function($__m) { Component = $__m.Component; }, function($__m) { View = $__m.View; }, function($__m) { DynamicComponentLoader = $__m.DynamicComponentLoader; }, function($__m) { ElementRef = $__m.ElementRef; }, function($__m) { If = $__m.If; }, function($__m) { DirectDomRenderer = $__m.DirectDomRenderer; }, function($__m) { DOM = $__m.DOM; }], execute: function() { ImperativeViewComponentUsingNgComponent = (function() { var ImperativeViewComponentUsingNgComponent = function ImperativeViewComponentUsingNgComponent(self, dynamicComponentLoader, renderer) { var div = el('<div></div>'); renderer.setImperativeComponentRootNodes(self.parentView.render, self.boundElementIndex, [div]); this.done = dynamicComponentLoader.loadIntoNewLocation(ChildComp, self, div, null); }; return ($traceurRuntime.createClass)(ImperativeViewComponentUsingNgComponent, {}, {}); }()); Object.defineProperty(ImperativeViewComponentUsingNgComponent, "annotations", {get: function() { return [new Component({selector: 'imp-ng-cmp'}), new View({renderer: 'imp-ng-cmp-renderer'})]; }}); Object.defineProperty(ImperativeViewComponentUsingNgComponent, "parameters", {get: function() { return [[ElementRef], [DynamicComponentLoader], [DirectDomRenderer]]; }}); ChildComp = (function() { var ChildComp = function ChildComp() { this.ctxProp = 'hello'; }; return ($traceurRuntime.createClass)(ChildComp, {}, {}); }()); Object.defineProperty(ChildComp, "annotations", {get: function() { return [new Component({selector: 'child-cmp'}), new View({template: '{{ctxProp}}'})]; }}); DynamicallyCreatedComponentService = (function() { var DynamicallyCreatedComponentService = function DynamicallyCreatedComponentService() { ; }; return ($traceurRuntime.createClass)(DynamicallyCreatedComponentService, {}, {}); }()); DynamicComp = (function() { var DynamicComp = function DynamicComp(loader, location) { this.done = loader.loadIntoExistingLocation(DynamicallyCreatedCmp, location); }; return ($traceurRuntime.createClass)(DynamicComp, {}, {}); }()); Object.defineProperty(DynamicComp, "annotations", {get: function() { return [new Component({selector: 'dynamic-comp'})]; }}); Object.defineProperty(DynamicComp, "parameters", {get: function() { return [[DynamicComponentLoader], [ElementRef]]; }}); DynamicallyCreatedCmp = (function() { var DynamicallyCreatedCmp = function DynamicallyCreatedCmp(a) { this.greeting = "hello"; this.dynamicallyCreatedComponentService = a; }; return ($traceurRuntime.createClass)(DynamicallyCreatedCmp, {}, {}); }()); Object.defineProperty(DynamicallyCreatedCmp, "annotations", {get: function() { return [new Component({ selector: 'hello-cmp', injectables: [DynamicallyCreatedComponentService] }), new View({template: "{{greeting}}"})]; }}); Object.defineProperty(DynamicallyCreatedCmp, "parameters", {get: function() { return [[DynamicallyCreatedComponentService]]; }}); DynamicallyLoaded = (function() { var DynamicallyLoaded = function DynamicallyLoaded() { ; }; return ($traceurRuntime.createClass)(DynamicallyLoaded, {}, {}); }()); Object.defineProperty(DynamicallyLoaded, "annotations", {get: function() { return [new Component({selector: 'dummy'}), new View({template: "DynamicallyLoaded;"})]; }}); DynamicallyLoaded2 = (function() { var DynamicallyLoaded2 = function DynamicallyLoaded2() { ; }; return ($traceurRuntime.createClass)(DynamicallyLoaded2, {}, {}); }()); Object.defineProperty(DynamicallyLoaded2, "annotations", {get: function() { return [new Component({selector: 'dummy'}), new View({template: "DynamicallyLoaded2;"})]; }}); DynamicallyLoadedWithHostProps = (function() { var DynamicallyLoadedWithHostProps = function DynamicallyLoadedWithHostProps() { this.id = "default"; }; return ($traceurRuntime.createClass)(DynamicallyLoadedWithHostProps, {}, {}); }()); Object.defineProperty(DynamicallyLoadedWithHostProps, "annotations", {get: function() { return [new Component({ selector: 'dummy', hostProperties: {'id': 'id'} }), new View({template: "DynamicallyLoadedWithHostProps;"})]; }}); Location = (function() { var Location = function Location(elementRef) { this.elementRef = elementRef; }; return ($traceurRuntime.createClass)(Location, {}, {}); }()); Object.defineProperty(Location, "annotations", {get: function() { return [new Component({selector: 'location'}), new View({template: "Location;"})]; }}); Object.defineProperty(Location, "parameters", {get: function() { return [[ElementRef]]; }}); MyComp = (function() { var MyComp = function MyComp() { this.ctxBoolProp = false; }; return ($traceurRuntime.createClass)(MyComp, {}, {}); }()); Object.defineProperty(MyComp, "annotations", {get: function() { return [new Component({selector: 'my-comp'}), new View({directives: []})]; }}); } }; }); //# sourceMappingURL=dynamic_component_loader_spec.es6.map //# sourceMappingURL=./dynamic_component_loader_spec.js.map
denzp/pacman
application/lib/angular2/test/core/compiler/dynamic_component_loader_spec.js
JavaScript
isc
13,760
/* * DISTRHO Plugin Framework (DPF) * Copyright (C) 2012-2014 Filipe Coelho <falktx@falktx.com> * * Permission to use, copy, modify, and/or distribute this software for any purpose with * or without fee is hereby granted, provided that the above copyright notice and this * permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD * TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER * IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "DistrhoPluginInternal.hpp" #include "lv2/atom.h" #include "lv2/buf-size.h" #include "lv2/data-access.h" #include "lv2/instance-access.h" #include "lv2/midi.h" #include "lv2/options.h" #include "lv2/port-props.h" #include "lv2/resize-port.h" #include "lv2/state.h" #include "lv2/time.h" #include "lv2/ui.h" #include "lv2/units.h" #include "lv2/urid.h" #include "lv2/worker.h" #include "lv2/lv2_kxstudio_properties.h" #include "lv2/lv2_programs.h" #include <fstream> #include <iostream> #ifndef DISTRHO_PLUGIN_URI # error DISTRHO_PLUGIN_URI undefined! #endif #ifndef DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE # define DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE 2048 #endif #define DISTRHO_LV2_USE_EVENTS_IN (DISTRHO_PLUGIN_HAS_MIDI_INPUT || DISTRHO_PLUGIN_WANT_TIMEPOS || (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI)) #define DISTRHO_LV2_USE_EVENTS_OUT (DISTRHO_PLUGIN_HAS_MIDI_OUTPUT || (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI)) // ----------------------------------------------------------------------- DISTRHO_PLUGIN_EXPORT void lv2_generate_ttl(const char* const basename) { USE_NAMESPACE_DISTRHO // Dummy plugin to get data from d_lastBufferSize = 512; d_lastSampleRate = 44100.0; PluginExporter plugin; d_lastBufferSize = 0; d_lastSampleRate = 0.0; d_string pluginDLL(basename); d_string pluginTTL(pluginDLL + ".ttl"); // --------------------------------------------- { std::cout << "Writing manifest.ttl..."; std::cout.flush(); std::fstream manifestFile("manifest.ttl", std::ios::out); d_string manifestString; manifestString += "@prefix lv2: <" LV2_CORE_PREFIX "> .\n"; manifestString += "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n"; #if DISTRHO_PLUGIN_HAS_UI manifestString += "@prefix ui: <" LV2_UI_PREFIX "> .\n"; #endif manifestString += "\n"; manifestString += "<" DISTRHO_PLUGIN_URI ">\n"; manifestString += " a lv2:Plugin ;\n"; manifestString += " lv2:binary <" + pluginDLL + "." DISTRHO_DLL_EXTENSION "> ;\n"; manifestString += " rdfs:seeAlso <" + pluginTTL + "> .\n"; manifestString += "\n"; #if DISTRHO_PLUGIN_HAS_UI manifestString += "<" DISTRHO_UI_URI ">\n"; # if DISTRHO_OS_HAIKU manifestString += " a ui:BeUI ;\n"; # elif DISTRHO_OS_MAC manifestString += " a ui:CocoaUI ;\n"; # elif DISTRHO_OS_WINDOWS manifestString += " a ui:WindowsUI ;\n"; # else manifestString += " a ui:X11UI ;\n"; # endif # if ! DISTRHO_PLUGIN_WANT_DIRECT_ACCESS d_string pluginUI(pluginDLL); pluginUI.truncate(pluginDLL.rfind("_dsp")); pluginUI += "_ui"; manifestString += " ui:binary <" + pluginUI + "." DISTRHO_DLL_EXTENSION "> ;\n"; # else manifestString += " ui:binary <" + pluginDLL + "." DISTRHO_DLL_EXTENSION "> ;\n"; #endif manifestString += " lv2:extensionData ui:idleInterface ,\n"; # if DISTRHO_PLUGIN_WANT_PROGRAMS manifestString += " ui:showInterface ,\n"; manifestString += " <" LV2_PROGRAMS__Interface "> ;\n"; # else manifestString += " ui:showInterface ;\n"; # endif manifestString += " lv2:optionalFeature ui:noUserResize ,\n"; manifestString += " ui:resize ,\n"; manifestString += " ui:touch ;\n"; # if DISTRHO_PLUGIN_WANT_DIRECT_ACCESS manifestString += " lv2:requiredFeature <" LV2_DATA_ACCESS_URI "> ,\n"; manifestString += " <" LV2_INSTANCE_ACCESS_URI "> ,\n"; manifestString += " <" LV2_OPTIONS__options "> ,\n"; # else manifestString += " lv2:requiredFeature <" LV2_OPTIONS__options "> ,\n"; # endif manifestString += " <" LV2_URID__map "> .\n"; #endif manifestFile << manifestString << std::endl; manifestFile.close(); std::cout << " done!" << std::endl; } // --------------------------------------------- { std::cout << "Writing " << pluginTTL << "..."; std::cout.flush(); std::fstream pluginFile(pluginTTL, std::ios::out); d_string pluginString; // header #if DISTRHO_LV2_USE_EVENTS_IN pluginString += "@prefix atom: <" LV2_ATOM_PREFIX "> .\n"; #endif pluginString += "@prefix doap: <http://usefulinc.com/ns/doap#> .\n"; pluginString += "@prefix foaf: <http://xmlns.com/foaf/0.1/> .\n"; pluginString += "@prefix lv2: <" LV2_CORE_PREFIX "> .\n"; pluginString += "@prefix rsz: <" LV2_RESIZE_PORT_PREFIX "> .\n"; #if DISTRHO_PLUGIN_HAS_UI pluginString += "@prefix ui: <" LV2_UI_PREFIX "> .\n"; #endif pluginString += "@prefix unit: <" LV2_UNITS_PREFIX "> .\n"; pluginString += "\n"; // plugin pluginString += "<" DISTRHO_PLUGIN_URI ">\n"; #if DISTRHO_PLUGIN_IS_SYNTH pluginString += " a lv2:InstrumentPlugin, lv2:Plugin ;\n"; #else pluginString += " a lv2:Plugin ;\n"; #endif pluginString += "\n"; // extensionData pluginString += " lv2:extensionData <" LV2_STATE__interface "> "; #if DISTRHO_PLUGIN_WANT_STATE pluginString += ",\n <" LV2_OPTIONS__interface "> "; pluginString += ",\n <" LV2_WORKER__interface "> "; #endif #if DISTRHO_PLUGIN_WANT_PROGRAMS pluginString += ",\n <" LV2_PROGRAMS__Interface "> "; #endif pluginString += ";\n\n"; // optionalFeatures #if DISTRHO_PLUGIN_IS_RT_SAFE pluginString += " lv2:optionalFeature <" LV2_CORE__hardRTCapable "> ,\n"; pluginString += " <" LV2_BUF_SIZE__boundedBlockLength "> ;\n"; #else pluginString += " lv2:optionalFeature <" LV2_BUF_SIZE__boundedBlockLength "> ;\n"; #endif pluginString += "\n"; // requiredFeatures pluginString += " lv2:requiredFeature <" LV2_OPTIONS__options "> "; pluginString += ",\n <" LV2_URID__map "> "; #if DISTRHO_PLUGIN_WANT_STATE pluginString += ",\n <" LV2_WORKER__schedule "> "; #endif pluginString += ";\n\n"; // UI #if DISTRHO_PLUGIN_HAS_UI pluginString += " ui:ui <" DISTRHO_UI_URI "> ;\n"; pluginString += "\n"; #endif { uint32_t portIndex = 0; #if DISTRHO_PLUGIN_NUM_INPUTS > 0 for (uint32_t i=0; i < DISTRHO_PLUGIN_NUM_INPUTS; ++i, ++portIndex) { if (i == 0) pluginString += " lv2:port [\n"; else pluginString += " [\n"; pluginString += " a lv2:InputPort, lv2:AudioPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:symbol \"lv2_audio_in_" + d_string(i+1) + "\" ;\n"; pluginString += " lv2:name \"Audio Input " + d_string(i+1) + "\" ;\n"; if (i+1 == DISTRHO_PLUGIN_NUM_INPUTS) pluginString += " ] ;\n\n"; else pluginString += " ] ,\n"; } pluginString += "\n"; #endif #if DISTRHO_PLUGIN_NUM_OUTPUTS > 0 for (uint32_t i=0; i < DISTRHO_PLUGIN_NUM_OUTPUTS; ++i, ++portIndex) { if (i == 0) pluginString += " lv2:port [\n"; else pluginString += " [\n"; pluginString += " a lv2:OutputPort, lv2:AudioPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:symbol \"lv2_audio_out_" + d_string(i+1) + "\" ;\n"; pluginString += " lv2:name \"Audio Output " + d_string(i+1) + "\" ;\n"; if (i+1 == DISTRHO_PLUGIN_NUM_OUTPUTS) pluginString += " ] ;\n\n"; else pluginString += " ] ,\n"; } pluginString += "\n"; #endif #if DISTRHO_LV2_USE_EVENTS_IN pluginString += " lv2:port [\n"; pluginString += " a lv2:InputPort, atom:AtomPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:name \"Events Input\" ;\n"; pluginString += " lv2:symbol \"lv2_events_in\" ;\n"; pluginString += " rsz:minimumSize " + d_string(DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE) + " ;\n"; pluginString += " atom:bufferType atom:Sequence ;\n"; # if (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI) pluginString += " atom:supports <" LV2_ATOM__String "> ;\n"; # endif # if DISTRHO_PLUGIN_HAS_MIDI_INPUT pluginString += " atom:supports <" LV2_MIDI__MidiEvent "> ;\n"; # endif # if DISTRHO_PLUGIN_WANT_TIMEPOS pluginString += " atom:supports <" LV2_TIME__Position "> ;\n"; # endif pluginString += " ] ;\n\n"; ++portIndex; #endif #if DISTRHO_LV2_USE_EVENTS_OUT pluginString += " lv2:port [\n"; pluginString += " a lv2:OutputPort, atom:AtomPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:name \"Events Output\" ;\n"; pluginString += " lv2:symbol \"lv2_events_out\" ;\n"; pluginString += " rsz:minimumSize " + d_string(DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE) + " ;\n"; pluginString += " atom:bufferType atom:Sequence ;\n"; # if (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI) pluginString += " atom:supports <" LV2_ATOM__String "> ;\n"; # endif # if DISTRHO_PLUGIN_HAS_MIDI_OUTPUT pluginString += " atom:supports <" LV2_MIDI__MidiEvent "> ;\n"; # endif pluginString += " ] ;\n\n"; ++portIndex; #endif #if DISTRHO_PLUGIN_WANT_LATENCY pluginString += " lv2:port [\n"; pluginString += " a lv2:OutputPort, lv2:ControlPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:name \"Latency\" ;\n"; pluginString += " lv2:symbol \"lv2_latency\" ;\n"; pluginString += " lv2:designation lv2:latency ;\n"; pluginString += " lv2:portProperty lv2:reportsLatency, lv2:integer ;\n"; pluginString += " ] ;\n\n"; ++portIndex; #endif for (uint32_t i=0, count=plugin.getParameterCount(); i < count; ++i, ++portIndex) { if (i == 0) pluginString += " lv2:port [\n"; else pluginString += " [\n"; if (plugin.isParameterOutput(i)) pluginString += " a lv2:OutputPort, lv2:ControlPort ;\n"; else pluginString += " a lv2:InputPort, lv2:ControlPort ;\n"; pluginString += " lv2:index " + d_string(portIndex) + " ;\n"; pluginString += " lv2:name \"" + plugin.getParameterName(i) + "\" ;\n"; // symbol { d_string symbol(plugin.getParameterSymbol(i)); if (symbol.isEmpty()) symbol = "lv2_port_" + d_string(portIndex-1); pluginString += " lv2:symbol \"" + symbol + "\" ;\n"; } // ranges { const ParameterRanges& ranges(plugin.getParameterRanges(i)); if (plugin.getParameterHints(i) & kParameterIsInteger) { pluginString += " lv2:default " + d_string(int(plugin.getParameterValue(i))) + " ;\n"; pluginString += " lv2:minimum " + d_string(int(ranges.min)) + " ;\n"; pluginString += " lv2:maximum " + d_string(int(ranges.max)) + " ;\n"; } else { pluginString += " lv2:default " + d_string(plugin.getParameterValue(i)) + " ;\n"; pluginString += " lv2:minimum " + d_string(ranges.min) + " ;\n"; pluginString += " lv2:maximum " + d_string(ranges.max) + " ;\n"; } } // unit { const d_string& unit(plugin.getParameterUnit(i)); if (! unit.isEmpty()) { if (unit == "db" || unit == "dB") { pluginString += " unit:unit unit:db ;\n"; } else if (unit == "hz" || unit == "Hz") { pluginString += " unit:unit unit:hz ;\n"; } else if (unit == "khz" || unit == "kHz") { pluginString += " unit:unit unit:khz ;\n"; } else if (unit == "mhz" || unit == "mHz") { pluginString += " unit:unit unit:mhz ;\n"; } else if (unit == "%") { pluginString += " unit:unit unit:pc ;\n"; } else { pluginString += " unit:unit [\n"; pluginString += " a unit:Unit ;\n"; pluginString += " unit:name \"" + unit + "\" ;\n"; pluginString += " unit:symbol \"" + unit + "\" ;\n"; pluginString += " unit:render \"%f " + unit + "\" ;\n"; pluginString += " ] ;\n"; } } } // hints { const uint32_t hints(plugin.getParameterHints(i)); if (hints & kParameterIsBoolean) pluginString += " lv2:portProperty lv2:toggled ;\n"; if (hints & kParameterIsInteger) pluginString += " lv2:portProperty lv2:integer ;\n"; if (hints & kParameterIsLogarithmic) pluginString += " lv2:portProperty <" LV2_PORT_PROPS__logarithmic "> ;\n"; if ((hints & kParameterIsAutomable) == 0 && ! plugin.isParameterOutput(i)) { pluginString += " lv2:portProperty <" LV2_PORT_PROPS__expensive "> ,\n"; pluginString += " <" LV2_KXSTUDIO_PROPERTIES__NonAutomable "> ;\n"; } } if (i+1 == count) pluginString += " ] ;\n\n"; else pluginString += " ] ,\n"; } } pluginString += " doap:name \"" + d_string(plugin.getName()) + "\" ;\n"; pluginString += " doap:maintainer [ foaf:name \"" + d_string(plugin.getMaker()) + "\" ] .\n"; pluginFile << pluginString << std::endl; pluginFile.close(); std::cout << " done!" << std::endl; } }
DanielAeolusLaude/DPF-NTK
distrho/src/DistrhoPluginLV2export.cpp
C++
isc
16,790
// The following are instance methods and variables var Note = Class.create({ initialize: function(id, is_new, raw_body) { if (Note.debug) { console.debug("Note#initialize (id=%d)", id) } this.id = id this.is_new = is_new this.document_observers = []; // Cache the elements this.elements = { box: $('note-box-' + this.id), corner: $('note-corner-' + this.id), body: $('note-body-' + this.id), image: $('image') } // Cache the dimensions this.fullsize = { left: this.elements.box.offsetLeft, top: this.elements.box.offsetTop, width: this.elements.box.clientWidth, height: this.elements.box.clientHeight } // Store the original values (in case the user clicks Cancel) this.old = { raw_body: raw_body, formatted_body: this.elements.body.innerHTML } for (p in this.fullsize) { this.old[p] = this.fullsize[p] } // Make the note translucent if (is_new) { this.elements.box.setOpacity(0.2) } else { this.elements.box.setOpacity(0.5) } if (is_new && raw_body == '') { this.bodyfit = true this.elements.body.style.height = "100px" } // Attach the event listeners this.elements.box.observe("mousedown", this.dragStart.bindAsEventListener(this)) this.elements.box.observe("mouseout", this.bodyHideTimer.bindAsEventListener(this)) this.elements.box.observe("mouseover", this.bodyShow.bindAsEventListener(this)) this.elements.corner.observe("mousedown", this.resizeStart.bindAsEventListener(this)) this.elements.body.observe("mouseover", this.bodyShow.bindAsEventListener(this)) this.elements.body.observe("mouseout", this.bodyHideTimer.bindAsEventListener(this)) this.elements.body.observe("click", this.showEditBox.bindAsEventListener(this)) this.adjustScale() }, // Returns the raw text value of this note textValue: function() { if (Note.debug) { console.debug("Note#textValue (id=%d)", this.id) } return this.old.raw_body.strip() }, // Removes the edit box hideEditBox: function(e) { if (Note.debug) { console.debug("Note#hideEditBox (id=%d)", this.id) } var editBox = $('edit-box') if (editBox != null) { var boxid = editBox.noteid $("edit-box").stopObserving() $("note-save-" + boxid).stopObserving() $("note-cancel-" + boxid).stopObserving() $("note-remove-" + boxid).stopObserving() $("note-history-" + boxid).stopObserving() $("edit-box").remove() } }, // Shows the edit box showEditBox: function(e) { if (Note.debug) { console.debug("Note#showEditBox (id=%d)", this.id) } this.hideEditBox(e) var insertionPosition = Note.getInsertionPosition() var top = insertionPosition[0] var left = insertionPosition[1] var html = "" html += '<div id="edit-box" style="top: '+top+'px; left: '+left+'px; position: absolute; visibility: visible; z-index: 100; background: white; border: 1px solid black; padding: 12px;">' html += '<form onsubmit="return false;" style="padding: 0; margin: 0;">' html += '<textarea rows="7" id="edit-box-text" style="width: 350px; margin: 2px 2px 12px 2px;">' + this.textValue() + '</textarea>' html += '<input type="submit" value="Save" name="save" id="note-save-' + this.id + '">' html += '<input type="submit" value="Cancel" name="cancel" id="note-cancel-' + this.id + '">' html += '<input type="submit" value="Remove" name="remove" id="note-remove-' + this.id + '">' html += '<input type="submit" value="History" name="history" id="note-history-' + this.id + '">' html += '</form>' html += '</div>' $("note-container").insert({bottom: html}) $('edit-box').noteid = this.id $("edit-box").observe("mousedown", this.editDragStart.bindAsEventListener(this)) $("note-save-" + this.id).observe("click", this.save.bindAsEventListener(this)) $("note-cancel-" + this.id).observe("click", this.cancel.bindAsEventListener(this)) $("note-remove-" + this.id).observe("click", this.remove.bindAsEventListener(this)) $("note-history-" + this.id).observe("click", this.history.bindAsEventListener(this)) $("edit-box-text").focus() }, // Shows the body text for the note bodyShow: function(e) { if (Note.debug) { console.debug("Note#bodyShow (id=%d)", this.id) } if (this.dragging) { return } if (this.hideTimer) { clearTimeout(this.hideTimer) this.hideTimer = null } if (Note.noteShowingBody == this) { return } if (Note.noteShowingBody) { Note.noteShowingBody.bodyHide() } Note.noteShowingBody = this if (Note.zindex >= 9) { /* don't use more than 10 layers (+1 for the body, which will always be above all notes) */ Note.zindex = 0 for (var i=0; i< Note.all.length; ++i) { Note.all[i].elements.box.style.zIndex = 0 } } this.elements.box.style.zIndex = ++Note.zindex this.elements.body.style.zIndex = 10 this.elements.body.style.top = 0 + "px" this.elements.body.style.left = 0 + "px" var dw = document.documentElement.scrollWidth this.elements.body.style.visibility = "hidden" this.elements.body.style.display = "block" if (!this.bodyfit) { this.elements.body.style.height = "auto" this.elements.body.style.minWidth = "140px" var w = null, h = null, lo = null, hi = null, x = null, last = null w = this.elements.body.offsetWidth h = this.elements.body.offsetHeight if (w/h < 1.6180339887) { /* for tall notes (lots of text), find more pleasant proportions */ lo = 140, hi = 400 do { last = w x = (lo+hi)/2 this.elements.body.style.minWidth = x + "px" w = this.elements.body.offsetWidth h = this.elements.body.offsetHeight if (w/h < 1.6180339887) lo = x else hi = x } while ((lo < hi) && (w > last)) } else if (this.elements.body.scrollWidth <= this.elements.body.clientWidth) { /* for short notes (often a single line), make the box no wider than necessary */ // scroll test necessary for Firefox lo = 20, hi = w do { x = (lo+hi)/2 this.elements.body.style.minWidth = x + "px" if (this.elements.body.offsetHeight > h) lo = x else hi = x } while ((hi - lo) > 4) if (this.elements.body.offsetHeight > h) this.elements.body.style.minWidth = hi + "px" } if (Prototype.Browser.IE) { // IE7 adds scrollbars if the box is too small, obscuring the text if (this.elements.body.offsetHeight < 35) { this.elements.body.style.minHeight = "35px" } if (this.elements.body.offsetWidth < 47) { this.elements.body.style.minWidth = "47px" } } this.bodyfit = true } this.elements.body.style.top = (this.elements.box.offsetTop + this.elements.box.clientHeight + 5) + "px" // keep the box within the document's width var l = 0, e = this.elements.box do { l += e.offsetLeft } while (e = e.offsetParent) l += this.elements.body.offsetWidth + 10 - dw if (l > 0) this.elements.body.style.left = this.elements.box.offsetLeft - l + "px" else this.elements.body.style.left = this.elements.box.offsetLeft + "px" this.elements.body.style.visibility = "visible" }, // Creates a timer that will hide the body text for the note bodyHideTimer: function(e) { if (Note.debug) { console.debug("Note#bodyHideTimer (id=%d)", this.id) } this.hideTimer = setTimeout(this.bodyHide.bindAsEventListener(this), 250) }, // Hides the body text for the note bodyHide: function(e) { if (Note.debug) { console.debug("Note#bodyHide (id=%d)", this.id) } this.elements.body.hide() if (Note.noteShowingBody == this) { Note.noteShowingBody = null } }, addDocumentObserver: function(name, func) { document.observe(name, func); this.document_observers.push([name, func]); }, clearDocumentObservers: function(name, handler) { for(var i = 0; i < this.document_observers.length; ++i) { var observer = this.document_observers[i]; document.stopObserving(observer[0], observer[1]); } this.document_observers = []; }, // Start dragging the note dragStart: function(e) { if (Note.debug) { console.debug("Note#dragStart (id=%d)", this.id) } this.addDocumentObserver("mousemove", this.drag.bindAsEventListener(this)) this.addDocumentObserver("mouseup", this.dragStop.bindAsEventListener(this)) this.addDocumentObserver("selectstart", function() {return false}) this.cursorStartX = e.pointerX() this.cursorStartY = e.pointerY() this.boxStartX = this.elements.box.offsetLeft this.boxStartY = this.elements.box.offsetTop this.boundsX = new ClipRange(5, this.elements.image.clientWidth - this.elements.box.clientWidth - 5) this.boundsY = new ClipRange(5, this.elements.image.clientHeight - this.elements.box.clientHeight - 5) this.dragging = true this.bodyHide() }, // Stop dragging the note dragStop: function(e) { if (Note.debug) { console.debug("Note#dragStop (id=%d)", this.id) } this.clearDocumentObservers() this.cursorStartX = null this.cursorStartY = null this.boxStartX = null this.boxStartY = null this.boundsX = null this.boundsY = null this.dragging = false this.bodyShow() }, ratio: function() { return this.elements.image.width / this.elements.image.getAttribute("large_width") // var ratio = this.elements.image.width / this.elements.image.getAttribute("large_width") // if (this.elements.image.scale_factor != null) // ratio *= this.elements.image.scale_factor; // return ratio }, // Scale the notes for when the image gets resized adjustScale: function() { if (Note.debug) { console.debug("Note#adjustScale (id=%d)", this.id) } var ratio = this.ratio() for (p in this.fullsize) { this.elements.box.style[p] = this.fullsize[p] * ratio + 'px' } }, // Update the note's position as it gets dragged drag: function(e) { var left = this.boxStartX + e.pointerX() - this.cursorStartX var top = this.boxStartY + e.pointerY() - this.cursorStartY left = this.boundsX.clip(left) top = this.boundsY.clip(top) this.elements.box.style.left = left + 'px' this.elements.box.style.top = top + 'px' var ratio = this.ratio() this.fullsize.left = left / ratio this.fullsize.top = top / ratio e.stop() }, // Start dragging the edit box editDragStart: function(e) { if (Note.debug) { console.debug("Note#editDragStart (id=%d)", this.id) } var node = e.element().nodeName if (node != 'FORM' && node != 'DIV') { return } this.addDocumentObserver("mousemove", this.editDrag.bindAsEventListener(this)) this.addDocumentObserver("mouseup", this.editDragStop.bindAsEventListener(this)) this.addDocumentObserver("selectstart", function() {return false}) this.elements.editBox = $('edit-box'); this.cursorStartX = e.pointerX() this.cursorStartY = e.pointerY() this.editStartX = this.elements.editBox.offsetLeft this.editStartY = this.elements.editBox.offsetTop this.dragging = true }, // Stop dragging the edit box editDragStop: function(e) { if (Note.debug) { console.debug("Note#editDragStop (id=%d)", this.id) } this.clearDocumentObservers() this.cursorStartX = null this.cursorStartY = null this.editStartX = null this.editStartY = null this.dragging = false }, // Update the edit box's position as it gets dragged editDrag: function(e) { var left = this.editStartX + e.pointerX() - this.cursorStartX var top = this.editStartY + e.pointerY() - this.cursorStartY this.elements.editBox.style.left = left + 'px' this.elements.editBox.style.top = top + 'px' e.stop() }, // Start resizing the note resizeStart: function(e) { if (Note.debug) { console.debug("Note#resizeStart (id=%d)", this.id) } this.cursorStartX = e.pointerX() this.cursorStartY = e.pointerY() this.boxStartWidth = this.elements.box.clientWidth this.boxStartHeight = this.elements.box.clientHeight this.boxStartX = this.elements.box.offsetLeft this.boxStartY = this.elements.box.offsetTop this.boundsX = new ClipRange(10, this.elements.image.clientWidth - this.boxStartX - 5) this.boundsY = new ClipRange(10, this.elements.image.clientHeight - this.boxStartY - 5) this.dragging = true this.clearDocumentObservers() this.addDocumentObserver("mousemove", this.resize.bindAsEventListener(this)) this.addDocumentObserver("mouseup", this.resizeStop.bindAsEventListener(this)) e.stop() this.bodyHide() }, // Stop resizing teh note resizeStop: function(e) { if (Note.debug) { console.debug("Note#resizeStop (id=%d)", this.id) } this.clearDocumentObservers() this.boxCursorStartX = null this.boxCursorStartY = null this.boxStartWidth = null this.boxStartHeight = null this.boxStartX = null this.boxStartY = null this.boundsX = null this.boundsY = null this.dragging = false e.stop() }, // Update the note's dimensions as it gets resized resize: function(e) { var width = this.boxStartWidth + e.pointerX() - this.cursorStartX var height = this.boxStartHeight + e.pointerY() - this.cursorStartY width = this.boundsX.clip(width) height = this.boundsY.clip(height) this.elements.box.style.width = width + "px" this.elements.box.style.height = height + "px" var ratio = this.ratio() this.fullsize.width = width / ratio this.fullsize.height = height / ratio e.stop() }, // Save the note to the database save: function(e) { if (Note.debug) { console.debug("Note#save (id=%d)", this.id) } var note = this for (p in this.fullsize) { this.old[p] = this.fullsize[p] } this.old.raw_body = $('edit-box-text').value this.old.formatted_body = this.textValue() // FIXME: this is not quite how the note will look (filtered elems, <tn>...). the user won't input a <script> that only damages him, but it might be nice to "preview" the <tn> here this.elements.body.update(this.textValue()) this.hideEditBox(e) this.bodyHide() this.bodyfit = false var params = { "id": this.id, "note[x]": this.old.left, "note[y]": this.old.top, "note[width]": this.old.width, "note[height]": this.old.height, "note[body]": this.old.raw_body } if (this.is_new) { params["note[post_id]"] = Note.post_id } notice("Saving note...") new Ajax.Request('/note/update.json', { parameters: params, onComplete: function(resp) { var resp = resp.responseJSON if (resp.success) { notice("Note saved") var note = Note.find(resp.old_id) if (resp.old_id < 0) { note.is_new = false note.id = resp.new_id note.elements.box.id = 'note-box-' + note.id note.elements.body.id = 'note-body-' + note.id note.elements.corner.id = 'note-corner-' + note.id } note.elements.body.innerHTML = resp.formatted_body note.elements.box.setOpacity(0.5) note.elements.box.removeClassName('unsaved') } else { notice("Error: " + resp.reason) note.elements.box.addClassName('unsaved') } } }) e.stop() }, // Revert the note to the last saved state cancel: function(e) { if (Note.debug) { console.debug("Note#cancel (id=%d)", this.id) } this.hideEditBox(e) this.bodyHide() var ratio = this.ratio() for (p in this.fullsize) { this.fullsize[p] = this.old[p] this.elements.box.style[p] = this.fullsize[p] * ratio + 'px' } this.elements.body.innerHTML = this.old.formatted_body e.stop() }, // Remove all references to the note from the page removeCleanup: function() { if (Note.debug) { console.debug("Note#removeCleanup (id=%d)", this.id) } this.elements.box.remove() this.elements.body.remove() var allTemp = [] for (i=0; i<Note.all.length; ++i) { if (Note.all[i].id != this.id) { allTemp.push(Note.all[i]) } } Note.all = allTemp Note.updateNoteCount() }, // Removes a note from the database remove: function(e) { if (Note.debug) { console.debug("Note#remove (id=%d)", this.id) } this.hideEditBox(e) this.bodyHide() this_note = this if (this.is_new) { this.removeCleanup() notice("Note removed") } else { notice("Removing note...") new Ajax.Request('/note/update.json', { parameters: { "id": this.id, "note[is_active]": "0" }, onComplete: function(resp) { var resp = resp.responseJSON if (resp.success) { notice("Note removed") this_note.removeCleanup() } else { notice("Error: " + resp.reason) } } }) } e.stop() }, // Redirect to the note's history history: function(e) { if (Note.debug) { console.debug("Note#history (id=%d)", this.id) } this.hideEditBox(e) if (this.is_new) { notice("This note has no history") } else { location.href = '/history?search=notes:' + this.id } e.stop() } }) // The following are class methods and variables Object.extend(Note, { zindex: 0, counter: -1, all: [], display: true, debug: false, // Show all notes show: function() { if (Note.debug) { console.debug("Note.show") } $("note-container").show() }, // Hide all notes hide: function() { if (Note.debug) { console.debug("Note.hide") } $("note-container").hide() }, // Find a note instance based on the id number find: function(id) { if (Note.debug) { console.debug("Note.find") } for (var i=0; i<Note.all.size(); ++i) { if (Note.all[i].id == id) { return Note.all[i] } } return null }, // Toggle the display of all notes toggle: function() { if (Note.debug) { console.debug("Note.toggle") } if (Note.display) { Note.hide() Note.display = false } else { Note.show() Note.display = true } }, // Update the text displaying the number of notes a post has updateNoteCount: function() { if (Note.debug) { console.debug("Note.updateNoteCount") } if (Note.all.length > 0) { var label = "" if (Note.all.length == 1) label = "note" else label = "notes" $('note-count').innerHTML = "This post has <a href=\"/note/history?post_id=" + Note.post_id + "\">" + Note.all.length + " " + label + "</a>" } else { $('note-count').innerHTML = "" } }, // Create a new note create: function() { if (Note.debug) { console.debug("Note.create") } Note.show() var insertion_position = Note.getInsertionPosition() var top = insertion_position[0] var left = insertion_position[1] var html = '' html += '<div class="note-box unsaved" style="width: 150px; height: 150px; ' html += 'top: ' + top + 'px; ' html += 'left: ' + left + 'px;" ' html += 'id="note-box-' + Note.counter + '">' html += '<div class="note-corner" id="note-corner-' + Note.counter + '"></div>' html += '</div>' html += '<div class="note-body" title="Click to edit" id="note-body-' + Note.counter + '"></div>' $("note-container").insert({bottom: html}) var note = new Note(Note.counter, true, "") Note.all.push(note) Note.counter -= 1 }, // Find a suitable position to insert new notes getInsertionPosition: function() { if (Note.debug) { console.debug("Note.getInsertionPosition") } // We want to show the edit box somewhere on the screen, but not outside the image. var scroll_x = $("image").cumulativeScrollOffset()[0] var scroll_y = $("image").cumulativeScrollOffset()[1] var image_left = $("image").positionedOffset()[0] var image_top = $("image").positionedOffset()[1] var image_right = image_left + $("image").width var image_bottom = image_top + $("image").height var left = 0 var top = 0 if (scroll_x > image_left) { left = scroll_x } else { left = image_left } if (scroll_y > image_top) { top = scroll_y } else { top = image_top + 20 } if (top > image_bottom) { top = image_top + 20 } return [top, left] } })
rhaphazard/moebooru
lib/assets/javascripts/moe-legacy/notes.js
JavaScript
isc
21,067
// Copyright (c) 2021 The Decred developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package indexers import ( "context" "fmt" "sync" "sync/atomic" "github.com/decred/dcrd/blockchain/v4/internal/progresslog" "github.com/decred/dcrd/database/v3" "github.com/decred/dcrd/dcrutil/v4" ) // IndexNtfnType represents an index notification type. type IndexNtfnType int const ( // ConnectNtfn indicates the index notification signals a block // connected to the main chain. ConnectNtfn IndexNtfnType = iota // DisconnectNtfn indicates the index notification signals a block // disconnected from the main chain. DisconnectNtfn ) var ( // bufferSize represents the index notification buffer size. bufferSize = 128 // noPrereqs indicates no index prerequisites. noPrereqs = "none" ) // IndexNtfn represents an index notification detailing a block connection // or disconnection. type IndexNtfn struct { NtfnType IndexNtfnType Block *dcrutil.Block Parent *dcrutil.Block PrevScripts PrevScripter IsTreasuryEnabled bool Done chan bool } // IndexSubscription represents a subscription for index updates. type IndexSubscription struct { id string idx Indexer subscriber *IndexSubscriber mtx sync.Mutex // prerequisite defines the notification processing hierarchy for this // subscription. It is expected that the subscriber associated with the // prerequisite provided processes notifications before they are // delivered by this subscription to its subscriber. An empty string // indicates the subscription has no prerequisite. prerequisite string // dependent defines the index subscription that requires the subscriber // associated with this subscription to have processed incoming // notifications before it does. A nil dependency indicates the subscription // has no dependencies. dependent *IndexSubscription } // newIndexSubscription initializes a new index subscription. func newIndexSubscription(subber *IndexSubscriber, indexer Indexer, prereq string) *IndexSubscription { return &IndexSubscription{ id: indexer.Name(), idx: indexer, prerequisite: prereq, subscriber: subber, } } // stop prevents any future index updates from being delivered and // unsubscribes the associated subscription. func (s *IndexSubscription) stop() error { // If the subscription has a prerequisite, find it and remove the // subscription as a dependency. if s.prerequisite != noPrereqs { s.mtx.Lock() prereq, ok := s.subscriber.subscriptions[s.prerequisite] s.mtx.Unlock() if !ok { return fmt.Errorf("no subscription found with id %s", s.prerequisite) } prereq.mtx.Lock() prereq.dependent = nil prereq.mtx.Unlock() return nil } // If the subscription has a dependent, stop it as well. if s.dependent != nil { err := s.dependent.stop() if err != nil { return err } } // If the subscription is independent, remove it from the // index subscriber's subscriptions. s.mtx.Lock() delete(s.subscriber.subscriptions, s.id) s.mtx.Unlock() return nil } // IndexSubscriber subscribes clients for index updates. type IndexSubscriber struct { subscribers uint32 // update atomically. c chan IndexNtfn subscriptions map[string]*IndexSubscription mtx sync.Mutex ctx context.Context cancel context.CancelFunc quit chan struct{} } // NewIndexSubscriber creates a new index subscriber. It also starts the // handler for incoming index update subscriptions. func NewIndexSubscriber(sCtx context.Context) *IndexSubscriber { ctx, cancel := context.WithCancel(sCtx) s := &IndexSubscriber{ c: make(chan IndexNtfn, bufferSize), subscriptions: make(map[string]*IndexSubscription), ctx: ctx, cancel: cancel, quit: make(chan struct{}), } return s } // Subscribe subscribes an index for updates. The returned index subscription // has functions to retrieve a channel that produces a stream of index updates // and to stop the stream when the caller no longer wishes to receive updates. func (s *IndexSubscriber) Subscribe(index Indexer, prerequisite string) (*IndexSubscription, error) { sub := newIndexSubscription(s, index, prerequisite) // If the subscription has a prequisite, find it and set the subscription // as a dependency. if prerequisite != noPrereqs { s.mtx.Lock() prereq, ok := s.subscriptions[prerequisite] s.mtx.Unlock() if !ok { return nil, fmt.Errorf("no subscription found with id %s", prerequisite) } prereq.mtx.Lock() defer prereq.mtx.Unlock() if prereq.dependent != nil { return nil, fmt.Errorf("%s already has a dependent set: %s", prereq.id, prereq.dependent.id) } prereq.dependent = sub atomic.AddUint32(&s.subscribers, 1) return sub, nil } // If the subscription does not have a prerequisite, add it to the index // subscriber's subscriptions. s.mtx.Lock() s.subscriptions[sub.id] = sub s.mtx.Unlock() atomic.AddUint32(&s.subscribers, 1) return sub, nil } // Notify relays an index notification to subscribed indexes for processing. func (s *IndexSubscriber) Notify(ntfn *IndexNtfn) { subscribers := atomic.LoadUint32(&s.subscribers) // Only relay notifications when there are subscribed indexes // to be notified. if subscribers > 0 { select { case <-s.quit: case s.c <- *ntfn: } } } // findLowestIndexTipHeight determines the lowest index tip height among // subscribed indexes and their dependencies. func (s *IndexSubscriber) findLowestIndexTipHeight(queryer ChainQueryer) (int64, int64, error) { // Find the lowest tip height to catch up among subscribed indexes. bestHeight, _ := queryer.Best() lowestHeight := bestHeight for _, sub := range s.subscriptions { tipHeight, tipHash, err := sub.idx.Tip() if err != nil { return 0, bestHeight, err } // Ensure the index tip is on the main chain. if !queryer.MainChainHasBlock(tipHash) { return 0, bestHeight, fmt.Errorf("%s: index tip (%s) is not on the "+ "main chain", sub.idx.Name(), tipHash) } if tipHeight < lowestHeight { lowestHeight = tipHeight } // Update the lowest tip height if a dependent has a lower tip height. dependent := sub.dependent for dependent != nil { tipHeight, _, err := sub.dependent.idx.Tip() if err != nil { return 0, bestHeight, err } if tipHeight < lowestHeight { lowestHeight = tipHeight } dependent = dependent.dependent } } return lowestHeight, bestHeight, nil } // CatchUp syncs all subscribed indexes to the the main chain by connecting // blocks from after the lowest index tip to the current main chain tip. // // This should be called after all indexes have subscribed for updates. func (s *IndexSubscriber) CatchUp(ctx context.Context, db database.DB, queryer ChainQueryer) error { lowestHeight, bestHeight, err := s.findLowestIndexTipHeight(queryer) if err != nil { return err } // Nothing to do if all indexes are synced. if bestHeight == lowestHeight { return nil } // Create a progress logger for the indexing process below. progressLogger := progresslog.NewBlockProgressLogger("Indexed", log) // tip and need to be caught up, so log the details and loop through // each block that needs to be indexed. log.Infof("Catching up from height %d to %d", lowestHeight, bestHeight) var cachedParent *dcrutil.Block for height := lowestHeight + 1; height <= bestHeight; height++ { if interruptRequested(ctx) { return indexerError(ErrInterruptRequested, interruptMsg) } hash, err := queryer.BlockHashByHeight(height) if err != nil { return err } // Ensure the next tip hash is on the main chain. if !queryer.MainChainHasBlock(hash) { msg := fmt.Sprintf("the next block being synced to (%s) "+ "at height %d is not on the main chain", hash, height) return indexerError(ErrBlockNotOnMainChain, msg) } var parent *dcrutil.Block if cachedParent == nil && height > 0 { parentHash, err := queryer.BlockHashByHeight(height - 1) if err != nil { return err } parent, err = queryer.BlockByHash(parentHash) if err != nil { return err } } else { parent = cachedParent } child, err := queryer.BlockByHash(hash) if err != nil { return err } // Construct and send the index notification. var prevScripts PrevScripter err = db.View(func(dbTx database.Tx) error { if interruptRequested(ctx) { return indexerError(ErrInterruptRequested, interruptMsg) } prevScripts, err = queryer.PrevScripts(dbTx, child) if err != nil { return err } return nil }) if err != nil { return err } isTreasuryEnabled, err := queryer.IsTreasuryAgendaActive(parent.Hash()) if err != nil { return err } ntfn := &IndexNtfn{ NtfnType: ConnectNtfn, Block: child, Parent: parent, PrevScripts: prevScripts, IsTreasuryEnabled: isTreasuryEnabled, } // Relay the index update to subscribed indexes. for _, sub := range s.subscriptions { err := updateIndex(ctx, sub.idx, ntfn) if err != nil { s.cancel() return err } } cachedParent = child progressLogger.LogBlockHeight(child.MsgBlock(), parent.MsgBlock()) } log.Infof("Caught up to height %d", bestHeight) return nil } // Run relays index notifications to subscribed indexes. // // This should be run as a goroutine. func (s *IndexSubscriber) Run(ctx context.Context) { for { select { case ntfn := <-s.c: // Relay the index update to subscribed indexes. for _, sub := range s.subscriptions { err := updateIndex(ctx, sub.idx, &ntfn) if err != nil { log.Error(err) s.cancel() break } } if ntfn.Done != nil { close(ntfn.Done) } case <-ctx.Done(): log.Infof("Index subscriber shutting down") close(s.quit) // Stop all updates to subscribed indexes and terminate their // processes. for _, sub := range s.subscriptions { err := sub.stop() if err != nil { log.Error("unable to stop index subscription: %v", err) } } s.cancel() return } } }
decred/dcrd
blockchain/indexers/indexsubscriber.go
GO
isc
10,267
// Copyright (c) 2013-2015 The btcsuite developers // Copyright (c) 2015 The Decred developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package wire_test import ( "bytes" "io" "reflect" "testing" "time" "github.com/davecgh/go-spew/spew" "github.com/decred/dcrd/chaincfg/chainhash" "github.com/decred/dcrd/wire" "github.com/decred/dcrutil" ) // TestBlock tests the MsgBlock API. func TestBlock(t *testing.T) { pver := wire.ProtocolVersion // Test block header. bh := wire.NewBlockHeader( int32(pver), // Version &testBlock.Header.PrevBlock, // PrevHash &testBlock.Header.MerkleRoot, // MerkleRoot &testBlock.Header.StakeRoot, // StakeRoot uint16(0x0000), // VoteBits [6]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, // FinalState uint16(0x0000), // Voters uint8(0x00), // FreshStake uint8(0x00), // Revocations uint32(0), // Poolsize testBlock.Header.Bits, // Bits int64(0x0000000000000000), // Sbits uint32(1), // Height uint32(1), // Size testBlock.Header.Nonce, // Nonce [36]byte{}, // ExtraData ) // Ensure the command is expected value. wantCmd := "block" msg := wire.NewMsgBlock(bh) if cmd := msg.Command(); cmd != wantCmd { t.Errorf("NewMsgBlock: wrong command - got %v want %v", cmd, wantCmd) } // Ensure max payload is expected value for latest protocol version. // Num addresses (varInt) + max allowed addresses. wantPayload := uint32(1000000) maxPayload := msg.MaxPayloadLength(pver) if maxPayload != wantPayload { t.Errorf("MaxPayloadLength: wrong max payload length for "+ "protocol version %d - got %v, want %v", pver, maxPayload, wantPayload) } // Ensure we get the same block header data back out. if !reflect.DeepEqual(&msg.Header, bh) { t.Errorf("NewMsgBlock: wrong block header - got %v, want %v", spew.Sdump(&msg.Header), spew.Sdump(bh)) } // Ensure transactions are added properly. tx := testBlock.Transactions[0].Copy() msg.AddTransaction(tx) if !reflect.DeepEqual(msg.Transactions, testBlock.Transactions) { t.Errorf("AddTransaction: wrong transactions - got %v, want %v", spew.Sdump(msg.Transactions), spew.Sdump(testBlock.Transactions)) } // Ensure transactions are properly cleared. msg.ClearTransactions() if len(msg.Transactions) != 0 { t.Errorf("ClearTransactions: wrong transactions - got %v, want %v", len(msg.Transactions), 0) } // Ensure stake transactions are added properly. stx := testBlock.STransactions[0].Copy() msg.AddSTransaction(stx) if !reflect.DeepEqual(msg.STransactions, testBlock.STransactions) { t.Errorf("AddSTransaction: wrong transactions - got %v, want %v", spew.Sdump(msg.STransactions), spew.Sdump(testBlock.STransactions)) } // Ensure transactions are properly cleared. msg.ClearSTransactions() if len(msg.STransactions) != 0 { t.Errorf("ClearTransactions: wrong transactions - got %v, want %v", len(msg.STransactions), 0) } return } // TestBlockTxShas tests the ability to generate a slice of all transaction // hashes from a block accurately. func TestBlockTxShas(t *testing.T) { // Block 1, transaction 1 hash. hashStr := "55a25248c04dd8b6599ca2a708413c00d79ae90ce075c54e8a967a647d7e4bea" wantHash, err := chainhash.NewHashFromStr(hashStr) if err != nil { t.Errorf("NewShaHashFromStr: %v", err) return } wantShas := []chainhash.Hash{*wantHash} shas := testBlock.TxShas() if !reflect.DeepEqual(shas, wantShas) { t.Errorf("TxShas: wrong transaction hashes - got %v, want %v", spew.Sdump(shas), spew.Sdump(wantShas)) } } // TestBlockSTxShas tests the ability to generate a slice of all stake transaction // hashes from a block accurately. func TestBlockSTxShas(t *testing.T) { // Block 1, transaction 1 hash. hashStr := "ae208a69f3ee088d0328126e3d9bef7652b108d1904f27b166c5999233a801d4" wantHash, err := chainhash.NewHashFromStr(hashStr) if err != nil { t.Errorf("NewShaHashFromStr: %v", err) return } wantShas := []chainhash.Hash{*wantHash} shas := testBlock.STxShas() if !reflect.DeepEqual(shas, wantShas) { t.Errorf("STxShas: wrong transaction hashes - got %v, want %v", spew.Sdump(shas), spew.Sdump(wantShas)) } } // TestBlockSha tests the ability to generate the hash of a block accurately. func TestBlockSha(t *testing.T) { // Block 1 hash. hashStr := "152437dada95368c42b19febc1702939fa9c1ccdb6fd7284e5b7a19d8fe6df7a" wantHash, err := chainhash.NewHashFromStr(hashStr) if err != nil { t.Errorf("NewShaHashFromStr: %v", err) } // Ensure the hash produced is expected. blockHash := testBlock.BlockSha() if !blockHash.IsEqual(wantHash) { t.Errorf("BlockSha: wrong hash - got %v, want %v", spew.Sprint(blockHash), spew.Sprint(wantHash)) } } // TestBlockWire tests the MsgBlock wire encode and decode for various numbers // of transaction inputs and outputs and protocol versions. func TestBlockWire(t *testing.T) { tests := []struct { in *wire.MsgBlock // Message to encode out *wire.MsgBlock // Expected decoded message buf []byte // Wire encoding txLocs []wire.TxLoc // Expected transaction locations sTxLocs []wire.TxLoc // Expected stake transaction locations pver uint32 // Protocol version for wire encoding }{ // Latest protocol version. { &testBlock, &testBlock, testBlockBytes, testBlockTxLocs, testBlockSTxLocs, wire.ProtocolVersion, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Encode the message to wire format. var buf bytes.Buffer err := test.in.BtcEncode(&buf, test.pver) if err != nil { t.Errorf("BtcEncode #%d error %v", i, err) continue } if !bytes.Equal(buf.Bytes(), test.buf) { t.Errorf("BtcEncode #%d\n got: %s want: %s", i, spew.Sdump(buf.Bytes()), spew.Sdump(test.buf)) continue } // Decode the message from wire format. var msg wire.MsgBlock rbuf := bytes.NewReader(test.buf) err = msg.BtcDecode(rbuf, test.pver) if err != nil { t.Errorf("BtcDecode #%d error %v", i, err) continue } if !reflect.DeepEqual(&msg, test.out) { t.Errorf("BtcDecode #%d\n got: %s want: %s", i, spew.Sdump(&msg), spew.Sdump(test.out)) continue } } } // TestBlockWireErrors performs negative tests against wire encode and decode // of MsgBlock to confirm error paths work correctly. func TestBlockWireErrors(t *testing.T) { // Use protocol version 60002 specifically here instead of the latest // because the test data is using bytes encoded with that protocol // version. pver := uint32(60002) tests := []struct { in *wire.MsgBlock // Value to encode buf []byte // Wire encoding pver uint32 // Protocol version for wire encoding max int // Max size of fixed buffer to induce errors writeErr error // Expected write error readErr error // Expected read error }{ // Force error in version. {&testBlock, testBlockBytes, pver, 0, io.ErrShortWrite, io.EOF}, // 0 // Force error in prev block hash. {&testBlock, testBlockBytes, pver, 4, io.ErrShortWrite, io.EOF}, // 1 // Force error in merkle root. {&testBlock, testBlockBytes, pver, 36, io.ErrShortWrite, io.EOF}, // 2 // Force error in stake root. {&testBlock, testBlockBytes, pver, 68, io.ErrShortWrite, io.EOF}, // 3 // Force error in vote bits. {&testBlock, testBlockBytes, pver, 100, io.ErrShortWrite, io.EOF}, // 4 // Force error in finalState. {&testBlock, testBlockBytes, pver, 102, io.ErrShortWrite, io.EOF}, // 5 // Force error in voters. {&testBlock, testBlockBytes, pver, 108, io.ErrShortWrite, io.EOF}, // 6 // Force error in freshstake. {&testBlock, testBlockBytes, pver, 110, io.ErrShortWrite, io.EOF}, // 7 // Force error in revocations. {&testBlock, testBlockBytes, pver, 111, io.ErrShortWrite, io.EOF}, // 8 // Force error in poolsize. {&testBlock, testBlockBytes, pver, 112, io.ErrShortWrite, io.EOF}, // 9 // Force error in difficulty bits. {&testBlock, testBlockBytes, pver, 116, io.ErrShortWrite, io.EOF}, // 10 // Force error in stake difficulty bits. {&testBlock, testBlockBytes, pver, 120, io.ErrShortWrite, io.EOF}, // 11 // Force error in height. {&testBlock, testBlockBytes, pver, 128, io.ErrShortWrite, io.EOF}, // 12 // Force error in size. {&testBlock, testBlockBytes, pver, 132, io.ErrShortWrite, io.EOF}, // 13 // Force error in timestamp. {&testBlock, testBlockBytes, pver, 136, io.ErrShortWrite, io.EOF}, // 14 // Force error in nonce. {&testBlock, testBlockBytes, pver, 140, io.ErrShortWrite, io.EOF}, // 15 // Force error in tx count. {&testBlock, testBlockBytes, pver, 180, io.ErrShortWrite, io.EOF}, // 16 // Force error in tx. {&testBlock, testBlockBytes, pver, 181, io.ErrShortWrite, io.EOF}, // 17 } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Encode to wire format. w := newFixedWriter(test.max) err := test.in.BtcEncode(w, test.pver) if err != test.writeErr { t.Errorf("BtcEncode #%d wrong error got: %v, want: %v", i, err, test.writeErr) continue } // Decode from wire format. var msg wire.MsgBlock r := newFixedReader(test.max, test.buf) err = msg.BtcDecode(r, test.pver) if err != test.readErr { t.Errorf("BtcDecode #%d wrong error got: %v, want: %v", i, err, test.readErr) continue } } } // TestBlockSerialize tests MsgBlock serialize and deserialize. func TestBlockSerialize(t *testing.T) { tests := []struct { in *wire.MsgBlock // Message to encode out *wire.MsgBlock // Expected decoded message buf []byte // Serialized data txLocs []wire.TxLoc // Expected transaction locations sTxLocs []wire.TxLoc // Expected stake transaction locations }{ { &testBlock, &testBlock, testBlockBytes, testBlockTxLocs, testBlockSTxLocs, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Serialize the block. var buf bytes.Buffer err := test.in.Serialize(&buf) if err != nil { t.Errorf("Serialize #%d error %v", i, err) continue } if !bytes.Equal(buf.Bytes(), test.buf) { t.Errorf("Serialize #%d\n got: %s want: %s", i, spew.Sdump(buf.Bytes()), spew.Sdump(test.buf)) continue } // Deserialize the block. var block wire.MsgBlock rbuf := bytes.NewReader(test.buf) err = block.Deserialize(rbuf) if err != nil { t.Errorf("Deserialize #%d error %v", i, err) continue } if !reflect.DeepEqual(&block, test.out) { t.Errorf("Deserialize #%d\n got: %s want: %s", i, spew.Sdump(&block), spew.Sdump(test.out)) continue } // Deserialize the block while gathering transaction location // information. var txLocBlock wire.MsgBlock br := bytes.NewBuffer(test.buf) txLocs, sTxLocs, err := txLocBlock.DeserializeTxLoc(br) if err != nil { t.Errorf("DeserializeTxLoc #%d error %v", i, err) continue } if !reflect.DeepEqual(&txLocBlock, test.out) { t.Errorf("DeserializeTxLoc #%d\n got: %s want: %s", i, spew.Sdump(&txLocBlock), spew.Sdump(test.out)) continue } if !reflect.DeepEqual(txLocs, test.txLocs) { t.Errorf("DeserializeTxLoc #%d\n got: %s want: %s", i, spew.Sdump(txLocs), spew.Sdump(test.txLocs)) continue } if !reflect.DeepEqual(sTxLocs, test.sTxLocs) { t.Errorf("DeserializeTxLoc, sTxLocs #%d\n got: %s want: %s", i, spew.Sdump(sTxLocs), spew.Sdump(test.sTxLocs)) continue } } } // TestBlockSerializeErrors performs negative tests against wire encode and // decode of MsgBlock to confirm error paths work correctly. func TestBlockSerializeErrors(t *testing.T) { tests := []struct { in *wire.MsgBlock // Value to encode buf []byte // Serialized data max int // Max size of fixed buffer to induce errors writeErr error // Expected write error readErr error // Expected read error }{ {&testBlock, testBlockBytes, 0, io.ErrShortWrite, io.EOF}, // 0 // Force error in prev block hash. {&testBlock, testBlockBytes, 4, io.ErrShortWrite, io.EOF}, // 1 // Force error in merkle root. {&testBlock, testBlockBytes, 36, io.ErrShortWrite, io.EOF}, // 2 // Force error in stake root. {&testBlock, testBlockBytes, 68, io.ErrShortWrite, io.EOF}, // 3 // Force error in vote bits. {&testBlock, testBlockBytes, 100, io.ErrShortWrite, io.EOF}, // 4 // Force error in finalState. {&testBlock, testBlockBytes, 102, io.ErrShortWrite, io.EOF}, // 5 // Force error in voters. {&testBlock, testBlockBytes, 108, io.ErrShortWrite, io.EOF}, // 8 // Force error in freshstake. {&testBlock, testBlockBytes, 110, io.ErrShortWrite, io.EOF}, // 9 // Force error in revocations. {&testBlock, testBlockBytes, 111, io.ErrShortWrite, io.EOF}, // 10 // Force error in poolsize. {&testBlock, testBlockBytes, 112, io.ErrShortWrite, io.EOF}, // 11 // Force error in difficulty bits. {&testBlock, testBlockBytes, 116, io.ErrShortWrite, io.EOF}, // 12 // Force error in stake difficulty bits. {&testBlock, testBlockBytes, 120, io.ErrShortWrite, io.EOF}, // 13 // Force error in height. {&testBlock, testBlockBytes, 128, io.ErrShortWrite, io.EOF}, // 14 // Force error in size. {&testBlock, testBlockBytes, 132, io.ErrShortWrite, io.EOF}, // 15 // Force error in timestamp. {&testBlock, testBlockBytes, 136, io.ErrShortWrite, io.EOF}, // 16 // Force error in nonce. {&testBlock, testBlockBytes, 140, io.ErrShortWrite, io.EOF}, // 17 // Force error in tx count. {&testBlock, testBlockBytes, 180, io.ErrShortWrite, io.EOF}, // 18 // Force error in tx. {&testBlock, testBlockBytes, 181, io.ErrShortWrite, io.EOF}, // 19 } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Serialize the block. w := newFixedWriter(test.max) err := test.in.Serialize(w) if err != test.writeErr { t.Errorf("Serialize #%d wrong error got: %v, want: %v", i, err, test.writeErr) continue } // Deserialize the block. var block wire.MsgBlock r := newFixedReader(test.max, test.buf) err = block.Deserialize(r) if err != test.readErr { t.Errorf("Deserialize #%d wrong error got: %v, want: %v", i, err, test.readErr) continue } var txLocBlock wire.MsgBlock br := bytes.NewBuffer(test.buf[0:test.max]) _, _, err = txLocBlock.DeserializeTxLoc(br) if err != test.readErr { t.Errorf("DeserializeTxLoc #%d wrong error got: %v, want: %v", i, err, test.readErr) continue } } } // TestBlockOverflowErrors performs tests to ensure deserializing blocks which // are intentionally crafted to use large values for the number of transactions // are handled properly. This could otherwise potentially be used as an attack // vector. func TestBlockOverflowErrors(t *testing.T) { // Use protocol version 70001 specifically here instead of the latest // protocol version because the test data is using bytes encoded with // that version. pver := uint32(1) tests := []struct { buf []byte // Wire encoding pver uint32 // Protocol version for wire encoding err error // Expected error }{ // Block that claims to have ~uint64(0) transactions. { []byte{ 0x01, 0x00, 0x00, 0x00, // Version 1 0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72, 0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f, 0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c, 0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00, // PrevBlock 0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44, 0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67, 0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1, 0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // MerkleRoot 0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44, 0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67, 0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1, 0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // StakeRoot 0x00, 0x00, // VoteBits 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // FinalState 0x00, 0x00, // Voters 0x00, // FreshStake 0x00, // Revocations 0x00, 0x00, 0x00, 0x00, // Poolsize 0xff, 0xff, 0x00, 0x1d, // Bits 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // SBits 0x01, 0x00, 0x00, 0x00, // Height 0x01, 0x00, 0x00, 0x00, // Size 0x61, 0xbc, 0x66, 0x49, // Timestamp 0x01, 0xe3, 0x62, 0x99, // Nonce 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // ExtraData 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, // TxnCount }, pver, &wire.MessageError{}, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Decode from wire format. var msg wire.MsgBlock r := bytes.NewReader(test.buf) err := msg.BtcDecode(r, test.pver) if reflect.TypeOf(err) != reflect.TypeOf(test.err) { t.Errorf("BtcDecode #%d wrong error got: %v, want: %v", i, err, reflect.TypeOf(test.err)) continue } // Deserialize from wire format. r = bytes.NewReader(test.buf) err = msg.Deserialize(r) if reflect.TypeOf(err) != reflect.TypeOf(test.err) { t.Errorf("Deserialize #%d wrong error got: %v, want: %v", i, err, reflect.TypeOf(test.err)) continue } // Deserialize with transaction location info from wire format. br := bytes.NewBuffer(test.buf) _, _, err = msg.DeserializeTxLoc(br) if reflect.TypeOf(err) != reflect.TypeOf(test.err) { t.Errorf("DeserializeTxLoc #%d wrong error got: %v, "+ "want: %v", i, err, reflect.TypeOf(test.err)) continue } } } // TestBlockSerializeSize performs tests to ensure the serialize size for // various blocks is accurate. func TestBlockSerializeSize(t *testing.T) { // Block with no transactions. noTxBlock := wire.NewMsgBlock(&testBlock.Header) tests := []struct { in *wire.MsgBlock // Block to encode size int // Expected serialized size }{ // Block with no transactions (header + 2x numtx) {noTxBlock, 182}, // First block in the mainnet block chain. {&testBlock, len(testBlockBytes)}, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { serializedSize := test.in.SerializeSize() if serializedSize != test.size { t.Errorf("MsgBlock.SerializeSize: #%d got: %d, want: "+ "%d", i, serializedSize, test.size) continue } } } // testBlock is a basic normative block that is used throughout tests. var testBlock = wire.MsgBlock{ Header: wire.BlockHeader{ Version: 1, PrevBlock: chainhash.Hash([chainhash.HashSize]byte{ // Make go vet happy. 0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72, 0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f, 0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c, 0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00, }), MerkleRoot: chainhash.Hash([chainhash.HashSize]byte{ // Make go vet happy. 0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44, 0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67, 0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1, 0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, }), StakeRoot: chainhash.Hash([chainhash.HashSize]byte{ // Make go vet happy. 0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44, 0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67, 0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1, 0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, }), VoteBits: uint16(0x0000), FinalState: [6]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, Voters: uint16(0x0000), FreshStake: uint8(0x00), Revocations: uint8(0x00), PoolSize: uint32(0x00000000), // Poolsize Bits: 0x1d00ffff, // 486604799 SBits: int64(0x0000000000000000), Height: uint32(1), Size: uint32(1), Timestamp: time.Unix(0x4966bc61, 0), // 2009-01-08 20:54:25 -0600 CST Nonce: 0x9962e301, // 2573394689 ExtraData: [36]byte{}, }, Transactions: []*wire.MsgTx{ { Version: 1, TxIn: []*wire.TxIn{ { PreviousOutPoint: wire.OutPoint{ Hash: chainhash.Hash{}, Index: 0xffffffff, Tree: dcrutil.TxTreeRegular, }, Sequence: 0xffffffff, ValueIn: 0x1616161616161616, BlockHeight: 0x17171717, BlockIndex: 0x18181818, SignatureScript: []byte{ 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0xf2, }, }, }, TxOut: []*wire.TxOut{ { Value: 0x3333333333333333, Version: 0x9898, PkScript: []byte{ 0x41, // OP_DATA_65 0x04, 0x96, 0xb5, 0x38, 0xe8, 0x53, 0x51, 0x9c, 0x72, 0x6a, 0x2c, 0x91, 0xe6, 0x1e, 0xc1, 0x16, 0x00, 0xae, 0x13, 0x90, 0x81, 0x3a, 0x62, 0x7c, 0x66, 0xfb, 0x8b, 0xe7, 0x94, 0x7b, 0xe6, 0x3c, 0x52, 0xda, 0x75, 0x89, 0x37, 0x95, 0x15, 0xd4, 0xe0, 0xa6, 0x04, 0xf8, 0x14, 0x17, 0x81, 0xe6, 0x22, 0x94, 0x72, 0x11, 0x66, 0xbf, 0x62, 0x1e, 0x73, 0xa8, 0x2c, 0xbf, 0x23, 0x42, 0xc8, 0x58, 0xee, // 65-byte signature 0xac, // OP_CHECKSIG }, }, }, LockTime: 0x11111111, Expiry: 0x22222222, }, }, STransactions: []*wire.MsgTx{ { Version: 1, TxIn: []*wire.TxIn{ { PreviousOutPoint: wire.OutPoint{ Hash: chainhash.Hash{}, Index: 0xffffffff, Tree: dcrutil.TxTreeStake, }, Sequence: 0xffffffff, ValueIn: 0x1313131313131313, BlockHeight: 0x14141414, BlockIndex: 0x15151515, SignatureScript: []byte{ 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0xf2, }, }, }, TxOut: []*wire.TxOut{ { Value: 0x3333333333333333, Version: 0x1212, PkScript: []byte{ 0x41, // OP_DATA_65 0x04, 0x96, 0xb5, 0x38, 0xe8, 0x53, 0x51, 0x9c, 0x72, 0x6a, 0x2c, 0x91, 0xe6, 0x1e, 0xc1, 0x16, 0x00, 0xae, 0x13, 0x90, 0x81, 0x3a, 0x62, 0x7c, 0x66, 0xfb, 0x8b, 0xe7, 0x94, 0x7b, 0xe6, 0x3c, 0x52, 0xda, 0x75, 0x89, 0x37, 0x95, 0x15, 0xd4, 0xe0, 0xa6, 0x04, 0xf8, 0x14, 0x17, 0x81, 0xe6, 0x22, 0x94, 0x72, 0x11, 0x66, 0xbf, 0x62, 0x1e, 0x73, 0xa8, 0x2c, 0xbf, 0x23, 0x42, 0xc8, 0x58, 0xee, // 65-byte signature 0xac, // OP_CHECKSIG }, }, }, LockTime: 0x11111111, Expiry: 0x22222222, }, }, } // testBlockBytes is the serialized bytes for the above test block (testBlock). var testBlockBytes = []byte{ // Begin block header 0x01, 0x00, 0x00, 0x00, // Version 1 [0] 0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72, 0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f, 0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c, 0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00, // PrevBlock [4] 0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44, 0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67, 0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1, 0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // MerkleRoot [36] 0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44, 0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67, 0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1, 0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // StakeRoot [68] 0x00, 0x00, // VoteBits [100] 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // FinalState [102] 0x00, 0x00, // Voters [108] 0x00, // FreshStake [110] 0x00, // Revocations [111] 0x00, 0x00, 0x00, 0x00, // Poolsize [112] 0xff, 0xff, 0x00, 0x1d, // Bits [116] 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // SBits [120] 0x01, 0x00, 0x00, 0x00, // Height [128] 0x01, 0x00, 0x00, 0x00, // Size [132] 0x61, 0xbc, 0x66, 0x49, // Timestamp [136] 0x01, 0xe3, 0x62, 0x99, // Nonce [140] 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // ExtraData [144] 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Announce number of txs 0x01, // TxnCount [180] // Begin bogus normal txs 0x01, 0x00, 0x00, 0x00, // Version [181] 0x01, // Varint for number of transaction inputs [185] 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Previous output hash [186] 0xff, 0xff, 0xff, 0xff, // Prevous output index [218] 0x00, // Previous output tree [222] 0xff, 0xff, 0xff, 0xff, // Sequence [223] 0x01, // Varint for number of transaction outputs [227] 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, // Transaction amount [228] 0x98, 0x98, // Script version 0x43, // Varint for length of pk script 0x41, // OP_DATA_65 0x04, 0x96, 0xb5, 0x38, 0xe8, 0x53, 0x51, 0x9c, 0x72, 0x6a, 0x2c, 0x91, 0xe6, 0x1e, 0xc1, 0x16, 0x00, 0xae, 0x13, 0x90, 0x81, 0x3a, 0x62, 0x7c, 0x66, 0xfb, 0x8b, 0xe7, 0x94, 0x7b, 0xe6, 0x3c, 0x52, 0xda, 0x75, 0x89, 0x37, 0x95, 0x15, 0xd4, 0xe0, 0xa6, 0x04, 0xf8, 0x14, 0x17, 0x81, 0xe6, 0x22, 0x94, 0x72, 0x11, 0x66, 0xbf, 0x62, 0x1e, 0x73, 0xa8, 0x2c, 0xbf, 0x23, 0x42, 0xc8, 0x58, 0xee, // 65-byte signature 0xac, // OP_CHECKSIG 0x11, 0x11, 0x11, 0x11, // Lock time 0x22, 0x22, 0x22, 0x22, // Expiry 0x01, // Varint for number of signatures 0x16, 0x16, 0x16, 0x16, 0x16, 0x16, 0x16, 0x16, // ValueIn 0x17, 0x17, 0x17, 0x17, // BlockHeight 0x18, 0x18, 0x18, 0x18, // BlockIndex 0x07, // SigScript length 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0xf2, // Signature script (coinbase) // Announce number of stake txs 0x01, // TxnCount for stake tx // Begin bogus stake txs 0x01, 0x00, 0x00, 0x00, // Version 0x01, // Varint for number of transaction inputs 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Previous output hash 0xff, 0xff, 0xff, 0xff, // Prevous output index 0x01, // Previous output tree 0xff, 0xff, 0xff, 0xff, // Sequence 0x01, // Varint for number of transaction outputs 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, // Transaction amount 0x12, 0x12, // Script version 0x43, // Varint for length of pk script 0x41, // OP_DATA_65 0x04, 0x96, 0xb5, 0x38, 0xe8, 0x53, 0x51, 0x9c, 0x72, 0x6a, 0x2c, 0x91, 0xe6, 0x1e, 0xc1, 0x16, 0x00, 0xae, 0x13, 0x90, 0x81, 0x3a, 0x62, 0x7c, 0x66, 0xfb, 0x8b, 0xe7, 0x94, 0x7b, 0xe6, 0x3c, 0x52, 0xda, 0x75, 0x89, 0x37, 0x95, 0x15, 0xd4, 0xe0, 0xa6, 0x04, 0xf8, 0x14, 0x17, 0x81, 0xe6, 0x22, 0x94, 0x72, 0x11, 0x66, 0xbf, 0x62, 0x1e, 0x73, 0xa8, 0x2c, 0xbf, 0x23, 0x42, 0xc8, 0x58, 0xee, // 65-byte signature 0xac, // OP_CHECKSIG 0x11, 0x11, 0x11, 0x11, // Lock time 0x22, 0x22, 0x22, 0x22, // Expiry 0x01, // Varint for number of signatures 0x13, 0x13, 0x13, 0x13, 0x13, 0x13, 0x13, 0x13, // ValueIn 0x14, 0x14, 0x14, 0x14, // BlockHeight 0x15, 0x15, 0x15, 0x15, // BlockIndex 0x07, // SigScript length 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0xf2, // Signature script (coinbase) } // Transaction location information for the test block transactions. var testBlockTxLocs = []wire.TxLoc{ {TxStart: 181, TxLen: 158}, } // Transaction location information for the test block stake transactions. var testBlockSTxLocs = []wire.TxLoc{ {TxStart: 340, TxLen: 158}, }
Dirbaio/btcd
wire/msgblock_test.go
GO
isc
28,011
describe('dJSON', function () { 'use strict'; var chai = require('chai'); var expect = chai.expect; var dJSON = require('../lib/dJSON'); var path = 'x.y["q.{r}"].z'; var obj; beforeEach(function () { obj = { x: { y: { 'q.{r}': { z: 635 }, q: { r: { z: 1 } } } }, 'x-y': 5, falsy: false }; }); it('gets a value from an object with a path containing properties which contain a period', function () { expect(dJSON.get(obj, path)).to.equal(635); expect(dJSON.get(obj, 'x.y.q.r.z')).to.equal(1); }); it('sets a value from an object with a path containing properties which contain a period', function () { dJSON.set(obj, path, 17771); expect(dJSON.get(obj, path)).to.equal(17771); expect(dJSON.get(obj, 'x.y.q.r.z')).to.equal(1); }); it('will return undefined when requesting a property with a dash directly', function () { expect(dJSON.get(obj, 'x-y')).to.be.undefined; }); it('will return the proper value when requesting a property with a dash by square bracket notation', function () { expect(dJSON.get(obj, '["x-y"]')).to.equal(5); }); it('returns a value that is falsy', function () { expect(dJSON.get(obj, 'falsy')).to.equal(false); }); it('sets a value that is falsy', function () { dJSON.set(obj, 'new', false); expect(dJSON.get(obj, 'new')).to.equal(false); }); it('uses an empty object as default for the value in the set method', function () { var newObj = {}; dJSON.set(newObj, 'foo.bar.lorem'); expect(newObj).to.deep.equal({ foo: { bar: { lorem: {} } } }); }); it('does not create an object when a path exists as empty string', function () { var newObj = { nestedObject: { anArray: [ 'i have a value', '' ] } }; var newPath = 'nestedObject.anArray[1]'; dJSON.set(newObj, newPath, 17771); expect(newObj).to.deep.equal({ nestedObject: { anArray: [ 'i have a value', 17771 ] } }); }); it('creates an object from a path with a left curly brace', function () { var newObj = {}; dJSON.set(newObj, path.replace('}', ''), 'foo'); expect(newObj).to.be.deep.equal({ x: { y: { 'q.{r': { z: 'foo' } } } }); }); it('creates an object from a path with a right curly brace', function () { var newObj = {}; dJSON.set(newObj, path.replace('{', ''), 'foo'); expect(newObj).to.be.deep.equal({ x: { y: { 'q.r}': { z: 'foo' } } } }); }); it('creates an object from a path with curly braces', function () { var newObj = {}; dJSON.set(newObj, path, 'foo'); expect(newObj).to.be.deep.equal({ x: { y: { 'q.{r}': { z: 'foo' } } } }); }); it('creates an object from a path without curly braces', function () { var newObj = {}; dJSON.set(newObj, path.replace('{', '').replace('}', ''), 'foo'); expect(newObj).to.be.deep.equal({ x: { y: { 'q.r': { z: 'foo' } } } }); }); });
bsander/dJSON
test/dJSON.spec.js
JavaScript
isc
3,391
package main import ( "testing" ) func TestParseHdbSignatureRow(t *testing.T) { signature := new(signature) sample := "e11c2aff804ca144a3e49c42d6ac5783:1006:Exploit.CVE_2012_0779" sig := parseHdbSignatureRow(sample, signature) if sig.Size != 1006 { t.Fatal("Error parsing HDB or HSB signature length") } if signature.SigHash != "e11c2aff804ca144a3e49c42d6ac5783" { t.Fatal("Error parsing HDB or HSB signature hash") } }
sec51/clamav-yara
hdb_signatures_test.go
GO
isc
436
/** * The MIT License Copyright (c) 2015 Teal Cube Games * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and * associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, publish, distribute, * sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT * NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package land.face.strife.managers; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import land.face.strife.StrifePlugin; import land.face.strife.data.champion.Champion; import land.face.strife.data.champion.LifeSkillType; import org.bukkit.entity.Player; public class CombatStatusManager { private final StrifePlugin plugin; private final Map<Player, Integer> tickMap = new ConcurrentHashMap<>(); private static final int SECONDS_TILL_EXPIRY = 8; public CombatStatusManager(StrifePlugin plugin) { this.plugin = plugin; } public boolean isInCombat(Player player) { return tickMap.containsKey(player); } public void addPlayer(Player player) { tickMap.put(player, SECONDS_TILL_EXPIRY); } public void tickCombat() { for (Player player : tickMap.keySet()) { if (!player.isOnline() || !player.isValid()) { tickMap.remove(player); continue; } int ticksLeft = tickMap.get(player); if (ticksLeft < 1) { doExitCombat(player); tickMap.remove(player); continue; } tickMap.put(player, ticksLeft - 1); } } public void doExitCombat(Player player) { if (!tickMap.containsKey(player)) { return; } Champion champion = plugin.getChampionManager().getChampion(player); if (champion.getDetailsContainer().getExpValues() == null) { return; } for (LifeSkillType type : champion.getDetailsContainer().getExpValues().keySet()) { plugin.getSkillExperienceManager().addExperience(player, type, champion.getDetailsContainer().getExpValues().get(type), false, false); } champion.getDetailsContainer().clearAll(); } }
TealCube/strife
src/main/java/land/face/strife/managers/CombatStatusManager.java
Java
isc
2,828
from django import forms from django.core.exceptions import ValidationError from django.core.validators import validate_slug from django.db import models from django.utils import simplejson as json from django.utils.text import capfirst from django.utils.translation import ugettext_lazy as _ from philo.forms.fields import JSONFormField from philo.utils.registry import RegistryIterator from philo.validators import TemplateValidator, json_validator #from philo.models.fields.entities import * class TemplateField(models.TextField): """A :class:`TextField` which is validated with a :class:`.TemplateValidator`. ``allow``, ``disallow``, and ``secure`` will be passed into the validator's construction.""" def __init__(self, allow=None, disallow=None, secure=True, *args, **kwargs): super(TemplateField, self).__init__(*args, **kwargs) self.validators.append(TemplateValidator(allow, disallow, secure)) class JSONDescriptor(object): def __init__(self, field): self.field = field def __get__(self, instance, owner): if instance is None: raise AttributeError # ? if self.field.name not in instance.__dict__: json_string = getattr(instance, self.field.attname) instance.__dict__[self.field.name] = json.loads(json_string) return instance.__dict__[self.field.name] def __set__(self, instance, value): instance.__dict__[self.field.name] = value setattr(instance, self.field.attname, json.dumps(value)) def __delete__(self, instance): del(instance.__dict__[self.field.name]) setattr(instance, self.field.attname, json.dumps(None)) class JSONField(models.TextField): """A :class:`TextField` which stores its value on the model instance as a python object and stores its value in the database as JSON. Validated with :func:`.json_validator`.""" default_validators = [json_validator] def get_attname(self): return "%s_json" % self.name def contribute_to_class(self, cls, name): super(JSONField, self).contribute_to_class(cls, name) setattr(cls, name, JSONDescriptor(self)) models.signals.pre_init.connect(self.fix_init_kwarg, sender=cls) def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name) # Hack to handle the xml serializer's handling of "null" if value is None: value = 'null' kwargs[self.attname] = value def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs) class SlugMultipleChoiceField(models.Field): """Stores a selection of multiple items with unique slugs in the form of a comma-separated list. Also knows how to correctly handle :class:`RegistryIterator`\ s passed in as choices.""" __metaclass__ = models.SubfieldBase description = _("Comma-separated slug field") def get_internal_type(self): return "TextField" def to_python(self, value): if not value: return [] if isinstance(value, list): return value return value.split(',') def get_prep_value(self, value): return ','.join(value) def formfield(self, **kwargs): # This is necessary because django hard-codes TypedChoiceField for things with choices. defaults = { 'widget': forms.CheckboxSelectMultiple, 'choices': self.get_choices(include_blank=False), 'label': capfirst(self.verbose_name), 'required': not self.blank, 'help_text': self.help_text } if self.has_default(): if callable(self.default): defaults['initial'] = self.default defaults['show_hidden_initial'] = True else: defaults['initial'] = self.get_default() for k in kwargs.keys(): if k not in ('coerce', 'empty_value', 'choices', 'required', 'widget', 'label', 'initial', 'help_text', 'error_messages', 'show_hidden_initial'): del kwargs[k] defaults.update(kwargs) form_class = forms.TypedMultipleChoiceField return form_class(**defaults) def validate(self, value, model_instance): invalid_values = [] for val in value: try: validate_slug(val) except ValidationError: invalid_values.append(val) if invalid_values: # should really make a custom message. raise ValidationError(self.error_messages['invalid_choice'] % invalid_values) def _get_choices(self): if isinstance(self._choices, RegistryIterator): return self._choices.copy() elif hasattr(self._choices, 'next'): choices, self._choices = itertools.tee(self._choices) return choices else: return self._choices choices = property(_get_choices) try: from south.modelsinspector import add_introspection_rules except ImportError: pass else: add_introspection_rules([], ["^philo\.models\.fields\.SlugMultipleChoiceField"]) add_introspection_rules([], ["^philo\.models\.fields\.TemplateField"]) add_introspection_rules([], ["^philo\.models\.fields\.JSONField"])
ithinksw/philo
philo/models/fields/__init__.py
Python
isc
4,971
<?php /** * Time Controller * * @package Argentum * @author Argentum Team * @copyright (c) 2008 Argentum Team * @license http://www.argentuminvoice.com/license.txt */ class Time_Controller extends Website_Controller { /** * Creates a new time block on a ticket */ public function add($ticket_id) { $time = new Time_Model(); $time->ticket_id = $ticket_id; if ( ! $_POST) // Display the form { $this->template->body = new View('admin/time/add'); $this->template->body->errors = ''; $this->template->body->time = $time; } else { $time->set_fields($this->input->post()); $time->user_id = $_SESSION['auth_user']->id; try { $time->save(); if ($this->input->post('ticket_complete')) { $ticket = new Ticket_Model($time->ticket_id); $ticket->complete= TRUE; $ticket->close_date = time(); $ticket->save(); Event::run('argentum.ticket_close', $ticket); } Event::run('argentum.ticket_time', $time); url::redirect('ticket/'.($time->ticket->complete ? 'closed' : 'active').'/'.$time->ticket->project->id); } catch (Kohana_User_Exception $e) { $this->template->body = new View('admin/time/add'); $this->template->body->time = $time; $this->template->body->errors = $e; $this->template->body->set($this->input->post()); } } } /** * Deletes a time item for a ticket */ public function delete() { $time = new Time_Model($this->input->post('id')); $time->delete(); url::redirect('ticket/view/'.$time->ticket->id); } }
la5942/argentum-invoice
application/controllers/admin/time.php
PHP
isc
1,554
var fusepm = require('./fusepm'); module.exports = fixunoproj; function fixunoproj () { var fn = fusepm.local_unoproj("."); fusepm.read_unoproj(fn).then(function (obj) { var inc = []; if (obj.Includes) { var re = /\//; for (var i=0; i<obj.Includes.length;i++) { if (obj.Includes[i] === '*') { inc.push('./*.ux'); inc.push('./*.uno'); inc.push('./*.uxl'); } else if (!obj.Includes[i].match(re)) { inc.push('./' + obj.Includes[i]); } else { inc.push(obj.Includes[i]); } } } else { inc = ['./*.ux', './*.uno', './*.uxl']; } if (!obj.Version) { obj.Version = "0.0.0"; } obj.Includes = inc; fusepm.save_unoproj(fn, obj); }).catch(function (e) { console.log(e); }); }
bolav/fusepm
lib/fixunoproj.js
JavaScript
isc
752
import mod437 from './mod437'; var value=mod437+1; export default value;
MirekSz/webpack-es6-ts
app/mods/mod438.js
JavaScript
isc
73
const defaults = { base_css: true, // the base dark theme css inline_youtube: true, // makes youtube videos play inline the chat collapse_onebox: true, // can collapse collapse_onebox_default: false, // default option for collapse pause_youtube_on_collapse: true, // default option for pausing youtube on collapse user_color_bars: true, // show colored bars above users message blocks fish_spinner: true, // fish spinner is best spinner inline_imgur: true, // inlines webm,gifv,mp4 content from imgur visualize_hex: true, // underlines hex codes with their colour values syntax_highlight_code: true, // guess at language and highlight the code blocks emoji_translator: true, // emoji translator for INPUT area code_mode_editor: true, // uses CodeMirror for your code inputs better_image_uploads: true // use the drag & drop and paste api for image uploads }; const fileLocations = { inline_youtube: ['js/inline_youtube.js'], collapse_onebox: ['js/collapse_onebox.js'], user_color_bars: ['js/user_color_bars.js'], fish_spinner: ['js/fish_spinner.js'], inline_imgur: ['js/inline_imgur.js'], visualize_hex: ['js/visualize_hex.js'], better_image_uploads: ['js/better_image_uploads.js'], syntax_highlight_code: ['js/highlight.js', 'js/syntax_highlight_code.js'], emoji_translator: ['js/emojidata.js', 'js/emoji_translator.js'], code_mode_editor: ['CodeMirror/js/codemirror.js', 'CodeMirror/mode/cmake/cmake.js', 'CodeMirror/mode/cobol/cobol.js', 'CodeMirror/mode/coffeescript/coffeescript.js', 'CodeMirror/mode/commonlisp/commonlisp.js', 'CodeMirror/mode/css/css.js', 'CodeMirror/mode/dart/dart.js', 'CodeMirror/mode/go/go.js', 'CodeMirror/mode/groovy/groovy.js', 'CodeMirror/mode/haml/haml.js', 'CodeMirror/mode/haskell/haskell.js', 'CodeMirror/mode/htmlembedded/htmlembedded.js', 'CodeMirror/mode/htmlmixed/htmlmixed.js', 'CodeMirror/mode/jade/jade.js', 'CodeMirror/mode/javascript/javascript.js', 'CodeMirror/mode/lua/lua.js', 'CodeMirror/mode/markdown/markdown.js', 'CodeMirror/mode/mathematica/mathematica.js', 'CodeMirror/mode/nginx/nginx.js', 'CodeMirror/mode/pascal/pascal.js', 'CodeMirror/mode/perl/perl.js', 'CodeMirror/mode/php/php.js', 'CodeMirror/mode/puppet/puppet.js', 'CodeMirror/mode/python/python.js', 'CodeMirror/mode/ruby/ruby.js', 'CodeMirror/mode/sass/sass.js', 'CodeMirror/mode/scheme/scheme.js', 'CodeMirror/mode/shell/shell.js' , 'CodeMirror/mode/sql/sql.js', 'CodeMirror/mode/swift/swift.js', 'CodeMirror/mode/twig/twig.js', 'CodeMirror/mode/vb/vb.js', 'CodeMirror/mode/vbscript/vbscript.js', 'CodeMirror/mode/vhdl/vhdl.js', 'CodeMirror/mode/vue/vue.js', 'CodeMirror/mode/xml/xml.js', 'CodeMirror/mode/xquery/xquery.js', 'CodeMirror/mode/yaml/yaml.js', 'js/code_mode_editor.js'] }; // right now I assume order is correct because I'm a terrible person. make an order array or base it on File Locations and make that an array // inject the observer and the utils always. then initialize the options. injector([{type: 'js', location: 'js/observer.js'},{type: 'js', location: 'js/utils.js'}], _ => chrome.storage.sync.get(defaults, init)); function init(options) { // inject the options for the plugins themselves. const opts = document.createElement('script'); opts.textContent = ` const options = ${JSON.stringify(options)}; `; document.body.appendChild(opts); // now load the plugins. const loading = []; if( !options.base_css ) { document.documentElement.classList.add('nocss'); } delete options.base_css; for( const key of Object.keys(options) ) { if( !options[key] || !( key in fileLocations)) continue; for( const location of fileLocations[key] ) { const [,type] = location.split('.'); loading.push({location, type}); } } injector(loading, _ => { const drai = document.createElement('script'); drai.textContent = ` if( document.readyState === 'complete' ) { DOMObserver.drain(); } else { window.onload = _ => DOMObserver.drain(); } `; document.body.appendChild(drai); }); } function injector([first, ...rest], cb) { if( !first ) return cb(); if( first.type === 'js' ) { injectJS(first.location, _ => injector(rest, cb)); } else { injectCSS(first.location, _ => injector(rest, cb)); } } function injectCSS(file, cb) { const elm = document.createElement('link'); elm.rel = 'stylesheet'; elm.type = 'text/css'; elm.href = chrome.extension.getURL(file); elm.onload = cb; document.head.appendChild(elm); } function injectJS(file, cb) { const elm = document.createElement('script'); elm.type = 'text/javascript'; elm.src = chrome.extension.getURL(file); elm.onload = cb; document.body.appendChild(elm); }
rlemon/se-chat-dark-theme-plus
script.js
JavaScript
isc
4,901
'use strict'; const expect = require('expect.js'); const http = require('http'); const express = require('express'); const linkCheck = require('../'); describe('link-check', function () { this.timeout(2500);//increase timeout to enable 429 retry tests let baseUrl; let laterCustomRetryCounter; before(function (done) { const app = express(); app.head('/nohead', function (req, res) { res.sendStatus(405); // method not allowed }); app.get('/nohead', function (req, res) { res.sendStatus(200); }); app.get('/foo/redirect', function (req, res) { res.redirect('/foo/bar'); }); app.get('/foo/bar', function (req, res) { res.json({foo:'bar'}); }); app.get('/loop', function (req, res) { res.redirect('/loop'); }); app.get('/hang', function (req, res) { // no reply }); app.get('/notfound', function (req, res) { res.sendStatus(404); }); app.get('/basic-auth', function (req, res) { if (req.headers["authorization"] === "Basic Zm9vOmJhcg==") { return res.sendStatus(200); } res.sendStatus(401); }); // prevent first header try to be a hit app.head('/later-custom-retry-count', function (req, res) { res.sendStatus(405); // method not allowed }); app.get('/later-custom-retry-count', function (req, res) { laterCustomRetryCounter++; if(laterCustomRetryCounter === parseInt(req.query.successNumber)) { res.sendStatus(200); }else{ res.setHeader('retry-after', 1); res.sendStatus(429); } }); // prevent first header try to be a hit app.head('/later-standard-header', function (req, res) { res.sendStatus(405); // method not allowed }); var stdRetried = false; var stdFirstTry = 0; app.get('/later', function (req, res) { var isRetryDelayExpired = stdFirstTry + 1000 < Date.now(); if(!stdRetried || !isRetryDelayExpired){ stdFirstTry = Date.now(); stdRetried = true; res.setHeader('retry-after', 1); res.sendStatus(429); }else{ res.sendStatus(200); } }); // prevent first header try to be a hit app.head('/later-no-header', function (req, res) { res.sendStatus(405); // method not allowed }); var stdNoHeadRetried = false; var stdNoHeadFirstTry = 0; app.get('/later-no-header', function (req, res) { var minTime = stdNoHeadFirstTry + 1000; var maxTime = minTime + 100; var now = Date.now(); var isRetryDelayExpired = minTime < now && now < maxTime; if(!stdNoHeadRetried || !isRetryDelayExpired){ stdNoHeadFirstTry = Date.now(); stdNoHeadRetried = true; res.sendStatus(429); }else{ res.sendStatus(200); } }); // prevent first header try to be a hit app.head('/later-non-standard-header', function (req, res) { res.sendStatus(405); // method not allowed }); var nonStdRetried = false; var nonStdFirstTry = 0; app.get('/later-non-standard-header', function (req, res) { var isRetryDelayExpired = nonStdFirstTry + 1000 < Date.now(); if(!nonStdRetried || !isRetryDelayExpired){ nonStdFirstTry = Date.now(); nonStdRetried = true; res.setHeader('retry-after', '1s'); res.sendStatus(429); }else { res.sendStatus(200); } }); app.get(encodeURI('/url_with_unicode–'), function (req, res) { res.sendStatus(200); }); app.get('/url_with_special_chars\\(\\)\\+', function (req, res) { res.sendStatus(200); }); const server = http.createServer(app); server.listen(0 /* random open port */, 'localhost', function serverListen(err) { if (err) { done(err); return; } baseUrl = 'http://' + server.address().address + ':' + server.address().port; done(); }); }); it('should find that a valid link is alive', function (done) { linkCheck(baseUrl + '/foo/bar', function (err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/foo/bar'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); expect(result.err).to.be(null); done(); }); }); it('should find that a valid external link with basic authentication is alive', function (done) { linkCheck(baseUrl + '/basic-auth', { headers: { 'Authorization': 'Basic Zm9vOmJhcg==' }, }, function (err, result) { expect(err).to.be(null); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); expect(result.err).to.be(null); done(); }); }); it('should find that a valid relative link is alive', function (done) { linkCheck('/foo/bar', { baseUrl: baseUrl }, function (err, result) { expect(err).to.be(null); expect(result.link).to.be('/foo/bar'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); expect(result.err).to.be(null); done(); }); }); it('should find that an invalid link is dead', function (done) { linkCheck(baseUrl + '/foo/dead', function (err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/foo/dead'); expect(result.status).to.be('dead'); expect(result.statusCode).to.be(404); expect(result.err).to.be(null); done(); }); }); it('should find that an invalid relative link is dead', function (done) { linkCheck('/foo/dead', { baseUrl: baseUrl }, function (err, result) { expect(err).to.be(null); expect(result.link).to.be('/foo/dead'); expect(result.status).to.be('dead'); expect(result.statusCode).to.be(404); expect(result.err).to.be(null); done(); }); }); it('should report no DNS entry as a dead link (http)', function (done) { linkCheck('http://example.example.example.com/', function (err, result) { expect(err).to.be(null); expect(result.link).to.be('http://example.example.example.com/'); expect(result.status).to.be('dead'); expect(result.statusCode).to.be(0); expect(result.err.code).to.be('ENOTFOUND'); done(); }); }); it('should report no DNS entry as a dead link (https)', function (done) { const badLink = 'https://githuuuub.com/tcort/link-check'; linkCheck(badLink, function (err, result) { expect(err).to.be(null); expect(result.link).to.be(badLink); expect(result.status).to.be('dead'); expect(result.statusCode).to.be(0); expect(result.err.code).to.contain('ENOTFOUND'); done(); }); }); it('should timeout if there is no response', function (done) { linkCheck(baseUrl + '/hang', { timeout: '100ms' }, function (err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/hang'); expect(result.status).to.be('dead'); expect(result.statusCode).to.be(0); expect(result.err.code).to.be('ECONNRESET'); done(); }); }); it('should try GET if HEAD fails', function (done) { linkCheck(baseUrl + '/nohead', function (err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/nohead'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); expect(result.err).to.be(null); done(); }); }); it('should handle redirects', function (done) { linkCheck(baseUrl + '/foo/redirect', function (err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/foo/redirect'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); expect(result.err).to.be(null); done(); }); }); it('should handle valid mailto', function (done) { linkCheck('mailto:linuxgeek@gmail.com', function (err, result) { expect(err).to.be(null); expect(result.link).to.be('mailto:linuxgeek@gmail.com'); expect(result.status).to.be('alive'); done(); }); }); it('should handle valid mailto with encoded characters in address', function (done) { linkCheck('mailto:foo%20bar@example.org', function (err, result) { expect(err).to.be(null); expect(result.link).to.be('mailto:foo%20bar@example.org'); expect(result.status).to.be('alive'); done(); }); }); it('should handle valid mailto containing hfields', function (done) { linkCheck('mailto:linuxgeek@gmail.com?subject=caf%C3%A9', function (err, result) { expect(err).to.be(null); expect(result.link).to.be('mailto:linuxgeek@gmail.com?subject=caf%C3%A9'); expect(result.status).to.be('alive'); done(); }); }); it('should handle invalid mailto', function (done) { linkCheck('mailto:foo@@bar@@baz', function (err, result) { expect(err).to.be(null); expect(result.link).to.be('mailto:foo@@bar@@baz'); expect(result.status).to.be('dead'); done(); }); }); it('should handle file protocol', function(done) { linkCheck('fixtures/file.md', { baseUrl: 'file://' + __dirname }, function(err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.status).to.be('alive'); done() }); }); it('should handle file protocol with fragment', function(done) { linkCheck('fixtures/file.md#section-1', { baseUrl: 'file://' + __dirname }, function(err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.status).to.be('alive'); done() }); }); it('should handle file protocol with query', function(done) { linkCheck('fixtures/file.md?foo=bar', { baseUrl: 'file://' + __dirname }, function(err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.status).to.be('alive'); done() }); }); it('should handle file path containing spaces', function(done) { linkCheck('fixtures/s p a c e/A.md', { baseUrl: 'file://' + __dirname }, function(err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.status).to.be('alive'); done() }); }); it('should handle baseUrl containing spaces', function(done) { linkCheck('A.md', { baseUrl: 'file://' + __dirname + '/fixtures/s p a c e'}, function(err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.status).to.be('alive'); done() }); }); it('should handle file protocol and invalid files', function(done) { linkCheck('fixtures/missing.md', { baseUrl: 'file://' + __dirname }, function(err, result) { expect(err).to.be(null); expect(result.err.code).to.be('ENOENT'); expect(result.status).to.be('dead'); done() }); }); it('should ignore file protocol on absolute links', function(done) { linkCheck(baseUrl + '/foo/bar', { baseUrl: 'file://' }, function(err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/foo/bar'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); expect(result.err).to.be(null); done() }); }); it('should ignore file protocol on fragment links', function(done) { linkCheck('#foobar', { baseUrl: 'file://' }, function(err, result) { expect(err).to.be(null); expect(result.link).to.be('#foobar'); done() }); }); it('should callback with an error on unsupported protocol', function (done) { linkCheck('gopher://gopher/0/v2/vstat', function (err, result) { expect(result).to.be(null); expect(err).to.be.an(Error); done(); }); }); it('should handle redirect loops', function (done) { linkCheck(baseUrl + '/loop', function (err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/loop'); expect(result.status).to.be('dead'); expect(result.statusCode).to.be(0); expect(result.err.message).to.contain('Max redirects reached'); done(); }); }); it('should honour response codes in opts.aliveStatusCodes[]', function (done) { linkCheck(baseUrl + '/notfound', { aliveStatusCodes: [ 404, 200 ] }, function (err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/notfound'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(404); done(); }); }); it('should honour regexps in opts.aliveStatusCodes[]', function (done) { linkCheck(baseUrl + '/notfound', { aliveStatusCodes: [ 200, /^[45][0-9]{2}$/ ] }, function (err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/notfound'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(404); done(); }); }); it('should honour opts.aliveStatusCodes[]', function (done) { linkCheck(baseUrl + '/notfound', { aliveStatusCodes: [ 200 ] }, function (err, result) { expect(err).to.be(null); expect(result.link).to.be(baseUrl + '/notfound'); expect(result.status).to.be('dead'); expect(result.statusCode).to.be(404); done(); }); }); it('should retry after the provided delay on HTTP 429 with standard header', function (done) { linkCheck(baseUrl + '/later', { retryOn429: true }, function (err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.link).to.be(baseUrl + '/later'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); done(); }); }); it('should retry after the provided delay on HTTP 429 with non standard header, and return a warning', function (done) { linkCheck(baseUrl + '/later-non-standard-header', { retryOn429: true }, function (err, result) { expect(err).to.be(null); expect(result.err).not.to.be(null) expect(result.err).to.contain("Server returned a non standard \'retry-after\' header."); expect(result.link).to.be(baseUrl + '/later-non-standard-header'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); done(); }); }); it('should retry after 1s delay on HTTP 429 without header', function (done) { linkCheck(baseUrl + '/later-no-header', { retryOn429: true, fallbackRetryDelay: '1s' }, function (err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.link).to.be(baseUrl + '/later-no-header'); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); done(); }); }); // 2 is default retry so test with custom 3 it('should retry 3 times for 429 status codes', function(done) { laterCustomRetryCounter = 0; linkCheck(baseUrl + '/later-custom-retry-count?successNumber=3', { retryOn429: true, retryCount: 3 }, function(err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); done(); }); }); // See issue #23 it('should handle non URL encoded unicode chars in URLs', function(done) { //last char is EN DASH linkCheck(baseUrl + '/url_with_unicode–', function(err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); done(); }); }); // See issues #34 and #40 it('should not URL encode already encoded characters', function(done) { linkCheck(baseUrl + '/url_with_special_chars%28%29%2B', function(err, result) { expect(err).to.be(null); expect(result.err).to.be(null); expect(result.status).to.be('alive'); expect(result.statusCode).to.be(200); done(); }); }); });
tcort/link-check
test/link-check.test.js
JavaScript
isc
18,014
function daysLeftThisWeek (date) { return 6 - date.getDay() } module.exports = daysLeftThisWeek
akileez/toolz
src/date/daysLeftThisWeek.js
JavaScript
isc
99
import hashlib import json import logging import os import subprocess import sys import time from collections import defaultdict from shutil import copy from shutil import copyfile from shutil import copystat from shutil import copytree from tempfile import mkdtemp import boto3 import botocore import yaml import sys from .helpers import archive from .helpers import get_environment_variable_value from .helpers import LambdaContext from .helpers import mkdir from .helpers import read from .helpers import timestamp ARN_PREFIXES = { "cn-north-1": "aws-cn", "cn-northwest-1": "aws-cn", "us-gov-west-1": "aws-us-gov", } log = logging.getLogger(__name__) def load_source(module_name, module_path): """Loads a python module from the path of the corresponding file.""" if sys.version_info[0] == 3 and sys.version_info[1] >= 5: import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info[0] == 3 and sys.version_info[1] < 5: import importlib.machinery loader = importlib.machinery.SourceFileLoader(module_name, module_path) module = loader.load_module() return module def cleanup_old_versions( src, keep_last_versions, config_file="config.yaml", profile_name=None, ): """Deletes old deployed versions of the function in AWS Lambda. Won't delete $Latest and any aliased version :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param int keep_last_versions: The number of recent versions to keep and not delete """ if keep_last_versions <= 0: print("Won't delete all versions. Please do this manually") else: path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) response = client.list_versions_by_function( FunctionName=cfg.get("function_name"), ) versions = response.get("Versions") if len(response.get("Versions")) < keep_last_versions: print("Nothing to delete. (Too few versions published)") else: version_numbers = [ elem.get("Version") for elem in versions[1:-keep_last_versions] ] for version_number in version_numbers: try: client.delete_function( FunctionName=cfg.get("function_name"), Qualifier=version_number, ) except botocore.exceptions.ClientError as e: print(f"Skipping Version {version_number}: {e}") def deploy( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function to AWS Lambda. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc ) else: create_function(cfg, path_to_zip_file) def deploy_s3( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function via AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) use_s3 = True s3_file = upload_s3(cfg, path_to_zip_file, use_s3) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, use_s3=use_s3, s3_file=s3_file, preserve_vpc=preserve_vpc, ) else: create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file) def upload( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, ): """Uploads a new function to AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) upload_s3(cfg, path_to_zip_file) def invoke( src, event_file="event.json", config_file="config.yaml", profile_name=None, verbose=False, ): """Simulates a call to your function. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str alt_event: An optional argument to override which event file to use. :param bool verbose: Whether to print out verbose details. """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Set AWS_PROFILE environment variable based on `--profile` option. if profile_name: os.environ["AWS_PROFILE"] = profile_name # Load environment variables from the config file into the actual # environment. env_vars = cfg.get("environment_variables") if env_vars: for key, value in env_vars.items(): os.environ[key] = get_environment_variable_value(value) # Load and parse event file. path_to_event_file = os.path.join(src, event_file) event = read(path_to_event_file, loader=json.loads) # Tweak to allow module to import local modules try: sys.path.index(src) except ValueError: sys.path.append(src) handler = cfg.get("handler") # Inspect the handler string (<module>.<function name>) and translate it # into a function we can execute. fn = get_callable_handler_function(src, handler) timeout = cfg.get("timeout") if timeout: context = LambdaContext(cfg.get("function_name"), timeout) else: context = LambdaContext(cfg.get("function_name")) start = time.time() results = fn(event, context) end = time.time() print("{0}".format(results)) if verbose: print( "\nexecution time: {:.8f}s\nfunction execution " "timeout: {:2}s".format(end - start, cfg.get("timeout", 15)) ) def init(src, minimal=False): """Copies template files to a given directory. :param str src: The path to output the template lambda project files. :param bool minimal: Minimal possible template files (excludes event.json). """ templates_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "project_templates", ) for filename in os.listdir(templates_path): if (minimal and filename == "event.json") or filename.endswith(".pyc"): continue dest_path = os.path.join(templates_path, filename) if not os.path.isdir(dest_path): copy(dest_path, src) def build( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, ): """Builds the file bundle. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Get the absolute path to the output directory and create it if it doesn't # already exist. dist_directory = cfg.get("dist_directory", "dist") path_to_dist = os.path.join(src, dist_directory) mkdir(path_to_dist) # Combine the name of the Lambda function with the current timestamp to use # for the output filename. function_name = cfg.get("function_name") output_filename = "{0}-{1}.zip".format(timestamp(), function_name) path_to_temp = mkdtemp(prefix="aws-lambda") pip_install_to_target( path_to_temp, requirements=requirements, local_package=local_package, ) # Hack for Zope. if "zope" in os.listdir(path_to_temp): print( "Zope packages detected; fixing Zope package paths to " "make them importable.", ) # Touch. with open(os.path.join(path_to_temp, "zope/__init__.py"), "wb"): pass # Gracefully handle whether ".zip" was included in the filename or not. output_filename = ( "{0}.zip".format(output_filename) if not output_filename.endswith(".zip") else output_filename ) # Allow definition of source code directories we want to build into our # zipped package. build_config = defaultdict(**cfg.get("build", {})) build_source_directories = build_config.get("source_directories", "") build_source_directories = ( build_source_directories if build_source_directories is not None else "" ) source_directories = [ d.strip() for d in build_source_directories.split(",") ] files = [] for filename in os.listdir(src): if os.path.isfile(filename): if filename == ".DS_Store": continue if filename == config_file: continue print("Bundling: %r" % filename) files.append(os.path.join(src, filename)) elif os.path.isdir(filename) and filename in source_directories: print("Bundling directory: %r" % filename) files.append(os.path.join(src, filename)) # "cd" into `temp_path` directory. os.chdir(path_to_temp) for f in files: if os.path.isfile(f): _, filename = os.path.split(f) # Copy handler file into root of the packages folder. copyfile(f, os.path.join(path_to_temp, filename)) copystat(f, os.path.join(path_to_temp, filename)) elif os.path.isdir(f): src_path_length = len(src) + 1 destination_folder = os.path.join( path_to_temp, f[src_path_length:] ) copytree(f, destination_folder) # Zip them together into a single file. # TODO: Delete temp directory created once the archive has been compiled. path_to_zip_file = archive("./", path_to_dist, output_filename) return path_to_zip_file def get_callable_handler_function(src, handler): """Translate a string of the form "module.function" into a callable function. :param str src: The path to your Lambda project containing a valid handler file. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ # "cd" into `src` directory. os.chdir(src) module_name, function_name = handler.split(".") filename = get_handler_filename(handler) path_to_module_file = os.path.join(src, filename) module = load_source(module_name, path_to_module_file) return getattr(module, function_name) def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name) def _install_packages(path, packages): """Install all packages listed to the target directory. Ignores any package that includes Python itself and python-lambda as well since its only needed for deploying and not running the code :param str path: Path to copy installed pip packages to. :param list packages: A list of packages to be installed via pip. """ def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist) filtered_packages = filter(_filter_blacklist, packages) for package in filtered_packages: if package.startswith("-e "): package = package.replace("-e ", "") print("Installing {package}".format(package=package)) subprocess.check_call( [ sys.executable, "-m", "pip", "install", package, "-t", path, "--ignore-installed", ] ) print( "Install directory contents are now: {directory}".format( directory=os.listdir(path) ) ) def pip_install_to_target(path, requirements=None, local_package=None): """For a given active virtualenv, gather all installed pip packages then copy (re-install) them to the path provided. :param str path: Path to copy installed pip packages to. :param str requirements: If set, only the packages in the supplied requirements file are installed. If not set then installs all packages found via pip freeze. :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ packages = [] if not requirements: print("Gathering pip packages") pkgStr = subprocess.check_output( [sys.executable, "-m", "pip", "freeze"] ) packages.extend(pkgStr.decode("utf-8").splitlines()) else: if os.path.exists(requirements): print("Gathering requirement packages") data = read(requirements) packages.extend(data.splitlines()) if not packages: print("No dependency packages installed!") if local_package is not None: if not isinstance(local_package, (list, tuple)): local_package = [local_package] for l_package in local_package: packages.append(l_package) _install_packages(path, packages) def get_role_name(region, account_id, role): """Shortcut to insert the `account_id` and `role` into the iam string.""" prefix = ARN_PREFIXES.get(region, "aws") return "arn:{0}:iam::{1}:role/{2}".format(prefix, account_id, role) def get_account_id( profile_name, aws_access_key_id, aws_secret_access_key, region=None, ): """Query STS for a users' account_id""" client = get_client( "sts", profile_name, aws_access_key_id, aws_secret_access_key, region, ) return client.get_caller_identity().get("Account") def get_client( client, profile_name, aws_access_key_id, aws_secret_access_key, region=None, ): """Shortcut for getting an initialized instance of the boto3 client.""" boto3.setup_default_session( profile_name=profile_name, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=region, ) return boto3.client(client) def create_function(cfg, path_to_zip_file, use_s3=False, s3_file=None): """Register and upload a function to AWS Lambda.""" print("Creating your new Lambda function") byte_stream = read(path_to_zip_file, binary_file=True) profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") account_id = get_account_id( profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region",), ) role = get_role_name( cfg.get("region"), account_id, cfg.get("role", "lambda_basic_execution"), ) client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) # Do we prefer development variable over config? buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name") func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get( "function_name" ) print("Creating lambda function with name: {}".format(func_name)) if use_s3: kwargs = { "FunctionName": func_name, "Runtime": cfg.get("runtime", "python2.7"), "Role": role, "Handler": cfg.get("handler"), "Code": { "S3Bucket": "{}".format(buck_name), "S3Key": "{}".format(s3_file), }, "Description": cfg.get("description", ""), "Timeout": cfg.get("timeout", 15), "MemorySize": cfg.get("memory_size", 512), "VpcConfig": { "SubnetIds": cfg.get("subnet_ids", []), "SecurityGroupIds": cfg.get("security_group_ids", []), }, "Publish": True, } else: kwargs = { "FunctionName": func_name, "Runtime": cfg.get("runtime", "python2.7"), "Role": role, "Handler": cfg.get("handler"), "Code": {"ZipFile": byte_stream}, "Description": cfg.get("description", ""), "Timeout": cfg.get("timeout", 15), "MemorySize": cfg.get("memory_size", 512), "VpcConfig": { "SubnetIds": cfg.get("subnet_ids", []), "SecurityGroupIds": cfg.get("security_group_ids", []), }, "Publish": True, } if "tags" in cfg: kwargs.update( Tags={key: str(value) for key, value in cfg.get("tags").items()} ) if "environment_variables" in cfg: kwargs.update( Environment={ "Variables": { key: get_environment_variable_value(value) for key, value in cfg.get("environment_variables").items() }, }, ) client.create_function(**kwargs) concurrency = get_concurrency(cfg) if concurrency > 0: client.put_function_concurrency( FunctionName=func_name, ReservedConcurrentExecutions=concurrency ) def update_function( cfg, path_to_zip_file, existing_cfg, use_s3=False, s3_file=None, preserve_vpc=False, ): """Updates the code of an existing Lambda function""" print("Updating your Lambda function") byte_stream = read(path_to_zip_file, binary_file=True) profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") account_id = get_account_id( profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region",), ) role = get_role_name( cfg.get("region"), account_id, cfg.get("role", "lambda_basic_execution"), ) client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) # Do we prefer development variable over config? buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name") if use_s3: client.update_function_code( FunctionName=cfg.get("function_name"), S3Bucket="{}".format(buck_name), S3Key="{}".format(s3_file), Publish=True, ) else: client.update_function_code( FunctionName=cfg.get("function_name"), ZipFile=byte_stream, Publish=True, ) kwargs = { "FunctionName": cfg.get("function_name"), "Role": role, "Runtime": cfg.get("runtime"), "Handler": cfg.get("handler"), "Description": cfg.get("description", ""), "Timeout": cfg.get("timeout", 15), "MemorySize": cfg.get("memory_size", 512), } if preserve_vpc: kwargs["VpcConfig"] = existing_cfg.get("Configuration", {}).get( "VpcConfig" ) if kwargs["VpcConfig"] is None: kwargs["VpcConfig"] = { "SubnetIds": cfg.get("subnet_ids", []), "SecurityGroupIds": cfg.get("security_group_ids", []), } else: del kwargs["VpcConfig"]["VpcId"] else: kwargs["VpcConfig"] = { "SubnetIds": cfg.get("subnet_ids", []), "SecurityGroupIds": cfg.get("security_group_ids", []), } if "environment_variables" in cfg: kwargs.update( Environment={ "Variables": { key: str(get_environment_variable_value(value)) for key, value in cfg.get("environment_variables").items() }, }, ) ret = client.update_function_configuration(**kwargs) concurrency = get_concurrency(cfg) if concurrency > 0: client.put_function_concurrency( FunctionName=cfg.get("function_name"), ReservedConcurrentExecutions=concurrency, ) elif "Concurrency" in existing_cfg: client.delete_function_concurrency( FunctionName=cfg.get("function_name") ) if "tags" in cfg: tags = {key: str(value) for key, value in cfg.get("tags").items()} if tags != existing_cfg.get("Tags"): if existing_cfg.get("Tags"): client.untag_resource( Resource=ret["FunctionArn"], TagKeys=list(existing_cfg["Tags"].keys()), ) client.tag_resource(Resource=ret["FunctionArn"], Tags=tags) def upload_s3(cfg, path_to_zip_file, *use_s3): """Upload a function to AWS S3.""" print("Uploading your new Lambda function") profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( "s3", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) byte_stream = b"" with open(path_to_zip_file, mode="rb") as fh: byte_stream = fh.read() s3_key_prefix = cfg.get("s3_key_prefix", "/dist") checksum = hashlib.new("md5", byte_stream).hexdigest() timestamp = str(time.time()) filename = "{prefix}{checksum}-{ts}.zip".format( prefix=s3_key_prefix, checksum=checksum, ts=timestamp, ) # Do we prefer development variable over config? buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name") func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get( "function_name" ) kwargs = { "Bucket": "{}".format(buck_name), "Key": "{}".format(filename), "Body": byte_stream, } client.put_object(**kwargs) print("Finished uploading {} to S3 bucket {}".format(func_name, buck_name)) if use_s3: return filename def get_function_config(cfg): """Check whether a function exists or not and return its config""" function_name = cfg.get("function_name") profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) try: return client.get_function(FunctionName=function_name) except client.exceptions.ResourceNotFoundException as e: if "Function not found" in str(e): return False def get_concurrency(cfg): """Return the Reserved Concurrent Executions if present in the config""" concurrency = int(cfg.get("concurrency", 0)) return max(0, concurrency) def read_cfg(path_to_config_file, profile_name): cfg = read(path_to_config_file, loader=yaml.full_load) if profile_name is not None: cfg["profile"] = profile_name elif "AWS_PROFILE" in os.environ: cfg["profile"] = os.environ["AWS_PROFILE"] return cfg
nficano/python-lambda
aws_lambda/aws_lambda.py
Python
isc
26,779
async function test(object) { for (var key in object) { await key; } }
marten-de-vries/kneden
test/fixtures/for-in/actual.js
JavaScript
isc
79

No dataset card yet

New: Create and edit this dataset card directly on the website!

Contribute a Dataset Card
Downloads last month
0
Add dataset card