hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
b4fcdc48d6dc2ba7fa17924abe374a77c806b787
diff --git a/src/JMS/Serializer/Handler/FormErrorHandler.php b/src/JMS/Serializer/Handler/FormErrorHandler.php index <HASH>..<HASH> 100644 --- a/src/JMS/Serializer/Handler/FormErrorHandler.php +++ b/src/JMS/Serializer/Handler/FormErrorHandler.php @@ -21,7 +21,7 @@ namespace JMS\Serializer\Handler; use JMS\Serializer\YamlSerializationVisitor; use JMS\Serializer\JsonSerializationVisitor; use JMS\Serializer\GraphNavigator; -use JMS\Serializer\GenericSerializationVisitor; +use JMS\Serializer\VisitorInterface; use Symfony\Component\Form\Form; use Symfony\Component\Form\FormError; use Symfony\Component\Translation\TranslatorInterface; @@ -125,7 +125,7 @@ class FormErrorHandler implements SubscribingHandlerInterface return $this->translator->trans($error->getMessageTemplate(), $error->getMessageParameters(), 'validators'); } - private function convertFormToArray(GenericSerializationVisitor $visitor, Form $data) + private function convertFormToArray(VisitorInterface $visitor, Form $data) { $isRoot = null === $visitor->getRoot();
do not depend on the generic serialization visitor
schmittjoh_serializer
train
5fef7176f06ef4f665641dfc0deea47a566b90e0
diff --git a/sprd/view/ProductViewerClass.js b/sprd/view/ProductViewerClass.js index <HASH>..<HASH> 100644 --- a/sprd/view/ProductViewerClass.js +++ b/sprd/view/ProductViewerClass.js @@ -331,7 +331,9 @@ define(["js/ui/View", "js/core/Bus", "sprd/manager/ProductManager", "sprd/data/I self.$stage.set('height', '100%'); }, 200); - this.$.productManager.setTextForConfiguration(this.$.textArea.$.value, this.$.selectedConfiguration); + if(this.$.selectedConfiguration){ + this.$.productManager.setTextForConfiguration(this.$.textArea.$.value, this.$.selectedConfiguration); + } self.$.textArea.set('opacity', 0); }
DEV-<I> - Uncaught Error: Configuration is not a TextConfiguration
spreadshirt_rAppid.js-sprd
train
1e098e3916cd81d4832f8ee2dcb8d35e50d242d1
diff --git a/scriptcwl/library.py b/scriptcwl/library.py index <HASH>..<HASH> 100644 --- a/scriptcwl/library.py +++ b/scriptcwl/library.py @@ -157,6 +157,8 @@ def sort_loading_order(step_files): workflows_with_subworkflows.append(f) else: workflows.append(f) + else: + workflows.append(f) else: tools.append(f) return tools + workflows + workflows_with_subworkflows
Fix loading of workflows Workflows were added to the list of steps to load only if the workflow has a requirements section. Workflows should also be loaded if they don't have a requirements section. That was fixed in this commit.
NLeSC_scriptcwl
train
ee0644b1885a6f1de35d330daea0b3b1faee60c4
diff --git a/httpbin/core.py b/httpbin/core.py index <HASH>..<HASH> 100644 --- a/httpbin/core.py +++ b/httpbin/core.py @@ -12,10 +12,7 @@ import json import os import time -import newrelic.agent - from flask import Flask, Response, request, render_template, redirect, jsonify, make_response -from raven.contrib.flask import Sentry from werkzeug.datastructures import WWWAuthenticate from . import filters @@ -36,10 +33,6 @@ ENV_COOKIES = ( app = Flask(__name__) -# Setup error collection -sentry = Sentry(app) -newrelic.agent.initialize() - # ------ # Routes # ------ diff --git a/httpbin/helpers.py b/httpbin/helpers.py index <HASH>..<HASH> 100644 --- a/httpbin/helpers.py +++ b/httpbin/helpers.py @@ -8,7 +8,7 @@ This module provides helper functions for httpbin. """ import base64 -import simplejson as json +import json from hashlib import md5 from werkzeug.http import parse_authorization_header diff --git a/httpbin/runner.py b/httpbin/runner.py index <HASH>..<HASH> 100755 --- a/httpbin/runner.py +++ b/httpbin/runner.py @@ -5,7 +5,7 @@ httpbin.runner ~~~~~~~~~~~~~~ This module serves as a command-line runner for httpbin, powered by -gunicorn. +gevent. """ diff --git a/requirements.txt b/requirements.txt index <HASH>..<HASH> 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,15 +2,9 @@ Flask==0.8 Flask-Script==0.3.1 Jinja2==2.5.5 Werkzeug==0.8.1 -argparse==1.2.1 -blinker==1.2 decorator==3.3.2 distribute==0.6.15 eventlet==0.9.16 greenlet==0.3.1 gunicorn==0.13.4 -newrelic==1.2.1.265 -raven==1.9.3 -simplejson==2.4.0 -wsgiref==0.1.2 gevent \ No newline at end of file diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -20,11 +20,8 @@ deps = [ 'Flask-Script==0.3.1', 'Jinja2==2.5.5', 'Werkzeug==0.8.1', - 'argparse==1.2.1', 'distribute==0.6.15', - 'wsgiref==0.1.2', 'decorator==3.3.2', - 'omnijson==0.1.2', 'gevent' ]
Removed unused and/or undocumented dependencies. Fixes issue #<I>
postmanlabs_httpbin
train
5620936b2d6dd2ece0bcf61edf7cf41a75abb180
diff --git a/tests/test_anyio.py b/tests/test_anyio.py index <HASH>..<HASH> 100644 --- a/tests/test_anyio.py +++ b/tests/test_anyio.py @@ -5,8 +5,9 @@ from os import path from functools import partial from pathlib import Path +import h11 import pytest -from anyio import create_task_group, open_file +from anyio import create_task_group, open_file, EndOfStream from overly import ( Server, ssl_socket_wrapper, @@ -27,6 +28,7 @@ from overly import ( ) import asks +from asks.request_object import RequestProcessor from asks.errors import TooManyRedirects, BadStatus, RequestTimeout pytestmark = pytest.mark.anyio @@ -594,3 +596,28 @@ async def test_session_unknown_kwargs(): session = asks.Session("https://httpbin.org/get") await session.request("GET", ko=7, foo=0, bar=3, shite=3) pytest.fail("Passing unknown kwargs does not raise TypeError") + + +async def test_recv_event_anyio2_end_of_stream(): + class MockH11Connection: + def __init__(self): + self.data = None + def next_event(self): + if self.data == b"": + return h11.PAUSED + else: + return h11.NEED_DATA + def receive_data(self, data): + self.data = data + + class MockSock: + def receive(self): + raise EndOfStream + + req = RequestProcessor(None, "get", "toot-toot", None) + req.sock = MockSock() + + h11_connection = MockH11Connection() + event = await req._recv_event(h11_connection) + assert event is h11.PAUSED + assert h11_connection.data == b""
Implement a test for EndOfStream
theelous3_asks
train
3a905ef1bfd21363c05bafaaf83161e38c213aba
diff --git a/backend/geomajas-impl/src/main/java/org/geomajas/spring/DependencyCheckPostProcessor.java b/backend/geomajas-impl/src/main/java/org/geomajas/spring/DependencyCheckPostProcessor.java index <HASH>..<HASH> 100644 --- a/backend/geomajas-impl/src/main/java/org/geomajas/spring/DependencyCheckPostProcessor.java +++ b/backend/geomajas-impl/src/main/java/org/geomajas/spring/DependencyCheckPostProcessor.java @@ -31,6 +31,7 @@ import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; /** @@ -43,26 +44,27 @@ import java.util.Map; public class DependencyCheckPostProcessor { @Autowired(required = false) - protected Map<String, PluginInfo> declaredPlugins; + protected Map<String, PluginInfo> contextDeclaredPlugins; @PostConstruct public void checkPluginDependencies() { - if (null == declaredPlugins) { + if (null == contextDeclaredPlugins) { return; } + + List<PluginInfo> declaredPlugins = new ArrayList<PluginInfo>(); // remove unfiltered plugin metadata (needed for eclipse !) - for (Map.Entry<String, PluginInfo> entry : - new ArrayList<Map.Entry<String, PluginInfo>>(declaredPlugins.entrySet())) { + for (Map.Entry<String, PluginInfo> entry : contextDeclaredPlugins.entrySet()) { String version = entry.getValue().getVersion().getVersion(); - if (null != version && version.startsWith("$")) { - declaredPlugins.remove(entry.getKey()); + if (null == version || !version.startsWith("$")) { + declaredPlugins.add(entry.getValue()); } } // start by going through all plug-ins to build a map of versions for plug-in keys // includes verification that each key is only used once Map<String, String> versions = new HashMap<String, String>(); - for (PluginInfo plugin : declaredPlugins.values()) { + for (PluginInfo plugin : declaredPlugins) { String name = plugin.getVersion().getName(); String version = plugin.getVersion().getVersion(); // check for multiple plugin with same name but different versions (duplicates allowed for jar+source dep) @@ -80,7 +82,7 @@ public class DependencyCheckPostProcessor { // Check dependencies StringBuffer message = new StringBuffer(); String backendVersion = versions.get("Geomajas"); - for (PluginInfo plugin : declaredPlugins.values()) { + for (PluginInfo plugin : declaredPlugins) { String name = plugin.getVersion().getName(); message.append(checkVersion(name, "Geomajas back-end", plugin.getBackendVersion(), backendVersion)); for (PluginVersionInfo dependency : plugin.getDependencies()) {
GBE-<I> fix unfiltered check, no ConcurrentModificationException
geomajas_geomajas-project-server
train
98e0b8867ed22c1653a209f351fc2c3419463a99
diff --git a/src/heywood/manager.py b/src/heywood/manager.py index <HASH>..<HASH> 100644 --- a/src/heywood/manager.py +++ b/src/heywood/manager.py @@ -64,6 +64,9 @@ class BaseProcess(object): os.setsid() def spawn(self): + if self.process: + return + self.process = Popen(parse_command(self.command), stdin=dev_null, stdout=PIPE, stderr=STDOUT, preexec_fn=self.set_process_group)
Protect against double-spawning children.
doptio_heywood
train
1e159063af98c174f268e70d064aa1d21bc9bd63
diff --git a/src/sap.ui.core/src/sap/ui/Device.js b/src/sap.ui.core/src/sap/ui/Device.js index <HASH>..<HASH> 100644 --- a/src/sap.ui.core/src/sap/ui/Device.js +++ b/src/sap.ui.core/src/sap/ui/Device.js @@ -295,16 +295,18 @@ if (typeof window.sap.ui !== "object") { if (pf.indexOf("Win") != -1 ) { // userAgent in windows 7 contains: windows NT 6.1 // userAgent in windows 8 contains: windows NT 6.2 or higher - // TODO: update this after windows 9 is released - var rVersion = /windows NT 6.(\d)/i; + // userAgent since windows 10: Windows NT 10[...] + var rVersion = /Windows NT (\d+).(\d)/i; var uaResult = userAgent.match(rVersion); var sVersionStr = ""; - if (uaResult) { + if (uaResult[1] == "6") { if (uaResult[1] == 1) { sVersionStr = "7"; } else if (uaResult[1] > 1) { sVersionStr = "8"; } + } else { + sVersionStr = uaResult[1]; } return {"name": OS.WINDOWS, "versionStr": sVersionStr}; } else if (pf.indexOf("Mac") != -1) {
[FIX] sap.ui.Device: OS detection - support for future Windows versions Now windows versions that do not start with "Windows NT 6." are recognized, thus supporting Windows <I> Change-Id: I<I>a<I>aef<I>a7b1ae<I>e<I>f0a<I>f6a
SAP_openui5
train
ccb8e00e7424dc7e060fd448f78f53bcdd50bb67
diff --git a/lib/travis/client/artifact.rb b/lib/travis/client/artifact.rb index <HASH>..<HASH> 100644 --- a/lib/travis/client/artifact.rb +++ b/lib/travis/client/artifact.rb @@ -1,3 +1,4 @@ +# encoding: utf-8 require 'travis/client' module Travis @@ -9,8 +10,13 @@ module Travis # @!parse attr_reader :job has :job + def encoded_body + return body unless body.respond_to? :encode + body.encode 'utf-8' + end + def colorized_body - attributes['colorized_body'] ||= body.gsub(/[^[:print:]\e\n]/, '') + attributes['colorized_body'] ||= encoded_body.gsub(/[^[:print:]\e\n]/, '') end def clean_body
try to fix encoding issues with body, see #<I>
travis-ci_travis.rb
train
e4937bca0c8d02beeb33a2c46a101a721847c960
diff --git a/api/src/main/java/org/jfrog/artifactory/client/RepositoryHandle.java b/api/src/main/java/org/jfrog/artifactory/client/RepositoryHandle.java index <HASH>..<HASH> 100644 --- a/api/src/main/java/org/jfrog/artifactory/client/RepositoryHandle.java +++ b/api/src/main/java/org/jfrog/artifactory/client/RepositoryHandle.java @@ -33,4 +33,7 @@ public interface RepositoryHandle { DownloadableArtifact download(String path); Set<ItemPermission> effectivePermissions(); -} \ No newline at end of file + + boolean isFolder(String path); +} + diff --git a/services/src/main/groovy/org/jfrog/artifactory/client/impl/RepositoryHandleImpl.groovy b/services/src/main/groovy/org/jfrog/artifactory/client/impl/RepositoryHandleImpl.groovy index <HASH>..<HASH> 100644 --- a/services/src/main/groovy/org/jfrog/artifactory/client/impl/RepositoryHandleImpl.groovy +++ b/services/src/main/groovy/org/jfrog/artifactory/client/impl/RepositoryHandleImpl.groovy @@ -80,4 +80,12 @@ class RepositoryHandleImpl implements RepositoryHandle { Set<ItemPermission> effectivePermissions() { this.folder('').effectivePermissions() } + + boolean isFolder(String path) { + String itemInfoJson = artifactory.get("/api/storage/${repoKey}/${path}", ContentType.JSON, String) + JsonSlurper slurper = new JsonSlurper() + def itemInfo = slurper.parseText(itemInfoJson) + return itemInfo.children != null; + } + } diff --git a/services/src/test/java/org/jfrog/artifactory/client/RepositoryTests.java b/services/src/test/java/org/jfrog/artifactory/client/RepositoryTests.java index <HASH>..<HASH> 100644 --- a/services/src/test/java/org/jfrog/artifactory/client/RepositoryTests.java +++ b/services/src/test/java/org/jfrog/artifactory/client/RepositoryTests.java @@ -195,4 +195,13 @@ public class RepositoryTests extends ArtifactoryTestsBase { assertTrue(libsReleases.getKeyPair() == null || libsReleases.getKeyPair().isEmpty()); assertTrue(libsReleases.getRepoLayoutRef() == null || libsReleases.getRepoLayoutRef().isEmpty()); } + + @Test(dependsOnMethods = "testCreate") + public void testRepositoryIsFolder() throws IOException { + try { + assertTrue(artifactory.repository(NEW_LOCAL).isFolder("myFolder")); + } catch (Exception e) { + assertTrue(e.getMessage().contains("Internal Server Error")); + } + } }
[RTFACT-<I>] Add isFolder method to RepositoryHandle.
jfrog_artifactory-client-java
train
fc489d29d69af4e62e20dce6aff8714021ee56e3
diff --git a/lxd/daemon.go b/lxd/daemon.go index <HASH>..<HASH> 100644 --- a/lxd/daemon.go +++ b/lxd/daemon.go @@ -798,7 +798,7 @@ func (d *Daemon) Init() error { /* Read the uid/gid allocation */ d.IdmapSet, err = shared.DefaultIdmapSet() if err != nil { - shared.LogWarn("Error reading default idmap", log.Ctx{"err": err.Error()}) + shared.LogWarn("Error reading default uid/gid map", log.Ctx{"err": err.Error()}) shared.LogWarnf("Only privileged containers will be able to run") d.IdmapSet = nil } else { @@ -810,24 +810,30 @@ func (d *Daemon) Init() error { } } - shared.LogInfof("Configured LXD uid/gid map:") - for _, lxcmap := range d.IdmapSet.Idmap { - suffix := "" + if len(d.IdmapSet.Idmap) == 0 { + shared.LogWarnf("No available uid/gid map could be found") + shared.LogWarnf("Only privileged containers will be able to run") + d.IdmapSet = nil + } else { + shared.LogInfof("Configured LXD uid/gid map:") + for _, lxcmap := range d.IdmapSet.Idmap { + suffix := "" - if lxcmap.Usable() != nil { - suffix = " (unusable)" - } + if lxcmap.Usable() != nil { + suffix = " (unusable)" + } - for _, lxcEntry := range lxcmap.ToLxcString() { - shared.LogInfof(" - %s%s", strings.TrimRight(lxcEntry, "\n"), suffix) + for _, lxcEntry := range lxcmap.ToLxcString() { + shared.LogInfof(" - %s%s", strings.TrimRight(lxcEntry, "\n"), suffix) + } } - } - err = d.IdmapSet.Usable() - if err != nil { - shared.LogWarnf("One or more uid/gid map entry isn't usable (typically due to nesting)") - shared.LogWarnf("Only privileged containers will be able to run") - d.IdmapSet = nil + err = d.IdmapSet.Usable() + if err != nil { + shared.LogWarnf("One or more uid/gid map entry isn't usable (typically due to nesting)") + shared.LogWarnf("Only privileged containers will be able to run") + d.IdmapSet = nil + } } } diff --git a/shared/idmapset_linux.go b/shared/idmapset_linux.go index <HASH>..<HASH> 100644 --- a/shared/idmapset_linux.go +++ b/shared/idmapset_linux.go @@ -658,10 +658,83 @@ func DefaultIdmapSet() (*IdmapSet, error) { // NOTE: Remove once LXD can deal with multiple shadow maps break } - } else { - // Fallback map + + return idmapset, nil + } + + // No shadow available, figure out a default map + kernelMap, err := CurrentIdmapSet() + if err != nil { + // Hardcoded fallback map e := IdmapEntry{Isuid: true, Isgid: true, Nsid: 0, Hostid: 1000000, Maprange: 1000000000} idmapset.Idmap = Extend(idmapset.Idmap, e) + return idmapset, nil + } + + // Look for mapped ranges + kernelRanges, err := kernelMap.ValidRanges() + if err != nil { + return nil, err + } + + // Find a suitable uid range + for _, entry := range kernelRanges { + // We only care about uids right now + if !entry.Isuid { + continue + } + + // We want a map that's separate from the system's own POSIX allocation + if entry.Endid < 100000 { + continue + } + + // Don't use the first 65536 ids + if entry.Startid < 100000 { + entry.Startid = 100000 + } + + // Check if we have enough ids + if entry.Endid-entry.Startid < 65536 { + continue + } + + // Add the map + e := IdmapEntry{Isuid: true, Isgid: false, Nsid: 0, Hostid: entry.Startid, Maprange: entry.Endid - entry.Startid + 1} + idmapset.Idmap = Extend(idmapset.Idmap, e) + + // NOTE: Remove once LXD can deal with multiple shadow maps + break + } + + // Find a suitable gid range + for _, entry := range kernelRanges { + // We only care about gids right now + if !entry.Isgid { + continue + } + + // We want a map that's separate from the system's own POSIX allocation + if entry.Endid < 100000 { + continue + } + + // Don't use the first 65536 ids + if entry.Startid < 100000 { + entry.Startid = 100000 + } + + // Check if we have enough ids + if entry.Endid-entry.Startid < 65536 { + continue + } + + // Add the map + e := IdmapEntry{Isuid: false, Isgid: true, Nsid: 0, Hostid: entry.Startid, Maprange: entry.Endid - entry.Startid + 1} + idmapset.Idmap = Extend(idmapset.Idmap, e) + + // NOTE: Remove once LXD can deal with multiple shadow maps + break } return idmapset, nil
idmap: Make more of an effort to find a default
lxc_lxd
train
22978beba01d4d87fb0576482c8c941e2cfc4e30
diff --git a/Controller/ExerciseController.php b/Controller/ExerciseController.php index <HASH>..<HASH> 100755 --- a/Controller/ExerciseController.php +++ b/Controller/ExerciseController.php @@ -876,7 +876,7 @@ class ExerciseController extends Controller //To record response $exerciseSer = $this->container->get('ujm.exercise_services'); - $ip = $exerciseSer->getIP(); + $ip = $exerciseSer->getIP($request); $interactionToValidatedID = $request->get('interactionToValidated'); $response = $this->getDoctrine() ->getManager() diff --git a/Services/classes/exerciseServices.php b/Services/classes/exerciseServices.php index <HASH>..<HASH> 100755 --- a/Services/classes/exerciseServices.php +++ b/Services/classes/exerciseServices.php @@ -64,20 +64,14 @@ class exerciseServices * Get IP client * * @access public + * @param Request $request * * @return IP Client */ - public function getIP() + public function getIP(Request $request) { - if (isset($_SERVER['HTTP_X_FORWARDED_FOR'])) { - $ip = $_SERVER['HTTP_X_FORWARDED_FOR']; - } elseif (isset($_SERVER['HTTP_CLIENT_IP'])) { - $ip = $_SERVER['HTTP_CLIENT_IP']; - } else { - $ip = $_SERVER['REMOTE_ADDR']; - } - return $ip; + return $request->getClientIp(); }
[ExoBundle] Refactoring for test insight
claroline_Distribution
train
00e1cf02ebf2fb05d6bef97b426ccd6032361674
diff --git a/tests/lib/test-utils.js b/tests/lib/test-utils.js index <HASH>..<HASH> 100644 --- a/tests/lib/test-utils.js +++ b/tests/lib/test-utils.js @@ -7,7 +7,6 @@ var SockJS = require('../../lib/entry') ; var MPrefix = '_sockjs_global'; -var localServerAddress = 'http://localhost:8081'; module.exports = { getSameOriginUrl: function () { @@ -15,14 +14,7 @@ module.exports = { return urlUtils.getOrigin(global.location.href); } // travis does not currently have IPv6 enabled for several envs - return localServerAddress; - } - -, updateTestServerAddress: function(server) { - var addr = server.address(); - localServerAddress = addr.family === 'IPv6' - ? 'http://[::1]:' + addr.port - : 'http://localhost:' + addr.port; + return 'http://localhost:8081'; } , getCrossOriginUrl: function () { diff --git a/tests/node.js b/tests/node.js index <HASH>..<HASH> 100644 --- a/tests/node.js +++ b/tests/node.js @@ -1,7 +1,6 @@ 'use strict'; var server = require('./support/sockjs_server'); -require('./lib/test-utils').updateTestServerAddress(server); require('./lib/main'); require('./lib/main-node');
Apparently this detection does not work on travis This reverts commit <I>b2a<I>dcb3baa<I>eccfe0f<I>e0d<I>b9ae0e5.
sockjs_sockjs-client
train
dec265c4810fbe0c6584638eb6ac6521ce7112ba
diff --git a/index/scorch/merge.go b/index/scorch/merge.go index <HASH>..<HASH> 100644 --- a/index/scorch/merge.go +++ b/index/scorch/merge.go @@ -180,7 +180,7 @@ func (s *Scorch) planMergeAtSnapshot(ourSnapshot *IndexSnapshot, s.markIneligibleForRemoval(filename) path := s.path + string(os.PathSeparator) + filename atomic.AddUint64(&s.stats.TotFileMergeZapBeg, 1) - newDocNums, err := zap.Merge(segmentsToMerge, docsToDrop, path, 1024) + newDocNums, err := zap.Merge(segmentsToMerge, docsToDrop, path, 1024, &s.stats) atomic.AddUint64(&s.stats.TotFileMergeZapEnd, 1) if err != nil { s.unmarkIneligibleForRemoval(filename) diff --git a/index/scorch/scorch.go b/index/scorch/scorch.go index <HASH>..<HASH> 100644 --- a/index/scorch/scorch.go +++ b/index/scorch/scorch.go @@ -430,6 +430,7 @@ func (s *Scorch) StatsMap() map[string]interface{} { m["num_items_persisted"] = m["TotPersistedItems"] m["num_bytes_used_disk"] = m["CurOnDiskBytes"] m["num_files_on_disk"] = m["CurOnDiskFiles"] + m["total_compaction_written_bytes"] = m["TotCompactionWrittenBytes"] return m } diff --git a/index/scorch/segment/zap/merge.go b/index/scorch/segment/zap/merge.go index <HASH>..<HASH> 100644 --- a/index/scorch/segment/zap/merge.go +++ b/index/scorch/segment/zap/merge.go @@ -31,12 +31,17 @@ import ( const docDropped = math.MaxUint64 // sentinel docNum to represent a deleted doc +// StatsReporter interface represents stats reporting methods. +type StatsReporter interface { + ReportBytesWritten(numBytesWritten uint64) +} + // Merge takes a slice of zap segments and bit masks describing which // documents may be dropped, and creates a new segment containing the // remaining data. This new segment is built at the specified path, // with the provided chunkFactor. func Merge(segments []*Segment, drops []*roaring.Bitmap, path string, - chunkFactor uint32) ([][]uint64, error) { + chunkFactor uint32, stats StatsReporter) ([][]uint64, error) { flag := os.O_RDWR | os.O_CREATE f, err := os.OpenFile(path, flag, 0600) @@ -92,6 +97,8 @@ func Merge(segments []*Segment, drops []*roaring.Bitmap, path string, return nil, err } + stats.ReportBytesWritten(uint64(cr.Count())) + return newDocNums, nil } diff --git a/index/scorch/stats.go b/index/scorch/stats.go index <HASH>..<HASH> 100644 --- a/index/scorch/stats.go +++ b/index/scorch/stats.go @@ -100,6 +100,8 @@ type Stats struct { TotMemMergeZapBeg uint64 TotMemMergeZapEnd uint64 TotMemMergeSegments uint64 + + TotCompactionWrittenBytes uint64 } // atomically populates the returned map @@ -122,3 +124,7 @@ func (s *Stats) ToMap() map[string]interface{} { func (s *Stats) MarshalJSON() ([]byte, error) { return json.Marshal(s.ToMap()) } + +func (s *Stats) ReportBytesWritten(numBytesWritten uint64) { + atomic.AddUint64(&s.TotCompactionWrittenBytes, numBytesWritten) +}
adding compaction_written_bytes/sec stats to scorch
blevesearch_bleve
train
9e515fd63f9b298288dc83f598ac904224321cc4
diff --git a/ui/IsolatedInlineNodeComponent.js b/ui/IsolatedInlineNodeComponent.js index <HASH>..<HASH> 100644 --- a/ui/IsolatedInlineNodeComponent.js +++ b/ui/IsolatedInlineNodeComponent.js @@ -78,16 +78,21 @@ export default class IsolatedInlineNodeComponent extends AbstractIsolatedNodeCom selectNode () { // console.log('IsolatedNodeComponent: selecting node.'); const editorSession = this.getEditorSession() - const surface = this.getParentSurface() const node = this.props.node - editorSession.setSelection({ + let selData = { type: 'property', path: node.start.path, startOffset: node.start.offset, - endOffset: node.end.offset, - containerPath: surface.getContainerPath(), - surfaceId: surface.id - }) + endOffset: node.end.offset + } + const surface = this.getParentSurface() + if (surface) { + Object.assign(selData, { + containerPath: surface.getContainerPath(), + surfaceId: surface.id + }) + } + editorSession.setSelection(selData) } _getContentClass () { diff --git a/ui/IsolatedNodeComponent.js b/ui/IsolatedNodeComponent.js index <HASH>..<HASH> 100644 --- a/ui/IsolatedNodeComponent.js +++ b/ui/IsolatedNodeComponent.js @@ -92,14 +92,19 @@ export default class IsolatedNodeComponent extends AbstractIsolatedNodeComponent selectNode () { // console.log('IsolatedNodeComponent: selecting node.'); const editorSession = this.getEditorSession() - const surface = this.getParentSurface() const nodeId = this.props.node.id - editorSession.setSelection({ + let selData = { type: 'node', - nodeId: nodeId, - containerPath: surface.getContainerPath(), - surfaceId: surface.id - }) + nodeId: nodeId + } + const surface = this.getParentSurface() + if (surface) { + Object.assign(selData, { + containerPath: surface.getContainerPath(), + surfaceId: surface.id + }) + } + editorSession.setSelection(selData) } // EXPERIMENTAL: trying to catch clicks not handled by the
Make IsolatedNodeComponent more robust against missing surface.
substance_substance
train
f11ffd923798ae2373ad51710570bfee1eaf2711
diff --git a/cmd/gazelle/update-repos.go b/cmd/gazelle/update-repos.go index <HASH>..<HASH> 100644 --- a/cmd/gazelle/update-repos.go +++ b/cmd/gazelle/update-repos.go @@ -165,7 +165,7 @@ func updateImportPaths(c *updateReposConfig, f *rule.File, kinds map[string]rule var wg sync.WaitGroup wg.Add(len(c.importPaths)) for i, imp := range c.importPaths { - go func(i int) { + go func(i int, imp string) { defer wg.Done() repo, err := repos.UpdateRepo(rc, imp) if err != nil { @@ -176,7 +176,7 @@ func updateImportPaths(c *updateReposConfig, f *rule.File, kinds map[string]rule repo.VCS = "" rule := repos.GenerateRule(repo) genRules[i] = rule - }(i) + }(i, imp) } wg.Wait() diff --git a/internal/language/go/package.go b/internal/language/go/package.go index <HASH>..<HASH> 100644 --- a/internal/language/go/package.go +++ b/internal/language/go/package.go @@ -132,7 +132,7 @@ func (pkg *goPackage) firstGoFile() string { } for _, sb := range goSrcs { if sb.strs != nil { - for s, _ := range sb.strs { + for s := range sb.strs { if strings.HasSuffix(s, ".go") { return s } @@ -152,10 +152,9 @@ func (pkg *goPackage) inferImportPath(c *config.Config) error { } gc := getGoConfig(c) if !gc.prefixSet { - return fmt.Errorf("%s: go prefix is not set, so importpath can't be determined for rules. Set a prefix with a '# gazelle:prefix' comment or with -go_prefix on the command line.", pkg.dir) + return fmt.Errorf("%s: go prefix is not set, so importpath can't be determined for rules. Set a prefix with a '# gazelle:prefix' comment or with -go_prefix on the command line", pkg.dir) } pkg.importPath = inferImportPath(gc, pkg.rel) - return nil if pkg.rel == gc.prefixRel { pkg.importPath = gc.prefix @@ -414,21 +413,21 @@ func (sb *platformStringsBuilder) build() rule.PlatformStrings { if ps.OS == nil { ps.OS = make(map[string][]string) } - for os, _ := range si.oss { + for os := range si.oss { ps.OS[os] = append(ps.OS[os], s) } case archSet: if ps.Arch == nil { ps.Arch = make(map[string][]string) } - for arch, _ := range si.archs { + for arch := range si.archs { ps.Arch[arch] = append(ps.Arch[arch], s) } case platformSet: if ps.Platform == nil { ps.Platform = make(map[rule.Platform][]string) } - for p, _ := range si.platforms { + for p := range si.platforms { ps.Platform[p] = append(ps.Platform[p], s) } } @@ -470,7 +469,7 @@ func (si *platformStringInfo) convertToPlatforms() { case osSet: si.set = platformSet si.platforms = make(map[rule.Platform]bool) - for os, _ := range si.oss { + for os := range si.oss { for _, arch := range rule.KnownOSArchs[os] { si.platforms[rule.Platform{OS: os, Arch: arch}] = true } @@ -479,7 +478,7 @@ func (si *platformStringInfo) convertToPlatforms() { case archSet: si.set = platformSet si.platforms = make(map[rule.Platform]bool) - for arch, _ := range si.archs { + for arch := range si.archs { for _, os := range rule.KnownArchOSs[arch] { si.platforms[rule.Platform{OS: os, Arch: arch}] = true } diff --git a/internal/language/go/testdata/bin_with_tests/bin_test.go b/internal/language/go/testdata/bin_with_tests/bin_test.go index <HASH>..<HASH> 100644 --- a/internal/language/go/testdata/bin_with_tests/bin_test.go +++ b/internal/language/go/testdata/bin_with_tests/bin_test.go @@ -21,6 +21,6 @@ import ( func TestCall(t *testing.T) { if got, want := call(), 42; got != want { - t.Errorf("call() = %d; want %d", got, want) + t.Errorf("call() = %s; want %d", got, want) } }
Fix some go lint errors (#<I>)
bazelbuild_bazel-gazelle
train
ee5752a3062b55246cf6603ff3bacd0501f27dd8
diff --git a/app/controllers/carnival/base_admin_controller.rb b/app/controllers/carnival/base_admin_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/carnival/base_admin_controller.rb +++ b/app/controllers/carnival/base_admin_controller.rb @@ -134,7 +134,7 @@ module Carnival end def instantiate_presenter - carnival_presenter_class.new controller: self + carnival_presenter_class.new controller: self, current_user: current_user end def carnival_presenter_class diff --git a/app/presenters/carnival/base_admin_presenter.rb b/app/presenters/carnival/base_admin_presenter.rb index <HASH>..<HASH> 100644 --- a/app/presenters/carnival/base_admin_presenter.rb +++ b/app/presenters/carnival/base_admin_presenter.rb @@ -6,6 +6,7 @@ module Carnival def initialize(params) @controller = params[:controller] + @current_user = params[:current_user] @special_scopes_to_exec = nil @klass_service = KlassService.new model_class @advanced_search_parser = Presenters::AdvancedSearchParser.new(@klass_service)
current_user available to the presenter
Vizir_carnival
train
02014cf0935e2e3c279721f083b726458ac49e4c
diff --git a/lib/matchers/IsRejected.js b/lib/matchers/IsRejected.js index <HASH>..<HASH> 100644 --- a/lib/matchers/IsRejected.js +++ b/lib/matchers/IsRejected.js @@ -36,13 +36,13 @@ function IsRejected(valueOrMatcher) { .append('was not rejected (') .appendValue(qPromise.inspect()) .append(')'); + deferred.resolve(); } else { description - .append('was rejected with ') - .appendValue(qPromise.inspect().reason); + .append('rejection value '); + deferred.resolve(valueMatcher.describeMismatch(qPromise.inspect().reason, description)); } - deferred.resolve(); }); return deferred.promise; } diff --git a/test/matchers/IsRejectedSpec.js b/test/matchers/IsRejectedSpec.js index <HASH>..<HASH> 100644 --- a/test/matchers/IsRejectedSpec.js +++ b/test/matchers/IsRejectedSpec.js @@ -143,7 +143,7 @@ describe('IsRejected', function () { var actual = q.reject('another reason'); sut.describeMismatch(actual, description).done(function () { - __.assertThat(description.get(), __.equalTo('was rejected with "another reason"')); + __.assertThat(description.get(), __.equalTo('rejection value was "another reason"')); done(); }); }); @@ -223,11 +223,11 @@ describe('IsRejected', function () { }); }); - it('should contain mismatched reason', function (done) { + it('should contain mismatched description', function (done) { var actual = q.reject('another reason'); sut.describeMismatch(actual, description).done(function () { - __.assertThat(description.get(), __.equalTo('was rejected with "another reason"')); + __.assertThat(description.get(), __.equalTo('rejection value was "another reason"')); done(); }); });
fix: `isRejectedWith` did not use its submatcher to describe mismatches
rluba_hamjest
train
2ed5fcadeffb544e60496a4c882e7da356daea9c
diff --git a/lib/gollum-lib/filter/code.rb b/lib/gollum-lib/filter/code.rb index <HASH>..<HASH> 100644 --- a/lib/gollum-lib/filter/code.rb +++ b/lib/gollum-lib/filter/code.rb @@ -46,8 +46,8 @@ class Gollum::Filter::Code < Gollum::Filter end if @markup.format == :asciidoc then - data.gsub!(/^(\[source,(.*)\]\n)?----\n(.*)\n----$/m) do - lang = $2 + data.gsub!(/^(\[source,([^\n]*)\]\n)?----\n(.+?)\n----$/m) do + lang = $2.empty? ? nil : $2 id = Digest::SHA1.hexdigest("#{lang}.#{$3}") cached = @markup.check_cache(:code, id) @map[id] = cached ?
Repair possible issue (matching superfluous whitespace) with asciidoc code blocks.
gollum_gollum-lib
train
12fe468b4e23878d670c78b35bad65521a3110b1
diff --git a/plugins/maven-dependency-resolver/src/main/java/org/robolectric/internal/dependency/MavenDependencyResolver.java b/plugins/maven-dependency-resolver/src/main/java/org/robolectric/internal/dependency/MavenDependencyResolver.java index <HASH>..<HASH> 100755 --- a/plugins/maven-dependency-resolver/src/main/java/org/robolectric/internal/dependency/MavenDependencyResolver.java +++ b/plugins/maven-dependency-resolver/src/main/java/org/robolectric/internal/dependency/MavenDependencyResolver.java @@ -146,7 +146,7 @@ public class MavenDependencyResolver implements DependencyResolver { if (nodeList.getLength() != 0) { Node node = nodeList.item(0); - return node.getTextContent(); + return node.getTextContent().trim(); } } catch (ParserConfigurationException | IOException | SAXException e) { Logger.error("Error reading settings.xml", e);
Trimming the localrepository string. When the maven settings.xml is read, remove starting and end space. This avoid an error when fecthing the additional jar.
robolectric_robolectric
train
0b3605477d8ba3db32acd783dfba5a19b1e9b63e
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -256,6 +256,23 @@ p filings.first } ``` +Options: + +```ruby +filings = ECFS::SolrScrapeQuery.new.tap do |q| + q.docket_number = '12-83' + + # a minimum date, inclusive. mm/dd/yyyy + q.received_min_date = '03/30/2012' + + # an after_scrape block + q.after_scrape = Proc.new do |filings| + p "Fetched asynchronyously: #{filings.length}" + end + # This is handy for large scrapes. +end.get +``` + ### Daily Releases This feature parses these types of pages: http://transition.fcc.gov/Daily_Releases/Daily_Business/2014/db0917/. diff --git a/lib/ecfs/solr_scrape_query.rb b/lib/ecfs/solr_scrape_query.rb index <HASH>..<HASH> 100644 --- a/lib/ecfs/solr_scrape_query.rb +++ b/lib/ecfs/solr_scrape_query.rb @@ -7,8 +7,9 @@ module ECFS class SolrScrapeQuery attr_accessor :docket_number attr_accessor :received_min_date + attr_accessor :after_scrape - def filings_from_docket_number(docket_number, start=0, received_min_date=nil) + def filings_from_docket_number(docket_number, start=0, received_min_date=nil, after_scrape=nil) url = "http://apps.fcc.gov/ecfs/solr/search?sort=dateRcpt&proceeding=#{docket_number}&dir=asc&start=#{start}" if received_min_date @@ -46,6 +47,10 @@ module ECFS } end + if after_scrape + after_scrape.call(filings) + end + return filings, total end @@ -72,7 +77,7 @@ module ECFS url = "http://apps.fcc.gov/ecfs/solr/search?sort=dateRcpt&proceeding=#{@docket_number}&dir=asc&start=0" filings = [] - first_page_of_filings, total = filings_from_docket_number(@docket_number, 0, @received_min_date) + first_page_of_filings, total = filings_from_docket_number(@docket_number, 0, @received_min_date, @after_scrape) pages = (total.to_f/20.0).ceil.to_i.times.map {|n| n*20} # divide, round up, then map *20 pages.shift @@ -80,7 +85,7 @@ module ECFS filings.concat first_page_of_filings pages.each do |page| - filings.concat filings_from_docket_number(@docket_number, page, @received_min_date)[0] + filings.concat filings_from_docket_number(@docket_number, page, @received_min_date, @after_scrape)[0] end filings.each do |filing| diff --git a/test/test_solr_scrape.rb b/test/test_solr_scrape.rb index <HASH>..<HASH> 100644 --- a/test/test_solr_scrape.rb +++ b/test/test_solr_scrape.rb @@ -33,5 +33,39 @@ class TestSolrScrape < MiniTest::Unit::TestCase assert filing_date > min_date end end + + class FakeArrayThing + def initialize + @filings = [] + end + + def concat(filings) + @filings.concat(filings) + end + + def filings + @filings + end + end + + def test_after_scrape + VCR.use_cassette('solr_cassette') do + + @fake_array_thing = FakeArrayThing.new + + filings = ECFS::SolrScrapeQuery.new.tap do |q| + q.docket_number = '12-83' + q.after_scrape = Proc.new do |filings| + @fake_array_thing.concat(filings) + end + end.get + + assert filings.first.is_a?(Hash) + assert filings.first.has_key?('docket_number') + assert filings.first.has_key?('citation') + + assert_equal filings.length, @fake_array_thing.filings.length + end + end end \ No newline at end of file
Added after_scrape option to solr
adelevie_ecfs
train
2b37fe602a62fd6570542d92ea5b7e91bbb14380
diff --git a/lib/async/index.js b/lib/async/index.js index <HASH>..<HASH> 100644 --- a/lib/async/index.js +++ b/lib/async/index.js @@ -68,6 +68,9 @@ function Notifier () { // Contains files successfully processed this.filesDone = []; + // Contains files unsuccessfully processed + this.filesNotDone = []; + // true if a timeout occurred, or set by abort this.errors = []; @@ -75,7 +78,7 @@ function Notifier () { this.padTimeout = TIMEOUT_PAD_FLOOR; // our reference to the listener - this.callback = NOOP; + this.callback = null; // our reference to the watcher (an interval id) // initial value undefined is important @@ -142,17 +145,21 @@ Notifier.prototype = { }); })) .then(function (files) { + var callback = self.callback; + try { files.forEach(function (file) { file && self._remove(file, true); }); - // if done, exit if (self._isDone()) { self._closeWatcher(); - setImmediate(function () { - self.callback(self.getError(), self.filesDone); - }); + if (self.callback) { + self.callback = null; + setImmediate(function () { + callback(self.getError(), self.filesDone); + }); + } } } catch (e) { console.error(e); @@ -210,20 +217,24 @@ Notifier.prototype = { .then(function () { return true; }) .catch(function () { return false; }) .then(function (fsExists) { + var callback = self.callback; + try { if (!fsExists) { self._setError(new Error( - "'"+outputFile+"' did not get a snapshot before timeout" + "'" + outputFile + "' did not get a snapshot before timeout" )); } self._remove(outputFile, fsExists); - // if we're done right now, finish if (self._isDone()) { self._closeWatcher(); - setImmediate(function () { - self.callback(self.getError(), self.filesDone); - }); + if (self.callback) { + self.callback = null; + setImmediate(function () { + callback(self.getError(), self.filesDone); + }); + } } } catch (e) { console.error(e); @@ -279,6 +290,8 @@ Notifier.prototype = { if (this._exists(outputFile)) { if (done) { this.filesDone.push(outputFile); + } else { + this.filesNotDone.push(outputFile); } clearTimeout(this.files[outputFile].timer); delete this.files[outputFile]; diff --git a/lib/html-snapshots.js b/lib/html-snapshots.js index <HASH>..<HASH> 100644 --- a/lib/html-snapshots.js +++ b/lib/html-snapshots.js @@ -204,7 +204,7 @@ module.exports = { console.error("User supplied listener exception", e); } if (err) { - err.notCompleted = Object.keys(notifier.files); + err.notCompleted = notifier.filesNotDone; err.completed = completed; reject(err); } else { diff --git a/test/mocha/async/test.js b/test/mocha/async/test.js index <HASH>..<HASH> 100644 --- a/test/mocha/async/test.js +++ b/test/mocha/async/test.js @@ -211,11 +211,16 @@ describe("async", function () { // take the worker queue out of the equation notifier.qEmpty(); - notifier.start(timeout / pollCount, function (err, filesDone){ - assert.equal(true, (Date.now() - start) > (timeout+asyncLocal.TIMEOUT_PAD_FLOOR)); - assert.notStrictEqual(typeof err, "undefined"); - assert.equal(0, filesDone.length); - done(); + notifier.start(timeout / pollCount, function (err, filesDone) { + var assertionError; + try { + assert.equal(true, (Date.now() - start) > (timeout+asyncLocal.TIMEOUT_PAD_FLOOR)); + assert.notStrictEqual(typeof err, "undefined"); + assert.equal(0, filesDone.length); + } catch (e) { + assertionError = e; + } + done(assertionError); }, mockInput); mkdirp.sync(dir); diff --git a/test/mocha/html-snapshots/process-limit.js b/test/mocha/html-snapshots/process-limit.js index <HASH>..<HASH> 100644 --- a/test/mocha/html-snapshots/process-limit.js +++ b/test/mocha/html-snapshots/process-limit.js @@ -28,7 +28,7 @@ function processLimitTests (options) { return function () { it("should limit as expected", function (done) { var processLimit = urls - 1; - var pollInterval = 500; + var pollInterval = 50; var phantomCount = 0; var timer; @@ -95,7 +95,7 @@ function processLimitTests (options) { it("should limit to just one process", function (done) { var processLimit = 1; - var pollInterval = 500; + var pollInterval = 50; var phantomCount = 0; var timer;
add notCompleted property, track notCompleted, ensure callback once
localnerve_html-snapshots
train
79a40cd93beab16295ad35770c40f58013b9ed12
diff --git a/lib/util/connect/connector.js b/lib/util/connect/connector.js index <HASH>..<HASH> 100644 --- a/lib/util/connect/connector.js +++ b/lib/util/connect/connector.js @@ -87,6 +87,10 @@ Connection.prototype.connect = function() { }.bind(this)); }.bind(this)); + this.script.addEventListener('error', function(ev) { + deferred.reject('Error from SCRIPT tag to ' + this.script.src); + }.bind(this)); + exports.document.head.appendChild(this.script); return deferred.promise;
refactor-<I>: Track websocket errors better Now we report <I> errors (server not started with --websocket) to the web page properly.
joewalker_gcli
train
6c5475f64fdfda903ceb51aa027d40263b92f43d
diff --git a/packages/ember-glimmer/lib/environment.js b/packages/ember-glimmer/lib/environment.js index <HASH>..<HASH> 100644 --- a/packages/ember-glimmer/lib/environment.js +++ b/packages/ember-glimmer/lib/environment.js @@ -1,5 +1,6 @@ import { Environment as GlimmerEnvironment } from 'glimmer-runtime'; import Dict from 'ember-metal/empty_object'; +import { assert } from 'ember-metal/debug'; import { CurlyComponentSyntax, CurlyComponentDefinition } from './components/curly-component'; import { DynamicComponentSyntax } from './components/dynamic-component'; import { OutletSyntax } from './components/outlet'; @@ -72,7 +73,9 @@ export default class Environment extends GlimmerEnvironment { } } - return super.refineStatement(statement); + let nativeSyntax = super.refineStatement(statement); + assert(`Helpers may not be used in the block form, for example {{#${key}}}{{/${key}}}. Please use a component, or alternatively use the helper in combination with a built-in Ember helper, for example {{#if (${key})}}{{/if}}.`, !nativeSyntax && key && this.hasHelper(key) ? !isBlock : true); + return nativeSyntax; } hasComponentDefinition() { diff --git a/packages/ember-glimmer/tests/integration/helpers/custom-helper-test.js b/packages/ember-glimmer/tests/integration/helpers/custom-helper-test.js index <HASH>..<HASH> 100644 --- a/packages/ember-glimmer/tests/integration/helpers/custom-helper-test.js +++ b/packages/ember-glimmer/tests/integration/helpers/custom-helper-test.js @@ -282,7 +282,7 @@ moduleFor('Helpers test: custom helpers', class extends RenderingTest { this.assertText('Who overcomes by force hath overcome but half his foe'); } - ['@htmlbars simple helper not usable with a block']() { + ['@test simple helper not usable with a block']() { this.registerHelper('some-helper', () => {}); expectAssertion(() => {
[Glimmer2] Add compile-time assert for block usage of custom helper (#<I>) * Add compile-time assert for block usage of custom helper * Update the logic for the assert so that the whole statement can be removed during prod builds
emberjs_ember.js
train
ee8aea69bd92cb28103f7f839cc880ec9e0dcced
diff --git a/monero/backends/jsonrpc.py b/monero/backends/jsonrpc.py index <HASH>..<HASH> 100644 --- a/monero/backends/jsonrpc.py +++ b/monero/backends/jsonrpc.py @@ -237,7 +237,7 @@ class JSONRPCWallet(object): laddr = data.get('address', None) if laddr: laddr = address(laddr) - return { + result = { 'payment_id': None if pid is None else PaymentID(pid), 'amount': from_atomic(data['amount']), 'timestamp': datetime.fromtimestamp(data['timestamp']) if 'timestamp' in data else None, @@ -245,6 +245,13 @@ class JSONRPCWallet(object): 'transaction': self._tx(data), 'local_address': laddr, } + if 'destinations' in data: + result['destinations'] = [ + {'address': address(x['address']), 'amount': from_atomic(data['amount'])} + for x in data.get('destinations') + ] + return result + def _inpayment(self, data): return IncomingPayment(**self._paymentdict(data)) diff --git a/monero/transaction.py b/monero/transaction.py index <HASH>..<HASH> 100644 --- a/monero/transaction.py +++ b/monero/transaction.py @@ -47,6 +47,12 @@ class OutgoingPayment(Payment): An outgoing payment (one that decreases the balance of an :class:`Account <monero.account.Account>`) """ + destinations = None + + def __init__(self, **kwargs): + self.destinations = kwargs.pop('destinations', self.destinations) + super().__init__(**kwargs) + _reprstr = "out: {} @ {} {:.12f} id={}"
Destinations adresses support in outgoing payment transactions.
monero-ecosystem_monero-python
train
e796791a366f9912daec2115233f0139e507789e
diff --git a/jujupy.py b/jujupy.py index <HASH>..<HASH> 100644 --- a/jujupy.py +++ b/jujupy.py @@ -293,6 +293,20 @@ class Juju2Backend: (time.time() - start_time)) return rval + def expect(self, command, args, used_feature_flags, juju_home, model=None, + timeout=None, extra_env=None): + args = self.full_args(command, args, model, timeout) + log.info(' '.join(args)) + env = self.shell_environ(used_feature_flags, juju_home) + if extra_env is not None: + env.update(extra_env) + # pexpect.spawn expects a string. This is better than trying to extract + # command + args from the returned tuple (as there could be an intial + # timing command tacked on). + command_string = ' '.join(quote(a) for a in args) + with scoped_environ(env): + return pexpect.spawn(command_string) + @contextmanager def juju_async(self, command, args, used_feature_flags, juju_home, model=None, timeout=None): @@ -842,17 +856,10 @@ class EnvJujuClient: `args`. """ - args = self._full_args(command, sudo, args, include_e=include_e, - timeout=timeout) - log.info(' '.join(args)) - env = self._shell_environ() - if extra_env is not None: - env.update(extra_env) - # pexpect.spawn expects a string. This is better than trying to extract - # command + args from the returned tuple (as there could be an intial - # timing command tacked on). - command_string = ' '.join(quote(a) for a in args) - return pexpect.spawn(command_string, env=env) + model = self._cmd_model(include_e, admin=False) + return self._backend.expect( + command, args, self.used_feature_flags, self.env.juju_home, + model, timeout, extra_env) def controller_juju(self, command, args): args = ('-c', self.env.controller.name) + args diff --git a/tests/test_jujupy.py b/tests/test_jujupy.py index <HASH>..<HASH> 100644 --- a/tests/test_jujupy.py +++ b/tests/test_jujupy.py @@ -2444,10 +2444,20 @@ class TestEnvJujuClient(ClientTest): process = client.expect('foo', ('bar', 'baz')) self.assertIs(process, mock.return_value) - mock.assert_called_once_with( - 'juju --show-log foo -m qux bar baz', - env=client._shell_environ() - ) + mock.assert_called_once_with('juju --show-log foo -m qux bar baz') + + def test_expect_uses_provided_envvar_path(self): + from pexpect import ExceptionPexpect + env = JujuData('qux') + client = EnvJujuClient(env, None, None) + + with temp_dir() as empty_path: + broken_envvars = dict(PATH=empty_path) + self.assertRaises( + ExceptionPexpect, + client.expect, + 'ls', (), extra_env=broken_envvars, + ) def test_juju_env(self): env = JujuData('qux')
Fix EnvJujuClient.expect to ensure it uses the provided path within envvar.
juju_juju
train
9943c3dc3c21a08ffd847c9e488dcc740d550846
diff --git a/externs/browser/html5.js b/externs/browser/html5.js index <HASH>..<HASH> 100644 --- a/externs/browser/html5.js +++ b/externs/browser/html5.js @@ -3489,4 +3489,3 @@ HTMLFieldSetElement.prototype.validity; * @see https://www.w3.org/TR/html5/forms.html#the-fieldset-element */ HTMLFieldSetElement.prototype.willValidate; -
Remove a trailing newline ------------- Created by MOE: <URL>
google_closure-compiler
train
0d93054dfb549851f47a1e509ca17b1ed6e30927
diff --git a/pywws/WeatherStation.py b/pywws/WeatherStation.py index <HASH>..<HASH> 100755 --- a/pywws/WeatherStation.py +++ b/pywws/WeatherStation.py @@ -176,7 +176,7 @@ def findDevice(idVendor, idProduct): if device.idVendor == idVendor and device.idProduct == idProduct: return device return None -class weather_station: +class weather_station(object): """Class that represents the weather station to user program.""" def __init__(self): """Connect to weather station and prepare to read data.""" @@ -255,7 +255,8 @@ class weather_station: old_data['delay'] = new_data['delay'] yielded = False data_changed = new_data != old_data - if ptr_changed and (new_data['delay'] == None or new_data['delay'] > 4): + if ptr_changed and (new_data['delay'] == None or + new_data['delay'] >= read_period): # picked up old data from new pointer, ignore it self.logger.info('live_data old data') pass
Fixed serious bug in 'live_log' routine when weather station is set to more than five minute logging interval.
jim-easterbrook_pywws
train
5247cdf6a3ed29ffaade80db718dd9c873e339b3
diff --git a/src/CreateCommand.php b/src/CreateCommand.php index <HASH>..<HASH> 100644 --- a/src/CreateCommand.php +++ b/src/CreateCommand.php @@ -63,9 +63,10 @@ class CreateCommand extends Command throw new NoticeException('Command "'.$this->key.'": Controller definition is missing.'); $controller = explode('@', $object[1]); $this->createController($controller[0], $args); - // Create method + // Create methods if (count($controller) > 1) - $this->createControllerMethod($controller[0], $controller[1]); + foreach (array_slice($controller, 1) as $controller_method) + $this->createControllerMethod($controller[0], $controller_method); break; case 'model': case 'postmodel': @@ -103,4 +104,4 @@ class CreateCommand extends Command break; } } -} \ No newline at end of file +}
Expand Create Controller command to support multiple methods This little patch takes advantage of the existing '@' delimiter to allow multiple methods to be created through the create controller command. Example Usage; `php ayuco create controller:ControllerName@FirstMethod@SecondMethod@etcMethod` This call would create the ControllerName.php file and inject 3 method calls for the additional items added after the '@' delimiter
10quality_wpmvc-commands
train
10b0cdef169966829ad44747dc0c2fa6a598d293
diff --git a/datadog_checks_base/datadog_checks/base/utils/db/utils.py b/datadog_checks_base/datadog_checks/base/utils/db/utils.py index <HASH>..<HASH> 100644 --- a/datadog_checks_base/datadog_checks/base/utils/db/utils.py +++ b/datadog_checks_base/datadog_checks/base/utils/db/utils.py @@ -182,6 +182,9 @@ def default_json_event_encoding(o): def obfuscate_sql_with_metadata(query, options=None): + if not query: + return {'query': None, 'metadata': {}} + def _load_metadata(statement): try: statement_with_metadata = json.loads(statement) diff --git a/datadog_checks_base/tests/base/utils/db/test_util.py b/datadog_checks_base/tests/base/utils/db/test_util.py index <HASH>..<HASH> 100644 --- a/datadog_checks_base/tests/base/utils/db/test_util.py +++ b/datadog_checks_base/tests/base/utils/db/test_util.py @@ -159,6 +159,11 @@ def test_obfuscate_sql_with_metadata(obfuscator_return_value, expected_value, re ) assert statement == expected_value + # Check that it can handle null values + statement = obfuscate_sql_with_metadata(None) + assert statement['query'] is None + assert statement['metadata'] == {} + class TestJob(DBMAsyncJob): def __init__(self, check, run_sync=False, enabled=True, rate_limit=10, min_collection_interval=15):
datadog_checks_base: Fix obfuscate_sql_with_metadata query being None (#<I>) * Check if query is None * Add test
DataDog_integrations-core
train
f79665c8bb0ff9f001b21bf33e2393bef71f924a
diff --git a/packages/mdx/mdx-hast-to-jsx.js b/packages/mdx/mdx-hast-to-jsx.js index <HASH>..<HASH> 100644 --- a/packages/mdx/mdx-hast-to-jsx.js +++ b/packages/mdx/mdx-hast-to-jsx.js @@ -50,7 +50,8 @@ function toJSX(node, parentNode = {}, options = {}) { if ( /\bdefault\b/.test(childNode.value) && - !/default\s+as/.test(childNode.value) + !/default\s+as/.test(childNode.value) && + !/^export (const|let|var|function)/.test(childNode.value) ) { let example diff --git a/packages/mdx/test/index.test.js b/packages/mdx/test/index.test.js index <HASH>..<HASH> 100644 --- a/packages/mdx/test/index.test.js +++ b/packages/mdx/test/index.test.js @@ -250,6 +250,12 @@ it('Should support semicolons in default export statement', async () => { it('Should throw when exporting default via named export', async () => { await expect(mdx(`export { default } from './Layout'`)).rejects.toThrow() await expect(mdx(`export { Layout as default }`)).rejects.toThrow() + + // Edge case where user has the text "default" as part of the export node + await mdx(`export const meta = { + description: 'better default behavior.' + }`) + await expect( mdx(`export { default as MyComp } from './MyComp'`) ).resolves.toContain(`export { default as MyComp } from './MyComp'`)
Exclude export const/let/var/function from default check (#<I>)
mdx-js_mdx
train
7fa69ee73064e07ba98c4cf9b48e5d6468aefb27
diff --git a/scapy/layers/inet.py b/scapy/layers/inet.py index <HASH>..<HASH> 100644 --- a/scapy/layers/inet.py +++ b/scapy/layers/inet.py @@ -1617,7 +1617,7 @@ class MTR: # Graph the Multi-Traceroute... def graph(self, ASres = None, padding = 0, **kargs): """x.graph(ASres=conf.AS_resolver, other args): - ASres=None : no AS resolver => no clustering + ASres=None : Use AS default resolver => 'conf.AS_resolver' ASres=AS_resolver() : default whois AS resolver (riswhois.ripe.net) ASres=AS_resolver_cymru(): use whois.cymru.com whois database ASres=AS_resolver(server="whois.ra.net") @@ -1733,14 +1733,16 @@ class MTracerouteResult(SndRcvList): ## Multi-Traceroute ## ###################### @conf.commands.register -def mtr(target, dport=80, minttl=1, maxttl=30, sport=RandShort(), l4=None, filter=None, timeout=2, verbose=None, nquery=1, privaddr=0, **kargs): +def mtr(target, dport=80, minttl=1, maxttl=30, sport=RandShort(), l4=None, filter=None, timeout=2, verbose=None, nquery=1, privaddr=0, rasn=1, **kargs): """A Multi-Traceroute (mtr) command: mtr(target, [maxttl=30,] [dport=80,] [sport=80,] [minttl=1,] [maxttl=1,] - [l4=None,] [filter=None,] [nquery=1,] [privaddr=0,] [verbose=conf.verb]) + [l4=None,] [filter=None,] [nquery=1,] [privaddr=0,] [rasn=1,] [verbose=conf.verb]) - nquery: Number of Traceroute queries to perform. - privaddr: 0 - Normal display of AS numbers, - 1 - Do not show an associated AS Number bound box (cluster) on graph for a private IPv4 Address.""" + nquery: Number of Traceroute queries to perform. + privaddr: 0 - Default: Normal display of all resolved AS numbers, + 1 - Do not show an associated AS Number bound box (cluster) on graph for a private IPv4 Address. + rasn: 0 - Do not resolve AS Numbers - No graph clustering. + 1 - Default: Resolve all AS numbers.""" trace = [] # Initialize vars if (nquery < 1): # Range check number of query traces nquery = 1 @@ -1781,7 +1783,8 @@ def mtr(target, dport=80, minttl=1, maxttl=30, sport=RandShort(), l4=None, filte mtrc.get_black_holes() # # Resolve AS Numbers... - mtrc.get_asns(privaddr) + if rasn: + mtrc.get_asns(privaddr) # # Debug: Print at verbose level 8... if (verbose == 8):
Added an option for disabling AS Number resolving.
phaethon_kamene
train
eb5871fdfb47a13ced0db450a016c3898d01e0d3
diff --git a/router/router_test.go b/router/router_test.go index <HASH>..<HASH> 100644 --- a/router/router_test.go +++ b/router/router_test.go @@ -819,6 +819,42 @@ var _ = Describe("Router", func() { }) }) + Context("multiple open connections", func() { + It("does not hang any connections", func() { + app := testcommon.NewTestApp([]route.Uri{"app.vcap.me"}, config.Port, mbusClient, nil, "") + + rCh := make(chan string) + app.AddHandler("/", func(w http.ResponseWriter, r *http.Request) { + rCh <- r.Header.Get("X-Forwarded-For") + }) + app.Listen() + Eventually(func() bool { + return appRegistered(registry, app) + }).Should(BeTrue()) + + host := fmt.Sprintf("app.vcap.me:%d", config.Port) + existingConn, err := net.DialTimeout("tcp", host, 10*time.Second) + Expect(err).ToNot(HaveOccurred()) + defer existingConn.Close() + + fmt.Fprintf(existingConn, "GET / HTTP/1.1\r\n"+ + "Host: %s\r\n"+ + "\r\n", host) + + newConn, err := net.DialTimeout("tcp", host, 10*time.Second) + Expect(err).ToNot(HaveOccurred()) + defer newConn.Close() + + fmt.Fprintf(newConn, "GET / HTTP/1.1\r\n"+ + "Host: %s\r\n"+ + "\r\n", host) + + var rr string + Eventually(rCh).Should(Receive(&rr)) + Expect(rr).ToNot(BeNil()) + }) + }) + Context("serving https", func() { It("serves ssl traffic", func() { app := test.NewGreetApp([]route.Uri{"test.vcap.me"}, config.Port, mbusClient, nil)
Add test coverage for handling multiple conn [#<I>]
cloudfoundry_gorouter
train
e36fe9a87251279671ed187fee23abc519123f7b
diff --git a/tarbell/cli.py b/tarbell/cli.py index <HASH>..<HASH> 100644 --- a/tarbell/cli.py +++ b/tarbell/cli.py @@ -310,7 +310,7 @@ def tarbell_publish(command, args): kwargs['excludes'] = site.project.EXCLUDES s3 = S3Sync(tempdir, bucket_url, **kwargs) s3.deploy_to_s3() - site.call_hook("publish", s3) + site.call_hook("publish", site, s3) puts("\nIf you have website hosting enabled, you can see your project at:") puts(colored.green("http://{0}\n".format(bucket_url)))
call publish hook with site and s3 connection
tarbell-project_tarbell
train
5a31b29c5a21994a02d003ee16b3b056ee9c7b09
diff --git a/lib/oxcelix/workbook.rb b/lib/oxcelix/workbook.rb index <HASH>..<HASH> 100644 --- a/lib/oxcelix/workbook.rb +++ b/lib/oxcelix/workbook.rb @@ -31,11 +31,11 @@ module Oxcelix # options is a collection of options that can be passed to Workbook. # Options may include: # * :copymerge (=> true/false) - Copy and repeat the content of the merged cells into the whole group, e.g. - # the group of three merged cells <tt>| a |</tt> will become <tt>|a|a|a|</tt> + # the group of three merged cells <tt>| a |</tt> will become <tt>|a|a|a|</tt> # * :include (Ary) - an array of sheet names to be included # * :exclude (Ary) - an array of sheet names not to be processed - # * :values (Symbol) - cell values. This can be: :false, if the whole cell is needed, :excel, if the raw excel - # values need to be inserted and :ruby if ruby objects are preferred. + # * :paginate (Ary) - an array that defines the number of lines to be included in the pagination and the page to be parsed + # * :cellrange (Range) - the range of cells to be included in parsing # # If a filename gets passed, the excel file is first getting unzipped, then # the workbook.xml file gets processed.
Fixed doc: :values parameter was never used - removed
gbiczo_oxcelix
train
b9b89a3e244aa3d38297feb2ac85e6e1930434c9
diff --git a/fireplace/game.py b/fireplace/game.py index <HASH>..<HASH> 100644 --- a/fireplace/game.py +++ b/fireplace/game.py @@ -10,8 +10,6 @@ from .utils import CardList class Game(Entity): MAX_MINIONS_ON_FIELD = 8 - # Game draws after 50 full turns (100 game turns) - MAX_TURNS = 100 Manager = GameManager def __init__(self, players): @@ -199,8 +197,6 @@ class Game(Entity): self.step, self.nextStep = self.nextStep, Step.MAIN_START self.turn += 1 logging.info("%s begins turn %i" % (player, self.turn)) - if self.turn == self.MAX_TURNS: - raise GameOver("It's a draw!") if self.currentPlayer: self.currentPlayer.currentPlayer = False self.step, self.nextStep = self.nextStep, Step.MAIN_ACTION
Drop support for game Draws This should be implemented in Game subclasses
jleclanche_fireplace
train
5fe4376c998ff0214d0af05c5c3526ebb6f3f662
diff --git a/app/controllers/rocket_job_mission_control/active_processes_controller.rb b/app/controllers/rocket_job_mission_control/active_processes_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/rocket_job_mission_control/active_processes_controller.rb +++ b/app/controllers/rocket_job_mission_control/active_processes_controller.rb @@ -4,19 +4,20 @@ module RocketJobMissionControl # The list of workers actively processing jobs # [Array[Array<worker_name [String], job [RocketJob::Job], slice_id [String]]] sorted = true - t = Time.new busy = [] - RocketJob::Job.running.sort(:worker_name).collect do |job| - if job.respond_to?(:input) + # Need paused, failed or aborted since workers may still be working on active slices + RocketJob::Job.where(state: [:running, :paused, :failed, :aborted]).sort(:worker_name).collect do |job| + if job.respond_to?(:input) && job.sub_state == :processing sorted = false job.input.each('state' => 'running') do |slice| - busy << [slice.worker_name, job, slice.started_at] + busy << {worker_name: slice.worker_name, klass: job.class.name, description: job.description, started_at: slice.started_at, id: job.id} end - else - busy << [job.worker_name, job, job.started_at] + elsif job.running? + busy << {worker_name: job.worker_name, klass: job.class.name, description: job.description, started_at: job.started_at, id: job.id} end end - @busy = sorted ? busy : busy.sort_by { |result| result.first } + @busy = sorted ? busy : busy.sort_by { |h| h[:worker_name] } + respond_to do |format| format.html format.json { render(json: ActiveProcessesDatatable.new(view_context, @busy)) } diff --git a/app/datatables/rocket_job_mission_control/active_processes_datatable.rb b/app/datatables/rocket_job_mission_control/active_processes_datatable.rb index <HASH>..<HASH> 100644 --- a/app/datatables/rocket_job_mission_control/active_processes_datatable.rb +++ b/app/datatables/rocket_job_mission_control/active_processes_datatable.rb @@ -1,6 +1,6 @@ module RocketJobMissionControl class ActiveProcessesDatatable - delegate :params, :link_to, :job_path, :job_icon, to: :@view + delegate :params, :link_to, :job_path, :state_icon, to: :@view delegate :h, to: 'ERB::Util' def initialize(view, processes) @@ -20,12 +20,12 @@ module RocketJobMissionControl private def data - processes.map do |worker_name, job, started_at| + processes.map do |h| { - '0' => worker_name_with_icon(worker_name, job), - '1' => job_name_with_link(job), - '2' => h(job.description.try(:truncate, 50)), - '3' => h(duration(started_at)), + '0' => worker_name_with_icon(h[:worker_name]), + '1' => job_name_with_link(h[:klass], h[:id]), + '2' => h(h[:description].try!(:truncate, 50)), + '3' => h(duration(h[:started_at])), 'DT_RowClass' => "card callout callout-running" } end @@ -57,17 +57,17 @@ module RocketJobMissionControl Kaminari.paginate_array(records).page(page).per(per_page) end - def worker_name_with_icon(worker_name, job) + def worker_name_with_icon(worker_name) <<-EOS - <i class="fa #{job_icon(job)}" style="font-size: 75%" title="#{job.state}"></i> + <i class="fa #{state_icon(:running)}" style="font-size: 75%" title="running"></i> #{worker_name} EOS end - def job_name_with_link(job) + def job_name_with_link(job_class_name, job_id) <<-EOS - <a href="#{job_path(job.id)}"> - #{job.class.name} + <a href="#{job_path(job_id)}"> + #{job_class_name} </a> EOS end diff --git a/app/datatables/rocket_job_mission_control/jobs_datatable.rb b/app/datatables/rocket_job_mission_control/jobs_datatable.rb index <HASH>..<HASH> 100644 --- a/app/datatables/rocket_job_mission_control/jobs_datatable.rb +++ b/app/datatables/rocket_job_mission_control/jobs_datatable.rb @@ -24,7 +24,7 @@ module RocketJobMissionControl { '0' => class_with_link(job), '1' => h(job.description.try(:truncate, 50)), - '2' => h(job.completed_at), + '2' => h(job.created_at), '3' => h(job.duration), 'DT_RowClass' => "card callout callout-#{job.state}" } diff --git a/app/views/rocket_job_mission_control/jobs/index.html.haml b/app/views/rocket_job_mission_control/jobs/index.html.haml index <HASH>..<HASH> 100644 --- a/app/views/rocket_job_mission_control/jobs/index.html.haml +++ b/app/views/rocket_job_mission_control/jobs/index.html.haml @@ -11,7 +11,7 @@ %tr %th Class %th Description - %th Completion + %th Created %th Duration %tbody
Fix active jobs view to include all active jobs. Show created column instead of completed when viewing all jobs.
rocketjob_rocketjob_mission_control
train
93963e4ff43edf42573d5415490a537c98464567
diff --git a/sonar-server/src/main/webapp/javascripts/build.js b/sonar-server/src/main/webapp/javascripts/build.js index <HASH>..<HASH> 100644 --- a/sonar-server/src/main/webapp/javascripts/build.js +++ b/sonar-server/src/main/webapp/javascripts/build.js @@ -2,6 +2,7 @@ appDir: '.', baseUrl: '.', dir: 'DEFINED IN POM.XML', + preserveLicenseComments: false, modules: [ { name: 'quality-gate/app' },
SONAR-<I> remove comments in minimized JS
SonarSource_sonarqube
train
281d115108b00844d8c4d595da2e5fc4765af214
diff --git a/Plugin/TemplatePlugin.php b/Plugin/TemplatePlugin.php index <HASH>..<HASH> 100644 --- a/Plugin/TemplatePlugin.php +++ b/Plugin/TemplatePlugin.php @@ -50,7 +50,7 @@ class TemplatePlugin if (empty($args)) { $templateText = $result['template_text']; $result['template_subject'] = utf8_decode($result['template_subject']); - if ( $this->isStringCompressed($templateText)) { + if ($this->isStringCompressed($templateText)) { $result['template_text'] = $this->decompresString($templateText); } } else { @@ -89,8 +89,10 @@ class TemplatePlugin private function isStringCompressed($string) { //check if the data is compressed - if (substr_count($string, '%9') > 20) + if (substr($string, 0, 1) == 'e') { return true; + } + return false; } @@ -100,7 +102,7 @@ class TemplatePlugin */ private function compresString($templateText): string { - return urlencode(gzcompress($templateText)); + return base64_encode(gzcompress($templateText, 9)); } /** @@ -109,7 +111,7 @@ class TemplatePlugin */ private function decompresString($templateText): string { - return gzuncompress(urldecode($templateText)); + return gzuncompress(base64_decode($templateText)); } }
change the encoding for text body\
dotmailer_dotmailer-magento2-extension
train
b30f81d9308f2c30f78fff69bd7ab08684a4f599
diff --git a/quart/wrappers/request.py b/quart/wrappers/request.py index <HASH>..<HASH> 100644 --- a/quart/wrappers/request.py +++ b/quart/wrappers/request.py @@ -156,20 +156,26 @@ class Request(BaseRequestWebsocket, JSONMixin): if content_type == 'application/x-www-form-urlencoded': for key, values in parse_qs(data.decode()).items(): for value in values: - self._form[key] = value + self._form.add(key, value) elif content_type == 'multipart/form-data': field_storage = FieldStorage( io.BytesIO(data), headers=self.headers, environ={'REQUEST_METHOD': 'POST'}, ) for key in field_storage: # type: ignore field_storage_key = field_storage[key] - if field_storage_key.filename is None: - self._form[key] = field_storage_key.value - else: - self._files[key] = FileStorage( + if isinstance(field_storage_key, list): + for value in field_storage_key: + self._form.add(key, value) + elif ( + isinstance(field_storage_key, FieldStorage) and + field_storage_key.filename is not None + ): + self._files[key] = FileStorage( # type: ignore io.BytesIO(field_storage_key.file.read()), field_storage_key.filename, - field_storage_key.name, field_storage_key.type, field_storage_key.headers, + field_storage_key.name, field_storage_key.type, field_storage_key.headers, # type: ignore # noqa: E501 ) + else: + self._form.add(key, str(field_storage_key.file.read())) async def _load_json_data(self) -> str: """Return the data after decoding."""
Bugfix cope with multi (list) form values This fixes both encoding types to cope with multiple values present with the same key name. It also better copes with non-files in the ``multipart/form-data`` encoding.
pgjones_quart
train
46a54371176b80db022dc038533ca76cf50009ae
diff --git a/helper/ec2/thunder-ec2.py b/helper/ec2/thunder-ec2.py index <HASH>..<HASH> 100644 --- a/helper/ec2/thunder-ec2.py +++ b/helper/ec2/thunder-ec2.py @@ -41,6 +41,7 @@ def install_thunder(master, opts): def load_data(master, opts): """ Load an example data set into a Spark EC2 cluster""" print "Transferring example data to the cluster..." + ssh(master, opts, "/root/ephemeral-hdfs/bin/stop-all.sh") ssh(master, opts, "/root/ephemeral-hdfs/bin/start-all.sh") time.sleep(10) (s3_access_key, s3_secret_key) = get_s3_keys() @@ -131,8 +132,10 @@ if __name__ == "__main__": opts.wait = 160 opts.hadoop_major_version = "1" opts.ganglia = True - opts.spark_version = "0.9.0" + opts.spark_version = "0.9.1" opts.swap = 1024 + opts.worker_instances = 1 + opts.master_opts = "" if opts.resume: (master_nodes, slave_nodes) = get_existing_cluster(conn, opts, cluster_name)
Updating EC2 script with new options
thunder-project_thunder
train
9e64809c5d1ea84600cc030e7d3f9ea0e8408831
diff --git a/applications/example/extensions/example-type/example-type.js b/applications/example/extensions/example-type/example-type.js index <HASH>..<HASH> 100644 --- a/applications/example/extensions/example-type/example-type.js +++ b/applications/example/extensions/example-type/example-type.js @@ -11,12 +11,7 @@ exampleType.type = function(types, callback) { title: 'Article', description: 'Articles are pieces of text with a title.', path: 'article', - //storage: 'memory', - //storage: 'database', - // @todo: figure out a better way for passing the database connection to - // the storage controller. - //database: this.application.database, - //data: data, + storage: 'database', fields: { id: { title: 'Id', diff --git a/applications/example/settings.js b/applications/example/settings.js index <HASH>..<HASH> 100644 --- a/applications/example/settings.js +++ b/applications/example/settings.js @@ -7,6 +7,8 @@ */ var settings = module.exports = { + database: 'mongodb://localhost/example', + sessionSecret: 'change-me', application: { diff --git a/lib/application.js b/lib/application.js index <HASH>..<HASH> 100644 --- a/lib/application.js +++ b/lib/application.js @@ -13,6 +13,8 @@ var express = require('express'); var flash = require('connect-flash'); var async = require('async'); var utils = Prana.utils; +var MongoClient = require('mongodb').MongoClient; +var MongoDBStorage = require('prana-mongodb'); /** * Main application controller class. @@ -71,18 +73,50 @@ Application.prototype.start = function(callback) { var self = this; - // Load all extensions and call init hooks on all of them. - this.loadAllExtensions(function(error) { + async.series([ + // Connect to the database and add the database storage. + function(next) { + if (!self.settings.database) { + return next(); + } + + MongoClient.connect(self.settings.database, function(error, database) { + if (error) { + return next(error); + } + + // Add the 'mongodb' storage. + self.storage('database', { + controller: MongoDBStorage, + database: database + }); + + next(); + }); + }, + + // Load all extensions and call init hooks on all of them. + function(next) { + self.loadAllExtensions(next); + }, + + // Call Prana init that also calls init hook on all extensions. + function(next) { + // Can't pass next directly to init() since it returns the application as + // the first argument and next() expect an error or null. + self.init(function() { + next(); + }); + } + + ], function(error, results) { if (error) { return callback(error); } - // Call Prana init that also calls init hook on all extensions. - self.init(function() { - // Create application vhost. - self.mainApplication.use(express.vhost(self.settings.hostname, self.application)); - callback(); - }); + // Create application vhost. + self.mainApplication.use(express.vhost(self.settings.hostname, self.application)); + callback(); }); }; diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -7,6 +7,8 @@ "dependencies": { "prana": "0.0.x", "express": "3.3.x", + "mongodb": "1.3.x", + "prana-mongodb": "0.0.x", "connect-flash": "0.1.x", "async": "0.2.x" }
Adding MongoDB storage. Allow resources to be persisted on MongoDB.
recidive_choko
train
d46dba32d4cd4ef763a635ab5dca41785364f3f8
diff --git a/lib/filelib.php b/lib/filelib.php index <HASH>..<HASH> 100644 --- a/lib/filelib.php +++ b/lib/filelib.php @@ -2229,6 +2229,10 @@ function readfile_accel($file, $mimetype, $accelerate) { header('Content-Length: '.$filesize); + if (!empty($_SERVER['REQUEST_METHOD']) and $_SERVER['REQUEST_METHOD'] === 'HEAD') { + exit; + } + if ($filesize > 10000000) { // for large files try to flush and close all buffers to conserve memory while(@ob_get_level()) {
MDL-<I> files: Honor http HEAD requests for files
moodle_moodle
train
752dc781b3b600ff3605f7ac75df8e2179a0a060
diff --git a/lib/ransack/constants.rb b/lib/ransack/constants.rb index <HASH>..<HASH> 100644 --- a/lib/ransack/constants.rb +++ b/lib/ransack/constants.rb @@ -55,6 +55,14 @@ module Ransack :formatter => proc { |v| true } } ], + ['not_true', { + :arel_predicate => proc { |v| v ? 'not_eq' : 'eq' }, + :compounds => false, + :type => :boolean, + :validator => proc { |v| BOOLEAN_VALUES.include?(v) }, + :formatter => proc { |v| true } + } + ], ['false', { :arel_predicate => proc { |v| v ? 'eq' : 'not_eq' }, :compounds => false, @@ -63,6 +71,14 @@ module Ransack :formatter => proc { |v| false } } ], + ['not_false', { + :arel_predicate => proc { |v| v ? 'not_eq' : 'eq' }, + :compounds => false, + :type => :boolean, + :validator => proc { |v| BOOLEAN_VALUES.include?(v) }, + :formatter => proc { |v| false } + } + ], ['present', { :arel_predicate => proc { |v| v ? 'not_eq_all' : 'eq_any' }, :compounds => false, diff --git a/spec/ransack/predicate_spec.rb b/spec/ransack/predicate_spec.rb index <HASH>..<HASH> 100644 --- a/spec/ransack/predicate_spec.rb +++ b/spec/ransack/predicate_spec.rb @@ -96,6 +96,22 @@ module Ransack end end + describe 'not_true' do + it 'generates an inequality condition for boolean true' do + @s.awesome_not_true = true + field = "#{quote_table_name("people")}.#{quote_column_name("awesome")}" + expect(@s.result.to_sql).to match /#{field} != #{ + ActiveRecord::Base.connection.quoted_true}/ + end + + it 'generates an equality condition for boolean true' do + @s.awesome_not_true = false + field = "#{quote_table_name("people")}.#{quote_column_name("awesome")}" + expect(@s.result.to_sql).to match /#{field} = #{ + ActiveRecord::Base.connection.quoted_true}/ + end + end + describe 'false' do it 'generates an equality condition for boolean false' do @s.awesome_false = true @@ -112,6 +128,22 @@ module Ransack end end + describe 'not_false' do + it 'generates an inequality condition for boolean false' do + @s.awesome_not_false = true + field = "#{quote_table_name("people")}.#{quote_column_name("awesome")}" + expect(@s.result.to_sql).to match /#{field} != #{ + ActiveRecord::Base.connection.quoted_false}/ + end + + it 'generates an equality condition for boolean false' do + @s.awesome_not_false = false + field = "#{quote_table_name("people")}.#{quote_column_name("awesome")}" + expect(@s.result.to_sql).to match /#{field} = #{ + ActiveRecord::Base.connection.quoted_false}/ + end + end + describe 'null' do it 'generates a value IS NULL query' do @s.name_null = true
Add not_true and not_false predicates
activerecord-hackery_ransack
train
d7ad2e1d00a39df3a474617a73cad374ace74cd9
diff --git a/molgenis-model-registry/src/main/resources/js/standardsregistry.js b/molgenis-model-registry/src/main/resources/js/standardsregistry.js index <HASH>..<HASH> 100644 --- a/molgenis-model-registry/src/main/resources/js/standardsregistry.js +++ b/molgenis-model-registry/src/main/resources/js/standardsregistry.js @@ -70,8 +70,8 @@ function renderSearchResults(searchResults, container) { container.empty(); - for(var i = 0; i < searchResults.packages.length; ++i){ - container.append(modelTemplate({'package': searchResults.packages[i] })); + for(var i = 0; i < searchResults.packages.length; ++i){ + container.append(modelTemplate({'package': searchResults.packages[i], 'entities' : searchResults.packages[i].entitiesInPackage})); } container.append(countTemplate({'count': searchResults.total})); } @@ -135,14 +135,9 @@ $(document).on('click', '.dataexplorer-btn', function() { var id = $(this).closest('.package').data('id'); - // TODO link id to data explorer + var selectedEntity = $('.entity-select-dropdown').val(); // FIXME do not hardcode URL - window.location.href= '/menu/main/dataexplorer'; - }); - - $(document).on('click', '.import-btn', function() { - // FIXME do not hardcode URL - window.location.href= '/menu/main/importwizard'; + window.location.href= '/menu/main/dataexplorer?dataset=' + selectedEntity; }); countTemplate = Handlebars.compile($("#count-template").html());
Expanded the handlebars JSON message with a list of entity names
molgenis_molgenis
train
555a5cbbb2b615aac65d62ff08bbf87f4c28eefc
diff --git a/baselines/run.py b/baselines/run.py index <HASH>..<HASH> 100644 --- a/baselines/run.py +++ b/baselines/run.py @@ -222,7 +222,7 @@ def main(): env = build_env(args) obs = env.reset() def initialize_placeholders(nlstm=128,**kwargs): - return np.zeros((args.num_env, 2*nlstm)), np.zeros((1)) + return np.zeros((args.num_env or 1, 2*nlstm)), np.zeros((1)) state, dones = initialize_placeholders(**extra_args) while True: actions, _, state, _ = model.step(obs,S=state, M=dones)
Adding num_env to readme example (#<I>) * Adding num_env to readme example * Updated readme example fix
openai_baselines
train
cb33203293c19aa3ee2cbfddbe299a17b4b08518
diff --git a/packages/plugin-critical/index.js b/packages/plugin-critical/index.js index <HASH>..<HASH> 100644 --- a/packages/plugin-critical/index.js +++ b/packages/plugin-critical/index.js @@ -40,7 +40,7 @@ class CriticalPlugin { ignore: options.ignore, width: options.width, height: options.height, - folder: baseUrl, + pathPrefix: baseUrl, html: sourceHTML, inline: false, minify: true, @@ -51,6 +51,9 @@ class CriticalPlugin { return } + // remove path prefix from hashed urls + criticalCSS = criticalCSS.replace(/="url\([/\w]+%23(\w+)\)"/g, '="url(%23$1)"') + // we manually inline critical css because cheerio is messing // up the markup from Vue server renderer const resultHTML = await inlineCriticalCSS(filePath, criticalCSS)
fix(critical): remove path prefix from hashed urls
gridsome_gridsome
train
fd5996a5358d6455843cdf5ad0c7f8e283b820c9
diff --git a/cmd/arc/main.go b/cmd/arc/main.go index <HASH>..<HASH> 100644 --- a/cmd/arc/main.go +++ b/cmd/arc/main.go @@ -21,6 +21,7 @@ var ( mkdirAll bool selectiveCompression bool implicitTopLevelFolder bool + stripComponents int continueOnError bool specifyFileType string ) @@ -37,6 +38,7 @@ func init() { flag.BoolVar(&mkdirAll, "mkdirs", false, "Make all necessary directories") flag.BoolVar(&selectiveCompression, "smart", true, "Only compress files which are not already compressed (zip only)") flag.BoolVar(&implicitTopLevelFolder, "folder-safe", true, "If an archive does not have a single top-level folder, create one implicitly") + flag.IntVar(&stripComponents, "strip-components", 0, "Strip number of leading paths") flag.BoolVar(&continueOnError, "allow-errors", true, "Log errors and continue processing") flag.StringVar(&specifyFileType, "ext", "", "specify file type") } @@ -223,6 +225,7 @@ func getFormat(subcommand string) (interface{}, error) { OverwriteExisting: overwriteExisting, MkdirAll: mkdirAll, ImplicitTopLevelFolder: implicitTopLevelFolder, + StripComponents: stripComponents, ContinueOnError: continueOnError, } diff --git a/tar.go b/tar.go index <HASH>..<HASH> 100644 --- a/tar.go +++ b/tar.go @@ -40,6 +40,10 @@ type Tar struct { // especially on extraction. ImplicitTopLevelFolder bool + // Strip number of leading paths. This feature is available + // only during unpacking of the entire archive. + StripComponents int + // If true, errors encountered during reading // or writing a single file will be logged and // the operation will continue on remaining files. @@ -233,6 +237,17 @@ func (t *Tar) untarNext(destination string) error { if errPath != nil { return fmt.Errorf("checking path traversal attempt: %v", errPath) } + + if t.StripComponents > 0 { + if strings.Count(header.Name, "/") < t.StripComponents { + return nil // skip path with fewer components + } + + for i := 0; i < t.StripComponents; i++ { + slash := strings.Index(header.Name, "/") + header.Name = header.Name[slash+1:] + } + } return t.untarFile(f, destination, header) }
Add a way to strip top-level folder Closes #<I>
mholt_archiver
train
cb82fccd59a1bbb06a9f869916ff702eed59554c
diff --git a/packages/js-go-channels/src/dispatcher.js b/packages/js-go-channels/src/dispatcher.js index <HASH>..<HASH> 100644 --- a/packages/js-go-channels/src/dispatcher.js +++ b/packages/js-go-channels/src/dispatcher.js @@ -80,33 +80,36 @@ function processGoRoutines( * Note that as per https://jsperf.com/array-filter-performance, * Array.filter isn't as performant. */ -function clearDones(goRoutines) { - const countDones = goRoutines.reduce( +function clearDones(state) { + const countDones = state.goRoutines.reduce( (total, goRoutine) => { return goRoutine.done ? total + 1 : total }, 0 ) // first handle some simple cases first if (!countDones) { - return goRoutines - } else if (goRoutines.length === countDones) { - return [] + return + } else if (state.goRoutines.length === countDones) { + state.goRoutines = [] + state.lastSelectedChannel = {} + return } // then return a new array with all the done goRoutines removed let len = 0 - return goRoutines.reduce( + state.goRoutes = state.goRoutines.reduce( (newGoRoutines, goRoutine, i) => { if (!goRoutine.done) { newGoRoutines[len++] = goRoutine } return newGoRoutines }, - new Array(goRoutines.length - countDones) + new Array(state.goRoutines.length - countDones) ) + state.lastSelectedChannel = {} } function dispatcher(state) { processGoRoutines(state) - state.goRoutines = clearDones(state.goRoutines) + clearDones(state) // recursively call itself runDispatcher(state) }
[core] also killing selected channels on go routine ending
frankandrobot_js-go-channels
train
c5f7c49014e0b58952810b7ae577ffc9d9e4c163
diff --git a/core/parser/src/main/java/org/overture/parser/syntax/DefinitionReader.java b/core/parser/src/main/java/org/overture/parser/syntax/DefinitionReader.java index <HASH>..<HASH> 100644 --- a/core/parser/src/main/java/org/overture/parser/syntax/DefinitionReader.java +++ b/core/parser/src/main/java/org/overture/parser/syntax/DefinitionReader.java @@ -1226,6 +1226,7 @@ public class DefinitionReader extends SyntaxReader AAccessSpecifierAccessSpecifier access = readAccessSpecifier(false); AAssignmentDefinition def = getStatementReader().readAssignmentDefinition(); AInstanceVariableDefinition ivd = AstFactory.newAInstanceVariableDefinition(def.getName(), def.getType(), def.getExpression()); + def.getType().parent(ivd);//the type of ivd is graph but we trough away the assignment ivd.setAccess(access); return ivd; }
fixed bug with parent of instance variable type
overturetool_overture
train
da5727a2b016dcf6e7a8a9f3982479d66dc98658
diff --git a/Components/Import/Entity/PlentymarketsImportEntityItem.php b/Components/Import/Entity/PlentymarketsImportEntityItem.php index <HASH>..<HASH> 100644 --- a/Components/Import/Entity/PlentymarketsImportEntityItem.php +++ b/Components/Import/Entity/PlentymarketsImportEntityItem.php @@ -157,8 +157,7 @@ class PlentymarketsImportEntityItem protected function setDetails() { // Shipping time - $availability = PlentymarketsImportController::getItemAvailability(); - $shippingTime = isset($availability[$this->ItemBase->Availability->AvailabilityID]) ? $availability[$this->ItemBase->Availability->AvailabilityID] : null; + $shippingTime = PlentymarketsUtils::getShippingTimeByAvailabilityId($this->ItemBase->Availability->AvailabilityID); // Active $active = $this->ItemBase->Availability->Inactive == 0 && $this->ItemBase->Availability->Webshop == 1; @@ -366,6 +365,12 @@ class PlentymarketsImportEntityItem } } + $shippingTime = PlentymarketsUtils::getShippingTimeByAvailabilityId($AttributeValueSet->Availability); + if ($shippingTime) + { + $details['shippingtime'] = $shippingTime; + } + $details['additionaltext'] = $AttributeValueSet->AttributeValueSetName; $details['ean'] = $AttributeValueSet->EAN; $details['X_plentySku'] = $sku; @@ -375,7 +380,7 @@ class PlentymarketsImportEntityItem } /** - * Sets the categories. Non-existing categories will be created immediatly. + * Sets the categories. Non-existing categories will be created immediately. */ protected function setCategories() { diff --git a/Components/Utils/PlentymarketsUtils.php b/Components/Utils/PlentymarketsUtils.php index <HASH>..<HASH> 100644 --- a/Components/Utils/PlentymarketsUtils.php +++ b/Components/Utils/PlentymarketsUtils.php @@ -26,7 +26,6 @@ * @author Daniel Bächtle <daniel.baechtle@plentymarkets.com> */ - /** * The class PlentymarketsUtils contains different useful methods. The get-methods of this class are used * in some export and import entity classes. And the check-methods are used in the controllers PlentymarketsCronjobController @@ -86,6 +85,7 @@ class PlentymarketsUtils public static function getShopwareIDFromExternalItemID($externalItemID) { list ($shopwareID) = sscanf($externalItemID, self::EXTERNAL_ITEM_ID_FORMAT); + return (integer) $shopwareID; } @@ -98,6 +98,7 @@ class PlentymarketsUtils public static function getShopwareIDFromExternalOrderID($externalItemID) { list ($shopwareID) = sscanf($externalItemID, self::EXTERNAL_ORDER_ID_FORMAT); + return (integer) $shopwareID; } @@ -137,6 +138,7 @@ class PlentymarketsUtils if (Shopware()->Bootstrap()->issetResource('License')) { $License = Shopware()->License(); + return $License->checkCoreLicense(false); } else @@ -154,6 +156,7 @@ class PlentymarketsUtils public static function convertBytes($size) { $unit = array('b', 'kb', 'mb', 'gb', 'tb', 'pb'); + return @round($size / pow(1024, ($i = floor(log($size, 1024)))), 2) . ' ' . $unit[$i]; } @@ -205,7 +208,7 @@ class PlentymarketsUtils } $path = realpath( - Shopware()->AppPath() . '/Plugins/'. $plugin['source'] .'/'. $plugin['namespace'] .'/SwagBundle/Models/' + Shopware()->AppPath() . '/Plugins/' . $plugin['source'] . '/' . $plugin['namespace'] . '/SwagBundle/Models/' ); if (!$path) @@ -235,6 +238,32 @@ class PlentymarketsUtils public static function getRootIdByCategory(Shopware\Models\Category\Category $category) { $path = array_reverse(explode('|', $category->getPath())); + return $path[1]; } + + /** + * @var null|array + */ + protected static $availability = null; + + /** + * Returns the shipping time + * + * @param $availabilityId + * @return integer|null + */ + public static function getShippingTimeByAvailabilityId($availabilityId) + { + if ((integer) $availabilityId <= 0) + { + return null; + } + if (!is_array(self::$availability)) + { + self::$availability = PlentymarketsImportController::getItemAvailability(); + } + + return isset(self::$availability[$availabilityId]) ? self::$availability[$availabilityId] : null; + } }
UPDATE Item import (shipping time variants)
plentymarkets_plentymarkets-shopware-connector
train
6cb4716ab6ce88d9be05ff9a58ac3910fe752237
diff --git a/src/main/org/codehaus/groovy/syntax/lexer/StringLexer.java b/src/main/org/codehaus/groovy/syntax/lexer/StringLexer.java index <HASH>..<HASH> 100644 --- a/src/main/org/codehaus/groovy/syntax/lexer/StringLexer.java +++ b/src/main/org/codehaus/groovy/syntax/lexer/StringLexer.java @@ -58,6 +58,11 @@ public class StringLexer extends TextLexerBase { string.append( consume() ); } + + if( la(1) == CharStream.EOS && string.length() == 0 ) + { + finished = true; + } return Token.newString( string.toString(), getStartLine(), getStartColumn() ); } diff --git a/src/test/org/codehaus/groovy/syntax/parser/CompilerErrorTest.java b/src/test/org/codehaus/groovy/syntax/parser/CompilerErrorTest.java index <HASH>..<HASH> 100644 --- a/src/test/org/codehaus/groovy/syntax/parser/CompilerErrorTest.java +++ b/src/test/org/codehaus/groovy/syntax/parser/CompilerErrorTest.java @@ -52,7 +52,7 @@ public class CompilerErrorTest extends TestSupport { public void testUnknownClassCatch() throws Exception { MissingClassException e = - assertCompileFailed( + assertCompileFailed_WithMCE( "class UnknownClass {\n" + " main() {\n" + " try {\n" @@ -69,7 +69,7 @@ public class CompilerErrorTest extends TestSupport { public void testUnknownClassInNew() throws Exception { MissingClassException e = - assertCompileFailed( + assertCompileFailed_WithMCE( "class UnknownClass {\n" + " main() {\n" + " x = new UnknownThingy()\n" + " }\n" + "}\n"); assertEquals("UnknownThingy", e.getType()); } @@ -92,13 +92,27 @@ public class CompilerErrorTest extends TestSupport { */ } + + + public void testUnterminatedConstantGString() throws Exception { + assertCompileFailed( "println \"d" ); + } + + public void testUnterminatedGString() throws Exception { + assertCompileFailed( "println \"${1+2\"\nprintln \"c\"" ); + } + + + + + protected GroovyObject assertCompileWorks(String code) throws Exception { Class type = loader.parseClass(new ByteArrayInputStream(code.getBytes()), "ValidClass_" + getMethodName() + ".groovy"); return (GroovyObject) type.newInstance(); } - protected MissingClassException assertCompileFailed(String code) throws Exception { + protected MissingClassException assertCompileFailed_WithMCE(String code) throws Exception { try { assertCompileWorks(code); @@ -116,4 +130,17 @@ public class CompilerErrorTest extends TestSupport { return null; } + protected CompilationFailedException assertCompileFailed(String code) throws Exception { + try { + assertCompileWorks(code); + + fail("Should have thrown an exception"); + } + catch( CompilationFailedException e ) { + return e; + } + + return null; + } + }
Fixed bug that caused the parser to hang on unterminated GStrings. git-svn-id: <URL>
groovy_groovy-core
train
23b5c5f61a9149c39fc1891f688194c4dbbca3d7
diff --git a/lib/Map/TableDataSource.js b/lib/Map/TableDataSource.js index <HASH>..<HASH> 100644 --- a/lib/Map/TableDataSource.js +++ b/lib/Map/TableDataSource.js @@ -697,25 +697,26 @@ function setNewRegionImageryLayer(dataSource, layerIndex) { }); var tableStructure = dataSource.tableStructure; var legendHelper = dataSource._legendHelper; + + var regionColumnValues = regionDetail.column.values; + // Wipe out the region names from the rows that do not apply at this time, if there is a time column. + var timeColumn = tableStructure.columnsByType[VarType.TIME][0]; + if (defined(timeColumn) && dataSource._availabilities) { + regionColumnValues = regionColumnValues.map(function(value, index) { + return (dataSource._availabilities[index].contains(dataSource.clock.currentTime) ? value : undefined); + }); + } // Recolor the regions var colorFunction; // regionIndices will be an array the same length as regionProvider.regions, giving the index of each region into the table. var regionIndices = regionDetail.regionProvider.mapRegionsToIndicesInto( - regionDetail.column.values, - defined(regionDetail.disambigColumn) && regionDetail.disambigColumn.values + regionColumnValues, + defined(regionDetail.disambigColumn) ? regionDetail.disambigColumn.values : undefined ); var regionValues = regionIndices; // Appropriate if no active column: color each region according to its index into the table. if (tableStructure.activeItems.length > 0) { var activeColumn = tableStructure.activeItems[0]; regionValues = regionIndices.map(function(i) { return activeColumn.values[i]; }); - // Filter the region values by time, if present - var timeColumn = tableStructure.columnsByType[VarType.TIME][0]; - if (timeColumn && dataSource._availabilities) { - // this is wrong. should be filtering something else I think. - regionValues = regionValues.map(function(value, index) { - return dataSource._availabilities[index].contains(dataSource.clock.currentTime); - }); - } if (activeColumn.usesIndicesIntoUniqueValues) { // Convert the region's value to an index into the uniqueValues array. regionValues = regionValues.map(function(value) { return activeColumn.uniqueValues.indexOf(value); });
time filtering works, but not connected to clock updates yet
TerriaJS_terriajs
train
2a1bd88afaeb6efd5e130db3a696c78b501f3b2d
diff --git a/unixtimestampfield/fields.py b/unixtimestampfield/fields.py index <HASH>..<HASH> 100644 --- a/unixtimestampfield/fields.py +++ b/unixtimestampfield/fields.py @@ -67,12 +67,17 @@ class TimestampPatchMixin(object): value = timezone.localtime(value, timezone.utc) return value.timestamp() + raise exceptions.ValidationError( + "Unable to convert value: '%s' to timestamp" % value, + code="invalid_timestamp" + ) + def to_utc_datetime(self, value): """ from value to datetime with tzinfo format (datetime.datetime instance) """ if isinstance(value, str) or isinstance(value, int) or isinstance(value, float): - value = timezone.datetime.fromtimestamp(value, timezone.utc) + value = timezone.datetime.fromtimestamp(float(value), timezone.utc) return value if isinstance(value, datetime.datetime): @@ -84,7 +89,7 @@ class TimestampPatchMixin(object): raise exceptions.ValidationError( "Unable to convert value: '%s' to python data type" % value, - code="invalid_unix_timestamp" + code="invalid_datetime" ) def to_default_timezone_datetime(self, value): @@ -138,7 +143,9 @@ class UnixTimeStampField(TimestampPatchMixin, Field): setattr(model_instance, self.attname, value) return value else: - return super(UnixTimeStampField, self).pre_save(model_instance, add) + value = getattr(model_instance, self.attname) + setattr(model_instance, self.attname, self.to_datetime(value)) + return value def to_python(self, value): return self.to_datetime(value)
Set model instance's attribute and minor bug fix
myyang_django-unixtimestampfield
train
7e49c2d3b0af9d4b85b6edc042cb9d3770a33ac9
diff --git a/il2fb/commons/weather.py b/il2fb/commons/weather.py index <HASH>..<HASH> 100644 --- a/il2fb/commons/weather.py +++ b/il2fb/commons/weather.py @@ -5,18 +5,18 @@ from candv import with_constant_class from ._translations import gettext_lazy as _ -class ConditionType(VerboseValueConstant): +class WeatherCondition(VerboseValueConstant): ... -class Conditions(with_constant_class(ConditionType), Values): - clear = ConditionType(0, _("clear")) - good = ConditionType(1, _("good")) - hazy = ConditionType(2, _("hazy")) - poor = ConditionType(3, _("poor")) - blind = ConditionType(4, _("blind")) - precipitation = ConditionType(5, _("precipitation")) - thunderstorm = ConditionType(6, _("thunderstorm")) +class WeatherConditions(with_constant_class(WeatherCondition), Values): + clear = WeatherCondition(0, _("clear")) + good = WeatherCondition(1, _("good")) + hazy = WeatherCondition(2, _("hazy")) + poor = WeatherCondition(3, _("poor")) + blind = WeatherCondition(4, _("blind")) + precipitation = WeatherCondition(5, _("precipitation")) + thunderstorm = WeatherCondition(6, _("thunderstorm")) class GustType(VerboseValueConstant):
Rename 'Conditions' to 'WeatherConditions'
IL2HorusTeam_il2fb-commons
train
1b0402af0fa068fdc0e625abd8de63a65c433cd2
diff --git a/test/e2e/common/volumes.go b/test/e2e/common/volumes.go index <HASH>..<HASH> 100644 --- a/test/e2e/common/volumes.go +++ b/test/e2e/common/volumes.go @@ -53,7 +53,7 @@ import ( // These tests need privileged containers, which are disabled by default. Run // the test with "go run hack/e2e.go ... --ginkgo.focus=[Feature:Volumes]" -var _ = framework.KubeDescribe("[sig-storage] GCP Volumes", func() { +var _ = Describe("[sig-storage] GCP Volumes", func() { f := framework.NewDefaultFramework("gcp-volume") // If 'false', the test won't clear its volumes upon completion. Useful for debugging, @@ -73,7 +73,7 @@ var _ = framework.KubeDescribe("[sig-storage] GCP Volumes", func() { //////////////////////////////////////////////////////////////////////// // NFS //////////////////////////////////////////////////////////////////////// - framework.KubeDescribe("NFSv4", func() { + Describe("NFSv4", func() { It("should be mountable for NFSv4", func() { config, _, serverIP := framework.NewNFSServer(c, namespace.Name, []string{}) defer func() { @@ -101,7 +101,7 @@ var _ = framework.KubeDescribe("[sig-storage] GCP Volumes", func() { }) }) - framework.KubeDescribe("NFSv3", func() { + Describe("NFSv3", func() { It("should be mountable for NFSv3", func() { config, _, serverIP := framework.NewNFSServer(c, namespace.Name, []string{}) defer func() { @@ -131,7 +131,7 @@ var _ = framework.KubeDescribe("[sig-storage] GCP Volumes", func() { //////////////////////////////////////////////////////////////////////// // Gluster //////////////////////////////////////////////////////////////////////// - framework.KubeDescribe("GlusterFS", func() { + Describe("GlusterFS", func() { It("should be mountable", func() { // create gluster server and endpoints config, _, _ := framework.NewGlusterfsServer(c, namespace.Name) diff --git a/test/e2e/storage/volume_io.go b/test/e2e/storage/volume_io.go index <HASH>..<HASH> 100644 --- a/test/e2e/storage/volume_io.go +++ b/test/e2e/storage/volume_io.go @@ -243,7 +243,7 @@ var _ = SIGDescribe("Volume plugin streaming [Slow]", func() { //////////////////////////////////////////////////////////////////////// // NFS //////////////////////////////////////////////////////////////////////// - SIGDescribe("NFS", func() { + Describe("NFS", func() { testFile := "nfs_io_test" // client pod uses selinux podSec := v1.PodSecurityContext{ @@ -279,7 +279,7 @@ var _ = SIGDescribe("Volume plugin streaming [Slow]", func() { //////////////////////////////////////////////////////////////////////// // Gluster //////////////////////////////////////////////////////////////////////// - SIGDescribe("GlusterFS", func() { + Describe("GlusterFS", func() { var name string testFile := "gluster_io_test" @@ -325,7 +325,7 @@ var _ = SIGDescribe("Volume plugin streaming [Slow]", func() { // iSCSI // The iscsiadm utility and iscsi target kernel modules must be installed on all nodes. //////////////////////////////////////////////////////////////////////// - SIGDescribe("iSCSI [Feature:Volumes]", func() { + Describe("iSCSI [Feature:Volumes]", func() { testFile := "iscsi_io_test" BeforeEach(func() { @@ -362,7 +362,7 @@ var _ = SIGDescribe("Volume plugin streaming [Slow]", func() { //////////////////////////////////////////////////////////////////////// // Ceph RBD //////////////////////////////////////////////////////////////////////// - SIGDescribe("Ceph-RBD [Feature:Volumes]", func() { + Describe("Ceph-RBD [Feature:Volumes]", func() { var ( secret *v1.Secret name string
Remove [k8s.io] tag and redundant [sig-storage] tags from tests
kubernetes_kubernetes
train
e50a821aa80f143b7379503a807af83f9b99d865
diff --git a/test/e2e/kubectl.go b/test/e2e/kubectl.go index <HASH>..<HASH> 100644 --- a/test/e2e/kubectl.go +++ b/test/e2e/kubectl.go @@ -348,6 +348,7 @@ var _ = framework.KubeDescribe("Kubectl client", func() { }) It("should support inline execution and attach", func() { + framework.SkipIfContainerRuntimeIs("rkt") // #23335 framework.SkipUnlessServerVersionGTE(jobsVersion, c) nsFlag := fmt.Sprintf("--namespace=%v", ns) @@ -1044,6 +1045,8 @@ var _ = framework.KubeDescribe("Kubectl client", func() { jobName := "e2e-test-rm-busybox-job" It("should create a job from an image, then delete the job [Conformance]", func() { + // The rkt runtime doesn't support attach, see #23335 + framework.SkipIfContainerRuntimeIs("rkt") framework.SkipUnlessServerVersionGTE(jobsVersion, c) By("executing a command with run --rm and attach with stdin")
e2e: Skip attach tests for rkt runtime
kubernetes_kubernetes
train
dad7a9443f20ef44d9628027497f8fdd7cb1df8d
diff --git a/vent/core/network_tap/ncontrol/rest/create.py b/vent/core/network_tap/ncontrol/rest/create.py index <HASH>..<HASH> 100644 --- a/vent/core/network_tap/ncontrol/rest/create.py +++ b/vent/core/network_tap/ncontrol/rest/create.py @@ -60,7 +60,10 @@ class CreateR: except Exception as e: # pragma: no cover return (False, 'unable to connect to redis because: ' + str(e)) if r: - r.hmset(payload['id'], ast.literal_eval(payload['metadata'])) + try: + r.hmset(payload['id'], payload['metadata']) + except Exception as e: # pragma: no cover + return (False, 'unable to store contents of the payload in redis because: ' + str(e)) # connect to docker c = None
fix string to dict into redis issue
CyberReboot_vent
train
c5c1bdf7497ffb7f49e2f9b074eafd2ce175c884
diff --git a/lib/solargraph/yard_map.rb b/lib/solargraph/yard_map.rb index <HASH>..<HASH> 100755 --- a/lib/solargraph/yard_map.rb +++ b/lib/solargraph/yard_map.rb @@ -178,15 +178,15 @@ module Solargraph ver = spec.version.to_s ver = ">= 0" if ver.empty? yd = YARD::Registry.yardoc_file_for_gem(spec.name, ver) + # YARD detects gems for certain libraries that do not have a yardoc + # but exist in the stdlib. `fileutils` is an example. Treat those + # cases as errors and check the stdlib yardoc. + raise Gem::LoadError if yd.nil? @gem_paths[spec.name] = spec.full_gem_path - if yd.nil? - unresolved_requires.push r - else - unless yardocs.include?(yd) - yardocs.unshift yd - result.concat process_yardoc yd - result.concat add_gem_dependencies(spec) if with_dependencies? - end + unless yardocs.include?(yd) + yardocs.unshift yd + result.concat process_yardoc yd + result.concat add_gem_dependencies(spec) if with_dependencies? end rescue Gem::LoadError => e stdtmp = []
Gems without yardocs always fall back to stdlib.
castwide_solargraph
train
8dfb4bf4f4a5a8cf1131eebff2e37a6990c7798e
diff --git a/indra/tests/test_indranet_assembler.py b/indra/tests/test_indranet_assembler.py index <HASH>..<HASH> 100644 --- a/indra/tests/test_indranet_assembler.py +++ b/indra/tests/test_indranet_assembler.py @@ -1,3 +1,4 @@ +import numpy as np import pandas as pd import networkx as nx from indra.assemblers.indranet import IndraNetAssembler, IndraNet @@ -115,13 +116,18 @@ def test_to_digraph(): net = IndraNet.from_df(df) assert len(net.nodes) == 3 assert len(net.edges) == 8 - digraph = net.to_digraph() + digraph = net.to_digraph(weight_mapping=_weight_mapping) assert len(digraph.nodes) == 3 assert len(digraph.edges) == 2 assert set([ stmt['stmt_type'] for stmt in digraph['a']['b']['statements']]) == { 'Activation', 'Phosphorylation', 'Inhibition', 'IncreaseAmount'} assert all(digraph.edges[e].get('belief', False) for e in digraph.edges) + assert all(isinstance(digraph.edges[e]['belief'], + (float, np.longfloat)) for e in digraph.edges) + assert all(digraph.edges[e].get('weight', False) for e in digraph.edges) + assert all(isinstance(digraph.edges[e]['weight'], + (float, np.longfloat)) for e in digraph.edges) digraph_from_df = IndraNet.digraph_from_df(df) assert nx.is_isomorphic(digraph, digraph_from_df) @@ -131,7 +137,8 @@ def test_to_signed_graph(): df = ia.make_df() net = IndraNet.from_df(df) signed_graph = net.to_signed_graph( - sign_dict=IndraNetAssembler.default_sign_dict) + sign_dict=IndraNetAssembler.default_sign_dict, + weight_mapping=_weight_mapping) assert len(signed_graph.nodes) == 3 assert len(signed_graph.edges) == 4 assert set([stmt['stmt_type'] for stmt in @@ -147,3 +154,15 @@ def test_to_signed_graph(): 'Inhibition', 'DecreaseAmount'} assert all(signed_graph.edges[e].get('belief', False) for e in signed_graph.edges) + assert all(isinstance(signed_graph.edges[e]['belief'], + (float, np.longfloat)) for e in signed_graph.edges) + assert all(signed_graph.edges[e].get('weight', False) for e in + signed_graph.edges) + assert all(isinstance(signed_graph.edges[e]['weight'], + (float, np.longfloat)) for e in signed_graph.edges) + + +def _weight_mapping(G): + for edge in G.edges: + G.edges[edge]['weight'] = 1 - G.edges[edge]['belief'] + return G
Add tests for flattened edge weight. Update for belief.
sorgerlab_indra
train
739c5284dc3d2da9da6a521bf4c80e87753fff60
diff --git a/lib/rvc/modules/host.rb b/lib/rvc/modules/host.rb index <HASH>..<HASH> 100644 --- a/lib/rvc/modules/host.rb +++ b/lib/rvc/modules/host.rb @@ -203,3 +203,33 @@ def rescan_storage hosts storageSystem.RescanVmfs end end + + +opts :select_vmknic_for_service do + summary "Selects a vmknic for a particular service" + arg :vmknic, "Name of vmknic", :type => :string + arg :service, "e.g.: vmotion", :type => :string + arg :host, nil, :lookup => VIM::HostSystem, :multi => true +end + +def select_vmknic_for_service vmknic, service, hosts + hosts.each do |host| + vnicSys = host.configManager.virtualNicManager + vnicSys.SelectVnicForNicType(:nicType => service, :device => vmknic) + end +end + + +opts :deselect_vmknic_for_service do + summary "Selects a vmknic for a particular service" + arg :vmknic, "Name of vmknic", :type => :string + arg :service, "e.g.: vmotion", :type => :string + arg :host, nil, :lookup => VIM::HostSystem, :multi => true +end + +def deselect_vmknic_for_service vmknic, service, hosts + hosts.each do |host| + vnicSys = host.configManager.virtualNicManager + vnicSys.DeselectVnicForNicType(:nicType => service, :device => vmknic) + end +end
Add host.(de)select_vmknic_for_service
vmware_rvc
train
ee2a8404ccf64483c78f5cdfac97475c8f9796c5
diff --git a/tests/upgrade/src/test/java/org/sonarsource/sonarqube/upgrade/UpgradeTest.java b/tests/upgrade/src/test/java/org/sonarsource/sonarqube/upgrade/UpgradeTest.java index <HASH>..<HASH> 100644 --- a/tests/upgrade/src/test/java/org/sonarsource/sonarqube/upgrade/UpgradeTest.java +++ b/tests/upgrade/src/test/java/org/sonarsource/sonarqube/upgrade/UpgradeTest.java @@ -56,9 +56,6 @@ public class UpgradeTest { private static final String LATEST_JAVA_RELEASE = "LATEST_RELEASE"; private static final Version VERSION_5_2 = Version.create("5.2"); private static final Version VERSION_5_6_1 = Version.create("5.6.1"); - private static final Version VERSION_5_6 = Version.create("5.6"); - private static final Version VERSION_6_0 = Version.create("6.0"); - private static final Version VERSION_6_1 = Version.create("6.1"); private static final Version VERSION_CURRENT = Version.create("DEV"); private Orchestrator orchestrator; @@ -77,18 +74,8 @@ public class UpgradeTest { } @Test - public void test_upgrade_from_5_2_via_5_6() { - testDatabaseUpgrade(VERSION_5_2, VERSION_5_6); - } - - @Test - public void test_upgrade_from_6_0() { - testDatabaseUpgrade(VERSION_6_0); - } - - @Test - public void test_upgrade_from_6_1() { - testDatabaseUpgrade(VERSION_6_1); + public void test_upgrade_from_5_2_via_5_6_1() { + testDatabaseUpgrade(VERSION_5_2, VERSION_5_6_1); } private void testDatabaseUpgrade(Version fromVersion, Version... intermediaryVersions) { @@ -144,8 +131,6 @@ public class UpgradeTest { checkUrlIsReturningOk("/api/system/status"); checkUrlIsReturningOk("/api/system/db_migration_status"); checkUrlIsReturningOk("/api/webservices/list"); - // TODO Reactivate when latest Sonarqube version will be in repox - // checkUrlIsReturningOkOnlyForDevVersion("/api/l10n/index", sqVersion); // These urls should not be available when system requires a migration checkUrlIsReturningNotFound("/api/issues/search?projectKeys=org.apache.struts%3Astruts-core");
Drop long upgrade tests from versions <I> and <I>
SonarSource_sonarqube
train
0856406a89cfa3c7fd0a1a9050dfe130d0cf6011
diff --git a/src/WMS/Ldap/Configuration.php b/src/WMS/Ldap/Configuration.php index <HASH>..<HASH> 100644 --- a/src/WMS/Ldap/Configuration.php +++ b/src/WMS/Ldap/Configuration.php @@ -659,7 +659,7 @@ class Configuration } /** - * @param mixed $membershipUseAttributeFromUser + * @param bool $membershipUseAttributeFromUser */ public function setMembershipUseAttributeFromUser($membershipUseAttributeFromUser) { @@ -671,7 +671,7 @@ class Configuration } /** - * @return mixed + * @return bool */ public function getMembershipUseAttributeFromUser() { diff --git a/src/WMS/Ldap/Repository/AccountRepository.php b/src/WMS/Ldap/Repository/AccountRepository.php index <HASH>..<HASH> 100644 --- a/src/WMS/Ldap/Repository/AccountRepository.php +++ b/src/WMS/Ldap/Repository/AccountRepository.php @@ -173,7 +173,7 @@ class AccountRepository extends AbstractRepository protected function getSearchAttributes() { - return array( + $attribs = array( 'dn', 'objectClass', $this->getConfiguration()->getAccountUsernameAttribute(), @@ -183,5 +183,11 @@ class AccountRepository extends AbstractRepository $this->getConfiguration()->getAccountLastNameAttribute(), $this->getConfiguration()->getAccountEmailAttribute(), ); + + if ($this->getConfiguration()->getMembershipUseAttributeFromUser()) { + $attribs[] = $this->getConfiguration()->getAccountMembershipAttribute(); + } + + return $attribs; } } \ No newline at end of file diff --git a/src/WMS/Ldap/Repository/GroupRepository.php b/src/WMS/Ldap/Repository/GroupRepository.php index <HASH>..<HASH> 100644 --- a/src/WMS/Ldap/Repository/GroupRepository.php +++ b/src/WMS/Ldap/Repository/GroupRepository.php @@ -52,6 +52,7 @@ class GroupRepository extends AbstractRepository /** * @param $groupName + * * @return Entity\GroupNode|null */ public function findByGroupName($groupName) @@ -65,6 +66,7 @@ class GroupRepository extends AbstractRepository /** * @param Entity\AccountNode $account + * * @return Collection\GroupNodeCollection */ public function findGroupsForAccount(Entity\AccountNode $account) @@ -80,6 +82,7 @@ class GroupRepository extends AbstractRepository /** * @param Entity\AccountNode $account + * * @return Collection\GroupNodeCollection */ protected function findGroupsForAccountUsingAccountAttribute(Entity\AccountNode $account) @@ -112,6 +115,7 @@ class GroupRepository extends AbstractRepository /** * @param Entity\AccountNode $account + * * @return Collection\GroupNodeCollection */ protected function findGroupsForAccountUsingGroupAttribute(Entity\AccountNode $account) @@ -137,11 +141,17 @@ class GroupRepository extends AbstractRepository protected function getSearchAttributes() { - return array( + $attribs = array( 'dn', 'objectClass', $this->getConfiguration()->getGroupNameAttribute(), $this->getConfiguration()->getGroupDescriptionAttribute(), ); + + if ($this->getConfiguration()->getMembershipUseAttributeFromGroup()) { + $attribs[] = $this->getConfiguration()->getGroupMembersAttribute(); + } + + return $attribs; } } \ No newline at end of file
Fixed problems when fetching groups form AD
WolfMicrosystems_ldap
train
7def04863790845d2174cae58b3f7ea7b1a81701
diff --git a/state/backups/backups_test.go b/state/backups/backups_test.go index <HASH>..<HASH> 100644 --- a/state/backups/backups_test.go +++ b/state/backups/backups_test.go @@ -105,8 +105,6 @@ func (s *backupsSuite) TestCreateOkay(c *gc.C) { filesToBackUp, _ := backups.ExposeCreateArgs(received) c.Check(filesToBackUp, jc.SameContents, []string{"<some file>"}) - err = receivedDBInfo.Validate() - c.Assert(err, gc.IsNil) c.Check(receivedDBInfo.Address, gc.Equals, "a") c.Check(receivedDBInfo.Username, gc.Equals, "b") c.Check(receivedDBInfo.Password, gc.Equals, "c") diff --git a/state/backups/db.go b/state/backups/db.go index <HASH>..<HASH> 100644 --- a/state/backups/db.go +++ b/state/backups/db.go @@ -40,23 +40,6 @@ type DBConnInfo struct { Password string } -// Validate checks the DB connection info. If it isn't valid for use in -// juju state backups, it returns an error. Make sure that the ConnInfo -// values do not change between the time you call this method and when -// you actually need the values. -func (ci *DBConnInfo) Validate() error { - if ci.Address == "" { - return errors.New("missing address") - } - if ci.Username == "" { - return errors.New("missing username") - } - if ci.Password == "" { - return errors.New("missing password") - } - return nil -} - // DBInfo wraps all the DB-specific information backups needs to dump // and restore the database. type DBInfo struct { @@ -155,11 +138,6 @@ type mongoDumper struct { // NewDBDumper returns a new value with a Dump method for dumping the // juju state database. func NewDBDumper(info DBInfo) (DBDumper, error) { - err := info.Validate() - if err != nil { - return nil, errors.Trace(err) - } - mongodumpPath, err := getMongodumpPath() if err != nil { return nil, errors.Annotate(err, "mongodump not available") diff --git a/state/backups/db_info_test.go b/state/backups/db_info_test.go index <HASH>..<HASH> 100644 --- a/state/backups/db_info_test.go +++ b/state/backups/db_info_test.go @@ -18,34 +18,6 @@ type connInfoSuite struct { testing.BaseSuite } -func (s *connInfoSuite) TestDBConnInfoValidateOkay(c *gc.C) { - connInfo := &backups.DBConnInfo{"a", "b", "c"} - err := connInfo.Validate() - - c.Check(err, gc.IsNil) -} - -func (s *connInfoSuite) TestDBConnInfoCheckMissingAddress(c *gc.C) { - connInfo := &backups.DBConnInfo{"", "b", "c"} - err := connInfo.Validate() - - c.Check(err, gc.ErrorMatches, "missing address") -} - -func (s *connInfoSuite) TestDBConnInfoCheckMissingUsername(c *gc.C) { - connInfo := &backups.DBConnInfo{"a", "", "c"} - err := connInfo.Validate() - - c.Check(err, gc.ErrorMatches, "missing username") -} - -func (s *connInfoSuite) TestDBConnInfoCheckMissingPassword(c *gc.C) { - connInfo := &backups.DBConnInfo{"a", "b", ""} - err := connInfo.Validate() - - c.Check(err, gc.ErrorMatches, "missing password") -} - func (s *connInfoSuite) TestNewMongoConnInfoOkay(c *gc.C) { tag, err := names.ParseTag("machine-0") c.Assert(err, gc.IsNil) @@ -56,10 +28,7 @@ func (s *connInfoSuite) TestNewMongoConnInfoOkay(c *gc.C) { Tag: tag, Password: "eggs", } - connInfo := backups.NewMongoConnInfo(&mgoInfo) - err = connInfo.Validate() - c.Assert(err, gc.IsNil) c.Check(connInfo.Address, gc.Equals, "localhost:8080") c.Check(connInfo.Username, gc.Equals, "machine-0") @@ -73,9 +42,9 @@ func (s *connInfoSuite) TestNewMongoConnInfoMissingTag(c *gc.C) { }, Password: "eggs", } - connInfo := backups.NewMongoConnInfo(&mgoInfo) - err := connInfo.Validate() - c.Check(err, gc.ErrorMatches, "missing username") + c.Check(connInfo.Username, gc.Equals, "") + c.Check(connInfo.Address, gc.Equals, "localhost:8080") + c.Check(connInfo.Password, gc.Equals, "eggs") }
Drop DBConnInfo.Validate.
juju_juju
train
1e458ef03d5e0e330ce99ca3df523d15a22d443e
diff --git a/src/Middleware/InitStateMiddleware.php b/src/Middleware/InitStateMiddleware.php index <HASH>..<HASH> 100644 --- a/src/Middleware/InitStateMiddleware.php +++ b/src/Middleware/InitStateMiddleware.php @@ -62,7 +62,7 @@ class InitStateMiddleware implements HTTPMiddleware public function getIsAdmin(HTTPRequest $request) { $adminPaths = static::config()->get('admin_url_paths'); - $adminPaths[] = AdminRootController::config()->get('url_base') . '/'; + $adminPaths[] = AdminRootController::admin_url(); $currentPath = rtrim($request->getURL(), '/') . '/'; foreach ($adminPaths as $adminPath) { if (substr($currentPath, 0, strlen($adminPath)) === $adminPath) {
Change source of admin URL in getIsAdmin() As per #<I> Change direct call to the AdminRootController config setting, using instead the admin_url() method on the class which provides detection via the Director rules, and the fallback to the config setting.
silverstripe_silverstripe-subsites
train
1e20f23e3d8f8dba7580671b43a6c9374aecd1dc
diff --git a/rdm/db/context.py b/rdm/db/context.py index <HASH>..<HASH> 100644 --- a/rdm/db/context.py +++ b/rdm/db/context.py @@ -165,24 +165,48 @@ class CSVConnection(SQLiteDBConnection): ''' def __init__(self, file_list): self.sqlite_database = os.path.join(tempfile.mkdtemp(), 'tempdb.sqlite3') - self.csv2db(file_list) + self.__csv2db(file_list) self.src = SQLiteDataSource(self) if not(sqlite3.sqlite_version_info[0] >= 3 and sqlite3.sqlite_version_info[1] >= 16): raise Exception('Your SQLite does not support pragma functions. Please upgrade to at least 3.16.0') self.check_connection() + def __getstate__(self): + # Copy the object's state from self.__dict__ which contains + # all our instance attributes. Always use the dict.copy() + # method to avoid modifying the original state. + + # Now store a dump of the db file. This is required for unpickled instance to work. + self.dbdump = open(self.sqlite_database, 'rb').read() + state = self.__dict__.copy() + # Remove the unpicklable entries. + del state['src'] + del state['sqlite_database'] + return state + + def __setstate__(self, state): + # Restore instance attributes + self.__dict__.update(state) + sqldb = os.path.join(tempfile.mkdtemp(), 'tempdb.sqlite3') + with open(sqldb, 'wb') as fp: + fp.write(self.dbdump) + src = SQLiteDataSource(self) + self.sqlite_database = sqldb + self.src = src + def __del__(self): - tmpdir, _ = os.path.split(self.sqlite_database) try: - os.remove(self.sqlite_database) - os.rmdir(tmpdir) + tmpdir, _ = os.path.split(self.sqlite_database) + if os.path.exists(self.sqlite_database): + os.remove(self.sqlite_database) + os.rmdir(tmpdir) except Exception as e: print('Warning: cannot remove temporary database "{}"'.format(self.sqlite_database)) def connect(self): return self.Manager(self.sqlite_database, sqlite3.connect) - def csv2db(self, file_list): + def __csv2db(self, file_list): ''' Loads csv files into an SQLite database and checks foreign keys constraints ''' @@ -255,12 +279,12 @@ class CSVConnection(SQLiteDBConnection): constraints.append('PRIMARY KEY ({})'.format(','.join(['"{}"'.format(x) for x in pkeys]))) ddl = 'CREATE TABLE "{}" (\n{}\n)'.format(tablename, ',\n'.join(declarations + constraints)) - insert = 'INSERT INTO "{}" VALUES ({})'.format(tablename, ','.join('?'*len(declarations))) + insert = 'INSERT INTO "{}" VALUES ({})'.format(tablename, ','.join('?' * len(declarations))) return ddl, insert, data def dump_sql(self, sqlfile): ''' - Dumps the in-memory database contructed from csv files into an SQLite SQL file + Dumps the database contructed from csv files into an SQLite SQL file :param sqlfile: name of the output file ''' @@ -269,16 +293,16 @@ class CSVConnection(SQLiteDBConnection): for line in con.iterdump(): fp.write('{}\n'.format(line)) - def dump_db(self, sqlite_database): + def dump_db(self, sqlite_database_file): ''' - Dumps the in-memory database contructed from csv files into an SQLite database file. + Dumps the database constructed from csv files into an SQLite database file. Python 3.7 and SQLite 3.6.11 or newer are required to use this function. ''' if not(sys.version_info.major >= 3 and sys.version_info.minor >= 7): raise EnvironmentError('Python >= 3.7 and SQLite >= 3.6.11 are required for backuping SQLite databases') with sqlite3.connect(self.sqlite_database) as con: - with sqlite3.connect(sqlite_database) as bck: + with sqlite3.connect(sqlite_database_file) as bck: con.backup(bck, pages=0)
Fixed CSV connection to suport pickling
xflows_rdm
train
f05eaa902a92cc6bfd2387b61eee2dcb7ff5e1b8
diff --git a/mod/workshop/assessments.php b/mod/workshop/assessments.php index <HASH>..<HASH> 100644 --- a/mod/workshop/assessments.php +++ b/mod/workshop/assessments.php @@ -223,6 +223,7 @@ $assessment->grade = -1; // set impossible grade $assessment->timecreated = $yearfromnow; $assessment->timegraded = 0; + $assessment->timeagreed = 0; if (!$assessment->id = insert_record("workshop_assessments", $assessment)) { error("Could not insert workshop assessment!"); } @@ -231,7 +232,7 @@ print_heading_with_help(get_string("assessthissubmission", "workshop"), "grading", "workshop"); // show assessment and allow changes - workshop_print_assessment($workshop, $assessment, true, $allowcomments); + workshop_print_assessment($workshop, $assessment, true, $allowcomments, $_SERVER["HTTP_REFERER"]); } @@ -875,14 +876,19 @@ } add_to_log($course->id, "workshop", "assess", "view.php?a=$workshop->id", "$workshop->id"); - + + // set up return address + if (!$returnto = $form->returnto) { + $returnto = "view.php?id=$cm->id"; + } + // show grade if grading strategy is not zero if ($workshop->gradingstrategy) { - redirect("view.php?id=$cm->id", get_string("thegradeis", "workshop").": ".number_format($grade, 2)."% (".get_string("maximumgrade"). + redirect($returnto, get_string("thegradeis", "workshop").": ".number_format($grade, 2)."% (".get_string("maximumgrade"). " ".number_format($workshop->grade)."%)"); } else { - redirect("view.php?id=$cm->id"); + redirect($returnto); } } diff --git a/mod/workshop/lib.php b/mod/workshop/lib.php index <HASH>..<HASH> 100644 --- a/mod/workshop/lib.php +++ b/mod/workshop/lib.php @@ -707,7 +707,7 @@ function workshop_list_ungraded_assessments($workshop, $stype) { function workshop_list_user_submissions($workshop, $user) { -function workshop_print_assessment($workshop, $assessment, $allowchanges, $showcommentlinks) +function workshop_print_assessment($workshop, $assessment, $allowchanges, $showcommentlinks, $returnto) function workshop_print_assessments_by_user_for_admin($workshop, $user) { function workshop_print_assessments_for_admin($workshop, $submission) { function workshop_print_assignment_info($cm, $workshop) { @@ -2030,8 +2030,9 @@ function workshop_list_user_submissions($workshop, $user) { function workshop_print_assessment($workshop, $assessment = false, $allowchanges = false, - $showcommentlinks = false) { + $showcommentlinks = false, $returnto = '') { // $allowchanges added 14/7/03 + // $returnto added 28/8/03 global $CFG, $THEME, $USER, $WORKSHOP_SCALES, $WORKSHOP_EWEIGHTS; if (! $course = get_record("course", "id", $workshop->course)) { error("Course is misconfigured"); @@ -2131,6 +2132,7 @@ function workshop_print_assessment($workshop, $assessment = false, $allowchanges <INPUT TYPE="hidden" NAME="id" VALUE="<?PHP echo $cm->id ?>"> <input type="hidden" name="aid" value="<?PHP echo $assessment->id ?>"> <input type="hidden" name="action" value="updateassessment"> + <input type="hidden" name="returnto" value="<?PHP echo $returnto ?>"> <CENTER> <TABLE CELLPADDING=2 BORDER=1> <?PHP
Addition of returnto parameter to workshop_print_assessment function to allow return to original pages when making assessemnts; clearing (trival) unassigned variable bug in the same function.
moodle_moodle
train
bc1103828c7c02031fadfff06a8f0589356d78b9
diff --git a/modules/quality-check/src/test/java/net/sf/qualitycheck/CheckTest.java b/modules/quality-check/src/test/java/net/sf/qualitycheck/CheckTest.java index <HASH>..<HASH> 100644 --- a/modules/quality-check/src/test/java/net/sf/qualitycheck/CheckTest.java +++ b/modules/quality-check/src/test/java/net/sf/qualitycheck/CheckTest.java @@ -43,4 +43,11 @@ public class CheckTest { cls.newInstance(); // exception here } + @Test + public void giveMeCoverageForMyPrivateConstructor_NumericRegularExpressionHolder() throws Exception { + // reduces only some noise in coverage report + final Constructor<Check.NumericRegularExpressionHolder> constructor = Check.NumericRegularExpressionHolder.class.getDeclaredConstructor(); + constructor.setAccessible(true); + constructor.newInstance(); + } } diff --git a/modules/quality-check/src/test/java/net/sf/qualitycheck/CheckTest_isNumber.java b/modules/quality-check/src/test/java/net/sf/qualitycheck/CheckTest_isNumber.java index <HASH>..<HASH> 100644 --- a/modules/quality-check/src/test/java/net/sf/qualitycheck/CheckTest_isNumber.java +++ b/modules/quality-check/src/test/java/net/sf/qualitycheck/CheckTest_isNumber.java @@ -1,12 +1,63 @@ +/******************************************************************************* + * Copyright 2012 André Rouél + * Copyright 2012 Dominik Seichter + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ package net.sf.qualitycheck; +import java.math.BigDecimal; +import java.math.BigInteger; + import net.sf.qualitycheck.exception.IllegalNumberArgumentException; +import net.sf.qualitycheck.exception.IllegalNumberRangeException; import org.junit.Assert; import org.junit.Test; public class CheckTest_isNumber { + private final class FakeNumber extends Number { + + private static final long serialVersionUID = -828838716306473102L; + + @Override + public double doubleValue() { + return 0; + } + + @Override + public float floatValue() { + return 0; + } + + @Override + public int intValue() { + return 0; + } + + @Override + public long longValue() { + return 0; + } + + }; + + @Test(expected=IllegalNumberArgumentException.class) + public void testUnknownClass_Fail() { + Check.isNumber("A", FakeNumber.class); + } + @Test(expected = IllegalNumberArgumentException.class) public void isNumber_decimalNumber_fail() { Check.isNumber("1.23"); @@ -57,9 +108,61 @@ public class CheckTest_isNumber { Assert.assertEquals(123, Check.isNumber("0123", "numeric")); } - @Test(expected = IllegalNumberArgumentException.class) + @Test(expected = IllegalNumberRangeException.class) public void isNumeric_longNumericString_fail() { Check.isNumber("1230000000000000000000000000"); } + @Test + public void isNumber_Byte_Ok() { + byte b = Check.isNumber("12", Byte.class).byteValue(); + Assert.assertEquals((byte)12, b); + } + + @Test + public void isNumber_Double_Ok() { + double d = Check.isNumber("12.1", Double.class).doubleValue(); + Assert.assertEquals(12.1d, d, 0.0); + } + + @Test + public void isNumber_Float_Ok() { + float f = Check.isNumber("12.1", Float.class).floatValue(); + Assert.assertEquals(12.1f, f, 0.0); + } + + @Test + public void isNumber_Short_Ok() { + short s = Check.isNumber("121", Short.class).shortValue(); + Assert.assertEquals((short)121, s); + } + + @Test + public void isNumber_Integer_Ok() { + int i = Check.isNumber("42", Integer.class).intValue(); + Assert.assertEquals(42, i); + } + + @Test + public void isNumber_Long_Ok() { + long l = Check.isNumber("-121", Long.class).longValue(); + Assert.assertEquals(-121L, l); + } + + @Test + public void isNumber_BigInteger_Ok() { + final BigInteger bi = Check.isNumber("121000099999999999999999", BigInteger.class); + Assert.assertEquals(new BigInteger("121000099999999999999999"), bi); + } + + @Test + public void isNumber_BigDecimal_Ok() { + final BigDecimal bd = Check.isNumber("121000099999999999999999.90", BigDecimal.class); + Assert.assertEquals(new BigDecimal("121000099999999999999999.90"), bd); + } + + @Test(expected=IllegalNumberArgumentException.class) + public void isNumber_BigDecimal_Fail() { + Check.isNumber("Halllo121000099999999999999999.90", "fail", BigDecimal.class); + } }
Enhanced tests to include constructor of NumericRegularExpressionHolder to get test coverage to <I>% again.
before_quality-check
train
31f98ded58dd3bcddc210d3332c62613e5741d99
diff --git a/eZ/Bundle/EzPublishCoreBundle/DependencyInjection/Configuration.php b/eZ/Bundle/EzPublishCoreBundle/DependencyInjection/Configuration.php index <HASH>..<HASH> 100644 --- a/eZ/Bundle/EzPublishCoreBundle/DependencyInjection/Configuration.php +++ b/eZ/Bundle/EzPublishCoreBundle/DependencyInjection/Configuration.php @@ -40,6 +40,7 @@ class Configuration implements ConfigurationInterface $this->addHttpCacheSection( $rootNode ); $this->addSystemSection( $rootNode ); $this->addPageSection( $rootNode ); + $this->addRouterSection( $rootNode ); return $treeBuilder; } @@ -294,4 +295,31 @@ EOT; ->end(); } + + private function addRouterSection( ArrayNodeDefinition $rootNode ) + { + $nonSAAwareInfo = <<<EOT +Route names that are not supposed to be SiteAccess aware, i.e. Routes pointing to asset generation (like assetic). +Note that you can just specify a prefix to match a selection of routes. +e.g. "_assetic_" will match "_assetic_*" +Defaults to ['_assetic_', '_wdt', '_profiler', '_configurator_'] +EOT; + $rootNode + ->children() + ->arrayNode( 'router' ) + ->children() + ->arrayNode( 'default_router' ) + ->children() + ->arrayNode( 'non_siteaccess_aware_routes' ) + ->prototype( 'scalar' )->end() + ->info( $nonSAAwareInfo ) + ->example( array( 'my_route_name', 'some_prefix_' ) ) + ->end() + ->end() + ->end() + ->end() + ->info( 'Router related settings' ) + ->end() + ->end(); + } } diff --git a/eZ/Bundle/EzPublishCoreBundle/DependencyInjection/EzPublishCoreExtension.php b/eZ/Bundle/EzPublishCoreBundle/DependencyInjection/EzPublishCoreExtension.php index <HASH>..<HASH> 100644 --- a/eZ/Bundle/EzPublishCoreBundle/DependencyInjection/EzPublishCoreExtension.php +++ b/eZ/Bundle/EzPublishCoreBundle/DependencyInjection/EzPublishCoreExtension.php @@ -64,7 +64,7 @@ class EzPublishCoreExtension extends Extension $this->registerPageConfiguration( $config, $container ); // Routing - $this->handleRouting( $container, $loader ); + $this->handleRouting( $config, $container, $loader ); // Public API loading $this->handleApiLoading( $container, $loader ); $this->handleTemplating( $container, $loader ); @@ -174,13 +174,25 @@ class EzPublishCoreExtension extends Extension /** * Handle routing parameters * + * @param array $config * @param \Symfony\Component\DependencyInjection\ContainerBuilder $container * @param \Symfony\Component\DependencyInjection\Loader\FileLoader $loader */ - private function handleRouting( ContainerBuilder $container, FileLoader $loader ) + private function handleRouting( array $config, ContainerBuilder $container, FileLoader $loader ) { $loader->load( 'routing.yml' ); $container->setAlias( 'router', 'ezpublish.chain_router' ); + + if ( isset( $config['router']['default_router']['non_siteaccess_aware_routes'] ) ) + { + $container->setParameter( + 'ezpublish.default_router.non_siteaccess_aware_routes', + array_merge( + $container->getParameter( 'ezpublish.default_router.non_siteaccess_aware_routes' ), + $config['router']['default_router']['non_siteaccess_aware_routes'] + ) + ); + } } /**
EZP-<I>: Added semantic config to add `non_siteaccess_aware_routes`
ezsystems_ezpublish-kernel
train
7070cb0e53850e9b0fe9e7620302536bdf69fa50
diff --git a/tracer/cachemanager/cachemanager.py b/tracer/cachemanager/cachemanager.py index <HASH>..<HASH> 100644 --- a/tracer/cachemanager/cachemanager.py +++ b/tracer/cachemanager/cachemanager.py @@ -11,18 +11,15 @@ class CacheManager(object): def set_tracer(self, tracer): self.tracer = tracer - def cacher(self): + def cacher(self, simstate): raise NotImplementedError("subclasses must implement this method") def cache_lookup(self): raise NotImplementedError("subclasses must implement this method") - def _prepare_cache_data(self): + def _prepare_cache_data(self, simstate): - cache_path = self.tracer.previous.copy() - self.tracer.remove_preconstraints(cache_path, to_composite_solver=False) - - state = cache_path.state + state = self.tracer.previous.state ds = None try: @@ -30,4 +27,7 @@ class CacheManager(object): except RuntimeError as e: # maximum recursion depth can be reached here l.error("unable to cache state, '%s' during pickling", e.message) + # add preconstraints to tracer + self.tracer._preconstrain_state(simstate) + return ds diff --git a/tracer/cachemanager/localcacher.py b/tracer/cachemanager/localcacher.py index <HASH>..<HASH> 100644 --- a/tracer/cachemanager/localcacher.py +++ b/tracer/cachemanager/localcacher.py @@ -30,9 +30,9 @@ class LocalCacheManager(CacheManager): with open(self._cache_file) as f: return pickle.load(f) - def cacher(self): + def cacher(self, simstate): - cdata = self._prepare_cache_data() + cdata = self._prepare_cache_data(simstate) if cdata is not None: l.warning("caching state to %s", self._cache_file) with open(self._cache_file, 'wb') as f: diff --git a/tracer/simprocedures/receive.py b/tracer/simprocedures/receive.py index <HASH>..<HASH> 100644 --- a/tracer/simprocedures/receive.py +++ b/tracer/simprocedures/receive.py @@ -3,7 +3,7 @@ from simuvex.procedures.cgc.receive import receive import logging l = logging.getLogger("tracer.simprocedures.FixedInReceive") -def cache_pass(): +def cache_pass(_): l.warning("cache_hook never set") # called when caching the state @@ -19,7 +19,7 @@ class FixedInReceive(receive): if self.state.se.any_int(self.state.posix.files[0].pos) == 0: if cache_hook is not None: - cache_hook() + cache_hook(self.state) if self.state.se.any_n_int(fd, 2) < 2: if self.state.se.any_int(fd) == 1: diff --git a/tracer/tracer.py b/tracer/tracer.py index <HASH>..<HASH> 100644 --- a/tracer/tracer.py +++ b/tracer/tracer.py @@ -724,6 +724,9 @@ class Tracer(object): preconstrain the entry state to the input ''' + if not self.preconstrain_input: + return + repair_entry_state_opts = False if so.TRACK_ACTION_HISTORY in entry_state.options: repair_entry_state_opts = True @@ -796,11 +799,13 @@ class Tracer(object): cache_tuple = self._cache_lookup() pg = None + # if we're restoring from a cache, we preconstrain if cache_tuple is not None: bb_cnt, self.cgc_flag_data, state = cache_tuple pg = self._cgc_prepare_paths(state) + self._preconstrain_state(state) self.bb_cnt = bb_cnt - else: + else: # if we're not restoring from a cache, the cacher will preconstrain pg = self._cgc_prepare_paths() return pg @@ -889,9 +894,6 @@ class Tracer(object): entry_state = state - if self.preconstrain_input: - self._preconstrain_state(entry_state) - if not self.pov: entry_state.cgc.input_size = len(self.input)
Now add tracer preconstraints only after caching state
angr_angr
train
0e54fb226bd4c6bf5a8055955a90b129d0d18cd6
diff --git a/scraper/util.py b/scraper/util.py index <HASH>..<HASH> 100644 --- a/scraper/util.py +++ b/scraper/util.py @@ -20,6 +20,14 @@ def execute(command, cwd=None): process = Popen(command, cwd=cwd, stdout=PIPE, stderr=STDOUT, shell=False) # nosec out, err = process.communicate() + + if process.returncode: + logging.error( + "Error Executing: command=%s, returncode=%d", + " ".join(command), + process.returncode, + ) + return str(out), str(err) @@ -130,7 +138,7 @@ def git_repo_to_sloc(url): cloc_json = json.loads(json_blob) sloc = cloc_json["SUM"]["code"] except json.decoder.JSONDecodeError: - logger.debug("Error Decoding: url=%s, out=%s", url, out) + logger.error("Error Decoding: url=%s, out=%s", url, out) sloc = 0 logger.debug("SLOC: url=%s, sloc=%d", url, sloc)
Check the return code of executed commands Add a check of the returncode of the command executed in scraper.util.execute() and output an error message if it is not zero. Additionally change the logging level from DEBUG to ERROR for failures to process the JSON output from cloc. These combined will make it more clear when failures in core functionality are occurring.
LLNL_scraper
train
920070e42b33c5bdaf62977ed22aa867a455ffd5
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb +++ b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb @@ -71,7 +71,7 @@ module ActiveRecord when Date, Time then quoted_date(value) when Symbol then value.to_s else - YAML.dump(value) + raise TypeError, "can't cast #{value.class} to #{column.type}" end end diff --git a/activerecord/test/cases/adapters/sqlite3/quoting_test.rb b/activerecord/test/cases/adapters/sqlite3/quoting_test.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/cases/adapters/sqlite3/quoting_test.rb +++ b/activerecord/test/cases/adapters/sqlite3/quoting_test.rb @@ -70,9 +70,9 @@ module ActiveRecord assert_equal bd.to_f, @conn.type_cast(bd, nil) end - def test_type_cast_unknown + def test_type_cast_unknown_should_raise_error obj = Class.new.new - assert_equal YAML.dump(obj), @conn.type_cast(obj, nil) + assert_raise(TypeError) { @conn.type_cast(obj, nil) } end def test_quoted_id diff --git a/activerecord/test/cases/base_test.rb b/activerecord/test/cases/base_test.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/cases/base_test.rb +++ b/activerecord/test/cases/base_test.rb @@ -992,10 +992,9 @@ class BasicsTest < ActiveRecord::TestCase assert_equal "b", duped_topic.title # test if the attribute values have been duped - topic.title = {"a" => "b"} duped_topic = topic.dup - duped_topic.title["a"] = "c" - assert_equal "b", topic.title["a"] + duped_topic.title.replace "c" + assert_equal "a", topic.title # test if attributes set as part of after_initialize are duped correctly assert_equal topic.author_email_address, duped_topic.author_email_address @@ -1006,8 +1005,7 @@ class BasicsTest < ActiveRecord::TestCase assert_not_equal duped_topic.id, topic.id duped_topic.reload - # FIXME: I think this is poor behavior, and will fix it with #5686 - assert_equal({'a' => 'c'}.to_yaml, duped_topic.title) + assert_equal("c", duped_topic.title) end def test_dup_with_aggregate_of_same_name_as_attribute
Don't type-cast unknown types to YAML.
rails_rails
train
53fa3d33dffeff53966b0ef71f350466d0aafbcb
diff --git a/ehforwarderbot/message.py b/ehforwarderbot/message.py index <HASH>..<HASH> 100644 --- a/ehforwarderbot/message.py +++ b/ehforwarderbot/message.py @@ -301,6 +301,9 @@ class EFBMsgCommand: from the ``chat``, this function should be called on the ``author``'s module. + The method specified must return either a ``str`` as result or ``None`` + if this message will be further edited or deleted for interactions. + Attributes: name (str): Human-friendly name of the command. callable_name (str): Callable name of the command.
Allow message commands to return None if further advanced interactions are expected.
blueset_ehForwarderBot
train
450959d3ad2180c22fe7343760dbb61b0bf2bdef
diff --git a/lib/appsignal.rb b/lib/appsignal.rb index <HASH>..<HASH> 100644 --- a/lib/appsignal.rb +++ b/lib/appsignal.rb @@ -752,7 +752,7 @@ module Appsignal end deprecate :is_ignored_action?, :none, 2017, 3 - # Convenience method for skipping instrumentations around a block of code. + # Convenience method for skipping instrumentation around a block of code. # # @example # Appsignal.without_instrumentation do diff --git a/lib/appsignal/transaction.rb b/lib/appsignal/transaction.rb index <HASH>..<HASH> 100644 --- a/lib/appsignal/transaction.rb +++ b/lib/appsignal/transaction.rb @@ -277,10 +277,12 @@ module Appsignal alias_method :add_exception, :set_error def start_event + return if paused? @ext.start_event(self.class.garbage_collection_profiler.total_time) end def finish_event(name, title, body, body_format = Appsignal::EventFormatter::DEFAULT) + return if paused? @ext.finish_event( name, title || BLANK, @@ -291,6 +293,7 @@ module Appsignal end def record_event(name, title, body, duration, body_format = Appsignal::EventFormatter::DEFAULT) + return if paused? @ext.record_event( name, title || BLANK, diff --git a/spec/lib/appsignal/transaction_spec.rb b/spec/lib/appsignal/transaction_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/appsignal/transaction_spec.rb +++ b/spec/lib/appsignal/transaction_spec.rb @@ -214,7 +214,7 @@ describe Appsignal::Transaction do context "pausing" do describe "#pause!" do - it "should change the pause flag to true" do + it "changes the pause flag to true" do expect do transaction.pause! end.to change(transaction, :paused).from(false).to(true) @@ -224,7 +224,7 @@ describe Appsignal::Transaction do describe "#resume!" do before { transaction.pause! } - it "should change the pause flag to false" do + it "changes the pause flag to false" do expect do transaction.resume! end.to change(transaction, :paused).from(true).to(false) @@ -232,14 +232,16 @@ describe Appsignal::Transaction do end describe "#paused?" do - it "should return the pause state" do - expect(transaction.paused?).to be_falsy + context "when not paused" do + it "return false" do + expect(transaction.paused?).to be_falsy + end end context "when paused" do before { transaction.pause! } - it "should return the pause state" do + it "returns true" do expect(transaction.paused?).to be_truthy end end @@ -690,11 +692,20 @@ describe Appsignal::Transaction do end describe "#start_event" do - it "should start the event in the extension" do + it "starts the event in the extension" do expect(transaction.ext).to receive(:start_event).with(0).and_call_original transaction.start_event end + + context "when transaction is paused" do + it "does not start the event" do + transaction.pause! + expect(transaction.ext).to_not receive(:start_event) + + transaction.start_event + end + end end describe "#finish_event" do @@ -737,6 +748,15 @@ describe Appsignal::Transaction do nil ) end + + context "when transaction is paused" do + it "does not finish the event" do + transaction.pause! + expect(transaction.ext).to_not receive(:finish_event) + + transaction.start_event + end + end end describe "#record_event" do @@ -783,6 +803,21 @@ describe Appsignal::Transaction do nil ) end + + context "when transaction is paused" do + it "does not record the event" do + transaction.pause! + expect(transaction.ext).to_not receive(:record_event) + + transaction.record_event( + "name", + nil, + nil, + 1000, + nil + ) + end + end end describe "#instrument" do diff --git a/spec/lib/appsignal_spec.rb b/spec/lib/appsignal_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/appsignal_spec.rb +++ b/spec/lib/appsignal_spec.rb @@ -831,25 +831,25 @@ describe Appsignal do end describe ".without_instrumentation" do - let(:transaction) { double } + let(:transaction) { http_request_transaction } before { allow(Appsignal::Transaction).to receive(:current).and_return(transaction) } - it "should pause and unpause the transaction around the block" do - expect(transaction).to receive(:pause!) - expect(transaction).to receive(:resume!) + it "does not record events on the transaction" do + expect(transaction).to receive(:pause!).and_call_original + expect(transaction).to receive(:resume!).and_call_original + Appsignal.instrument("register.this.event") { :do_nothing } + Appsignal.without_instrumentation do + Appsignal.instrument("dont.register.this.event") { :do_nothing } + end + expect(transaction.to_h["events"].map { |e| e["name"] }) + .to match_array("register.this.event") end context "without transaction" do let(:transaction) { nil } it "should not crash" do - # just execute the after block - end - end - - after do - Appsignal.without_instrumentation do - # nothing + Appsignal.without_instrumentation { :do_nothing } end end end
Fix Transaction.pause! (#<I>) `Appsignal.without_instrumentation` was using `Transaction.pause!`, which no longer worked. Events were still registered on the transaction even though the code was wrapped in `Appsignal.without_instrumentation` and the transaction was paused. This change adds a guard for every Transaction method that does something with events. Add a spec that tests if the events are actually registered on the transaction or not, using `Transaction#to_h`.
appsignal_appsignal-ruby
train
3dcb6179bbc2f519e10d242ec98706660592cf6b
diff --git a/lib/Query.js b/lib/Query.js index <HASH>..<HASH> 100644 --- a/lib/Query.js +++ b/lib/Query.js @@ -51,8 +51,8 @@ Query.prototype = { }, limit: function(limit) { - if(isNaN(limit) || ((limit|0) != limit)) { - throw new Error("Limit should be an integer"); + if(isNaN(limit) || ((limit|0) != limit) || (limit|0) < 1) { + throw new Error("Limit should be an natural number"); } this.Limit = limit; @@ -82,7 +82,6 @@ Query.prototype = { if (data.LastEvaluatedKey) { if (self.Limit != null && self.Limit < response.length) { loop(data.LastEvaluatedKey) - return } else { cb(null, response) } diff --git a/lib/Scan.js b/lib/Scan.js index <HASH>..<HASH> 100644 --- a/lib/Scan.js +++ b/lib/Scan.js @@ -29,6 +29,15 @@ Scan.prototype = { return this }, + limit: function(limit) { + if(isNaN(limit) || ((limit|0) != limit) || (limit|0) < 1) { + throw new Error("Limit should be an natural number"); + } + + this.Limit = limit; + return this; + }, + fetch: function(cb) { var self = this , response = [] @@ -46,9 +55,15 @@ Scan.prototype = { response.push(Attributes.prototype.parse(item)) }) - if (data.LastEvaluatedKey) loop(data.LastEvaluatedKey) - - else cb(null, response) + if (data.LastEvaluatedKey) { + if (self.Limit != null && self.Limit < response.length) { + loop(data.LastEvaluatedKey) + } else { + cb(null, response) + } + } else { + cb(null, response) + } } ) }(this.ExclusiveStartKey)
Add limit(limit) function to Limit object, too
jed_dynamo
train
2bbaefefef031e8efc5190c30734a1d3e1048439
diff --git a/src/notebook/components/notebook.js b/src/notebook/components/notebook.js index <HASH>..<HASH> 100644 --- a/src/notebook/components/notebook.js +++ b/src/notebook/components/notebook.js @@ -12,7 +12,13 @@ import { displayOrder, transforms } from 'transformime-react'; import Cell from './cell/cell'; import DraggableCell from './cell/draggable-cell'; import CellCreator from './cell/cell-creator'; -import { executeCell, focusNextCell, moveCell } from '../actions'; +import { + executeCell, + focusNextCell, + moveCell, + copyCell, + pasteCell, +} from '../actions'; import complete from '../api/messaging/completion'; @@ -69,6 +75,8 @@ class Notebook extends React.Component { this.keyDown = this.keyDown.bind(this); this.moveCell = this.moveCell.bind(this); this.getCompletions = this.getCompletions.bind(this); + this.copyCell = this.copyCell.bind(this); + this.pasteCell = this.pasteCell.bind(this); } componentDidMount() { @@ -128,9 +136,24 @@ class Notebook extends React.Component { this.props.dispatch(moveCell(sourceId, destinationId, above)); } + copyCell() { + console.log(this.props.focusedCell); + this.props.dispatch(copyCell(this.props.focusedCell)); + } + + pasteCell() { + this.props.dispatch(pasteCell()); + } + keyDown(e) { if (e.keyCode !== 13) { + const cmdOrCtrl = e.ctrlKey; + if (cmdOrCtrl && e.keyCode === 67) { + this.copyCell(); + } else if (cmdOrCtrl && e.keyCode === 86) { + this.pasteCell(); + } return; }
Add cell copy/paste to notebook component
nteract_nteract
train
aeb458e4b7e0f6df1d900202f3c1c2a7ad7384f7
diff --git a/xmlnuke-php5/src/Xmlnuke/Util/CreatePhp5Project.php b/xmlnuke-php5/src/Xmlnuke/Util/CreatePhp5Project.php index <HASH>..<HASH> 100644 --- a/xmlnuke-php5/src/Xmlnuke/Util/CreatePhp5Project.php +++ b/xmlnuke-php5/src/Xmlnuke/Util/CreatePhp5Project.php @@ -264,8 +264,6 @@ class CreatePhp5Project $contents = file_get_contents($CONFIG); $contents = str_replace($xmlnukePathConfig, $PHPDIR, $contents); file_put_contents($CONFIG, $contents); - - throw new Exception("Config points to '$xmlnukePathConfig' and the script is running on '$PHPDIR'\n\nYour config is now updated. Please run it again;"); }
Removed error on found config different
byjg_xmlnuke
train
9c7afe08755d913e553ea5d60039c95fbfebb3cd
diff --git a/djrichtextfield/models.py b/djrichtextfield/models.py index <HASH>..<HASH> 100644 --- a/djrichtextfield/models.py +++ b/djrichtextfield/models.py @@ -6,8 +6,6 @@ from djrichtextfield.widgets import RichTextWidget class RichTextField(models.TextField): - _south_introspects = True - def __init__(self, *args, **kwargs): self.field_settings = None if 'field_settings' in kwargs:
Remove unused _south_introspects member
jaap3_django-richtextfield
train
1e26f5c71ac22a59561ec7ffb7cbf02621db039a
diff --git a/testUSB1.py b/testUSB1.py index <HASH>..<HASH> 100755 --- a/testUSB1.py +++ b/testUSB1.py @@ -7,8 +7,8 @@ import libusb1 from ctypes import pointer if sys.version_info[0] == 3: - buff = b'\x00\xff' - other_buff = b'foo' + buff = bytes([0, 0xff]) + other_buff = bytes((ord(x) for x in 'foo')) else: buff = '\x00\xff' other_buff = 'foo' diff --git a/usb1.py b/usb1.py index <HASH>..<HASH> 100644 --- a/usb1.py +++ b/usb1.py @@ -37,7 +37,7 @@ else: 'describe']) if sys.version_info[0] == 3: - BYTE = b'\x00' + BYTE = bytes([0]) xrange = range long = int else:
Avoid binary literal syntax, for <I> compatibility.
vpelletier_python-libusb1
train
271df797446fb36999afe3dc9e797129f55378d9
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -89,11 +89,12 @@ setup( install_requires=['numpy', 'nose', 'cython'], classifiers=[ 'Environment :: Console', - 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', - 'Topic :: Science/Engineering :: Molecular Science' + 'Topic :: Scientific/Engineering :: Physics', + 'Topic :: Scientific/Engineering :: Chemistry', + 'Intended Audience :: Science/Research', ], )
Fix classifiers in setup.py
molmod_molmod
train
f11a73489be35ad8a41f1ca956e72336fb276d8a
diff --git a/packages/gluestick/test/lib/server/DoctypeStream.test.js b/packages/gluestick/test/lib/server/DoctypeStream.test.js index <HASH>..<HASH> 100644 --- a/packages/gluestick/test/lib/server/DoctypeStream.test.js +++ b/packages/gluestick/test/lib/server/DoctypeStream.test.js @@ -4,14 +4,10 @@ import path from 'path'; import DoctypeStream from '../../../src/lib/server/DoctypeStream'; class WriteStream extends Writable { - constructor() { - super(); - this.data = []; - } + data = []; - _write(chunk, enc, cb) { - this.data.push(chunk); - cb(); + async _write(chunk) { + await this.data.push(chunk); } }
Fix async function with DoctypeStream when running all tests.
TrueCar_gluestick
train
34b80b8955dc64b4e5d3d4059ccd6ae25c4962c3
diff --git a/build/karma.conf.js b/build/karma.conf.js index <HASH>..<HASH> 100644 --- a/build/karma.conf.js +++ b/build/karma.conf.js @@ -33,11 +33,15 @@ const detectBrowsers = { return debug ? ['Chrome'] : ['ChromeHeadless']; } + if (availableBrowser.includes('Chromium')) { + return debug ? ['Chromium'] : ['ChromiumHeadless']; + } + if (availableBrowser.includes('Firefox')) { return debug ? ['Firefox'] : ['FirefoxHeadless']; } - throw new Error('Please install Firefox or Chrome'); + throw new Error('Please install Chrome, Chromium or Firefox'); } };
Karma: Allow use of Chromium (#<I>) * Karma: Allow use of Chromium * fix lint error
cast-org_figuration
train
560b250ff6868f140264036fc90ccb3e608d2b7d
diff --git a/sources/index.spec.js b/sources/index.spec.js index <HASH>..<HASH> 100644 --- a/sources/index.spec.js +++ b/sources/index.spec.js @@ -4,26 +4,8 @@ const {expect} = require('chai') let element -before(function addExternalStyle() { - const style = document.createElement('style') - style.innerHTML = 'div {text-transform: uppercase}' - document - .querySelector('head') - .appendChild(style) -}) - -beforeEach(function createElement() { - element = document.createElement('div') - document.body.appendChild(element) - element.style.color = 'red' - element.style.fontSize = '12px' - element.style.lineHeight = '1em' - element.style.margin = '2em auto' - element.style.padding = '0 10px' - element.style.height = '50vh' - element.style.width = '50vw' - element.style.boxShadow = '0 0 10px red' -}) +before(addExternalStyle) +beforeEach(createElement) describe('chai-style', () => { describe('module', () => { @@ -193,3 +175,24 @@ describe('chai-style', () => { }) }) }) + +function addExternalStyle() { + const style = document.createElement('style') + style.innerHTML = 'div {text-transform: uppercase}' + document + .querySelector('head') + .appendChild(style) +} + +function createElement() { + element = document.createElement('div') + document.body.appendChild(element) + element.style.color = 'red' + element.style.fontSize = '12px' + element.style.lineHeight = '1em' + element.style.margin = '2em auto' + element.style.padding = '0 10px' + element.style.height = '50vh' + element.style.width = '50vw' + element.style.boxShadow = '0 0 10px red' +}
better organization to befores in spec file
darlanmendonca_chai-style
train
380cb15edba71ab026a46c08351f80d93ce3383f
diff --git a/gcimagebundle/gcimagebundlelib/imagebundle.py b/gcimagebundle/gcimagebundlelib/imagebundle.py index <HASH>..<HASH> 100755 --- a/gcimagebundle/gcimagebundlelib/imagebundle.py +++ b/gcimagebundle/gcimagebundlelib/imagebundle.py @@ -82,6 +82,7 @@ def SetupArgsParser(): def VerifyArgs(parser, options): """Verifies that commandline flags are consistent.""" + return absolute_output_directory = utils.ExpandPath(options.output_directory) if not absolute_output_directory: parser.error('output bundle directory must be specified.') @@ -160,12 +161,11 @@ def PrintVersionInfo(): logging.info('version 1.1.0') -def GetTargetFilesystem(options): +def GetTargetFilesystem(options, guest_platform): if options.file_system: return options.file_system else: - fs_table = utils.GetFilesystemTable(fs_path_filter=options.disk) - return fs_table[0]['type'] + return guest_platform.GetPreferredFilesystemType() def main(): @@ -189,7 +189,7 @@ def main(): temp_file_name = tempfile.mktemp(dir=scratch_dir, suffix='.tar.gz') - file_system = GetTargetFilesystem(options) + file_system = GetTargetFilesystem(options, guest_platform) logging.info('file system = %s', file_system) logging.info('disk size = %s bytes', options.fs_size) bundle = block_disk.RootFsRaw(options.fs_size, file_system) diff --git a/gcimagebundle/gcimagebundlelib/linux.py b/gcimagebundle/gcimagebundlelib/linux.py index <HASH>..<HASH> 100644 --- a/gcimagebundle/gcimagebundlelib/linux.py +++ b/gcimagebundle/gcimagebundlelib/linux.py @@ -129,3 +129,7 @@ class LinuxPlatform(os_platform.Platform): def Overwrite(self, filename, arcname, tmpdir='/tmp'): """Overwrites specified file if needed for the Linux platform.""" pass + + def GetPreferredFilesystemType(self): + """Return the optimal filesystem supported for the platform.""" + return 'ext4' diff --git a/gcimagebundle/gcimagebundlelib/sle.py b/gcimagebundle/gcimagebundlelib/sle.py index <HASH>..<HASH> 100644 --- a/gcimagebundle/gcimagebundlelib/sle.py +++ b/gcimagebundle/gcimagebundlelib/sle.py @@ -29,3 +29,6 @@ class SLE(suse.SUSE): def __init__(self): super(SLE, self).__init__() + + def GetPreferredFilesystemType(self): + return 'ext3'
SLE's filesystem is now ext3. Temporary validation bypass.
GoogleCloudPlatform_compute-image-packages
train
457c60bc96072969096485237bb6cd2ef36be4fc
diff --git a/java/src/com/google/template/soy/jbcsrc/CompiledTemplateRegistry.java b/java/src/com/google/template/soy/jbcsrc/CompiledTemplateRegistry.java index <HASH>..<HASH> 100644 --- a/java/src/com/google/template/soy/jbcsrc/CompiledTemplateRegistry.java +++ b/java/src/com/google/template/soy/jbcsrc/CompiledTemplateRegistry.java @@ -19,27 +19,20 @@ package com.google.template.soy.jbcsrc; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.template.soy.base.internal.SanitizedContentKind; import com.google.template.soy.soytree.TemplateMetadata; import com.google.template.soy.soytree.TemplateRegistry; import com.google.template.soy.types.TemplateType; -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import javax.annotation.Nullable; /** A registry of information about every compiled template. */ final class CompiledTemplateRegistry { - private final ImmutableBiMap<String, CompiledTemplateMetadata> templateNameToMetadata; - private final ImmutableBiMap<String, CompiledTemplateMetadata> classNameToMetadata; - private final ImmutableMap<String, Optional<SanitizedContentKind>> deltemplateNameToContentKind; + private final ImmutableMap<String, CompiledTemplateMetadata> templateNameToMetadata; + private final ImmutableMap<String, CompiledTemplateMetadata> classNameToMetadata; private final ImmutableSet<String> delegateTemplateNames; CompiledTemplateRegistry(TemplateRegistry registry) { - Map<String, Optional<SanitizedContentKind>> deltemplateNameToContentKind = new HashMap<>(); - ImmutableBiMap.Builder<String, CompiledTemplateMetadata> templateToMetadata = + ImmutableMap.Builder<String, CompiledTemplateMetadata> templateToMetadata = ImmutableBiMap.builder(); - ImmutableBiMap.Builder<String, CompiledTemplateMetadata> classToMetadata = + ImmutableMap.Builder<String, CompiledTemplateMetadata> classToMetadata = ImmutableBiMap.builder(); ImmutableSet.Builder<String> delegateTemplateNames = ImmutableSet.builder(); for (TemplateMetadata template : registry.getAllTemplates()) { @@ -49,22 +42,13 @@ final class CompiledTemplateRegistry { classToMetadata.put(metadata.typeInfo().className(), metadata); if (template.getTemplateKind() == TemplateType.TemplateKind.DELTEMPLATE) { delegateTemplateNames.add(template.getTemplateName()); - // all delegates are guaranteed to have the same content kind by the - // checkdelegatesvisitor - deltemplateNameToContentKind.put( - template.getDelTemplateName(), Optional.ofNullable(template.getContentKind())); } } this.templateNameToMetadata = templateToMetadata.build(); this.classNameToMetadata = classToMetadata.build(); - this.deltemplateNameToContentKind = ImmutableMap.copyOf(deltemplateNameToContentKind); this.delegateTemplateNames = delegateTemplateNames.build(); } - ImmutableSet<String> getTemplateNames() { - return templateNameToMetadata.keySet(); - } - /** Returns the names of all delegate template implementations. */ ImmutableSet<String> getDelegateTemplateNames() { return delegateTemplateNames; @@ -76,22 +60,7 @@ final class CompiledTemplateRegistry { } /** Returns information about the generated class for the given fully qualified template name. */ - CompiledTemplateMetadata getTemplateInfoByClassName(String templateName) { - return classNameToMetadata.get(templateName); - } - - /** - * Returns the {@link SanitizedContentKind} (if any) of a deltemplate. - * - * @throws IllegalArgumentException if it is unknown because there are no implementations of the - * delegate available at compile time. - */ - @Nullable - SanitizedContentKind getDelTemplateContentKind(String delTemplateName) { - return deltemplateNameToContentKind.get(delTemplateName).orElse(null); - } - - boolean hasDelTemplateDefinition(String delTemplateName) { - return deltemplateNameToContentKind.containsKey(delTemplateName); + CompiledTemplateMetadata getTemplateInfoByClassName(String className) { + return classNameToMetadata.get(className); } } diff --git a/testdata/javascript/soy_usegoog_lib.js b/testdata/javascript/soy_usegoog_lib.js index <HASH>..<HASH> 100644 --- a/testdata/javascript/soy_usegoog_lib.js +++ b/testdata/javascript/soy_usegoog_lib.js @@ -6672,6 +6672,7 @@ goog.provide('goog.object'); * @param {*} v2 The second value to compare. * @return {boolean} Whether two values are not observably distinguishable. * @see http://wiki.ecmascript.org/doku.php?id=harmony:egal + * @deprecated Use Object.is */ goog.object.is = function(v, v2) { if (v === v2) { @@ -7156,15 +7157,11 @@ goog.object.equals = function(a, b) { * @template K,V */ goog.object.clone = function(obj) { - // We cannot use the prototype trick because a lot of methods depend on where - // the actual key is set. - const res = {}; for (const key in obj) { res[key] = obj[key]; } return res; - // We could also use goog.mixin but I wanted this to be independent from that. };
Remove some dead functionality from CompiledTemplateRegistry Also, switch the internal maps from BiMaps to normal Maps. Profiling reveals that a lot of time is spent hashing CompiledTemplateMetadata objects but we never actually need to do a reverse lookup. GITHUB_BREAKING_CHANGES=none ------------- Created by MOE: <URL>
google_closure-templates
train
ebaf8099df207e1226398d93f05d6672f65eac38
diff --git a/ignite/handlers/lr_finder.py b/ignite/handlers/lr_finder.py index <HASH>..<HASH> 100644 --- a/ignite/handlers/lr_finder.py +++ b/ignite/handlers/lr_finder.py @@ -137,6 +137,22 @@ class FastaiLRFinder: def _log_lr_and_loss(self, trainer: Engine, output_transform: Callable, smooth_f: float, diverge_th: float) -> None: output = trainer.state.output loss = output_transform(output) + if not isinstance(loss, float): + if isinstance(loss, torch.Tensor): + if (loss.ndimension() == 0) or (loss.ndimension() == 1 and len(loss) == 1): + loss = loss.item() + else: + raise ValueError( + "if output of the engine is torch.Tensor, then " + "it must be 0d torch.Tensor or 1d torch.Tensor with 1 element, " + f"but got torch.Tensor of shape {loss.shape}" + ) + else: + raise TypeError( + "output of the engine should be of type float or 0d torch.Tensor " + "or 1d torch.Tensor with 1 element, " + f"but got output of type {type(loss).__name__}" + ) loss = idist.all_reduce(loss) lr = self._lr_schedule.get_param() # type: ignore[union-attr] self._history["lr"].append(lr) diff --git a/tests/ignite/contrib/handlers/test_lr_finder.py b/tests/ignite/contrib/handlers/test_lr_finder.py index <HASH>..<HASH> 100644 --- a/tests/ignite/contrib/handlers/test_lr_finder.py +++ b/tests/ignite/contrib/handlers/test_lr_finder.py @@ -308,6 +308,39 @@ def test_detach_terminates(lr_finder, to_save, dummy_engine, dataloader, recwarn assert len(recwarn) == 0 +def test_engine_output_type(lr_finder, dummy_engine, optimizer): + from ignite.handlers.param_scheduler import PiecewiseLinear + + dummy_engine.state.iteration = 1 + dummy_engine.state.output = [10] + with pytest.raises(TypeError, match=r"output of the engine should be of type float or 0d torch.Tensor"): + lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1) + + dummy_engine.state.output = (10, 5) + with pytest.raises(TypeError, match=r"output of the engine should be of type float or 0d torch.Tensor"): + lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1) + + dummy_engine.state.output = torch.tensor([1, 2], dtype=torch.float32) + with pytest.raises(ValueError, match=r"if output of the engine is torch.Tensor"): + lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1) + + lr_finder._lr_schedule = PiecewiseLinear( + optimizer, param_name="lr", milestones_values=[(0, optimizer.param_groups[0]["lr"]), (100, 10)] + ) + + dummy_engine.state.output = torch.tensor(10.0, dtype=torch.float32) + lr_finder._history = {"lr": [], "loss": []} + lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1) + loss = lr_finder._history["loss"][-1] + assert type(loss) == float + + dummy_engine.state.output = torch.tensor([10.0], dtype=torch.float32) + lr_finder._history = {"lr": [], "loss": []} + lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1) + loss = lr_finder._history["loss"][-1] + assert type(loss) == float + + def test_lr_suggestion_unexpected_curve(lr_finder, to_save, dummy_engine, dataloader): with lr_finder.attach(dummy_engine, to_save) as trainer_with_finder: trainer_with_finder.run(dataloader)
Handled different `engine.state.output` types for LRFinder (#<I>) * handle different output types and add tests * edit TypeError * edit match * add 0d tensor check * add 1d tensor with 1 element check * edit checks
pytorch_ignite
train

CommitBench: A Benchmark for Commit Message Generation

We provide CommitBench as an open-source, reproducible and privacy- and license-aware benchmark for commit message generation. The dataset is gathered from GitHub repositories with licenses that permit redistribution. We provide six programming languages, Java, Python, Go, JavaScript, PHP, and Ruby. The commit messages in natural language are restricted to English, as it is the working language in many software development projects. The dataset has 1,664,590 examples that were generated by using extensive quality-focused filtering techniques (e.g., excluding bot commits). Additionally, we provide a version with longer sequences for benchmarking models with more extended sequence input.Ω

Downloads last month
0
Edit dataset card