content
stringlengths 128
6.74k
|
---|
<commit_msg>Refactor - remove unused imports
<commit_before>package at.ac.tuwien.inso.controller.student;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
import at.ac.tuwien.inso.entity.Course;
import at.ac.tuwien.inso.service.CourseService;
import at.ac.tuwien.inso.service.StudentService;
@Controller
@RequestMapping("/student/register")
public class StudentRegisterForCourseController {
@Autowired
private CourseService courseService;
@GetMapping
private String registerStudent(@RequestParam Long courseId,
RedirectAttributes redirectAttributes) {
Course course = courseService.findCourseWithId(courseId);
if (courseService.registerStudentForCourse(course)) {
redirectAttributes.addFlashAttribute("registeredForCourse", course.getSubject().getName());
return "redirect:/student/courses";
} else {
redirectAttributes.addFlashAttribute("notRegisteredForCourse", course.getSubject().getName());
return "redirect:/student/courses";
}
}
}
<commit_after>package at.ac.tuwien.inso.controller.student;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
import at.ac.tuwien.inso.entity.Course;
import at.ac.tuwien.inso.service.CourseService;
@Controller
@RequestMapping("/student/register")
public class StudentRegisterForCourseController {
@Autowired
private CourseService courseService;
@GetMapping
private String registerStudent(@RequestParam Long courseId,
RedirectAttributes redirectAttributes) {
Course course = courseService.findCourseWithId(courseId);
if (courseService.registerStudentForCourse(course)) {
redirectAttributes.addFlashAttribute("registeredForCourse", course.getSubject().getName());
return "redirect:/student/courses";
} else {
redirectAttributes.addFlashAttribute("notRegisteredForCourse", course.getSubject().getName());
return "redirect:/student/courses";
}
}
}
|
<commit_msg>Write syntax test on LocalDataSource which supports writes.
<commit_before>/*
* Copyright 2008-2010 Brookhaven National Laboratory
* All rights reserved. Use is subject to license terms.
*/
package org.epics.pvmanager;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.hamcrest.Matchers.*;
import static org.epics.pvmanager.ExpressionLanguage.*;
import static org.epics.pvmanager.data.ExpressionLanguage.*;
/**
*
* @author carcassi
*/
public class WriteSyntaxTest {
@Test
public void simpleWriter() {
String channelName = "test";
ChannelExpression<Object> chExpr = toChannel(channelName);
assertThat(chExpr.getDefaultName(), equalTo(channelName));
assertThat(((WriteCache<Object>) chExpr.getWriteFunction()).getValue(), nullValue());
assertThat(((WriteCache<Object>) chExpr.getWriteFunction()).getPrecedingChannels().isEmpty(), equalTo(true));
WriteExpression<Object> expr = toChannel(channelName).after("a", "b");
assertThat(expr.getDefaultName(), equalTo(channelName));
assertThat(((WriteCache<Object>) expr.getWriteFunction()).getValue(), nullValue());
assertThat(((WriteCache<Object>) expr.getWriteFunction()).getPrecedingChannels(), hasSize(2));
assertThat(((WriteCache<Object>) expr.getWriteFunction()).getPrecedingChannels(), contains("a", "b"));
PVWriter<Object> writer = PVManager.write(toChannel(channelName)).sync();
writer.write(10);
}
}
<commit_after>/*
* Copyright 2008-2010 Brookhaven National Laboratory
* All rights reserved. Use is subject to license terms.
*/
package org.epics.pvmanager;
import org.epics.pvmanager.loc.LocalDataSource;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.hamcrest.Matchers.*;
import static org.epics.pvmanager.ExpressionLanguage.*;
import static org.epics.pvmanager.data.ExpressionLanguage.*;
/**
*
* @author carcassi
*/
public class WriteSyntaxTest {
@Test
public void simpleWriter() {
String channelName = "test";
ChannelExpression<Object> chExpr = toChannel(channelName);
assertThat(chExpr.getDefaultName(), equalTo(channelName));
assertThat(((WriteCache<Object>) chExpr.getWriteFunction()).getValue(), nullValue());
assertThat(((WriteCache<Object>) chExpr.getWriteFunction()).getPrecedingChannels().isEmpty(), equalTo(true));
WriteExpression<Object> expr = toChannel(channelName).after("a", "b");
assertThat(expr.getDefaultName(), equalTo(channelName));
assertThat(((WriteCache<Object>) expr.getWriteFunction()).getValue(), nullValue());
assertThat(((WriteCache<Object>) expr.getWriteFunction()).getPrecedingChannels(), hasSize(2));
assertThat(((WriteCache<Object>) expr.getWriteFunction()).getPrecedingChannels(), contains("a", "b"));
PVWriter<Object> writer = PVManager.write(toChannel(channelName)).from(new LocalDataSource()).sync();
writer.write(10);
}
}
|
<commit_msg>Index the Atlas data by version and provider
<commit_before>from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
def versions(self):
"""Return a tuple with all available box versions."""
return tuple(v['version'] for v in self._data['versions']
if v['status'] == 'active')
def providers(self, version):
"""Return a list of providers for a specific box version."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return [p['name'] for p in _ver['providers']]
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return ([p for p in _ver['providers']
if p['name'] == provider])[0]['url']
<commit_after>from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
# We need to preserve the order of the versions
self._versions = tuple(v['version'] for v in self._data['versions'])
# Prepare a data structure for quick lookups
self._boxes = {}
for v in self._data['versions']:
_version = v['version']
self._boxes[_version] = {}
for p in v['providers']:
_provider = p['name']
self._boxes[_version][_provider] = {}
self._boxes[_version][_provider]['url'] = p['url']
def versions(self):
"""Return a tuple with all available box versions."""
return self._versions
def providers(self, version):
"""Return a list of providers for a specific box version."""
return self._boxes[version].keys()
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
return self._boxes[version][provider]['url']
|
<commit_msg>Add the basic AST nodes.
<commit_before>class Node(object):
def __eq__(self, other):
return (
self.__class__ == other.__class__ and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self == other
<commit_after>class Node(object):
def __eq__(self, other):
return (
self.__class__ == other.__class__ and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self == other
class BinaryOperation(Node):
def __init__(self, operand, left, right):
assert operand in ("+", "-") # for now
self.operand = operand
self.left = left
self.right = right
class Int32(Node):
def __init__(self, value):
assert isinstance(value, int)
assert -2**32 < value <= 2**32-1
self.value = value
|
<commit_msg>Add IPL_ and IST_ constants in preparation for the Amiga ISA-kit
<commit_before>/* $NetBSD: psl.h,v 1.7 1994/10/26 02:06:31 cgd Exp $ */
#ifndef _MACHINE_PSL_H_
#define _MACHINE_PSL_H_
#include <m68k/psl.h>
#endif
<commit_after>/* $NetBSD: psl.h,v 1.7 1994/10/26 02:06:31 cgd Exp $ */
#ifndef _MACHINE_PSL_H_
#define _MACHINE_PSL_H_
/* Interrupt priority `levels'; not mutually exclusive. */
#define IPL_NONE -1
#define IPL_BIO 3 /* block I/O */
#define IPL_NET 3 /* network */
#define IPL_TTY 4 /* terminal */
#define IPL_CLOCK 4 /* clock */
#define IPL_IMP 4 /* memory allocation */
/* Interrupt sharing types. */
#define IST_NONE 0 /* none */
#define IST_PULSE 1 /* pulsed */
#define IST_EDGE 2 /* edge-triggered */
#define IST_LEVEL 3 /* level-triggered */
#include <m68k/psl.h>
#endif
|
<commit_msg>Add comments and docstring to OSMDataModel
<commit_before>import pandas as pd
from monitorframe.monitor import BaseDataModel
from cosmo.filesystem import FileDataFinder
from cosmo import FILES_SOURCE
from cosmo.monitor_helpers import explode_df
class OSMDataModel(BaseDataModel):
def get_data(self):
header_keys = (
'ROOTNAME', 'EXPSTART', 'DETECTOR', 'LIFE_ADJ', 'OPT_ELEM', 'CENWAVE', 'FPPOS', 'PROPOSID', 'OBSET_ID'
)
header_extensions = (0, 1, 0, 0, 0, 0, 0, 0, 0)
data_keys = ('TIME', 'SHIFT_DISP', 'SHIFT_XDISP', 'SEGMENT')
data_extensions = (1, 1, 1, 1)
finder = FileDataFinder(
FILES_SOURCE,
'*lampflash*',
header_keys,
header_extensions,
data_keys=data_keys,
data_extensions=data_extensions
)
df = pd.DataFrame(finder.data_from_files())
return explode_df(df, list(data_keys))
<commit_after>import pandas as pd
from monitorframe.monitor import BaseDataModel
from cosmo.filesystem import FileDataFinder
from cosmo import FILES_SOURCE
from cosmo.monitor_helpers import explode_df
class OSMDataModel(BaseDataModel):
"""Data model for all OSM Shift monitors."""
def get_data(self):
header_keys = (
'ROOTNAME', 'EXPSTART', 'DETECTOR', 'LIFE_ADJ', 'OPT_ELEM', 'CENWAVE', 'FPPOS', 'PROPOSID', 'OBSET_ID'
)
header_extensions = (0, 1, 0, 0, 0, 0, 0, 0, 0)
data_keys = ('TIME', 'SHIFT_DISP', 'SHIFT_XDISP', 'SEGMENT')
data_extensions = (1, 1, 1, 1)
# Find data from lampflash files
finder = FileDataFinder(
FILES_SOURCE,
'*lampflash*',
header_keys,
header_extensions,
data_keys=data_keys,
data_extensions=data_extensions
)
df = pd.DataFrame(finder.data_from_files())
return explode_df(df, list(data_keys))
|
<commit_msg>Fix config parsing. Tweeting works
<commit_before>
import tweepy, time, sys, os
from ConfigParser import SafeConfigParser
parser = SafeConfigParser()
parser.read('secrets.cfg')
#enter the corresponding information from your Twitter application:
CONSUMER_KEY = parser.get('bug_tracker', 'CONSUMER_KEY')
CONSUMER_SECRET = parser.get('bug_tracker', 'CONSUMER_SECRET')
ACCESS_KEY = parser.get('bug_tracker', 'ACCESS_KEY')
ACCESS_SECRET = parser.get('bug_tracker', 'ACCESS_SECRET')
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
line = "Test tweet!"
api.update_status(line)<commit_after>
import tweepy, time, sys, os
from ConfigParser import SafeConfigParser
parser = SafeConfigParser()
parser.read('secrets.cfg')
#enter the corresponding information from your Twitter application:
CONSUMER_KEY = parser.get('Twitter', 'CONSUMER_KEY')
CONSUMER_SECRET = parser.get('Twitter', 'CONSUMER_SECRET')
ACCESS_KEY = parser.get('Twitter', 'ACCESS_KEY')
ACCESS_SECRET = parser.get('Twitter', 'ACCESS_SECRET')
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
line = "Test tweet!"
api.update_status(line) |
<commit_msg>Delete the class method shenannigans
<commit_before>from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
import inspect
from . import registry
class _method_wrapper(object):
"""A placeholder object used to wrap methods until the rules decorator
comes around and adds everything to the shield registry."""
def __init__(self, fn, permissions, owner, target):
self.permissions = permissions
self.owner = owner
self.target = target
self.fn = fn
def __call__(self, *args, **kwargs):
return self.fn(*args, **kwargs)
class rule:
def __init__(self, owner, permissions, target=None):
"""owner: The owner of the permissions.
permissions: The set of permissions that the owner has
target: The model that we are checking if the owner has permissions for
"""
self.owner = owner
self.permissions = permissions
self.target = target
def __call__(self, fn):
return _method_wrapper(fn, self.permissions, self.owner, self.target)
def rules(cls):
"""Add all permission methods on the decorated object to our registry."""
# This is called after the class object is instantiated and all methods are
# wrapped with the decorator. Iterate over all of our personal wrapped
# methods, unwrap them, and add them to the registry.
mems = inspect.getmembers(cls, lambda x: isinstance(x, _method_wrapper))
for name, member in mems:
# Unwrap each method
# Add the member to the registry
for perm in member.permissions:
registry.registry[(member.owner, perm, member.target)] = member.fn
# Now that the method has been added to the registry, unwrap the method
# since we don't need the wrapper anymore.
setattr(cls, name, member.fn)
<commit_after>from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from . import registry
class rule:
"""Add the decorated rule to our registry"""
def __init__(self, *perms, **kwargs):
"""owner: The owner of the permissions.
permissions: The set of permissions that the owner has
target: The model that we are checking if the owner has permissions for
"""
# Owner is a mandatory kwarg param
self.owner = kwargs['owner']
# Target is an optional param used to denote what registry the
# decorated function goes into
self.target = kwargs.get('target')
# The permission list!
self.permissions = perms
def __call__(self, fn):
"""Add the passed function to the registry
"""
for perm in self.permissions:
# If we have no target, this goes into the general registry
# otherwise it goes into the targeted registry
if self.target is None:
registry.general[(self.owner, perm)] = fn
else:
registry.targeted[(self.owner, perm, self.target)] = fn
# We really have no reason to keep the wrapper now that we have added
# the function to the registry
return fn
|
<commit_msg>Remove an import which snuck in but does not belong.
Signed-off-by: Robert Deaton <eb00a885478926d5d594195591fb94a03acb1062@udel.edu>
<commit_before>
__version__ = '0.1.1'
__license__ = 'MIT'
__author__ = 'Robert Deaton'
import compat
import memoize
import point
import camera
import sprite
import scene
import _lib
import event
import animator
import animation
import pygame
import image
import color
import rect
import signal
import font
import vector
Scene = scene.Scene
Image = image.Image
Sprite = sprite.Sprite
Group = sprite.Group
AnimationSprite = animation.AnimationSprite
AnimationGroup = animation.AnimationGroup
Rect = rect.Rect
Signal = signal.Signal
Vec2D = vector.Vec2D
keys = event.keys
director = scene.Director()
def init():
event.init()
pygame.init()
pygame.font.init()
def quit():
pygame.quit()
director._stack = []
<commit_after>
__version__ = '0.1.1'
__license__ = 'MIT'
__author__ = 'Robert Deaton'
import compat
import memoize
import point
import camera
import sprite
import scene
import _lib
import event
import animator
import animation
import pygame
import image
import color
import rect
import signal
import vector
Scene = scene.Scene
Image = image.Image
Sprite = sprite.Sprite
Group = sprite.Group
AnimationSprite = animation.AnimationSprite
AnimationGroup = animation.AnimationGroup
Rect = rect.Rect
Signal = signal.Signal
Vec2D = vector.Vec2D
keys = event.keys
director = scene.Director()
def init():
event.init()
pygame.init()
pygame.font.init()
def quit():
pygame.quit()
director._stack = []
|
<commit_msg>Replace hand-written loop with str.matches() call.
This should fix #4.
<commit_before>extern crate libc;
use std::ffi::CStr;
use std::str;
use libc::c_char;
#[no_mangle]
pub extern "C" fn count_substrings(value: *const c_char, substr: *const c_char) -> i32 {
let c_value = unsafe { CStr::from_ptr(value).to_bytes() };
let c_substr = unsafe { CStr::from_ptr(substr).to_bytes() };
match str::from_utf8(c_value) {
Ok(value) => match str::from_utf8(c_substr) {
Ok(substr) => rust_substrings(value, substr),
Err(_) => -1,
},
Err(_) => -1,
}
}
fn rust_substrings(value: &str, substr: &str) -> i32 {
let mut count = 0;
let substr_len = substr.len();
let upper_bound = value.len() - substr_len + 1;
for c in 0..upper_bound {
let possible_match = &value[c..c+substr_len];
if possible_match == substr {
count += 1;
}
}
count
}
<commit_after>extern crate libc;
use std::ffi::CStr;
use std::str;
use libc::c_char;
#[no_mangle]
pub extern "C" fn count_substrings(value: *const c_char, substr: *const c_char) -> i32 {
let c_value = unsafe { CStr::from_ptr(value).to_bytes() };
let c_substr = unsafe { CStr::from_ptr(substr).to_bytes() };
match str::from_utf8(c_value) {
Ok(value) => match str::from_utf8(c_substr) {
Ok(substr) => rust_substrings(value, substr),
Err(_) => -1,
},
Err(_) => -1,
}
}
fn rust_substrings(value: &str, substr: &str) -> i32 {
value.matches(substr).count() as i32
}
|
<commit_msg>Add get_absolute_url method to institutions
<commit_before>from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, RelationshipField, LinksField
class InstitutionSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'id',
'name'
])
name = ser.CharField(read_only=True)
id = ser.CharField(read_only=True, source='_id')
logo_path = ser.CharField(read_only=True)
auth_url = ser.CharField(read_only=True)
links = LinksField({'self': 'get_api_url', })
nodes = RelationshipField(
related_view='institutions:institution-nodes',
related_view_kwargs={'institution_id': '<pk>'},
)
registrations = RelationshipField(
related_view='institutions:institution-registrations',
related_view_kwargs={'institution_id': '<pk>'}
)
users = RelationshipField(
related_view='institutions:institution-users',
related_view_kwargs={'institution_id': '<pk>'}
)
def get_api_url(self, obj):
return obj.absolute_api_v2_url
class Meta:
type_ = 'institutions'
<commit_after>from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, RelationshipField, LinksField
class InstitutionSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'id',
'name'
])
name = ser.CharField(read_only=True)
id = ser.CharField(read_only=True, source='_id')
logo_path = ser.CharField(read_only=True)
auth_url = ser.CharField(read_only=True)
links = LinksField({'self': 'get_api_url', })
nodes = RelationshipField(
related_view='institutions:institution-nodes',
related_view_kwargs={'institution_id': '<pk>'},
)
registrations = RelationshipField(
related_view='institutions:institution-registrations',
related_view_kwargs={'institution_id': '<pk>'}
)
users = RelationshipField(
related_view='institutions:institution-users',
related_view_kwargs={'institution_id': '<pk>'}
)
def get_api_url(self, obj):
return obj.absolute_api_v2_url
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
class Meta:
type_ = 'institutions'
|
<commit_msg>Remove some cruft from the cufflinks test.
<commit_before>import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
# fixed_fn = os.path.join(self.out_dir, "fixed.gtf")
# fixed = cufflinks.fix_cufflinks_attributes(self.ref_gtf, clean, fixed_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
# assert(file_exists(fixed))
def tearDown(self):
shutil.rmtree(self.out_dir)
<commit_after>import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
def tearDown(self):
shutil.rmtree(self.out_dir)
|
<commit_msg>Fix temporary database file deletion.
<commit_before>// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "impeller/archivist/archivist_fixture.h"
#include "flutter/fml/paths.h"
namespace impeller {
namespace testing {
ArchivistFixture::ArchivistFixture() {
std::stringstream stream;
stream << flutter::testing::GetCurrentTestName() << ".db";
archive_file_name_ = fml::paths::JoinPaths(
{flutter::testing::GetFixturesPath(), stream.str()});
}
ArchivistFixture::~ArchivistFixture() = default;
const std::string ArchivistFixture::GetArchiveFileName() const {
return archive_file_name_;
}
void ArchivistFixture::SetUp() {
DeleteArchiveFile();
}
void ArchivistFixture::TearDown() {
// TODO: Tear this down. For now, I am inspecting the files for readability of
// schema.
// DeleteArchiveFile();
}
void ArchivistFixture::DeleteArchiveFile() const {
fml::UnlinkFile(archive_file_name_.c_str());
}
} // namespace testing
} // namespace impeller
<commit_after>// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "impeller/archivist/archivist_fixture.h"
#include "flutter/fml/paths.h"
namespace impeller {
namespace testing {
ArchivistFixture::ArchivistFixture() {
std::stringstream stream;
stream << "Test" << flutter::testing::GetCurrentTestName() << ".db";
archive_file_name_ = stream.str();
}
ArchivistFixture::~ArchivistFixture() = default;
const std::string ArchivistFixture::GetArchiveFileName() const {
return fml::paths::JoinPaths(
{flutter::testing::GetFixturesPath(), archive_file_name_});
}
void ArchivistFixture::SetUp() {
DeleteArchiveFile();
}
void ArchivistFixture::TearDown() {
DeleteArchiveFile();
}
void ArchivistFixture::DeleteArchiveFile() const {
auto fixtures = flutter::testing::OpenFixturesDirectory();
if (fml::FileExists(fixtures, archive_file_name_.c_str())) {
fml::UnlinkFile(fixtures, archive_file_name_.c_str());
}
}
} // namespace testing
} // namespace impeller
|
<commit_msg>core/cortex-m0: Use compiler_rt version of clz and ctz
Use __clzsi2 and __ctzsi2 from compiler_rt instead of our own version.
Using the compiler_rt versions result in a slightly smaller image.
servo_micro before this change:
RO: 18744 bytes in flash remaining
RW: 23192 bytes in flash remaining
servo_micro after this change:
RO: 18808 bytes in flash remaining
RW: 23256 bytes in flash remaining
BRANCH=none
BUG=b:172020503
TEST=CC=clang make BOARD=servo_micro
TEST=make buildall
Signed-off-by: Tom Hughes <fc5c12f30a5ed2baa0581f44ee14ad1eb7b20d83@chromium.org>
Change-Id: Ibc19a3670127dde211fb20d247c1284d0aec5f61
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3199739
Reviewed-by: Jack Rosenthal <d3f605bef1867f59845d4ce6e4f83b8dc9e4e0ae@chromium.org>
<commit_before>/* Copyright 2014 The Chromium OS Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef __CROS_EC_CONFIG_CORE_H
#define __CROS_EC_CONFIG_CORE_H
/* Linker binary architecture and format */
#define BFD_ARCH arm
#define BFD_FORMAT "elf32-littlearm"
/* Emulate the CLZ/CTZ instructions since the CPU core is lacking support */
#define CONFIG_SOFTWARE_CLZ
#define CONFIG_SOFTWARE_CTZ
#define CONFIG_SOFTWARE_PANIC
#define CONFIG_ASSEMBLY_MULA32
#endif /* __CROS_EC_CONFIG_CORE_H */
<commit_after>/* Copyright 2014 The Chromium OS Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef __CROS_EC_CONFIG_CORE_H
#define __CROS_EC_CONFIG_CORE_H
/* Linker binary architecture and format */
#define BFD_ARCH arm
#define BFD_FORMAT "elf32-littlearm"
/*
* Emulate the CLZ/CTZ instructions since the CPU core is lacking support.
* When building with clang, we rely on compiler_rt to provide this support.
*/
#ifndef __clang__
#define CONFIG_SOFTWARE_CLZ
#define CONFIG_SOFTWARE_CTZ
#endif /* __clang__ */
#define CONFIG_SOFTWARE_PANIC
#define CONFIG_ASSEMBLY_MULA32
#endif /* __CROS_EC_CONFIG_CORE_H */
|
<commit_msg>Fix access restriction to /accounts/profile/
LoginRequiredMixin needs to come first for it to be applied. Otherwise,
/accounts/profile/ is accessible even when the user is not
authenticated.
<commit_before>from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.shortcuts import render
from django.views.generic import TemplateView
from oidc_provider.models import UserConsent, Client
@login_required
def list_consents(request):
# if a revoke request was done, process it
revoke = request.POST.get('revoke', None)
if revoke is not None:
consent = UserConsent.objects.filter(user=request.user, client__client_id=revoke)
if consent:
revoked_client = consent[0].client
consent[0].delete()
messages.success(request, "Successfully revoked consent for client \"{}\"".format(revoked_client.name))
else:
client = Client.objects.filter(client_id=revoke)
if client:
messages.error(request, "You have no consent for client \"{}\".".format(client[0].name))
else:
messages.error(request, "Unknown client.")
# render the result
consents = UserConsent.objects.filter(user=request.user)
return render(request, 'list_consents.html', {
'consents': consents
})
class ProfileView(TemplateView, LoginRequiredMixin):
template_name = 'profile.html'
<commit_after>from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.shortcuts import render
from django.views.generic import TemplateView
from oidc_provider.models import UserConsent, Client
@login_required
def list_consents(request):
# if a revoke request was done, process it
revoke = request.POST.get('revoke', None)
if revoke is not None:
consent = UserConsent.objects.filter(user=request.user, client__client_id=revoke)
if consent:
revoked_client = consent[0].client
consent[0].delete()
messages.success(request, "Successfully revoked consent for client \"{}\"".format(revoked_client.name))
else:
client = Client.objects.filter(client_id=revoke)
if client:
messages.error(request, "You have no consent for client \"{}\".".format(client[0].name))
else:
messages.error(request, "Unknown client.")
# render the result
consents = UserConsent.objects.filter(user=request.user)
return render(request, 'list_consents.html', {
'consents': consents
})
class ProfileView(LoginRequiredMixin, TemplateView):
template_name = 'profile.html'
|
<commit_msg>Support for multiple methods, and leave the config file to hooker
<commit_before>import json
def check_labels(api):
with open('config.json', 'r') as fd:
config = json.load(fd)
if not config['active']:
return
labels = config['labels']
for label, comment in labels.items():
if api.payload['label']['name'].lower() == label:
api.post_comment(comment)
method = check_labels
<commit_after>import json
def check_labels(api, config):
if not config.get('active'):
return
labels = config.get('labels', [])
for label, comment in labels.items():
if api.payload['label']['name'].lower() == label:
api.post_comment(comment)
methods = [check_labels]
|
<commit_msg>Add "throws Exception" to customAction method.
<commit_before>package com.github.hisaichi5518.kise;
public interface UnitActions<ACTION_PARAM> {
void customAction(ACTION_PARAM actionParam);
void defaultAction(ACTION_PARAM actionParam);
}
<commit_after>package com.github.hisaichi5518.kise;
public interface UnitActions<ACTION_PARAM> {
void customAction(ACTION_PARAM actionParam) throws Exception;
void defaultAction(ACTION_PARAM actionParam);
}
|
<commit_msg>Fix test so it works on systems where wchar_t != int.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@75827 91177308-0d34-0410-b5e6-96231b3b80d8
<commit_before>// RUN: %llvmgcc -S %s -o - | llvm-as -f -o /dev/null
typedef int __darwin_wchar_t;
typedef __darwin_wchar_t wchar_t;
typedef signed short SQLSMALLINT;
typedef SQLSMALLINT SQLRETURN;
typedef enum
{
en_sqlstat_total
}
sqlerrmsg_t;
SQLRETURN _iodbcdm_sqlerror( )
{
wchar_t _sqlState[6] = { L"\0" };
}
<commit_after>// RUN: %llvmgcc -S %s -o - | llvm-as -f -o /dev/null
#include <stddef.h>
signed short _iodbcdm_sqlerror( )
{
wchar_t _sqlState[6] = { L"\0" };
}
|
<commit_msg>Reorganize bot start sequence, including custom naming
<commit_before>package main // import "cirello.io/gochatbot"
import (
"log"
"os"
"cirello.io/gochatbot/bot"
"cirello.io/gochatbot/brain"
"cirello.io/gochatbot/providers"
"cirello.io/gochatbot/rules/cron"
"cirello.io/gochatbot/rules/regex"
)
func main() {
provider := providers.Detect(os.Getenv)
memory := brain.Detect(os.Getenv)
robot := bot.New(
"gochatbot",
memory,
bot.MessageProvider(provider),
bot.RegisterRuleset(regex.New()),
bot.RegisterRuleset(cron.New()),
)
if err := provider.Error(); err != nil {
log.SetOutput(os.Stderr)
log.Fatalln("error in message provider:", err)
}
if err := memory.Error(); err != nil {
log.SetOutput(os.Stderr)
log.Fatalln("error in brain memory:", err)
}
robot.Process()
}
<commit_after>package main // import "cirello.io/gochatbot"
import (
"log"
"os"
"cirello.io/gochatbot/bot"
"cirello.io/gochatbot/brain"
"cirello.io/gochatbot/providers"
"cirello.io/gochatbot/rules/cron"
"cirello.io/gochatbot/rules/regex"
)
func main() {
name := os.Getenv("GOCHATBOT_NAME")
if name == "" {
name = "gochatbot"
}
provider := providers.Detect(os.Getenv)
if err := provider.Error(); err != nil {
log.SetOutput(os.Stderr)
log.Fatalln("error in message provider:", err)
}
memory := brain.Detect(os.Getenv)
if err := memory.Error(); err != nil {
log.SetOutput(os.Stderr)
log.Fatalln("error in brain memory:", err)
}
bot.New(
name,
memory,
bot.MessageProvider(provider),
bot.RegisterRuleset(regex.New()),
bot.RegisterRuleset(cron.New()),
).Process()
}
|
<commit_msg>Use keysym to keycode mapping in C
<commit_before>////////////////////////////////////////////////////////////////////////////////
#ifdef __linux__
#include <stdio.h>
#include <X11/Xlib.h>
#include <X11/Intrinsic.h>
#include <X11/extensions/XTest.h>
Display *fakekey_dis;
void fakekey_init(void) {
fakekey_dis = XOpenDisplay(NULL);
}
void fakekey_press(int code) {
XTestFakeKeyEvent(fakekey_dis, code, True, 0);
XFlush(fakekey_dis);
}
void fakekey_release(int code) {
XTestFakeKeyEvent(fakekey_dis, code, False, 0);
XFlush(fakekey_dis);
}
#else
void fakekey_init(void) {
}
void fakekey_press(int code) {
}
void fakekey_release(int code) {
}
#endif
<commit_after>////////////////////////////////////////////////////////////////////////////////
#ifdef __linux__
#include <stdio.h>
#include <X11/Xlib.h>
#include <X11/Intrinsic.h>
#include <X11/extensions/XTest.h>
Display *fakekey_dis;
void fakekey_init(void) {
fakekey_dis = XOpenDisplay(NULL);
}
void fakekey_press(int keysym) {
XTestFakeKeyEvent(fakekey_dis, XKeysymToKeycode(fakekey_dis, keysym), True, 0);
XFlush(fakekey_dis);
}
void fakekey_release(int keysym) {
XTestFakeKeyEvent(fakekey_dis, XKeysymToKeycode(fakekey_dis, keysym), False, 0);
XFlush(fakekey_dis);
}
#else
void fakekey_init(void) {
}
void fakekey_press(int keysym) {
}
void fakekey_release(int keysym) {
}
#endif
|
<commit_msg>Make compatible with Django 1.9
<commit_before>from django.conf.urls import patterns, url
from .views import NoticeSettingsView
urlpatterns = patterns(
"",
url(r"^settings/$", NoticeSettingsView.as_view(), name="notification_notice_settings"),
)
<commit_after>from django.conf.urls import url
from .views import NoticeSettingsView
urlpatterns = [
url(r"^settings/$", NoticeSettingsView.as_view(), name="notification_notice_settings"),
]
|
<commit_msg>Fix and re-enable the reset management command test.
Not 100% sure of why this fixes the issue - it appears that changes to django.test.TestCase in Django 2.0 led to the test failing.
<commit_before>from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
"""def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)"""
<commit_after>from __future__ import unicode_literals
from django.test import TransactionTestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TransactionTestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)
|
<commit_msg>Allow set scroll timeout param
<commit_before>from rest_framework import pagination
from amcat.tools import amcates
from rest_framework.response import Response
from django.core.urlresolvers import reverse
from rest_framework.utils.urls import replace_query_param
class ScrollingPaginator(pagination.BasePagination):
def paginate_queryset(self, queryset, request, view=None):
self.request = request
es = amcates.ES()
scroll_id = request.query_params.get("scroll_id")
if scroll_id:
res = es.es.scroll(scroll_id, scroll="1m")
else:
res = es.search(scroll="1m", **queryset)
self.total = res['hits']['total']
self.scroll_id = res['_scroll_id']
self.done = not res['hits']['hits']
for hit in res['hits']['hits']:
item = {'id': hit['_id']}
if '_source' in hit:
item.update({k: v for (k, v) in hit['_source'].items()})
yield item
def get_paginated_response(self, data):
return Response({
'next': self.get_next_link(),
'results': data,
'total': self.total,
})
def get_next_link(self):
if not self.done:
url = self.request.build_absolute_uri()
return replace_query_param(url, "scroll_id", self.scroll_id)
<commit_after>from rest_framework import pagination
from amcat.tools import amcates
from rest_framework.response import Response
from django.core.urlresolvers import reverse
from rest_framework.utils.urls import replace_query_param
class ScrollingPaginator(pagination.BasePagination):
def paginate_queryset(self, queryset, request, view=None):
self.request = request
es = amcates.ES()
scroll_id = request.query_params.get("scroll_id")
scroll = request.query_params.get("scroll", "1m")
if scroll_id:
res = es.es.scroll(scroll_id, scroll=scroll)
else:
res = es.search(scroll=scroll, **queryset)
self.total = res['hits']['total']
self.scroll_id = res['_scroll_id']
self.done = not res['hits']['hits']
for hit in res['hits']['hits']:
item = {'id': hit['_id']}
if '_source' in hit:
item.update({k: v for (k, v) in hit['_source'].items()})
yield item
def get_paginated_response(self, data):
return Response({
'next': self.get_next_link(),
'results': data,
'total': self.total,
})
def get_next_link(self):
if not self.done:
url = self.request.build_absolute_uri()
return replace_query_param(url, "scroll_id", self.scroll_id)
|
<commit_msg>Add required to sync subQuery
<commit_before>export type Ordering = 'NONE' | 'ASC' | 'DESC';
export interface QueryOrder {
field: string;
ordering: Ordering;
chain?: string[];
}
export interface QueryFilters {
[attribute: string]: any;
}
export interface QueryPagination {
skip?: number;
take?: number;
}
export interface QueryInclude {
[alias: string]: SubQuery | boolean;
}
export interface SubQuery {
include?: QueryInclude;
filters?: QueryFilters;
attributes?: string[];
}
export interface Query {
queryId?: number;
model?: string;
byId?: number;
count?: boolean;
extra?: any;
include?: QueryInclude;
filters?: QueryFilters;
attributes?: string[];
pagination?: QueryPagination;
order?: QueryOrder;
}
<commit_after>export type Ordering = 'NONE' | 'ASC' | 'DESC';
export interface QueryOrder {
field: string;
ordering: Ordering;
chain?: string[];
}
export interface QueryFilters {
[attribute: string]: any;
}
export interface QueryPagination {
skip?: number;
take?: number;
}
export interface QueryInclude {
[alias: string]: SubQuery | boolean;
}
export interface SubQuery {
include?: QueryInclude;
filters?: QueryFilters;
attributes?: string[];
required?: boolean;
}
export interface Query {
queryId?: number;
model?: string;
byId?: number;
count?: boolean;
extra?: any;
include?: QueryInclude;
filters?: QueryFilters;
attributes?: string[];
pagination?: QueryPagination;
order?: QueryOrder;
}
|
<commit_msg>Handle shell args in python scripts
<commit_before>import sys
import json
import struct
import subprocess
# Read a message from stdin and decode it.
def getMessage():
rawLength = sys.stdin.read(4)
if len(rawLength) == 0:
sys.exit(0)
messageLength = struct.unpack('@I', rawLength)[0]
message = sys.stdin.read(messageLength)
return json.loads(message)
# Encode a message for transmission,
# given its content.
def encodeMessage(messageContent):
encodedContent = json.dumps(messageContent)
encodedLength = struct.pack('@I', len(encodedContent))
return {'length': encodedLength, 'content': encodedContent}
# Send an encoded message to stdout
def sendMessage(encodedMessage):
sys.stdout.write(encodedMessage['length'])
sys.stdout.write(encodedMessage['content'])
sys.stdout.flush()
while True:
mpv_args = getMessage()
if (len(mpv_args) > 1):
subprocess.call(["mpv", mpv_args])
<commit_after>
import sys
import json
import struct
import subprocess
import shlex
# Read a message from stdin and decode it.
def getMessage():
rawLength = sys.stdin.read(4)
if len(rawLength) == 0:
sys.exit(0)
messageLength = struct.unpack('@I', rawLength)[0]
message = sys.stdin.read(messageLength)
return json.loads(message)
# Encode a message for transmission,
# given its content.
def encodeMessage(messageContent):
encodedContent = json.dumps(messageContent)
encodedLength = struct.pack('@I', len(encodedContent))
return {'length': encodedLength, 'content': encodedContent}
# Send an encoded message to stdout
def sendMessage(encodedMessage):
sys.stdout.write(encodedMessage['length'])
sys.stdout.write(encodedMessage['content'])
sys.stdout.flush()
while True:
mpv_args = getMessage()
if (len(mpv_args) > 1):
args = shlex.split("mpv " + mpv_args)
subprocess.call(args)
sys.exit(0)
|
<commit_msg>Use standard library instead of django.utils.importlib
> django.utils.importlib is a compatibility library for when Python 2.6 was
> still supported. It has been obsolete since Django 1.7, which dropped support
> for Python 2.6, and is removed in 1.9 per the deprecation cycle.
> Use Python's import_module function instead
> — [1]
References:
[1] http://stackoverflow.com/a/32763639
[2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9
<commit_before>
import django
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
def autodiscover():
"""Auto-discover INSTALLED_APPS mails.py modules."""
for app in settings.INSTALLED_APPS:
module = '%s.mails' % app # Attempt to import the app's 'mails' module
try:
import_module(module)
except:
# Decide whether to bubble up this error. If the app just
# doesn't have a mails module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
app_module = import_module(app)
if module_has_submodule(app_module, 'mails'):
raise
# If we're using Django >= 1.7, use the new app-loading mecanism which is way
# better.
if django.VERSION < (1, 7):
autodiscover()
<commit_after>
import django
from django.conf import settings
from django.utils.module_loading import module_has_submodule
try:
from importlib import import_module
except ImportError:
# Compatibility for python-2.6
from django.utils.importlib import import_module
def autodiscover():
"""Auto-discover INSTALLED_APPS mails.py modules."""
for app in settings.INSTALLED_APPS:
module = '%s.mails' % app # Attempt to import the app's 'mails' module
try:
import_module(module)
except:
# Decide whether to bubble up this error. If the app just
# doesn't have a mails module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
app_module = import_module(app)
if module_has_submodule(app_module, 'mails'):
raise
# If we're using Django >= 1.7, use the new app-loading mecanism which is way
# better.
if django.VERSION < (1, 7):
autodiscover()
|
<commit_msg>Fix update callback position to be grid-relative not scrollback-relative.
<commit_before>
void term_update(term_t_i *term)
{
if( term->dirty.exists && term->update != NULL ) {
term->update(TO_H(term), term->dirty.x, term->dirty.y, term->dirty.width, term->dirty.height);
term->dirty.exists = false;
}
}
void term_cursor_update(term_t_i *term)
{
if( term->dirty_cursor.exists && term->cursor_update != NULL ) {
term->cursor_update(TO_H(term), term->dirty_cursor.old_ccol, term->dirty_cursor.old_crow - (term->grid.history - term->grid.height), term->ccol, term->crow - (term->grid.history - term->grid.height));
term->dirty_cursor.exists = false;
term->dirty_cursor.old_ccol = term->ccol;
term->dirty_cursor.old_crow = term->crow;
}
}
<commit_after>
void term_update(term_t_i *term)
{
if( term->dirty.exists && term->update != NULL ) {
term->update(TO_H(term), term->dirty.x, term->dirty.y - (term->grid.history - term->grid.height), term->dirty.width, term->dirty.height);
term->dirty.exists = false;
}
}
void term_cursor_update(term_t_i *term)
{
if( term->dirty_cursor.exists && term->cursor_update != NULL ) {
term->cursor_update(TO_H(term), term->dirty_cursor.old_ccol, term->dirty_cursor.old_crow - (term->grid.history - term->grid.height), term->ccol, term->crow - (term->grid.history - term->grid.height));
term->dirty_cursor.exists = false;
term->dirty_cursor.old_ccol = term->ccol;
term->dirty_cursor.old_crow = term->crow;
}
}
|
<commit_msg>Return the json from the parser method
<commit_before>from bs4 import BeautifulSoup
from requests.auth import HTTPBasicAuth
import requests
import json
def parse_content():
rs = requests.get('http://soundspectra.com/admin/', auth=HTTPBasicAuth('admin', 'h@ckm3'))
html_data = rs.text
soup = BeautifulSoup(html_data)
details = {'stream_details' : []}
details_list = []
alt = 1
key = None
for td in soup.find_all('td'):
if alt:
if td.get('width') and td.get('width') == '130':
key = td.text
alt = not alt
elif not alt:
if td.get('class') and td.get('class')[0] == 'streamdata':
alt = not alt
value = td.text
d = {key.encode("utf-8") : value.encode("utf-8")}
details_list.append(d)
details['stream_details'] = details_list
print details
if __name__ == '__main__':
parse_content()<commit_after>from bs4 import BeautifulSoup
from requests.auth import HTTPBasicAuth
import requests
import json
def parse_content():
rs = requests.get('http://soundspectra.com/admin/', auth=HTTPBasicAuth('admin', 'h@ckm3'))
html_data = rs.text
soup = BeautifulSoup(html_data)
details = {'stream_details' : []}
details_list = []
alt = 1
key = None
for td in soup.find_all('td'):
if alt:
if td.get('width') and td.get('width') == '130':
key = td.text
alt = not alt
elif not alt:
if td.get('class') and td.get('class')[0] == 'streamdata':
alt = not alt
value = td.text
d = {key.encode("utf-8") : value.encode("utf-8")}
details_list.append(d)
details['stream_details'] = details_list
return details
if __name__ == '__main__':
details = parse_content()
print details
|
<commit_msg>Remove stay mention to BASE_URL
<commit_before>from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings._BASE_DIR + '/../media/publications/files'}),
)
<commit_after>from django.conf.urls.defaults import *
import settings
import views
urlpatterns = patterns('',
(r'^papers/(?P<paper>.+)$', views.papers),
(r'^publications/?$', views.publications, { 'collection' : 'luispedro' }),
(r'^publications/(?P<collection>.+)$', views.publications),
(r'^publications/files/(?P<file>.+)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT + '/publications/files'}),
)
|
<commit_msg>Increase the time delta from 120 to 240 milli secs to
decide the failure.
Change-Id: Ic51da36d79d4cd4ccac342d7242e56a23e21c07f
<commit_before>from fabfile.config import *
@task
@roles('all')
def get_all_time():
date = run("DATE=$( sudo date ); DATEMILLISEC=$( sudo date +%s ); echo $DATE; echo $DATEMILLISEC")
return tuple(date.split('\r\n'))
@task
@roles('build')
def verify_time_all():
result = execute('get_all_time')
print result
all_time = [int(date_in_millisec) for date, date_in_millisec in result.values()]
all_time.sort()
if (all_time[-1] - all_time[0]) > 120:
raise RuntimeError("Time not synced in the nodes, Please sync and proceed:\n %s" % result)
else:
print "Time synced in the nodes, Proceeding to install/provision."
<commit_after>from fabfile.config import *
@task
@roles('all')
def get_all_time():
date = run("DATE=$( sudo date ); DATEMILLISEC=$( sudo date +%s ); echo $DATE; echo $DATEMILLISEC")
return tuple(date.split('\r\n'))
@task
@parallel
@roles('build')
def verify_time_all():
result = execute('get_all_time')
all_time = []
for dates in result.values():
try:
(date, date_in_millisec) = dates
all_time.append(int(date_in_millisec))
except ValueError:
print "ERROR: %s" % dates
all_time.sort()
if (all_time[-1] - all_time[0]) > 240:
raise RuntimeError("Time not synced in the nodes,"
" Please sync and proceed:\n %s %s %s" %
(result, all_time[-1], all_time[0]))
else:
print "Time synced in the nodes, Proceeding to install/provision."
|
<commit_msg>Use multi-error compiletest (thanks to laumann)
<commit_before>fn main() {
//((1 + 1) & (1 + 1) == (1 + 1) & (1 + 1)); // does not work with compiletest yet due to multiple errors
(1 + 2) * 3 - 5 < 3 * (2 + 1) - 5; //~ERROR
}
<commit_after>fn main() {
((1 + 1) & (1 + 1) == (1 + 1) & (1 + 1));
//~^ ERROR
//~^^ ERROR
//~^^^ ERROR
(1 + 2) * 3 - 5 < 3 * (2 + 1) - 5; //~ERROR
}
|
<commit_msg>content: Remove unnecessary constructor from NotificationObserver.
BUG=98716
R=jam@chromium.org
Review URL: https://chromiumcodereview.appspot.com/10449076
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@139931 0039d316-1c4b-4281-b951-d872f2087c98
<commit_before>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_PUBLIC_BROWSER_NOTIFICATION_OBSERVER_H_
#define CONTENT_PUBLIC_BROWSER_NOTIFICATION_OBSERVER_H_
#pragma once
#include "content/common/content_export.h"
namespace content {
class NotificationDetails;
class NotificationSource;
// This is the base class for notification observers. When a matching
// notification is posted to the notification service, Observe is called.
class CONTENT_EXPORT NotificationObserver {
public:
virtual void Observe(int type,
const NotificationSource& source,
const NotificationDetails& details) = 0;
protected:
NotificationObserver() {}
virtual ~NotificationObserver() {}
};
} // namespace content
#endif // CONTENT_PUBLIC_BROWSER_NOTIFICATION_OBSERVER_H_
<commit_after>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_PUBLIC_BROWSER_NOTIFICATION_OBSERVER_H_
#define CONTENT_PUBLIC_BROWSER_NOTIFICATION_OBSERVER_H_
#pragma once
#include "content/common/content_export.h"
namespace content {
class NotificationDetails;
class NotificationSource;
// This is the base class for notification observers. When a matching
// notification is posted to the notification service, Observe is called.
class CONTENT_EXPORT NotificationObserver {
public:
virtual void Observe(int type,
const NotificationSource& source,
const NotificationDetails& details) = 0;
protected:
virtual ~NotificationObserver() {}
};
} // namespace content
#endif // CONTENT_PUBLIC_BROWSER_NOTIFICATION_OBSERVER_H_
|
<commit_msg>Use meeting date if case has no date
<commit_before>package models;
import java.util.*;
public class PersonHistory {
public class Record {
public Date most_recent_charge;
public Rule rule;
public int count;
}
public ArrayList<Record> rule_records;
public PersonHistory(Person p) {
HashMap<Rule, Record> records = new HashMap<Rule, Record>();
for (Charge c : p.charges) {
Record r = records.get(c.rule);
if (r == null) {
r = new Record();
r.most_recent_charge = c.the_case.date;
r.count = 1;
r.rule = c.rule;
records.put(c.rule, r);
} else {
r.count++;
}
}
rule_records = new ArrayList<Record>(records.values());
}
}
<commit_after>package models;
import java.util.*;
public class PersonHistory {
public class Record {
public Date most_recent_charge;
public Rule rule;
public int count;
}
public ArrayList<Record> rule_records;
public PersonHistory(Person p) {
HashMap<Rule, Record> records = new HashMap<Rule, Record>();
for (Charge c : p.charges) {
Record r = records.get(c.rule);
if (r == null) {
r = new Record();
r.most_recent_charge = c.the_case.date;
if (r.most_recent_charge == null) {
r.most_recent_charge = c.the_case.meeting.date;
}
r.count = 1;
r.rule = c.rule;
records.put(c.rule, r);
} else {
r.count++;
}
}
rule_records = new ArrayList<Record>(records.values());
}
}
|
<commit_msg>fuzz: Fix fuzzer to compile in C++ mode
<commit_before>/* How to fuzz:
clang main.c -O2 -g -fsanitize=address,fuzzer -o fuzz
cp -r data temp
./fuzz temp/ -dict=gltf.dict -jobs=12 -workers=12
*/
#define CGLTF_IMPLEMENTATION
#include "../cgltf.h"
int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
{
cgltf_options options = {0};
cgltf_data* data = NULL;
cgltf_result res = cgltf_parse(&options, Data, Size, &data);
if (res == cgltf_result_success)
{
cgltf_validate(data);
cgltf_free(data);
}
return 0;
}
<commit_after>/* How to fuzz:
clang main.c -O2 -g -fsanitize=address,fuzzer -o fuzz
cp -r data temp
./fuzz temp/ -dict=gltf.dict -jobs=12 -workers=12
*/
#define CGLTF_IMPLEMENTATION
#include "../cgltf.h"
int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
{
cgltf_options options = {cgltf_file_type_invalid};
cgltf_data* data = NULL;
cgltf_result res = cgltf_parse(&options, Data, Size, &data);
if (res == cgltf_result_success)
{
cgltf_validate(data);
cgltf_free(data);
}
return 0;
}
|
<commit_msg>Make NameTag available to modules
<commit_before>/*
* Copyright 2014 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.logic.nameTags;
import org.terasology.entitySystem.Component;
import org.terasology.rendering.nui.Color;
/**
* Will make the entity have a name tag overhead in the 3D view.
*
* The text on name tag is based on the {@link org.terasology.logic.common.DisplayNameComponent} this entity.
*
* The color of the name tag is based on the {@link org.terasology.network.ColorComponent} of this entity
*/
public class NameTagComponent implements Component {
public float yOffset = 0.3f;
public String text;
public Color textColor = Color.WHITE;
}
<commit_after>/*
* Copyright 2014 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.logic.nameTags;
import org.terasology.entitySystem.Component;
import org.terasology.module.sandbox.API;
import org.terasology.rendering.nui.Color;
/**
* Will make the entity have a name tag overhead in the 3D view.
*
* The text on name tag is based on the {@link org.terasology.logic.common.DisplayNameComponent} this entity.
*
* The color of the name tag is based on the {@link org.terasology.network.ColorComponent} of this entity
*/
@API
public class NameTagComponent implements Component {
public float yOffset = 0.3f;
public String text;
public Color textColor = Color.WHITE;
}
|
<commit_msg>Use NS_ENUM instead of enum makes more clear
<commit_before>
typedef enum {
UILabelCountingMethodEaseInOut,
UILabelCountingMethodEaseIn,
UILabelCountingMethodEaseOut,
UILabelCountingMethodLinear
} UILabelCountingMethod;
typedef NSString* (^UICountingLabelFormatBlock)(float value);
typedef NSAttributedString* (^UICountingLabelAttributedFormatBlock)(float value);
@interface UICountingLabel : UILabel
@property (nonatomic, strong) NSString *format;
@property (nonatomic, assign) UILabelCountingMethod method;
@property (nonatomic, assign) NSTimeInterval animationDuration;
@property (nonatomic, copy) UICountingLabelFormatBlock formatBlock;
@property (nonatomic, copy) UICountingLabelAttributedFormatBlock attributedFormatBlock;
@property (nonatomic, copy) void (^completionBlock)();
-(void)countFrom:(float)startValue to:(float)endValue;
-(void)countFrom:(float)startValue to:(float)endValue withDuration:(NSTimeInterval)duration;
-(void)countFromCurrentValueTo:(float)endValue;
-(void)countFromCurrentValueTo:(float)endValue withDuration:(NSTimeInterval)duration;
-(void)countFromZeroTo:(float)endValue;
-(void)countFromZeroTo:(float)endValue withDuration:(NSTimeInterval)duration;
- (CGFloat)currentValue;
@end
<commit_after>
typedef NS_ENUM(NSInteger, UILabelCountingMethod) {
UILabelCountingMethodEaseInOut,
UILabelCountingMethodEaseIn,
UILabelCountingMethodEaseOut,
UILabelCountingMethodLinear
};
typedef NSString* (^UICountingLabelFormatBlock)(float value);
typedef NSAttributedString* (^UICountingLabelAttributedFormatBlock)(float value);
@interface UICountingLabel : UILabel
@property (nonatomic, strong) NSString *format;
@property (nonatomic, assign) UILabelCountingMethod method;
@property (nonatomic, assign) NSTimeInterval animationDuration;
@property (nonatomic, copy) UICountingLabelFormatBlock formatBlock;
@property (nonatomic, copy) UICountingLabelAttributedFormatBlock attributedFormatBlock;
@property (nonatomic, copy) void (^completionBlock)();
-(void)countFrom:(float)startValue to:(float)endValue;
-(void)countFrom:(float)startValue to:(float)endValue withDuration:(NSTimeInterval)duration;
-(void)countFromCurrentValueTo:(float)endValue;
-(void)countFromCurrentValueTo:(float)endValue withDuration:(NSTimeInterval)duration;
-(void)countFromZeroTo:(float)endValue;
-(void)countFromZeroTo:(float)endValue withDuration:(NSTimeInterval)duration;
- (CGFloat)currentValue;
@end
|
<commit_msg>Make a call to beacon end point
<commit_before>'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# Import Python libs
from __future__ import absolute_import
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
import logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# TBD
# Call rest.py and return the result
ret = [{'foo': config['foo']}]
return ret
<commit_after>'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Import Python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.http
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Important!!!
# Although this toy example makes an HTTP call
# to get beacon information
# please be advised that doing CPU or IO intensive
# operations in this method will cause the beacon loop
# to block.
beacon_url = '{}{}'.format(__opts__['proxy']['url'],
config['endpoint'])
r = salt.utils.http.query(beacon_url,
decode_type='json',
decode=True)
return [r['dict']]
|
<commit_msg>Add / route to index.
Remove old requires_auth decorator.
<commit_before>from app import app
from app.slot import controller as con
import config
from auth import requires_auth
from flask import render_template
from flask.ext.login import login_required
@app.route('/dashboard')
# @requires_auth
@login_required
def index():
return con.index()
@app.route('/new', methods=['GET', 'POST'])
@requires_auth
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@requires_auth
def receive_sms():
return con.receive_sms()<commit_after>from app import app
from app.slot import controller as con
import config
from auth import requires_auth
from flask import render_template
from flask.ext.login import login_required
@app.route('/')
@app.route('/dashboard')
@login_required
def index():
return con.index()
@app.route('/new', methods=['GET', 'POST'])
@requires_auth
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@requires_auth
def receive_sms():
return con.receive_sms() |
<commit_msg>Remove debug flag from app
<commit_before>from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(debug=True)
<commit_after>from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run()
|
<commit_msg>Patch by Russel Owen: if we have command line arguments zap pyc files
in the directories given.
<commit_before>import os
import sys
doit = 1
def main():
if os.name == 'mac':
import macfs
fss, ok = macfs.GetDirectory('Directory to zap pyc files in')
if not ok:
sys.exit(0)
dir = fss.as_pathname()
zappyc(dir)
else:
if not sys.argv[1:]:
print 'Usage: zappyc dir ...'
sys.exit(1)
for dir in sys.argv[1:]:
zappyc(dir)
def zappyc(dir):
os.path.walk(dir, walker, None)
def walker(dummy, top, names):
for name in names:
if name[-4:] == '.pyc':
path = os.path.join(top, name)
print 'Zapping', path
if doit:
os.unlink(path)
if __name__ == '__main__':
main()
<commit_after>"""Recursively zap all .pyc files"""
import os
import sys
# set doit true to actually delete files
# set doit false to just print what would be deleted
doit = 1
def main():
if not sys.argv[1:]:
if os.name == 'mac':
import macfs
fss, ok = macfs.GetDirectory('Directory to zap pyc files in')
if not ok:
sys.exit(0)
dir = fss.as_pathname()
zappyc(dir)
else:
print 'Usage: zappyc dir ...'
sys.exit(1)
for dir in sys.argv[1:]:
zappyc(dir)
def zappyc(dir):
os.path.walk(dir, walker, None)
def walker(dummy, top, names):
for name in names:
if name[-4:] == '.pyc':
path = os.path.join(top, name)
print 'Zapping', path
if doit:
os.unlink(path)
if __name__ == '__main__':
main()
|
<commit_msg>Remove print statement for DD currents
<commit_before>
namespace mocc { namespace sn {
void SnSweeper_DD::sweep( int group ) {
// Store the transport cross section somewhere useful
for( auto &xsr: *xs_mesh_ ) {
real_t xstr = xsr.xsmactr()[group];
for( auto &ireg: xsr.reg() ) {
xstr_[ireg] = xstr;
}
}
flux_1g_ = flux_[ std::slice(n_reg_*group, n_reg_, 1) ];
// Perform inner iterations
for( size_t inner=0; inner<n_inner_; inner++ ) {
// Set the source (add upscatter and divide by 4PI)
source_->self_scatter( group, flux_1g_, q_ );
if( inner == n_inner_-1 && coarse_data_ ) {
// Wipe out the existing currents
coarse_data_->current.col( group ) = 0.0;
this->sweep_1g<sn::Current, CellWorker_DD>( group,
cell_worker_ );
std::cout << coarse_data_->current.col( group ) << std::endl << std::endl;
} else {
this->sweep_1g<sn::NoCurrent, CellWorker_DD>( group,
cell_worker_ );
}
}
flux_[ std::slice(n_reg_*group, n_reg_, 1) ] = flux_1g_;
return;
}
} }
<commit_after>
namespace mocc { namespace sn {
void SnSweeper_DD::sweep( int group ) {
// Store the transport cross section somewhere useful
for( auto &xsr: *xs_mesh_ ) {
real_t xstr = xsr.xsmactr()[group];
for( auto &ireg: xsr.reg() ) {
xstr_[ireg] = xstr;
}
}
flux_1g_ = flux_[ std::slice(n_reg_*group, n_reg_, 1) ];
// Perform inner iterations
for( size_t inner=0; inner<n_inner_; inner++ ) {
// Set the source (add upscatter and divide by 4PI)
source_->self_scatter( group, flux_1g_, q_ );
if( inner == n_inner_-1 && coarse_data_ ) {
// Wipe out the existing currents
coarse_data_->current.col( group ) = 0.0;
this->sweep_1g<sn::Current, CellWorker_DD>( group,
cell_worker_ );
} else {
this->sweep_1g<sn::NoCurrent, CellWorker_DD>( group,
cell_worker_ );
}
}
flux_[ std::slice(n_reg_*group, n_reg_, 1) ] = flux_1g_;
return;
}
} }
|
<commit_msg>Fix broken import in native scheduler
<commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from paasta_tools import native_mesos_scheduler
from paasta_tools.mesos_tools import status_mesos_tasks_verbose
from paasta_tools.utils import calculate_tail_lines
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import paasta_print
def perform_command(command, service, instance, cluster, verbose, soa_dir):
if verbose > 0:
tail_lines = calculate_tail_lines(verbose_level=verbose)
else:
tail_lines = 0
# We have to add a spacer at the end to make sure we only return
# things for service.main and not service.main_foo
task_id_prefix = "%s%s" % (compose_job_id(service, instance), native_mesos_scheduler.MESOS_TASK_SPACER)
if command == 'status':
paasta_print(status_mesos_tasks_verbose(
job_id=task_id_prefix,
get_short_task_id=lambda x: x,
tail_lines=tail_lines,
))
<commit_after>from __future__ import absolute_import
from __future__ import unicode_literals
from paasta_tools.frameworks.native_scheduler import MESOS_TASK_SPACER
from paasta_tools.mesos_tools import status_mesos_tasks_verbose
from paasta_tools.utils import calculate_tail_lines
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import paasta_print
def perform_command(command, service, instance, cluster, verbose, soa_dir):
if verbose > 0:
tail_lines = calculate_tail_lines(verbose_level=verbose)
else:
tail_lines = 0
# We have to add a spacer at the end to make sure we only return
# things for service.main and not service.main_foo
task_id_prefix = "%s%s" % (compose_job_id(service, instance), MESOS_TASK_SPACER)
if command == 'status':
paasta_print(status_mesos_tasks_verbose(
job_id=task_id_prefix,
get_short_task_id=lambda x: x,
tail_lines=tail_lines,
))
|
<commit_msg>Fix attach failing when no workspace is open
<commit_before>package me.coley.recaf.ui.controls;
import javafx.scene.control.*;
import javafx.scene.control.cell.ComboBoxListCell;
import javafx.scene.layout.HBox;
import me.coley.recaf.control.gui.GuiController;
import me.coley.recaf.util.UiUtil;
import me.coley.recaf.workspace.*;
/**
* Cell/renderer for displaying {@link JavaResource}s.
*/
public class ResourceSelectionCell extends ComboBoxListCell<JavaResource> {
private final GuiController controller;
/**
* @param controller
* Controller to use.
*/
public ResourceSelectionCell(GuiController controller) {
this.controller = controller;
}
@Override
public void updateItem(JavaResource item, boolean empty) {
super.updateItem(item, empty);
if(!empty) {
HBox g = new HBox();
if(item != null) {
String t = item.toString();
// Add icon for resource types
g.getChildren().add(new IconView(UiUtil.getResourceIcon(item)));
// Indicate which resource is the primary resource
if(item == controller.getWorkspace().getPrimary()) {
Label lbl = new Label(" [Primary]");
lbl.getStyleClass().add("bold");
g.getChildren().add(lbl);
}
setText(t);
}
setGraphic(g);
} else {
setGraphic(null);
setText(null);
}
}
}<commit_after>package me.coley.recaf.ui.controls;
import javafx.scene.control.*;
import javafx.scene.control.cell.ComboBoxListCell;
import javafx.scene.layout.HBox;
import me.coley.recaf.control.gui.GuiController;
import me.coley.recaf.util.UiUtil;
import me.coley.recaf.workspace.*;
/**
* Cell/renderer for displaying {@link JavaResource}s.
*/
public class ResourceSelectionCell extends ComboBoxListCell<JavaResource> {
private final GuiController controller;
/**
* @param controller
* Controller to use.
*/
public ResourceSelectionCell(GuiController controller) {
this.controller = controller;
}
@Override
public void updateItem(JavaResource item, boolean empty) {
super.updateItem(item, empty);
if(!empty) {
HBox g = new HBox();
if(item != null) {
String t = item.toString();
// Add icon for resource types
g.getChildren().add(new IconView(UiUtil.getResourceIcon(item)));
// Indicate which resource is the primary resource
if(controller.getWorkspace() != null && item == controller.getWorkspace().getPrimary()) {
Label lbl = new Label(" [Primary]");
lbl.getStyleClass().add("bold");
g.getChildren().add(lbl);
}
setText(t);
}
setGraphic(g);
} else {
setGraphic(null);
setText(null);
}
}
} |
<commit_msg>Fix random algo to evenly distribute.
<commit_before>
static uint64_t sqrt64(uint64_t n) {
uint64_t g = UINT64_C(1) << 31;
for (uint64_t c = g; c; g |= c) {
if (g * g > n) {
g ^= c;
}
c >>= 1;
}
return g;
}
static uint64_t get_split(uint64_t len) {
uint64_t rnd;
rand_fill(&rnd, sizeof(rnd));
rnd %= (len * len);
return sqrt64(rnd) + 1;
}
int main(int __attribute__ ((unused)) argc, char __attribute__ ((unused)) *argv[]) {
rand_init();
for (uint64_t len = 1397; len;) {
uint64_t consume = get_split(len);
fprintf(stderr, "consume %ju bytes\n", (uintmax_t) consume);
len -= consume;
}
rand_cleanup();
}
<commit_after>
static uint64_t get_split(uint64_t total_len, uint64_t remaining_len) {
uint64_t rnd;
rand_fill(&rnd, sizeof(rnd));
rnd %= total_len;
return rnd > remaining_len ? remaining_len : rnd;
}
int main(int __attribute__ ((unused)) argc, char __attribute__ ((unused)) *argv[]) {
rand_init();
uint64_t total_len = 1397;
for (uint64_t remaining = total_len, consume = 0; remaining; remaining -= consume) {
consume = get_split(total_len, remaining);
fprintf(stderr, "consume %ju bytes\n", (uintmax_t) consume);
}
rand_cleanup();
}
|
<commit_msg>Support for socket life-time events.
<commit_before>import { takeLatest } from 'redux-saga/effects';
import { eventChannel } from 'redux-saga';
import { actionsIDs } from '../constants'
import { fetchPatients } from './fetchPatients';
import { createStartHandler } from 'redux-saga-subscriptions';
import socket from '../../../socketIo/socket';
const msgChannel = 'patients-modified';
const getFetchEmit = (emit) => () => Promise.resolve(fetchPatients()).then(emit);
const createChannel = (payload?) => eventChannel((emit) => {
const fetch = getFetchEmit(emit);
fetch();
socket.on(msgChannel, fetch);
return () => {
socket.off(msgChannel);
};
});
export const watchPatientsSubscription = function *() {
const startHandler = createStartHandler([actionsIDs.STOP_SUBSCRIBE]);
yield takeLatest(actionsIDs.START_SUBSCRIBE, startHandler(createChannel));
};
<commit_after>import { takeLatest, put } from 'redux-saga/effects';
import { eventChannel } from 'redux-saga';
import { actionsIDs } from '../constants'
import { fetchPatients } from './fetchPatients';
import { createStartHandler } from 'redux-saga-subscriptions';
import socket from '../../../socketIo/socket';
const msgChannel = 'patients-modified';
const getFetchEmit = (emit) => () => Promise.resolve(fetchPatients()).then(emit);
const createChannel = (payload?) => eventChannel((emit) => {
const fetch = getFetchEmit(emit);
fetch();
socket.on(msgChannel, fetch);
socket.on('disconnect', () => emit(put({type: 'failing-socket'})));
socket.on('connect', () => emit(put({type: 'socket-alive'})));
return () => {
socket.off(msgChannel);
};
});
export const watchPatientsSubscription = function *() {
const startHandler = createStartHandler([actionsIDs.STOP_SUBSCRIBE]);
yield takeLatest(actionsIDs.START_SUBSCRIBE, startHandler(createChannel));
};
|
<commit_msg>Add priority option to send_queued
<commit_before>__version__ = '2.0.0'
def get_account(using=None):
from django.conf import settings
accounts = settings.SMSGATEWAY_ACCOUNTS
if using is not None:
return accounts[using]
else:
return accounts[accounts['__default__']]
def send(to, msg, signature, using=None, reliable=False):
"""
Send an SMS message immediately.
* 'to' is a semicolon separated list of phone numbers with an international
prefix (+32... etc).
* 'msg' is the message itself as a unicode object (max 160 characters).
* 'signature' is where the message comes from. Depends on the backend in use.
* 'using' is an optional parameter where you can specify a specific account
to send messages from.
"""
from smsgateway.backends import get_backend
from smsgateway.sms import SMSRequest
account_dict = get_account(using)
backend = get_backend(account_dict['backend'])
sms_request = SMSRequest(to, msg, signature, reliable=reliable)
return backend.send(sms_request, account_dict)
def send_queued(to, msg, signature, using=None, reliable=False):
"""
Place SMS message in queue to be sent.
"""
from smsgateway.models import QueuedSMS
QueuedSMS.objects.create(
to=to,
content=msg,
signature=signature,
using=using if using is not None else '__none__',
reliable=reliable
)
<commit_after>__version__ = '2.0.0'
def get_account(using=None):
from django.conf import settings
accounts = settings.SMSGATEWAY_ACCOUNTS
if using is not None:
return accounts[using]
else:
return accounts[accounts['__default__']]
def send(to, msg, signature, using=None, reliable=False):
"""
Send an SMS message immediately.
* 'to' is a semicolon separated list of phone numbers with an international
prefix (+32... etc).
* 'msg' is the message itself as a unicode object (max 160 characters).
* 'signature' is where the message comes from. Depends on the backend in use.
* 'using' is an optional parameter where you can specify a specific account
to send messages from.
"""
from smsgateway.backends import get_backend
from smsgateway.sms import SMSRequest
account_dict = get_account(using)
backend = get_backend(account_dict['backend'])
sms_request = SMSRequest(to, msg, signature, reliable=reliable)
return backend.send(sms_request, account_dict)
def send_queued(to, msg, signature, using=None, reliable=False, priority=None):
"""
Place SMS message in queue to be sent.
"""
from smsgateway.models import QueuedSMS
queued_sms = QueuedSMS(
to=to,
content=msg,
signature=signature,
using=using if using is not None else '__none__',
reliable=reliable
)
if priority is not None:
queued_sms.priority = priority
queued_sms.save()
|
<commit_msg>Make sure ObjectId always has the hash version and the raw version
<commit_before>pub struct ObjectId {
pub hash: &'static str,
bytes: Vec<u8>
}
impl ObjectId {
pub fn new(hash: &'static str) -> ObjectId {
ObjectId {
hash: hash,
bytes: Vec::new()
}
}
}
<commit_after>use conversion;
pub static RAW_SIZE: uint = 20;
pub static HEX_SIZE: uint = 40;
#[deriving(PartialEq, Show)]
pub struct ObjectId {
pub hash: String,
bytes: Vec<u8>
}
impl ObjectId {
pub fn from_string(hash: &str) -> ObjectId {
ObjectId {
hash: hash.to_string(),
bytes: conversion::hex_string_to_bytes(hash)
}
}
pub fn from_bytes(bytes: &[u8]) -> ObjectId {
ObjectId {
hash: conversion::bytes_to_hex_string(bytes),
bytes: bytes.into_vec()
}
}
}
|
<commit_msg>Use install_requires arg so dependencies are installed
<commit_before>from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
requires=[
'diesel(>=3.0)',
'greenlet(==0.3.4)',
'requests'
]
)
<commit_after>from distutils.core import setup
import nagios
setup(name='nagios-api',
version=nagios.version,
description='Control nagios using an API',
author='Mark Smith',
author_email='mark@qq.is',
license='BSD New (3-clause) License',
long_description=open('README.md').read(),
url='https://github.com/xb95/nagios-api',
packages=['nagios'],
scripts=['nagios-cli', 'nagios-api'],
install_requires=[
'diesel>=3.0',
'greenlet==0.3.4',
'requests'
]
)
|
<commit_msg>Add logging when netty channels are closed due to inactivity.
<commit_before>/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.worker.netty;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.timeout.IdleState;
import io.netty.handler.timeout.IdleStateEvent;
/**
* Closes the channel if it has been idle for too long.
*/
public class IdleReadHandler extends ChannelDuplexHandler {
/**
* Creates a new idle read handler.
*/
public IdleReadHandler() {}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof IdleStateEvent) {
if (((IdleStateEvent) evt).state() == IdleState.READER_IDLE) {
ctx.close();
}
}
}
}
<commit_after>/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.worker.netty;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.timeout.IdleState;
import io.netty.handler.timeout.IdleStateEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Closes the channel if it has been idle for too long.
*/
public class IdleReadHandler extends ChannelDuplexHandler {
private static final Logger LOG = LoggerFactory.getLogger(IdleReadHandler.class);
/**
* Creates a new idle read handler.
*/
public IdleReadHandler() {}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof IdleStateEvent) {
if (((IdleStateEvent) evt).state() == IdleState.READER_IDLE) {
LOG.warn("Closing netty channel to {} due to inactivity.", ctx.channel().remoteAddress());
ctx.close();
}
}
}
}
|
<commit_msg>Fix URL bug in `UserListService`
I had tried to clean up the URL handling in `UserListService`, but didn't
test it properly and we rolled it out in a broken state.
We probably want to add some tests for the service to catch problems
like this in the future.
Fixes #24
<commit_before>import { Injectable } from '@angular/core';
import { Http } from '@angular/http';
import { User } from './user';
import { Observable } from "rxjs";
@Injectable()
export class UserListService {
private userUrl: string = API_URL + "users/";
constructor(private http:Http) { }
getUsers(): Observable<User[]> {
return this.http.request(this.userUrl + 'users').map(res => res.json());
}
getUserById(id: string): Observable<User> {
return this.http.request(this.userUrl + id).map(res => res.json());
}
}<commit_after>import { Injectable } from '@angular/core';
import { Http } from '@angular/http';
import { User } from './user';
import { Observable } from "rxjs";
@Injectable()
export class UserListService {
private userUrl: string = API_URL + "users";
constructor(private http:Http) { }
getUsers(): Observable<User[]> {
return this.http.request(this.userUrl).map(res => res.json());
}
getUserById(id: string): Observable<User> {
return this.http.request(this.userUrl + "/" + id).map(res => res.json());
}
} |
<commit_msg>ENH: Print warnings on one line & no code
<commit_before>from textwrap import dedent
import warnings
# Statsmodels is slow to fix upstream future warnings
# This module is imported before the stats module so
# so any FutureWarnings with the imports are suppressed
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='statsmodels')
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='pandas')
# These are rare
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='scipy')
class PlotnineError(Exception):
"""
Exception for ggplot errors
"""
def __init__(self, *args):
args = [dedent(arg) for arg in args]
self.message = " ".join(args)
def __str__(self):
return repr(self.message)
class PlotnineWarning(UserWarning):
"""
Warnings for ggplot inconsistencies
"""
pass
<commit_after>from textwrap import dedent
import warnings
# Statsmodels is slow to fix upstream future warnings
# This module is imported before the stats module so
# so any FutureWarnings with the imports are suppressed
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='statsmodels')
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='pandas')
# These are rare
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='scipy')
# Show the warnings on one line, leaving out any code makes the
# message clear
def warning_format(message, category, filename, lineno, file=None, line=None):
fmt = '{}:{}: {}: {}\n'.format
return fmt(filename, lineno, category.__name__, message)
warnings.formatwarning = warning_format
class PlotnineError(Exception):
"""
Exception for ggplot errors
"""
def __init__(self, *args):
args = [dedent(arg) for arg in args]
self.message = " ".join(args)
def __str__(self):
return repr(self.message)
class PlotnineWarning(UserWarning):
"""
Warnings for ggplot inconsistencies
"""
pass
|
<commit_msg>Save preferences to application context
<commit_before>package fi.jyu.ln.luontonurkka.tools;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
/**
* Created by jarno on 10/22/16.
*/
public class SettingsManager {
private SharedPreferences sp;
public SettingsManager(Activity activity) {
sp = activity.getPreferences(Context.MODE_PRIVATE);
}
public void setBool(String preference, boolean bool) {
SharedPreferences.Editor editor = sp.edit();
editor.putBoolean(preference, bool);
editor.commit();
}
public boolean getBool(String preference) {
return sp.getBoolean(preference, false);
}
}
<commit_after>package fi.jyu.ln.luontonurkka.tools;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
/**
* Created by Jarno 22.10.16
*/
public class SettingsManager {
private SharedPreferences sp;
public SettingsManager(Activity activity) {
PreferenceManager.getDefaultSharedPreferences(activity.getApplicationContext());
sp = activity.getPreferences(Context.MODE_PRIVATE);
}
public void setBool(String preference, boolean bool) {
SharedPreferences.Editor editor = sp.edit();
editor.putBoolean(preference, bool);
editor.commit();
}
public boolean getBool(String preference) {
return sp.getBoolean(preference, false);
}
}
|
<commit_msg>Add inter-hive communication lotsamessages test
<commit_before>from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
### def test_ihc_lotsamessages():
<commit_after>from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
def test_lotsamessages_ihc():
"""
Test the lotsamessages demo with inter-hive communication
"""
assert lotsamessages.main(
num_experiments=20, num_steps=20, subprocesses=4) is True
### def test_ihc_lotsamessages():
|
<commit_msg>Fix redirectValue for circular type
<commit_before>package funk
import "reflect"
func equal(expected, actual interface{}) bool {
if expected == nil || actual == nil {
return expected == actual
}
return reflect.DeepEqual(expected, actual)
}
func sliceElem(rtype reflect.Type) reflect.Type {
if rtype.Kind() == reflect.Slice || rtype.Kind() == reflect.Array {
return sliceElem(rtype.Elem())
}
return rtype
}
func redirectValue(value reflect.Value) reflect.Value {
for {
if !value.IsValid() || value.Kind() != reflect.Ptr {
return value
}
value = value.Elem()
}
}
func makeSlice(value reflect.Value, values ...int) reflect.Value {
sliceType := sliceElem(value.Type())
size := value.Len()
cap := size
if len(values) > 0 {
size = values[0]
}
if len(values) > 1 {
cap = values[1]
}
return reflect.MakeSlice(reflect.SliceOf(sliceType), size, cap)
}
<commit_after>package funk
import "reflect"
func equal(expected, actual interface{}) bool {
if expected == nil || actual == nil {
return expected == actual
}
return reflect.DeepEqual(expected, actual)
}
func sliceElem(rtype reflect.Type) reflect.Type {
for {
if rtype.Kind() != reflect.Slice && rtype.Kind() != reflect.Array {
return rtype
}
rtype = rtype.Elem()
}
}
func redirectValue(value reflect.Value) reflect.Value {
for {
if !value.IsValid() || value.Kind() != reflect.Ptr {
return value
}
res := reflect.Indirect(value)
// Test for a circular type.
if res.Kind() == reflect.Ptr && value.Pointer() == res.Pointer() {
return value
}
value = res
}
}
func makeSlice(value reflect.Value, values ...int) reflect.Value {
sliceType := sliceElem(value.Type())
size := value.Len()
cap := size
if len(values) > 0 {
size = values[0]
}
if len(values) > 1 {
cap = values[1]
}
return reflect.MakeSlice(reflect.SliceOf(sliceType), size, cap)
}
|
<commit_msg>Fix trivial error: return void in main function.
<commit_before>
int main (int argc, char * argv [])
{
__u64 rdtsc = 0;
__u64 i = 0;
int fileStat = open (argv [1], O_RDONLY, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP);
while (read (fileStat, &rdtsc, sizeof (rdtsc) ) > 0) {
printf ("%llu:\t%llu\n", ++i, rdtsc);
}
close (fileStat);
}
<commit_after>
int main (int argc, char * argv [])
{
__u64 rdtsc = 0;
__u64 i = 0;
int fileStat = open (argv [1], O_RDONLY, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP);
while (read (fileStat, &rdtsc, sizeof (rdtsc) ) > 0) {
printf ("%llu:\t%llu\n", ++i, rdtsc);
}
close (fileStat);
return EXIT_SUCCESS;
}
|
<commit_msg>Replace async function with Promise
<commit_before>export async function handleResponse(res : Response) : Promise<any> {
const data = await res.json();
if (res.status < 300) {
return data;
}
if (data.error) {
throw new Error(data.error);
}
throw new Error(res.status.toString());
}
<commit_after>export function handleResponse(res : Response) : Promise<any> {
return res.json().then(data => {
if (res.status < 300) {
return data;
}
if (data.error) {
throw new Error(data.error);
}
throw new Error(res.status.toString());
});
}
|
<commit_msg>Add compact allocation on BioU.
<commit_before>
int cpumap(int i, int nprocs)
{
return (i / 36) * 36 + (i % 2) * 18 + (i % 36 / 2);
}
#elif GUADALUPE_MIC_COMPACT
int cpumap(int i, int nprocs)
{
return (i + 1) % 228;
}
#else
int cpumap(int id, int nprocs)
{
return id % nprocs;
}
#endif
<commit_after>
int cpumap(int i, int nprocs)
{
return (i / 36) * 36 + (i % 2) * 18 + (i % 36 / 2);
}
#elif GUADALUPE_MIC_COMPACT
int cpumap(int i, int nprocs)
{
return (i + 1) % 228;
}
#elif BIOU_COMPACT
int cpumap(int i, int nprocs)
{
return (i % 4) * 32 + i / 4;
}
#else
int cpumap(int id, int nprocs)
{
return id % nprocs;
}
#endif
|
<commit_msg>Remove the init rect when init canvas
<commit_before>/**
* Created by gpl on 15/11/30.
*/
module App.Controllers {
import Canvas = App.Services.Canvas;
import Logger = App.Services.Logger;
export class CanvasCtrl {
static $inject = ['Logger', 'Canvas'];
private canvas:Canvas;
private logger:Logger;
constructor(logger:Logger, canvas:Canvas) {
this.canvas = canvas;
this.logger = logger;
this.logger.info('canvas ctrl init');
// create a rectangle object
var rect = new fabric.Rect({
left: 100,
top: 100,
fill: 'white',
width: 20,
height: 20,
angle: 45
});
this.canvas.add(rect);
}
public startDraw():void {
this.canvas.startDrawing()
}
public stopDraw():void {
this.canvas.stopDrawing();
}
public startPolygon():void {
this.logger.info('start drawing polygon, click first point');
this.canvas.startPolygon();
}
public stopPolygon():void {
this.logger.info('stop drawing polygon');
this.canvas.stopPolygon();
}
}
}<commit_after>/**
* Created by gpl on 15/11/30.
*/
module App.Controllers {
import Canvas = App.Services.Canvas;
import Logger = App.Services.Logger;
export class CanvasCtrl {
static $inject = ['Logger', 'Canvas'];
private canvas:Canvas;
private logger:Logger;
constructor(logger:Logger, canvas:Canvas) {
this.canvas = canvas;
this.logger = logger;
this.logger.info('canvas ctrl init');
}
public startDraw():void {
this.canvas.startDrawing()
}
public stopDraw():void {
this.canvas.stopDrawing();
}
public startPolygon():void {
this.logger.info('start drawing polygon, click first point');
this.canvas.startPolygon();
}
public stopPolygon():void {
this.logger.info('stop drawing polygon');
this.canvas.stopPolygon();
}
}
} |
<commit_msg>Set dj-stripe as stripe app info
https://stripe.com/docs/building-plugins#setappinfo
<commit_before>from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
<commit_after>from __future__ import absolute_import, division, print_function, unicode_literals
import pkg_resources
import stripe
from . import checks # noqa: Register the checks
__version__ = pkg_resources.require("dj-stripe")[0].version
# Set app info
# https://stripe.com/docs/building-plugins#setappinfo
stripe.set_app_info(
"dj-stripe",
version=__version__,
url="https://github.com/dj-stripe/dj-stripe"
)
|
<commit_msg>Add zoom in/out action to the text edit context menu
Fix #5
<commit_before>import re
from pyqode.qt import QtWidgets
from .forms import quick_ref_ui
class QuickRefWidget(QtWidgets.QWidget):
def __init__(self, parent=None):
super(QuickRefWidget, self).__init__(parent)
self.ui = quick_ref_ui.Ui_Form()
self.ui.setupUi(self)
self._fix_default_font_size()
def _fix_default_font_size(self):
# remove fixed font size to allow the user to zoom in/out using
# Ctrl+Mouse Wheel
# Note: Zooming into HTML documents only works if the font-size is not
# set to a fixed size.
# (source: http://qt-project.org/doc/qt-5/qtextedit.html)
html = self.ui.textEditQuickRef.toHtml()
html = re.sub('font-size:\d+pt;', '', html)
self.ui.textEditQuickRef.setHtml(html)
<commit_after>import re
from pyqode.qt import QtCore, QtWidgets
from .forms import quick_ref_ui
class QuickRefWidget(QtWidgets.QWidget):
def __init__(self, parent=None):
super(QuickRefWidget, self).__init__(parent)
self.ui = quick_ref_ui.Ui_Form()
self.ui.setupUi(self)
self._fix_default_font_size()
self.ui.textEditQuickRef.setContextMenuPolicy(
QtCore.Qt.CustomContextMenu)
self.ui.textEditQuickRef.customContextMenuRequested.connect(
self._show_context_menu)
self.context_menu = self.ui.textEditQuickRef.createStandardContextMenu()
self.context_menu.addSeparator()
action = self.context_menu.addAction('Zoom in')
action.setShortcut('Ctrl+i')
action.triggered.connect(self.ui.textEditQuickRef.zoomIn)
self.addAction(action)
action = self.context_menu.addAction('Zoom out')
action.setShortcut('Ctrl+o')
self.addAction(action)
action.triggered.connect(self.ui.textEditQuickRef.zoomOut)
def _show_context_menu(self, pos):
self.context_menu.exec_(self.ui.textEditQuickRef.mapToGlobal(pos))
def _fix_default_font_size(self):
# remove fixed font size to allow the user to zoom in/out using
# Ctrl+Mouse Wheel
# Note: Zooming into HTML documents only works if the font-size is not
# set to a fixed size.
# (source: http://qt-project.org/doc/qt-5/qtextedit.html)
html = self.ui.textEditQuickRef.toHtml()
html = re.sub('font-size:\d+pt;', '', html)
self.ui.textEditQuickRef.setHtml(html)
|
<commit_msg>Fix a precision bug with the new timers
Delay was (accidentally) in microseconds, not milliseconds.
<commit_before>module Hans.Timers (
Milliseconds
, Timer()
, delay
, delay_
, cancel
) where
import Control.Concurrent (forkIO,ThreadId,threadDelay,killThread)
type Milliseconds = Int
newtype Timer = Timer ThreadId
deriving (Show,Eq)
-- | Delay an action, giving back a handle to allow the timer to be cancelled.
delay :: Milliseconds -> IO () -> IO Timer
delay n body = Timer `fmap` forkIO (threadDelay n >> body)
-- | Delay an action.
delay_ :: Milliseconds -> IO () -> IO ()
delay_ n body =
do _ <- forkIO (threadDelay n >> body)
return ()
-- | Cancel a delayed action.
cancel :: Timer -> IO ()
cancel (Timer tid) = killThread tid
<commit_after>module Hans.Timers (
Milliseconds
, Timer()
, delay
, delay_
, cancel
) where
import Control.Concurrent (forkIO,ThreadId,threadDelay,killThread)
type Milliseconds = Int
newtype Timer = Timer ThreadId
deriving (Show,Eq)
-- | Delay an action, giving back a handle to allow the timer to be cancelled.
delay :: Milliseconds -> IO () -> IO Timer
delay n body = Timer `fmap` forkIO (threadDelay (n * 1000) >> body)
-- | Delay an action.
delay_ :: Milliseconds -> IO () -> IO ()
delay_ n body =
do _ <- forkIO (threadDelay (n * 1000) >> body)
return ()
-- | Cancel a delayed action.
cancel :: Timer -> IO ()
cancel (Timer tid) = killThread tid
|
<commit_msg>Use the standard scenario for demo data
<commit_before>from django.core.management.base import BaseCommand
from login.tests.model_maker import make_superuser
from login.tests.model_maker import make_user
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
make_superuser('admin', 'admin')
make_user('staff', 'staff', is_staff=True)
make_user('web', 'web')
print("Created 'login' demo data...")
<commit_after>from django.core.management.base import BaseCommand
from login.tests.scenario import (
user_contractor,
user_default,
)
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
user_contractor()
user_default()
print("Created 'login' demo data...")
|
<commit_msg>Remove final modifier since it prevents overriding properties
<commit_before>package de.cpg.oss.verita.configuration.event_store;
import de.cpg.oss.verita.configuration.VeritaProperties;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
@Data
@ConfigurationProperties(prefix = VeritaEventStoreProperties.CONFIG_PREFIX, ignoreUnknownFields = true)
public class VeritaEventStoreProperties {
public static final String CONFIG_PREFIX = VeritaProperties.CONFIG_PREFIX + ".eventstore";
private final String username = "admin";
private final String password = "changeit";
private final String hostname = "localhost";
private final int port = 1113;
private final int maxEventsToLoad = 100;
}
<commit_after>package de.cpg.oss.verita.configuration.event_store;
import de.cpg.oss.verita.configuration.VeritaProperties;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
@Data
@ConfigurationProperties(prefix = VeritaEventStoreProperties.CONFIG_PREFIX, ignoreUnknownFields = true)
public class VeritaEventStoreProperties {
public static final String CONFIG_PREFIX = VeritaProperties.CONFIG_PREFIX + ".eventstore";
private String username = "admin";
private String password = "changeit";
private String hostname = "localhost";
private int port = 1113;
private int maxEventsToLoad = 100;
}
|
<commit_msg>Use a for each loop
<commit_before>
Header cpr::util::parseHeader(std::string headers) {
Header header;
std::vector<std::string> lines;
std::istringstream stream(headers);
{
std::string line;
while (std::getline(stream, line, '\n')) {
lines.push_back(line);
}
}
for (auto line = lines.begin(); line != lines.end(); ++line) {
if (line->substr(0, 5) == "HTTP/") {
header.clear();
}
if (line->length() > 0) {
auto found = line->find(":");
if (found != std::string::npos) {
auto value = line->substr(found + 2, line->length() - 1);
if (value.back() == '\r') {
value = value.substr(0, value.length() - 1);
}
header[line->substr(0, found)] = value;
}
}
}
return header;
}
std::string cpr::util::parseResponse(std::string response) {
if (!response.empty()) {
if (response.back() == '\n') {
return response.substr(0, response.length() - 1);
}
}
return response;
}
size_t cpr::util::writeFunction(void *ptr, size_t size, size_t nmemb, std::string* data) {
data->append((char*) ptr, size * nmemb);
return size * nmemb;
}
<commit_after>
Header cpr::util::parseHeader(std::string headers) {
Header header;
std::vector<std::string> lines;
std::istringstream stream(headers);
{
std::string line;
while (std::getline(stream, line, '\n')) {
lines.push_back(line);
}
}
for (auto& line : lines) {
if (line.substr(0, 5) == "HTTP/") {
header.clear();
}
if (line.length() > 0) {
auto found = line.find(":");
if (found != std::string::npos) {
auto value = line.substr(found + 2, line.length() - 1);
if (value.back() == '\r') {
value = value.substr(0, value.length() - 1);
}
header[line.substr(0, found)] = value;
}
}
}
return header;
}
std::string cpr::util::parseResponse(std::string response) {
if (!response.empty()) {
if (response.back() == '\n') {
return response.substr(0, response.length() - 1);
}
}
return response;
}
size_t cpr::util::writeFunction(void *ptr, size_t size, size_t nmemb, std::string* data) {
data->append((char*) ptr, size * nmemb);
return size * nmemb;
}
|
<commit_msg>Use colorama to print colors and print a plugin's short_description after running
<commit_before>
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.handle())
if __name__ == '__main__':
main()<commit_after>import colorama
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# Initialize colorama
colorama.init()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.short_description)
print(plugin.handle())
if __name__ == '__main__':
main() |
<commit_msg>Remove @Singletone annotation from API for better performance
<commit_before>package com.bookcrossing.mobile.modules;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.storage.FirebaseStorage;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
/** (c) 2016 Andrey Mukamolow aka fobo66 <fobo66@protonmail.com>
* Created by fobo66 on 15.11.2016.
*/
@Module
public class ApiModule {
@Provides
@Singleton
public FirebaseDatabase provideDatabase() {
return FirebaseDatabase.getInstance();
}
@Provides
@Singleton
public FirebaseStorage provideStorage() {
return FirebaseStorage.getInstance();
}
@Provides
@Singleton
public FirebaseAuth provideAuth() {
return FirebaseAuth.getInstance();
}
}
<commit_after>package com.bookcrossing.mobile.modules;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.storage.FirebaseStorage;
import dagger.Module;
import dagger.Provides;
/** (c) 2016 Andrey Mukamolow aka fobo66 <fobo66@protonmail.com>
* Created by fobo66 on 15.11.2016.
*/
@Module
public class ApiModule {
@Provides
public FirebaseDatabase provideDatabase() {
return FirebaseDatabase.getInstance();
}
@Provides
public FirebaseStorage provideStorage() {
return FirebaseStorage.getInstance();
}
@Provides
public FirebaseAuth provideAuth() {
return FirebaseAuth.getInstance();
}
}
|
<commit_msg>Use the correct names on the edit observation screen
<commit_before>package mil.nga.giat.mage.newsfeed;
import mil.nga.giat.mage.R;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
public class NewsFeedFragment extends Fragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_news_feed, container, false);
TextView dummyTextView = (TextView) rootView.findViewById(R.id.section_label);
dummyTextView.setText("News Feed");
return rootView;
}
}
<commit_after>package mil.nga.giat.mage.newsfeed;
import mil.nga.giat.mage.R;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
public class NewsFeedFragment extends Fragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_news_feed, container, false);
//TextView dummyTextView = (TextView) rootView.findViewById(R.id.);
//dummyTextView.setText("News Feed");
return rootView;
}
}
|
<commit_msg>Move to spec inspired test names (truth always wins.)<commit_before>package com.github.mttkay.memento;
import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource;
import com.google.testing.compile.CompilationFailureException;
import com.google.testing.compile.JavaFileObjects;
import com.sun.tools.internal.xjc.util.NullStream;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.truth0.Truth;
import java.io.PrintStream;
@RunWith(JUnit4.class)
public class MementoProcessorTest {
@Before
public void dontPrintExceptions() {
// get rid of the stack trace prints for expected exceptions
System.setErr(new PrintStream(new NullStream()));
}
@Test
public void generateMementoClass() {
Truth.ASSERT.about(javaSource())
.that(JavaFileObjects.forResource("RetainedActivity.java"))
.processedWith(new MementoProcessor())
.compilesWithoutError()
.and().generatesSources(JavaFileObjects.forResource("RetainedActivity$Memento.java"));
}
@Test(expected = CompilationFailureException.class)
public void throwsIllegalStateIfRetainedFieldIsPrivate() {
Truth.ASSERT.about(javaSource())
.that(JavaFileObjects.forResource("RetainedActivityWithPrivateFields.java"))
.processedWith(new MementoProcessor())
.failsToCompile();
}
}
<commit_after>package com.github.mttkay.memento;
import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource;
import com.google.testing.compile.CompilationFailureException;
import com.google.testing.compile.JavaFileObjects;
import com.sun.tools.internal.xjc.util.NullStream;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.truth0.Truth;
import java.io.PrintStream;
@RunWith(JUnit4.class)
public class MementoProcessorTest {
@Before
public void dontPrintExceptions() {
// get rid of the stack trace prints for expected exceptions
System.setErr(new PrintStream(new NullStream()));
}
@Test
public void itGeneratesMementoFragmentClass() {
Truth.ASSERT.about(javaSource())
.that(JavaFileObjects.forResource("RetainedActivity.java"))
.processedWith(new MementoProcessor())
.compilesWithoutError()
.and().generatesSources(JavaFileObjects.forResource("RetainedActivity$Memento.java"));
}
@Test(expected = CompilationFailureException.class)
public void itThrowsExceptionWhenRetainedFieldIsPrivate() {
Truth.ASSERT.about(javaSource())
.that(JavaFileObjects.forResource("RetainedActivityWithPrivateFields.java"))
.processedWith(new MementoProcessor())
.failsToCompile();
}
}
|
<commit_msg>Add function prototype for monitor clone set.
Signed-off-by: Christopher Michael <cp.michael@samsung.com>
SVN revision: 84208
<commit_before>
Evas_Object *e_smart_monitor_add(Evas *evas);
void e_smart_monitor_crtc_set(Evas_Object *obj, Ecore_X_Randr_Crtc crtc, Evas_Coord cx, Evas_Coord cy, Evas_Coord cw, Evas_Coord ch);
void e_smart_monitor_output_set(Evas_Object *obj, Ecore_X_Randr_Output output);
void e_smart_monitor_grid_set(Evas_Object *obj, Evas_Object *grid, Evas_Coord gx, Evas_Coord gy, Evas_Coord gw, Evas_Coord gh);
void e_smart_monitor_grid_virtual_size_set(Evas_Object *obj, Evas_Coord vw, Evas_Coord vh);
void e_smart_monitor_background_set(Evas_Object *obj, Evas_Coord dx, Evas_Coord dy);
void e_smart_monitor_current_geometry_set(Evas_Object *obj, Evas_Coord x, Evas_Coord y, Evas_Coord w, Evas_Coord h);
# endif
#endif
<commit_after>
Evas_Object *e_smart_monitor_add(Evas *evas);
void e_smart_monitor_crtc_set(Evas_Object *obj, Ecore_X_Randr_Crtc crtc, Evas_Coord cx, Evas_Coord cy, Evas_Coord cw, Evas_Coord ch);
Ecore_X_Randr_Crtc e_smart_monitor_crtc_get(Evas_Object *obj);
void e_smart_monitor_output_set(Evas_Object *obj, Ecore_X_Randr_Output output);
void e_smart_monitor_grid_set(Evas_Object *obj, Evas_Object *grid, Evas_Coord gx, Evas_Coord gy, Evas_Coord gw, Evas_Coord gh);
void e_smart_monitor_grid_virtual_size_set(Evas_Object *obj, Evas_Coord vw, Evas_Coord vh);
void e_smart_monitor_background_set(Evas_Object *obj, Evas_Coord dx, Evas_Coord dy);
void e_smart_monitor_current_geometry_set(Evas_Object *obj, Evas_Coord x, Evas_Coord y, Evas_Coord w, Evas_Coord h);
void e_smart_monitor_clone_set(Evas_Object *obj, Evas_Object *parent);
# endif
#endif
|
<commit_msg>[JupyROOT] Update logic to check for IPython
To sync it with what was already introduced in ROOT/__init__.py
<commit_before>
from JupyROOT.helpers import cppcompleter, utils
if '__IPYTHON__' in __builtins__ and __IPYTHON__:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize()
<commit_after>
from JupyROOT.helpers import cppcompleter, utils
# Check if we are in the IPython shell
try:
import builtins
except ImportError:
import __builtin__ as builtins # Py2
_is_ipython = hasattr(builtins, '__IPYTHON__')
if _is_ipython:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize()
|
<commit_msg>Remove redundant call to openFile.
Actually, why not just use readFile, both here and in catch-mask.hs?
<commit_before>import System.IO
import System.IO.Error
import System.Environment
import Control.Exception
-- <<main
main = do
fs <- getArgs
let
loop n [] = return n
loop n (f:fs) = do
getMaskingState >>= print
r <- Control.Exception.try (openFile f ReadMode)
case r of
Left e | isDoesNotExistError e -> loop n fs
| otherwise -> throwIO e
Right h -> do
h <- openFile f ReadMode
s <- hGetContents h
loop (n + length (lines s)) fs
n <- loop 0 fs
print n
-- >>
<commit_after>import System.IO
import System.IO.Error
import System.Environment
import Control.Exception
-- <<main
main = do
fs <- getArgs
let
loop n [] = return n
loop n (f:fs) = do
getMaskingState >>= print
r <- Control.Exception.try (openFile f ReadMode)
case r of
Left e | isDoesNotExistError e -> loop n fs
| otherwise -> throwIO e
Right h -> do
s <- hGetContents h
loop (n + length (lines s)) fs
n <- loop 0 fs
print n
-- >>
|
<commit_msg>[FIX] event_registration_analytic: Fix bug when in sales order lines there is a nonrecurring service.
<commit_before>from openerp import api, models
class SaleOrder(models.Model):
_inherit = 'sale.order'
@api.multi
def action_button_confirm(self):
project_obj = self.env['project.project']
event_obj = self.env['event.event']
res = super(SaleOrder, self).action_button_confirm()
cond = [('analytic_account_id', '=', self.project_id.id)]
project = project_obj.search(cond, limit=1)
cond = [('project_id', '=', project.id)]
events = event_obj.search(cond)
for event in events:
tickets = event.event_ticket_ids.filtered(
lambda x: x.product_id.id ==
self.env.ref('event_sale.product_product_event').id)
tickets.unlink()
return res
<commit_after>from openerp import api, models
class SaleOrder(models.Model):
_inherit = 'sale.order'
@api.multi
def action_button_confirm(self):
project_obj = self.env['project.project']
event_obj = self.env['event.event']
res = super(SaleOrder, self).action_button_confirm()
for sale in self.filtered(lambda x: x.project_id):
cond = [('analytic_account_id', '=', sale.project_id.id)]
project = project_obj.search(cond, limit=1)
cond = [('project_id', '=', project.id)]
events = event_obj.search(cond)
for event in events:
tickets = event.event_ticket_ids.filtered(
lambda x: x.product_id.id ==
self.env.ref('event_sale.product_product_event').id)
tickets.unlink()
return res
|
<commit_msg>Replace incorrect import of generic test runner w/ custom task
<commit_before>from os.path import join
from invoke import Collection
from invocations import docs as _docs, testing
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
ns = Collection(testing.test, docs=docs, www=www)
<commit_after>from os.path import join
from invoke import Collection, task
from invocations import docs as _docs
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Until we move to spec-based testing
@task
def test(ctx):
ctx.run("python test.py --verbose")
ns = Collection(test, docs=docs, www=www)
|
<commit_msg>Add test for incorrect generation
<commit_before>package db2
import (
"github.com/Aptomi/aptomi/pkg/slinga/util"
"github.com/stretchr/testify/assert"
"testing"
)
func TestKey(t *testing.T) {
correctKey := Key("72b062c1-7fcf-11e7-ab09-acde48001122$42")
assert.Equal(t, util.UID("72b062c1-7fcf-11e7-ab09-acde48001122"), correctKey.GetUID(), "Correct UID expected")
assert.Equal(t, Generation(42), correctKey.GetGeneration(), "Correct Generation expected")
noGenerationKey := Key("72b062c1-7fcf-11e7-ab09-acde48001122")
assert.Panics(t, func() { noGenerationKey.GetUID() }, "Panic expected if key is incorrect")
assert.Panics(t, func() { noGenerationKey.GetGeneration() }, "Panic expected if key is incorrect")
}
<commit_after>package db2
import (
"github.com/Aptomi/aptomi/pkg/slinga/util"
"github.com/stretchr/testify/assert"
"testing"
)
func TestKey(t *testing.T) {
correctKey := Key("72b062c1-7fcf-11e7-ab09-acde48001122$42")
assert.Equal(t, util.UID("72b062c1-7fcf-11e7-ab09-acde48001122"), correctKey.GetUID(), "Correct UID expected")
assert.Equal(t, Generation(42), correctKey.GetGeneration(), "Correct Generation expected")
noGenerationKey := Key("72b062c1-7fcf-11e7-ab09-acde48001122")
assert.Panics(t, func() { noGenerationKey.GetUID() }, "Panic expected if key is incorrect")
assert.Panics(t, func() { noGenerationKey.GetGeneration() }, "Panic expected if key is incorrect")
invalidGenerationKey := Key("72b062c1-7fcf-11e7-ab09-acde48001122$bad")
assert.Equal(t, util.UID("72b062c1-7fcf-11e7-ab09-acde48001122"), correctKey.GetUID(), "Correct UID expected")
assert.Panics(t, func() { invalidGenerationKey.GetGeneration() }, "Panic expected if key is incorrect")
}
|
<commit_msg>Use DKVManager to clean newly added keys.
<commit_before>package water.runner;
import org.junit.Ignore;
import water.*;
import java.util.Set;
@Ignore
public class CleanNewKeysTask extends KeysMRTask<CleanNewKeysTask> {
@Override
protected void setupLocal() {
final Set<Key> initKeys = LocalTestRuntime.initKeys;
final Set<Key> actualKeys = H2O.localKeySet();
for (Key actualKey : actualKeys){
final Value value = Value.STORE_get(actualKey);
if (initKeys.contains(actualKey) || isIgnorableKeyLeak(actualKey, value)) continue;
if (!(value.get() instanceof Keyed)) {
// Keyed objects might override remove_impl to excerscise their own removal strategy
// Non-keyed objects should just be removed from the DKV
DKV.remove(actualKey);
} else {
actualKey.remove();
}
}
}
}
<commit_after>package water.runner;
import org.junit.Ignore;
import water.*;
import java.util.Set;
@Ignore
public class CleanNewKeysTask extends KeysMRTask<CleanNewKeysTask> {
@Override
protected void setupLocal() {
DKVManager.retain(LocalTestRuntime.initKeys.toArray(new Key[0]));
}
}
|
<commit_msg>Make Encoding accessible from speech.Encoding.
<commit_before>
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
<commit_after>
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
<commit_msg>Change coordinates order to Lat,Lng to match the frontend geocoding.
<commit_before>import json, os, sys
from utils.file import load_json
# Parse a json-formatted input instance and produce a csv file with
# all involved coordinates.
def coord_to_csv(array):
return str(array[0]) + ',' + str(array[1]) + '\n'
def write_to_csv(input_file):
input = load_json(input_file)
lines = []
for v in input['vehicles']:
if 'start' in v:
lines.append(coord_to_csv(v['start']))
if 'end' in v:
lines.append(coord_to_csv(v['end']))
for job in input['jobs']:
lines.append(coord_to_csv(job['location']))
output_name = input_file[:input_file.rfind('.json')] + '.csv'
with open(output_name, 'w') as output_file:
for l in lines:
output_file.write(l)
if __name__ == "__main__":
write_to_csv(sys.argv[1])
<commit_after>import json, os, sys
from utils.file import load_json
# Parse a json-formatted input instance and produce a csv file with
# all involved coordinates in Lat,Lng order.
def coord_to_csv(array):
return str(array[1]) + ',' + str(array[0]) + '\n'
def write_to_csv(input_file):
input = load_json(input_file)
lines = []
for v in input['vehicles']:
if 'start' in v:
lines.append(coord_to_csv(v['start']))
if 'end' in v:
lines.append(coord_to_csv(v['end']))
for job in input['jobs']:
lines.append(coord_to_csv(job['location']))
output_name = input_file[:input_file.rfind('.json')] + '.csv'
with open(output_name, 'w') as output_file:
for l in lines:
output_file.write(l)
if __name__ == "__main__":
write_to_csv(sys.argv[1])
|
<commit_msg>Add support for boolean type
git-svn-id: 7e8def7d4256e953abb468098d8cb9b4faff0c63@5739 12255794-1b5b-4525-b599-b0510597569d
<commit_before>package com.arondor.common.reflection.reflect.instantiator;
import java.util.HashMap;
import java.util.Map;
public class FastPrimitiveConverter
{
private interface PrimitiveConverter
{
Object convert(String value);
}
private final Map<String, PrimitiveConverter> primitiveConverterMap = new HashMap<String, FastPrimitiveConverter.PrimitiveConverter>();
public FastPrimitiveConverter()
{
primitiveConverterMap.put("java.lang.String", new PrimitiveConverter()
{
public Object convert(String value)
{
return value;
}
});
primitiveConverterMap.put("long", new PrimitiveConverter()
{
public Object convert(String value)
{
return Long.parseLong(value);
}
});
primitiveConverterMap.put("int", new PrimitiveConverter()
{
public Object convert(String value)
{
return Integer.parseInt(value);
}
});
}
public Object convert(String value, String primitiveClass)
{
PrimitiveConverter converter = primitiveConverterMap.get(primitiveClass);
if (converter == null)
{
throw new IllegalArgumentException("Not supported : primitiveClass=" + primitiveClass);
}
return converter.convert(value);
}
}
<commit_after>package com.arondor.common.reflection.reflect.instantiator;
import java.util.HashMap;
import java.util.Map;
public class FastPrimitiveConverter
{
private interface PrimitiveConverter
{
Object convert(String value);
}
private final Map<String, PrimitiveConverter> primitiveConverterMap = new HashMap<String, FastPrimitiveConverter.PrimitiveConverter>();
public FastPrimitiveConverter()
{
primitiveConverterMap.put("java.lang.String", new PrimitiveConverter()
{
public Object convert(String value)
{
return value;
}
});
primitiveConverterMap.put("long", new PrimitiveConverter()
{
public Object convert(String value)
{
return Long.parseLong(value);
}
});
primitiveConverterMap.put("int", new PrimitiveConverter()
{
public Object convert(String value)
{
return Integer.parseInt(value);
}
});
primitiveConverterMap.put("boolean", new PrimitiveConverter()
{
public Object convert(String value)
{
return Boolean.parseBoolean(value);
}
});
}
public Object convert(String value, String primitiveClass)
{
PrimitiveConverter converter = primitiveConverterMap.get(primitiveClass);
if (converter == null)
{
throw new IllegalArgumentException("Not supported : primitiveClass=" + primitiveClass);
}
return converter.convert(value);
}
}
|
<commit_msg>Remove ssid/key from example script.
<commit_before>import time, network
from mqtt import MQTTClient
SSID='mux' # Network SSID
KEY='j806fVnT7tObdCYE' # Network key
# Init wlan module and connect to network
print("Trying to connect... (may take a while)...")
wlan = network.WINC()
wlan.connect(SSID, key=KEY, security=wlan.WPA_PSK)
# We should have a valid IP now via DHCP
print(wlan.ifconfig())
client = MQTTClient("openmv", "test.mosquitto.org", port=1883)
client.connect()
while (True):
client.publish("openmv/test", "Hello World!")
time.sleep(1000)
<commit_after>import time, network
from mqtt import MQTTClient
SSID='' # Network SSID
KEY='' # Network key
# Init wlan module and connect to network
print("Trying to connect... (may take a while)...")
wlan = network.WINC()
wlan.connect(SSID, key=KEY, security=wlan.WPA_PSK)
# We should have a valid IP now via DHCP
print(wlan.ifconfig())
client = MQTTClient("openmv", "test.mosquitto.org", port=1883)
client.connect()
while (True):
client.publish("openmv/test", "Hello World!")
time.sleep(1000)
|
<commit_msg>Use golang.org/x/sys/unix for IsTerminal on *BSD
Use unix.IoctlGetTermios to implement IsTerminal on *BSD and no longer
requires to use the frozen syscall package.
<commit_before>// +build darwin freebsd openbsd netbsd dragonfly
// +build !appengine
package isatty
import (
"syscall"
"unsafe"
)
const ioctlReadTermios = syscall.TIOCGETA
// IsTerminal return true if the file descriptor is terminal.
func IsTerminal(fd uintptr) bool {
var termios syscall.Termios
_, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0)
return err == 0
}
// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
// terminal. This is also always false on this environment.
func IsCygwinTerminal(fd uintptr) bool {
return false
}
<commit_after>// +build darwin freebsd openbsd netbsd dragonfly
// +build !appengine
package isatty
import "golang.org/x/sys/unix"
// IsTerminal return true if the file descriptor is terminal.
func IsTerminal(fd uintptr) bool {
_, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA)
return err == nil
}
// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
// terminal. This is also always false on this environment.
func IsCygwinTerminal(fd uintptr) bool {
return false
}
|
<commit_msg>Allow list of flashers as show token value
<commit_before>"""Flasher config player."""
from mpf.config_players.device_config_player import DeviceConfigPlayer
from mpf.core.delays import DelayManager
class FlasherPlayer(DeviceConfigPlayer):
"""Triggers flashers based on config."""
config_file_section = 'flasher_player'
show_section = 'flashers'
__slots__ = ["delay"]
def __init__(self, machine):
"""Initialise flasher_player."""
super().__init__(machine)
self.delay = DelayManager(self.machine.delayRegistry)
def play(self, settings, context, calling_context, priority=0, **kwargs):
"""Flash flashers."""
del kwargs
for flasher, s in settings.items():
if isinstance(flasher, str):
self._flash(self.machine.lights[flasher],
duration_ms=s['ms'],
key=context)
else:
self._flash(flasher, duration_ms=s['ms'], key=context)
def _flash(self, light, duration_ms, key):
light.color("white", fade_ms=0, key=key)
self.delay.add(duration_ms, self._remove_flash, light=light, key=key)
@staticmethod
def _remove_flash(light, key):
light.remove_from_stack_by_key(key=key, fade_ms=0)
def get_express_config(self, value):
"""Parse express config."""
return dict(ms=value)
<commit_after>"""Flasher config player."""
from mpf.config_players.device_config_player import DeviceConfigPlayer
from mpf.core.delays import DelayManager
from mpf.core.utility_functions import Util
class FlasherPlayer(DeviceConfigPlayer):
"""Triggers flashers based on config."""
config_file_section = 'flasher_player'
show_section = 'flashers'
__slots__ = ["delay"]
def __init__(self, machine):
"""Initialise flasher_player."""
super().__init__(machine)
self.delay = DelayManager(self.machine.delayRegistry)
def play(self, settings, context, calling_context, priority=0, **kwargs):
"""Flash flashers."""
del kwargs
for flasher, s in settings.items():
if isinstance(flasher, str):
flasher_names = Util.string_to_list(flasher)
for flasher_name in flasher_names:
self._flash(self.machine.lights[flasher_name],
duration_ms=s['ms'],
key=context)
else:
self._flash(flasher, duration_ms=s['ms'], key=context)
def _flash(self, light, duration_ms, key):
light.color("white", fade_ms=0, key=key)
self.delay.add(duration_ms, self._remove_flash, light=light, key=key)
@staticmethod
def _remove_flash(light, key):
light.remove_from_stack_by_key(key=key, fade_ms=0)
def get_express_config(self, value):
"""Parse express config."""
return dict(ms=value)
|
<commit_msg>Fix macos shim for gtk 4
<commit_before>try:
import gi
gi.require_version("GtkosxApplication", "1.0")
except ValueError:
macos_init = None
else:
from gi.repository import GtkosxApplication
macos_app = GtkosxApplication.Application.get()
def open_file(macos_app, path, application):
if path == __file__:
return False
app_file_manager = application.get_service("app_file_manager")
app_file_manager.load(path)
return True
def block_termination(macos_app, application):
quit = application.quit()
return not quit
def macos_init(application):
macos_app.connect("NSApplicationOpenFile", open_file, application)
macos_app.connect(
"NSApplicationBlockTermination", block_termination, application
)
<commit_after>try:
import gi
from gi.repository import Gtk
if Gtk.get_major_version() == 3:
gi.require_version("GtkosxApplication", "1.0")
else:
raise ValueError()
except ValueError:
macos_init = None
else:
from gi.repository import GtkosxApplication
macos_app = GtkosxApplication.Application.get()
def open_file(macos_app, path, application):
if path == __file__:
return False
app_file_manager = application.get_service("app_file_manager")
app_file_manager.load(path)
return True
def block_termination(macos_app, application):
quit = application.quit()
return not quit
def macos_init(application):
macos_app.connect("NSApplicationOpenFile", open_file, application)
macos_app.connect(
"NSApplicationBlockTermination", block_termination, application
)
|
<commit_msg>Fix route for main page
<commit_before>from django.urls import path
from core import views
app_name = "core"
urlpatterns = [
path("home/", views.home, name="home"),
path("about/", views.about, name="about"),
]
<commit_after>from core import views
from django.urls import path
app_name = "core"
urlpatterns = [
path("", views.home, name="home"),
path("about/", views.about, name="about"),
]
|
<commit_msg>Use better type definitions for the array API custom types
<commit_before>
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
Array = ndarray
Device = TypeVar('device')
Dtype = Literal[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
<commit_after>
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Any
Dtype = Type[Union[[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]]]
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
<commit_msg>Fix warning about initialization order (GCC/Clang, -Wreorder)
<commit_before>
class Socks5UDPRequestMessage : public SocksProtocolMessage
{
public:
Socks5UDPRequestMessage(QHostAddress dst, quint16 dstPort, QByteArray data, quint8 fragID=0);
Socks5UDPRequestMessage(QString domainName, quint16 dstPort, QByteArray data, quint8 fragID=0);
Socks5UDPRequestMessage();
//pure-virtual from SocksProtocolMessage
virtual ParseResult parse(QByteArray& bytes,QString * error =0);
//pure-virtual from SocksProtocolMessage
virtual bool toBytes(QByteArray * output,QString * error=0);
//pure-virtual from SocksProtocolMessage
virtual qint64 minimumMessageLength() const;
SocksProtocolMessage::AddressType addressType() const;
QHostAddress address() const;
QString domainName() const;
quint16 port() const;
QByteArray data() const;
private:
QHostAddress _address;
QString _domainName;
SocksProtocolMessage::AddressType _addressType;
quint16 _port;
quint8 _fragID;
QByteArray _data;
};
#endif // SOCKS5UDPREQUESTMESSAGE_H
<commit_after>
class Socks5UDPRequestMessage : public SocksProtocolMessage
{
public:
Socks5UDPRequestMessage(QHostAddress dst, quint16 dstPort, QByteArray data, quint8 fragID=0);
Socks5UDPRequestMessage(QString domainName, quint16 dstPort, QByteArray data, quint8 fragID=0);
Socks5UDPRequestMessage();
//pure-virtual from SocksProtocolMessage
virtual ParseResult parse(QByteArray& bytes,QString * error =0);
//pure-virtual from SocksProtocolMessage
virtual bool toBytes(QByteArray * output,QString * error=0);
//pure-virtual from SocksProtocolMessage
virtual qint64 minimumMessageLength() const;
SocksProtocolMessage::AddressType addressType() const;
QHostAddress address() const;
QString domainName() const;
quint16 port() const;
QByteArray data() const;
private:
QHostAddress _address;
QString _domainName;
SocksProtocolMessage::AddressType _addressType;
quint16 _port;
QByteArray _data;
quint8 _fragID;
};
#endif // SOCKS5UDPREQUESTMESSAGE_H
|
<commit_msg>Include headers we need directly
<commit_before>/****************************************************************/
/* DO NOT MODIFY THIS HEADER */
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* (c) 2010 Battelle Energy Alliance, LLC */
/* ALL RIGHTS RESERVED */
/* */
/* Prepared by Battelle Energy Alliance, LLC */
/* Under Contract No. DE-AC07-05ID14517 */
/* With the U. S. Department of Energy */
/* */
/* See COPYRIGHT for full restrictions */
/****************************************************************/
#ifndef COMPUTEREINITIALCONDITIONTHREAD_H
#define COMPUTEREINITIALCONDITIONTHREAD_H
#include "ParallelUniqueId.h"
// libmesh
#include "libmesh/elem_range.h"
class FEProblemBase;
class ComputeInitialConditionThread
{
public:
ComputeInitialConditionThread(FEProblemBase & fe_problem);
// Splitting Constructor
ComputeInitialConditionThread(ComputeInitialConditionThread & x, Threads::split split);
void operator() (const ConstElemRange & range);
void join(const ComputeInitialConditionThread & /*y*/);
protected:
FEProblemBase & _fe_problem;
THREAD_ID _tid;
};
#endif //COMPUTEINITIALCONDITIONTHREAD_H
<commit_after>/****************************************************************/
/* DO NOT MODIFY THIS HEADER */
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* (c) 2010 Battelle Energy Alliance, LLC */
/* ALL RIGHTS RESERVED */
/* */
/* Prepared by Battelle Energy Alliance, LLC */
/* Under Contract No. DE-AC07-05ID14517 */
/* With the U. S. Department of Energy */
/* */
/* See COPYRIGHT for full restrictions */
/****************************************************************/
#ifndef COMPUTEREINITIALCONDITIONTHREAD_H
#define COMPUTEREINITIALCONDITIONTHREAD_H
#include "MooseTypes.h"
// libmesh
#include "libmesh/elem_range.h"
#include "libmesh/threads.h"
class FEProblemBase;
class ComputeInitialConditionThread
{
public:
ComputeInitialConditionThread(FEProblemBase & fe_problem);
// Splitting Constructor
ComputeInitialConditionThread(ComputeInitialConditionThread & x, Threads::split split);
void operator() (const ConstElemRange & range);
void join(const ComputeInitialConditionThread & /*y*/);
protected:
FEProblemBase & _fe_problem;
THREAD_ID _tid;
};
#endif //COMPUTEINITIALCONDITIONTHREAD_H
|
<commit_msg>Remove redundant name field from Recording doc
<commit_before>package org.musicbrainz.search.solrwriter;
import java.util.ArrayList;
import java.util.Arrays;
public abstract class AbstractMBWriterRecordingTest extends AbstractMBWriterTest {
@Override
public ArrayList<String> getDoc() {
return new ArrayList<>(Arrays.asList(new String[]{
"mbid", uuid,
"recording", "Roots and Beginnings",
"name", "Roots and Beginnings",
"arid", "9b58672a-e68e-4972-956e-a8985a165a1f",
"artist", "Howard Shore",
"artistname", "Howard Shore",
"creditname", "Howard Shore",
"video", "false"
}));
}
}
<commit_after>package org.musicbrainz.search.solrwriter;
import java.util.ArrayList;
import java.util.Arrays;
public abstract class AbstractMBWriterRecordingTest extends AbstractMBWriterTest {
@Override
public ArrayList<String> getDoc() {
return new ArrayList<>(Arrays.asList(new String[]{
"mbid", uuid,
"recording", "Roots and Beginnings",
"arid", "9b58672a-e68e-4972-956e-a8985a165a1f",
"artist", "Howard Shore",
"artistname", "Howard Shore",
"creditname", "Howard Shore",
"video", "false"
}));
}
}
|
<commit_msg>Add test for posting content to the server
<commit_before>from django.test import TestCase
# Create your tests here.
<commit_after>import json
from django.test import TestCase, Client
class MemeTests(TestCase):
def test_can_post_to_db(self):
response = json.loads(self.client.post('/', {'url':'https://foo.bar/baz.gif', 'keywords':'omg, this, is, great'}).content)
self.assertTrue(response['success'])
|
<commit_msg>Allow gnupg keyring files to be selected via sys env
<commit_before>package openpgp
import (
"os"
"path/filepath"
)
// Gnupg keyrings files
var gPubringFile string = filepath.Join(os.Getenv("HOME"), ".gnupg", "pubring.gpg")
var gSecringFile string = filepath.Join(os.Getenv("HOME"), ".gnupg", "secring.gpg")
// Gnupg trousseau master gpg key id
var gMasterGpgId string = os.Getenv(ENV_MASTER_GPG_ID_KEY)
<commit_after>package openpgp
import (
"os"
"path/filepath"
)
// Gnupg keyrings files
var gPubringFile string = func() string {
envPubring := os.Getenv("GNUPG_PUBRING_PATH")
if envPubring != "" {
return envPubring
}
return filepath.Join(os.Getenv("HOME"), ".gnupg", "pubring.gpg")
}()
var gSecringFile string = func() string {
envSecring := os.Getenv("GNUPG_SECRING_PATH")
if envSecring != "" {
return envSecring
}
return filepath.Join(os.Getenv("HOME"), ".gnupg", "secring.gpg")
}()
// Gnupg trousseau master gpg key id
var gMasterGpgId string = os.Getenv(ENV_MASTER_GPG_ID_KEY)
|
<commit_msg>Add sections for PIL (Fred Lundh).
<commit_before>/* appinit.c -- Tcl and Tk application initialization. */
#include <tcl.h>
#include <tk.h>
int
Tcl_AppInit (interp)
Tcl_Interp *interp;
{
Tk_Window main;
main = Tk_MainWindow(interp);
if (Tcl_Init (interp) == TCL_ERROR)
return TCL_ERROR;
if (Tk_Init (interp) == TCL_ERROR)
return TCL_ERROR;
#ifdef WITH_MOREBUTTONS
{
extern Tcl_CmdProc studButtonCmd;
extern Tcl_CmdProc triButtonCmd;
Tcl_CreateCommand(interp, "studbutton", studButtonCmd,
(ClientData) main, NULL);
Tcl_CreateCommand(interp, "tributton", triButtonCmd,
(ClientData) main, NULL);
}
#endif
#ifdef WITH_XXX
#endif
return TCL_OK;
}
<commit_after>/* appinit.c -- Tcl and Tk application initialization. */
#include <tcl.h>
#include <tk.h>
int
Tcl_AppInit (interp)
Tcl_Interp *interp;
{
Tk_Window main;
main = Tk_MainWindow(interp);
if (Tcl_Init (interp) == TCL_ERROR)
return TCL_ERROR;
if (Tk_Init (interp) == TCL_ERROR)
return TCL_ERROR;
#ifdef WITH_MOREBUTTONS
{
extern Tcl_CmdProc studButtonCmd;
extern Tcl_CmdProc triButtonCmd;
Tcl_CreateCommand(interp, "studbutton", studButtonCmd,
(ClientData) main, NULL);
Tcl_CreateCommand(interp, "tributton", triButtonCmd,
(ClientData) main, NULL);
}
#endif
#ifdef WITH_PIL /* 0.2b5 and later -- not yet released as of May 14 */
{
extern void TkImaging_Init(Tcl_Interp *interp);
TkImaging_Init(interp);
}
#endif
#ifdef WITH_PIL_OLD /* 0.2b4 and earlier */
{
extern void TkImaging_Init(void);
TkImaging_Init();
}
#endif
#ifdef WITH_XXX
#endif
return TCL_OK;
}
|
<commit_msg>Create functionalities to find stock with item and warehouse<commit_before>package devopsdistilled.operp.server.data.repo.stock;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import devopsdistilled.operp.server.data.entity.stock.Stock;
@Repository
public interface StockRepository extends JpaRepository<Stock, Long> {
}
<commit_after>package devopsdistilled.operp.server.data.repo.stock;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import devopsdistilled.operp.server.data.entity.items.Item;
import devopsdistilled.operp.server.data.entity.stock.Stock;
import devopsdistilled.operp.server.data.entity.stock.Warehouse;
@Repository
public interface StockRepository extends JpaRepository<Stock, Long> {
Stock findByItemAndWarehouse(Item item,Warehouse warehouse);
}
|
<commit_msg>Bugfix: Make pipeline work for CGImage processing (missed out the header file)
<commit_before>
@interface GPUImageFilterPipeline : NSObject
@property (strong) NSMutableArray *filters;
@property (strong) GPUImageOutput *input;
@property (strong) id <GPUImageInput> output;
- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (void) addFilter:(GPUImageFilter*)filter;
- (void) addFilter:(GPUImageFilter*)filter atIndex:(NSUInteger)insertIndex;
- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageFilter*)filter;
- (void) replaceAllFilters:(NSArray*) newFilters;
- (void) removeFilterAtIndex:(NSUInteger)index;
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
@end
<commit_after>
@interface GPUImageFilterPipeline : NSObject
@property (strong) NSMutableArray *filters;
@property (strong) GPUImageOutput *input;
@property (strong) id <GPUImageInput> output;
- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (void) addFilter:(GPUImageFilter*)filter;
- (void) addFilter:(GPUImageFilter*)filter atIndex:(NSUInteger)insertIndex;
- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageFilter*)filter;
- (void) replaceAllFilters:(NSArray*) newFilters;
- (void) removeFilterAtIndex:(NSUInteger)index;
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
- (CGImageRef) newCGImageFromCurrentFilteredFrame;
@end
|
<commit_msg>Mark default ctor host/device for hcc
<commit_before>// This file is distributed under the MIT license.
// See the LICENSE file for details.
#pragma once
#ifndef VSNRAY_MATH_RAY_H
#define VSNRAY_MATH_RAY_H 1
#include "config.h"
#include "vector.h"
namespace MATH_NAMESPACE
{
template <typename T>
class basic_ray
{
public:
typedef T scalar_type;
typedef vector<3, T> vec_type;
public:
vec_type ori;
vec_type dir;
basic_ray() = default;
MATH_FUNC basic_ray(vector<3, T> const& o, vector<3, T> const& d);
};
} // MATH_NAMESPACE
#include "detail/ray.inl"
#endif // VSNRAY_MATH_RAY_H
<commit_after>// This file is distributed under the MIT license.
// See the LICENSE file for details.
#pragma once
#ifndef VSNRAY_MATH_RAY_H
#define VSNRAY_MATH_RAY_H 1
#include "config.h"
#include "vector.h"
namespace MATH_NAMESPACE
{
template <typename T>
class basic_ray
{
public:
typedef T scalar_type;
typedef vector<3, T> vec_type;
public:
vec_type ori;
vec_type dir;
MATH_FUNC basic_ray() = default;
MATH_FUNC basic_ray(vector<3, T> const& o, vector<3, T> const& d);
};
} // MATH_NAMESPACE
#include "detail/ray.inl"
#endif // VSNRAY_MATH_RAY_H
|
<commit_msg>Improve visually how methods are Imported
<commit_before>
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import load, download, process, add, remove, config_cache, query, info
<commit_after>
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import (
load,
download,
process,
add,
remove,
config_cache,
query,
info)
|
<commit_msg>Replace joins with list_to_number in 32
<commit_before>from itertools import permutations
def solve() -> int:
pandigital = []
for permutation in permutations(range(1, 10)):
result = int(''.join(str(digit) for digit in permutation[:4]))
for i in range(1, 4):
left = int(''.join(str(digit) for digit in permutation[4:4 + i]))
right = int(''.join(str(digit) for digit in permutation[4 + i:]))
if left * right == result:
pandigital.append(result)
return sum(set(pandigital))
<commit_after>from itertools import permutations
from ..library.base import list_to_number
def solve() -> int:
pandigital = []
for permutation in permutations(range(1, 10)):
result = list_to_number(permutation[:4])
for i in range(1, 4):
left = list_to_number(permutation[4:4 + i])
right = list_to_number(permutation[4 + i:])
if left * right == result:
pandigital.append(result)
return sum(set(pandigital))
|
<commit_msg>Skynet: Fix in collectd event notifier script.
This patch adds a fix to collectd event notifier script,
by providing a value the "severity" field in the event
that it sends to salt-master event bus. with out that
event listener in the skyring server will fail to
process it.
Change-Id: I20b738468c8022a25024e4327434ae6dab43a123
Signed-off-by: nnDarshan <d2c6d450ab98b078f2f1942c995e6d92dd504bc8@gmail.com>
<commit_before>import sys
import os
import salt.client
def getNotification():
notification_dict = {}
isEndOfDictionary = False
for line in sys.stdin:
if not line.strip():
isEndOfDictionary = True
continue
if isEndOfDictionary:
break
key, value = line.split(':')
notification_dict[key] = value.lstrip()[:-1]
return notification_dict, line
def postTheNotificationToSaltMaster():
salt_payload = {}
threshold_dict = {}
caller = salt.client.Caller()
threshold_dict['tags'], threshold_dict['message'] = getNotification()
tag = "skyring/collectd/node/{0}/threshold/{1}/{2}".format(
threshold_dict['tags']["Host"],
threshold_dict['tags']["Plugin"],
threshold_dict['tags']["Severity"])
caller.sminion.functions['event.send'](tag, threshold_dict)
if __name__ == '__main__':
postTheNotificationToSaltMaster()
<commit_after>import sys
import os
import salt.client
def getNotification():
notification_dict = {}
isEndOfDictionary = False
for line in sys.stdin:
if not line.strip():
isEndOfDictionary = True
continue
if isEndOfDictionary:
break
key, value = line.split(':')
notification_dict[key] = value.lstrip()[:-1]
return notification_dict, line
def postTheNotificationToSaltMaster():
salt_payload = {}
threshold_dict = {}
caller = salt.client.Caller()
threshold_dict['tags'], threshold_dict['message'] = getNotification()
threshold_dict['severity'] = threshold_dict['tags']["Severity"]
tag = "skyring/collectd/node/{0}/threshold/{1}/{2}".format(
threshold_dict['tags']["Host"],
threshold_dict['tags']["Plugin"],
threshold_dict['tags']["Severity"])
caller.sminion.functions['event.send'](tag, threshold_dict)
if __name__ == '__main__':
postTheNotificationToSaltMaster()
|
<commit_msg>Use non-blank string values as demo arguments
Prior to this commit the demo tree showed empty and multi-line
display names:
+-- ExternalMethodSourceDemo [OK]
| '-- testWithExternalMethodSource(String) [OK]
| +-- [1] [OK]
| +-- [2] [OK]
| '-- [3]
[OK]
Now, a tiny one-liner and non-blank String is provided.
<commit_before>/*
* Copyright 2015-2018 the original author or authors.
*
* All rights reserved. This program and the accompanying materials are
* made available under the terms of the Eclipse Public License v2.0 which
* accompanies this distribution and is available at
*
* http://www.eclipse.org/legal/epl-v20.html
*/
package example;
// tag::external_MethodSource_example[]
import java.util.stream.Stream;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
class ExternalMethodSourceDemo {
@ParameterizedTest
@MethodSource("example.StringsProviders#blankStrings")
void testWithExternalMethodSource(String blankString) {
// test with blank string
}
}
class StringsProviders {
static Stream<String> blankStrings() {
return Stream.of("", " ", " \n ");
}
}
// end::external_MethodSource_example[]
<commit_after>/*
* Copyright 2015-2018 the original author or authors.
*
* All rights reserved. This program and the accompanying materials are
* made available under the terms of the Eclipse Public License v2.0 which
* accompanies this distribution and is available at
*
* http://www.eclipse.org/legal/epl-v20.html
*/
package example;
// tag::external_MethodSource_example[]
import java.util.stream.Stream;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
class ExternalMethodSourceDemo {
@ParameterizedTest
@MethodSource("example.StringsProviders#tinyStrings")
void testWithExternalMethodSource(String tinyString) {
// test with tiny string
}
}
class StringsProviders {
static Stream<String> tinyStrings() {
return Stream.of(".", "oo", "OOO");
}
}
// end::external_MethodSource_example[]
|
<commit_msg>Fix up error handling for flowd
<commit_before>
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(file_path)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
<commit_after>
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
ERR_MSG = 'Could not extract data from {}'
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
raise IOError(ERR_MSG.format(file_path))
line = None
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
else:
if line is None:
raise IOError(ERR_MSG.format(file_path))
|
<commit_msg>Rename title of Project Settings item to more clear thing, to understood to which area this setting are linked.
"Template Variables" -> "File and Code Template Variable"
Thanks to Anonymous on the plugins.jetbrains.com =)
<commit_before>package org.jetbrains.idea.project.filetemplate.configuration;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
/**
* Created by IntelliJ IDEA.
* Author: Vladimir Kravets
* E-Mail: vova.kravets@gmail.com
* Date: 2/14/14
* Time: 7:38 PM
*/
public class PerProjectTemplateVariableConfigurable implements Configurable.NoScroll, Configurable {
private Project project;
private TemplateVariablesConfigurationPanel panel;
public PerProjectTemplateVariableConfigurable(Project project) {
this.project = project;
}
@Nls
@Override
public String getDisplayName() {
return "Template Variables";
}
@Nullable
@Override
public String getHelpTopic() {
return null;
}
@Nullable
@Override
public JComponent createComponent() {
panel = new TemplateVariablesConfigurationPanel(project);
return panel.getPanel();
}
@Override
public boolean isModified() {
return panel.isModified();
}
@Override
public void apply() throws ConfigurationException {
panel.commit();
}
@Override
public void reset() {
panel.reset();
}
@Override
public void disposeUIResources() {
panel = null;
}
}
<commit_after>package org.jetbrains.idea.project.filetemplate.configuration;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
/**
* Created by IntelliJ IDEA.
* Author: Vladimir Kravets
* E-Mail: vova.kravets@gmail.com
* Date: 2/14/14
* Time: 7:38 PM
*/
public class PerProjectTemplateVariableConfigurable implements Configurable.NoScroll, Configurable {
private Project project;
private TemplateVariablesConfigurationPanel panel;
public PerProjectTemplateVariableConfigurable(Project project) {
this.project = project;
}
@Nls
@Override
public String getDisplayName() {
return "File and Code Template Variables";
}
@Nullable
@Override
public String getHelpTopic() {
return null;
}
@Nullable
@Override
public JComponent createComponent() {
panel = new TemplateVariablesConfigurationPanel(project);
return panel.getPanel();
}
@Override
public boolean isModified() {
return panel.isModified();
}
@Override
public void apply() throws ConfigurationException {
panel.commit();
}
@Override
public void reset() {
panel.reset();
}
@Override
public void disposeUIResources() {
panel = null;
}
}
|
<commit_msg>Declare validate(), transfer() and getModel()
OPEN - task 24: Create a stock module
http://github.com/DevOpsDistilled/OpERP/issues/issue/24<commit_before>package devopsdistilled.operp.client.stock.panes.controllers;
import devopsdistilled.operp.client.abstracts.SubTaskPaneController;
import devopsdistilled.operp.client.exceptions.EntityValidationException;
import devopsdistilled.operp.server.data.entity.stock.Warehouse;
public interface TransferStockPaneController extends SubTaskPaneController {
public void validate(Warehouse fromWarehouse, Warehouse toWarehouse,
Long quantity) throws EntityValidationException;
public void transfer(Warehouse fromWarehouse, Warehouse toWarehouse,
Long quantity);
}
<commit_after>package devopsdistilled.operp.client.stock.panes.controllers;
import devopsdistilled.operp.client.abstracts.SubTaskPaneController;
import devopsdistilled.operp.client.exceptions.EntityValidationException;
import devopsdistilled.operp.client.stock.panes.models.TransferStockPaneModel;
public interface TransferStockPaneController extends SubTaskPaneController {
public void validate() throws EntityValidationException;
public void transfer();
public TransferStockPaneModel getModel();
}
|