content
stringlengths 128
6.74k
|
---|
<commit_msg>Fix get_router_replicas infrastructure node count.
<commit_before>'''
Custom filters for use in openshift_hosted
'''
class FilterModule(object):
''' Custom ansible filters for use by openshift_hosted role'''
@staticmethod
def get_router_replicas(replicas=None, router_nodes=None):
''' This function will return the number of replicas
based on the results from the defined
openshift.hosted.router.replicas OR
the query from oc_obj on openshift nodes with a selector OR
default to 1
'''
# We always use what they've specified if they've specified a value
if replicas is not None:
return replicas
if (isinstance(router_nodes, dict) and
'results' in router_nodes and
'results' in router_nodes['results'] and
'items' in router_nodes['results']['results']):
return len(router_nodes['results']['results'][0]['items'])
return 1
def filters(self):
''' returns a mapping of filters to methods '''
return {'get_router_replicas': self.get_router_replicas}
<commit_after>'''
Custom filters for use in openshift_hosted
'''
class FilterModule(object):
''' Custom ansible filters for use by openshift_hosted role'''
@staticmethod
def get_router_replicas(replicas=None, router_nodes=None):
''' This function will return the number of replicas
based on the results from the defined
openshift.hosted.router.replicas OR
the query from oc_obj on openshift nodes with a selector OR
default to 1
'''
# We always use what they've specified if they've specified a value
if replicas is not None:
return replicas
replicas = 1
# Ignore boolean expression limit of 5.
# pylint: disable=too-many-boolean-expressions
if (isinstance(router_nodes, dict) and
'results' in router_nodes and
'results' in router_nodes['results'] and
isinstance(router_nodes['results']['results'], list) and
len(router_nodes['results']['results']) > 0 and
'items' in router_nodes['results']['results'][0]):
if len(router_nodes['results']['results'][0]['items']) > 0:
replicas = len(router_nodes['results']['results'][0]['items'])
return replicas
def filters(self):
''' returns a mapping of filters to methods '''
return {'get_router_replicas': self.get_router_replicas}
|
<commit_msg>Fix indeterminate ordering issue for extensions
The original code used set() to dedupe enabled extensions. This resulted
in an arbitrary ordering of the values. The expected result was a
deterministic ordering of loaded extensions that matches the order given
by the whitelist. This removes the set() usage to preserve order.
Existing users subject to the arbitrary ordering should be unaffected as
their builds must already be tolerant to ordering changes to have worked
thus far.
<commit_before>"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = tuple(set(('core',) + tuple(whitelist)))
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
<commit_after>"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
|
<commit_msg>Check endpoint status first before adding into the hash table
<commit_before>package glock
import (
"github.com/stathat/consistent"
)
func initServersPool(endpoints []string) *consistent.Consistent {
cons := consistent.New()
for _, endpoint := range endpoints {
// TODO: First check if endpoint is live
cons.Add(endpoint)
}
return cons
}
<commit_after>package glock
import (
"net"
"github.com/stathat/consistent"
)
func initServersPool(endpoints []string) *consistent.Consistent {
cons := consistent.New()
for _, endpoint := range endpoints {
conn, err := net.Dial("tcp", endpoint)
if err == nil {
cons.Add(endpoint)
conn.Close()
}
}
return cons
}
|
<commit_msg>Add include directory and c library file.
<commit_before>from distutils.core import setup, Extension
module1 = Extension('foolib',
define_macros = [('MAJOR_VERSION', '1'),
('MINOR_VERSION', '0')],
sources = ['foolibmodule.c'])
setup (name = 'foolib',
version = '1.0',
description = 'This is a demo package',
author = 'Tom Kraljevic',
author_email = 'tomk@tomk.net',
url = 'http://example-of-where-to-put-url.org',
long_description = '''
This is really just a demo package.
''',
ext_modules = [module1])
<commit_after>from distutils.core import setup, Extension
module1 = Extension('foolib',
define_macros = [('MAJOR_VERSION', '1'),
('MINOR_VERSION', '0')],
include_dirs = ['../../cxx/include'],
sources = ['foolibmodule.c', '../../cxx/src/foolib_c.cxx'])
setup (name = 'foolib',
version = '1.0',
description = 'This is a demo package',
author = 'Tom Kraljevic',
author_email = 'tomk@tomk.net',
url = 'http://example-of-where-to-put-url.org',
long_description = '''
This is really just a demo package.
''',
ext_modules = [module1])
|
<commit_msg>Add utility function to get client IP from request<commit_before>from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
<commit_after>from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
<commit_msg>Update category and website on module description
<commit_before>{
'name': 'Document Attachment',
'version': '1.2',
'author': 'XCG Consulting',
'category': 'Dependency',
'description': """Enchancements to the ir.attachment module
to manage kinds of attachments that can be linked with OpenERP objects.
The implenter has to:
- Pass 'res_model' and 'res_id' in the context.
- Define menus and actions should it want to allow changing document types.
Document attachments are displayed in a many2many field; it can optionally be
changed to work like a one2many field by using the
"domain="[('res_id', '=', id)]" attribute.
""",
'website': 'www.odoo.consulting/',
'depends': [
'base',
'document',
],
'data': [
'security/ir.model.access.csv',
'document_attachment.xml',
],
'test': [
],
'installable': True,
}
<commit_after>{
'name': 'Document Attachment',
'version': '1.2',
'author': 'XCG Consulting',
'category': 'Hidden/Dependency',
'description': """Enchancements to the ir.attachment module
to manage kinds of attachments that can be linked with OpenERP objects.
The implenter has to:
- Pass 'res_model' and 'res_id' in the context.
- Define menus and actions should it want to allow changing document types.
Document attachments are displayed in a many2many field; it can optionally be
changed to work like a one2many field by using the
"domain="[('res_id', '=', id)]" attribute.
""",
'website': 'http://odoo.consulting/',
'depends': [
'base',
'document',
],
'data': [
'security/ir.model.access.csv',
'document_attachment.xml',
],
'test': [
],
'installable': True,
}
|
<commit_msg>Remove create_wrapper from the API
<commit_before>from ..extension import get_engine_manager
from ..extension import create_wrapper
__all__ = ['get_supported_engines', 'create_wrapper',
'get_supported_engine_names']
def get_supported_engine_names():
"""Show a list of supported engine names.
Returns
-------
names: list
a list of engine names
"""
return get_engine_manager().get_supported_engine_names()
def get_supported_engines():
"""Show a list of supported engines.
Returns
-------
metadata: list
a list of engine metadata objects
"""
return get_engine_manager().get_supported_engines()
def load_engine_extentions():
""" Discover and load engine extension modules.
"""
from stevedore import extension
mgr = extension.ExtensionManager(
namespace='simphony.engine',
invoke_on_load=False)
extensions = {}
engine_manager = get_engine_manager()
for ext in mgr.extensions:
extensions[ext.name] = ext.plugin
# Load engine metadata
engine_manager.load_metadata(ext.plugin)
return extensions
# Populate the module namespace
globals().update(load_engine_extentions())
# cleanup
del load_engine_extentions
<commit_after>from ..extension import get_engine_manager
__all__ = ['get_supported_engines',
'get_supported_engine_names']
def get_supported_engine_names():
"""Show a list of supported engine names.
Returns
-------
names: list
a list of engine names
"""
return get_engine_manager().get_supported_engine_names()
def get_supported_engines():
"""Show a list of supported engines.
Returns
-------
metadata: list
a list of engine metadata objects
"""
return get_engine_manager().get_supported_engines()
def load_engine_extentions():
""" Discover and load engine extension modules.
"""
from stevedore import extension
mgr = extension.ExtensionManager(
namespace='simphony.engine',
invoke_on_load=False)
extensions = {}
engine_manager = get_engine_manager()
for ext in mgr.extensions:
extensions[ext.name] = ext.plugin
# Load engine metadata
engine_manager.load_metadata(ext.plugin)
return extensions
# Populate the module namespace
globals().update(load_engine_extentions())
# cleanup
del load_engine_extentions
|
<commit_msg>Add class for competitor info
<commit_before>
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.sps.data;
import com.google.auto.value.AutoValue;
/**
* Class representing user data specific to a competition
*/
@AutoValue
public abstract class CompetitorInfo {
public static CompetitorInfo create(long id, String name, String email, long netWorth, long amountAvailable) {
return new AutoValue_CompetitorInfo(id, name, email, netWorth, amountAvailable);
}
/** The id of the competitor */
public abstract long id();
/** The competitor's name */
public abstract String name();
/** The competitor's Google email */
public abstract String email();
/** The networth of the competitor */
public abstract long netWorth();
/** The amount the user has available for additional investments */
public abstract long amountAvailable();
}<commit_after>
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.sps.data;
import com.google.auto.value.AutoValue;
/**
* Class representing user data specific to a competition
*/
@AutoValue
public abstract class CompetitorInfo {
public static CompetitorInfo create(String name, String email, int rank, int rankYesterday, long netWorth,
long amountAvailable, int numInvestments) {
return new AutoValue_CompetitorInfo(name, email, rank, rankYesterday, netWorth, amountAvailable, numInvestments);
}
/** The competitor's name */
public abstract String name();
/** The competitor's Google email */
public abstract String email();
/** Competitor's rank */
public abstract int rank();
/** Competitor's rank yesterday */
public abstract int rankYesterday();
/** The networth of the competitor */
public abstract long netWorth();
/** The amount the user has available for additional investments */
public abstract long amountAvailable();
/** The number of investments owned by this competitor */
public abstract int numInvestments();
} |
<commit_msg>Throw an exception when the user attempt to create more than one instance of QApplication.
Reviewed by Marcelo Lira <marcelo.lira@openbossa.org>
<commit_before>// Borrowed reference to QtGui module
extern PyObject* moduleQtGui;
int SbkQApplication_Init(PyObject* self, PyObject* args, PyObject*)
{
int numArgs = PyTuple_GET_SIZE(args);
if (numArgs != 1) {
PyErr_BadArgument();
return -1;
}
char** argv;
int argc;
if (!PySequence_to_argc_argv(PyTuple_GET_ITEM(args, 0), &argc, &argv)) {
PyErr_BadArgument();
return -1;
}
SbkBaseWrapper_setCptr(self, new QApplication(argc, argv));
SbkBaseWrapper_setValidCppObject(self, 1);
Shiboken::BindingManager::instance().registerWrapper(reinterpret_cast<SbkBaseWrapper*>(self));
// Verify if qApp is in main module
const char QAPP_MACRO[] = "qApp";
PyObject* localsDict = PyEval_GetLocals();
if (localsDict) {
PyObject* qAppObj = PyDict_GetItemString(localsDict, QAPP_MACRO);
if (qAppObj)
PyDict_SetItemString(localsDict, QAPP_MACRO, self);
}
PyObject_SetAttrString(moduleQtGui, QAPP_MACRO, self);
return 1;
}
<commit_after>// Borrowed reference to QtGui module
extern PyObject* moduleQtGui;
int SbkQApplication_Init(PyObject* self, PyObject* args, PyObject*)
{
if (QApplication::instance()) {
PyErr_SetString(PyExc_RuntimeError, "A QApplication instance already exists.");
return -1;
}
int numArgs = PyTuple_GET_SIZE(args);
if (numArgs != 1) {
PyErr_BadArgument();
return -1;
}
char** argv;
int argc;
if (!PySequence_to_argc_argv(PyTuple_GET_ITEM(args, 0), &argc, &argv)) {
PyErr_BadArgument();
return -1;
}
SbkBaseWrapper_setCptr(self, new QApplication(argc, argv));
SbkBaseWrapper_setValidCppObject(self, 1);
Shiboken::BindingManager::instance().registerWrapper(reinterpret_cast<SbkBaseWrapper*>(self));
// Verify if qApp is in main module
const char QAPP_MACRO[] = "qApp";
PyObject* localsDict = PyEval_GetLocals();
if (localsDict) {
PyObject* qAppObj = PyDict_GetItemString(localsDict, QAPP_MACRO);
if (qAppObj)
PyDict_SetItemString(localsDict, QAPP_MACRO, self);
}
PyObject_SetAttrString(moduleQtGui, QAPP_MACRO, self);
return 1;
}
|
<commit_msg>Update redundant test to check error handling
<commit_before>""" Tests for the Types module """
import unittest
# pylint: disable=import-error
from res import types
class TestTypes(unittest.TestCase):
""" Tests for the Types module """
def test_getPieceAbbreviation_empty(self):
"Correctly convert a type to a character for display"
self.assertEqual('.', types.getPieceAbbreviation(types.EMPTY))
def test_getPieceAbbreviation_goose(self):
"Correctly convert a type to a character for display"
self.assertEqual('G', types.getPieceAbbreviation(types.GOOSE))
def test_getPieceAbbreviation_fox(self):
"Correctly convert a type to a character for display"
self.assertEqual('F', types.getPieceAbbreviation(types.FOX))
def test_getPieceAbbreviation_supergoose(self):
"Correctly convert a type to a character for display"
self.assertEqual('S', types.getPieceAbbreviation(types.SUPERGOOSE))
def test_getPieceAbbreviation_outside(self):
"Correctly convert a type to a character for display"
self.assertEqual(None, types.getPieceAbbreviation(types.OUTSIDE))
def test_getPieceAbbreviation_unknown(self):
"Correctly convert a type to a character for display"
self.assertEqual(None, types.getPieceAbbreviation(4567))<commit_after>""" Tests for the Types module """
import unittest
# pylint: disable=import-error
from res import types
class TestTypes(unittest.TestCase):
""" Tests for the Types module """
def test_getPieceAbbreviation_empty(self):
"Correctly convert a type to a character for display"
self.assertEqual('.', types.getPieceAbbreviation(types.EMPTY))
def test_getPieceAbbreviation_goose(self):
"Correctly convert a type to a character for display"
self.assertEqual('G', types.getPieceAbbreviation(types.GOOSE))
def test_getPieceAbbreviation_fox(self):
"Correctly convert a type to a character for display"
self.assertEqual('F', types.getPieceAbbreviation(types.FOX))
def test_getPieceAbbreviation_supergoose(self):
"Correctly convert a type to a character for display"
self.assertEqual('S', types.getPieceAbbreviation(types.SUPERGOOSE))
def test_getPieceAbbreviation_outside(self):
"Correctly convert a type to a character for display"
self.assertEqual(None, types.getPieceAbbreviation(types.OUTSIDE))
def test_getPieceAbbreviation_unknown(self):
"Correctly convert a type to a character for display"
self.assertRaises(ValueError,
types.getPieceAbbreviation,
'abcd')
|
<commit_msg>Print the usage if the command is bad.
<commit_before>use getopts::{getopts};
use help;
fn verify_command(command: &str) -> bool {
match command {
"new" => true,
"open" => true,
_ => false
}
}
fn run_command(command: &str) {
println!("{}", command);
}
pub fn init(args: Vec<String>) {
let program = args[0].clone();
let opts = help::opts();
let matches = match getopts(args.tail(), opts) {
Ok(m) => { m }
Err(f) => { fail!(f.to_string()) }
};
if matches.opt_present("h") {
help::print_usage(program.as_slice(), opts);
return;
}
let input = if !matches.free.is_empty() {
matches.free[0].clone()
} else {
help::print_usage(program.as_slice(), opts);
return;
};
if !verify_command(input.as_slice()) {
return;
}
run_command(input.as_slice());
}
#[test]
fn verify_command_new_returns_true() {
assert_eq!(verify_command("new"), true);
}
#[test]
fn verify_command_open_returns_true() {
assert_eq!(verify_command("open"), true);
}
#[test]
fn verify_command_value_returns_false() {
assert_eq!(verify_command("value"), false);
}
<commit_after>use getopts::{getopts};
use help;
fn verify_command(command: &str) -> bool {
match command {
"new" => true,
"open" => true,
_ => false
}
}
fn run_command(command: &str) {
println!("{}", command);
}
pub fn init(args: Vec<String>) {
let program = args[0].clone();
let opts = help::opts();
let matches = match getopts(args.tail(), opts) {
Ok(m) => { m }
Err(f) => { fail!(f.to_string()) }
};
if matches.opt_present("h") {
help::print_usage(program.as_slice(), opts);
return;
}
let input = if !matches.free.is_empty() {
matches.free[0].clone()
} else {
help::print_usage(program.as_slice(), opts);
return;
};
if !verify_command(input.as_slice()) {
help::print_usage(program.as_slice(), opts);
return;
}
run_command(input.as_slice());
}
#[test]
fn verify_command_new_returns_true() {
assert_eq!(verify_command("new"), true);
}
#[test]
fn verify_command_open_returns_true() {
assert_eq!(verify_command("open"), true);
}
#[test]
fn verify_command_value_returns_false() {
assert_eq!(verify_command("value"), false);
}
|
<commit_msg>Remove defunct missing defs that interfere with CI build<commit_before>/*
* mswin32_config.h Hand made MSWin32 configuration file.
* Copyright (c) 1996 Applied Logic Systems, Inc.
*
* Author: Chuck Houpt
* Creation: 1/30/96
*/
#include "dfltsys.h"
#define MSWin32 1
#define OSStr "mswin32"
#ifdef __GNUC__
#define EXTERNAL_STATE 1
#endif
/* Temp. disable threading until threading GUI stub is fixed */
#ifdef __GNUC__
#define Bytecode 1
#endif
#define HAVE_STDARG_H 1
#define HAVE_STDLIB_H 1
#define HAVE_FCNTL_H 1
#define HAVE_STRING_H 1
#define HAVE_SRAND 1
#define HAVE_TIME 1
#define HAVE_SOCKET 1
#define BERKELEY_SOCKETS 1
#define HAVE_SELECT 1
#define MISSING_UNIX_DOMAIN_SOCKETS 1
#define APP_PRINTF_CALLBACK 1
#define HAVE_STRCSPN 1
#define HAVE_STRSPN 1
#define HAVE_STRTOK 1
#define REVERSE_ENDIAN 1
/* The windows headers in Cygwin 1.3.4 are missing some prototypes,
so define them here to silence the waring messages. */
#ifdef __GNUC__
extern __inline__ void* GetCurrentFiber(void);
extern __inline__ void* GetFiberData(void);
#endif
#include <winsock2.h>
#include <windows.h>
<commit_after>/*
* mswin32_config.h Hand made MSWin32 configuration file.
* Copyright (c) 1996 Applied Logic Systems, Inc.
*
* Author: Chuck Houpt
* Creation: 1/30/96
*/
#include "dfltsys.h"
#define MSWin32 1
#define OSStr "mswin32"
#ifdef __GNUC__
#define EXTERNAL_STATE 1
#endif
/* Temp. disable threading until threading GUI stub is fixed */
#ifdef __GNUC__
#define Bytecode 1
#endif
#define HAVE_STDARG_H 1
#define HAVE_STDLIB_H 1
#define HAVE_FCNTL_H 1
#define HAVE_STRING_H 1
#define HAVE_SRAND 1
#define HAVE_TIME 1
#define HAVE_SOCKET 1
#define BERKELEY_SOCKETS 1
#define HAVE_SELECT 1
#define MISSING_UNIX_DOMAIN_SOCKETS 1
#define APP_PRINTF_CALLBACK 1
#define HAVE_STRCSPN 1
#define HAVE_STRSPN 1
#define HAVE_STRTOK 1
#define REVERSE_ENDIAN 1
#include <winsock2.h>
#include <windows.h>
|
<commit_msg>Test update: added toggle to edit enabled field
svn changeset:4325/svn branch:trunk
<commit_before>/*
@ITMillApache2LicenseForJavaFiles@
*/
package com.itmill.toolkit.tests;
import com.itmill.toolkit.data.Property.ValueChangeEvent;
import com.itmill.toolkit.data.Property.ValueChangeListener;
import com.itmill.toolkit.ui.Button;
import com.itmill.toolkit.ui.CustomComponent;
import com.itmill.toolkit.ui.Label;
import com.itmill.toolkit.ui.OrderedLayout;
import com.itmill.toolkit.ui.RichTextArea;
/**
*
* @author IT Mill Ltd.
*/
public class TestForRichTextEditor extends CustomComponent implements
ValueChangeListener {
private final OrderedLayout main = new OrderedLayout();
private Label l;
private RichTextArea rte;
public TestForRichTextEditor() {
setCompositionRoot(main);
createNewView();
}
public void createNewView() {
main.removeAllComponents();
main.addComponent(new Label(
"RTE uses google richtextArea and their examples toolbar."));
rte = new RichTextArea();
rte.addListener(this);
main.addComponent(rte);
main.addComponent(new Button("commit content to label below"));
l = new Label("", Label.CONTENT_XHTML);
main.addComponent(l);
}
public void valueChange(ValueChangeEvent event) {
l.setValue(rte.getValue());
}
}
<commit_after>/*
@ITMillApache2LicenseForJavaFiles@
*/
package com.itmill.toolkit.tests;
import com.itmill.toolkit.data.Property.ValueChangeEvent;
import com.itmill.toolkit.data.Property.ValueChangeListener;
import com.itmill.toolkit.ui.Button;
import com.itmill.toolkit.ui.CustomComponent;
import com.itmill.toolkit.ui.Label;
import com.itmill.toolkit.ui.OrderedLayout;
import com.itmill.toolkit.ui.RichTextArea;
import com.itmill.toolkit.ui.Button.ClickEvent;
/**
*
* @author IT Mill Ltd.
*/
public class TestForRichTextEditor extends CustomComponent implements
ValueChangeListener {
private final OrderedLayout main = new OrderedLayout();
private Label l;
private RichTextArea rte;
public TestForRichTextEditor() {
setCompositionRoot(main);
createNewView();
}
public void createNewView() {
main.removeAllComponents();
main.addComponent(new Label(
"RTE uses google richtextArea and their examples toolbar."));
rte = new RichTextArea();
rte.addListener(this);
main.addComponent(rte);
main.addComponent(new Button("commit content to label below"));
l = new Label("", Label.CONTENT_XHTML);
main.addComponent(l);
Button b = new Button("enabled");
b.setSwitchMode(true);
b.setImmediate(true);
b.addListener(new Button.ClickListener() {
public void buttonClick(ClickEvent event) {
rte.setEnabled(!rte.isEnabled());
}
});
main.addComponent(b);
}
public void valueChange(ValueChangeEvent event) {
l.setValue(rte.getValue());
}
}
|
<commit_msg>Fix Copy&Paste Error und documentation<commit_before>package com.ulisesbocchio.jasyptmavenplugin.mojo;
import com.ulisesbocchio.jasyptmavenplugin.encrypt.EncryptionService;
import lombok.extern.slf4j.Slf4j;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
/**
* Goal which decrypts demarcated values in properties files.
*
* @author ubocchio
*/
@Mojo(name = "decrypt-value", defaultPhase = LifecyclePhase.PROCESS_RESOURCES)
@Slf4j
public class DecryptValueMojo extends AbstractValueJasyptMojo {
@Override
protected void run(final EncryptionService service, final String value, String encryptPrefix, String encryptSuffix, String decryptPrefix, String decryptSuffix) throws
MojoExecutionException {
try {
String actualValue = value.startsWith(encryptPrefix) ? value.substring(encryptPrefix.length(), value.length() - encryptSuffix.length()) : value;
log.info("Decrypting value " + actualValue);
String decryptedValue = service.decryptValue(actualValue);
log.info("\n" + decryptedValue);
} catch (Exception e) {
throw new MojoExecutionException("Error Decrypting: " + e.getMessage(), e);
}
}
}
<commit_after>package com.ulisesbocchio.jasyptmavenplugin.mojo;
import com.ulisesbocchio.jasyptmavenplugin.encrypt.EncryptionService;
import lombok.extern.slf4j.Slf4j;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
/**
* Goal which decrypts values.
*
* @author ubocchio
*/
@Mojo(name = "decrypt-value", defaultPhase = LifecyclePhase.PROCESS_RESOURCES)
@Slf4j
public class DecryptValueMojo extends AbstractValueJasyptMojo {
@Override
protected void run(final EncryptionService service, final String value, String encryptPrefix, String encryptSuffix, String decryptPrefix, String decryptSuffix) throws
MojoExecutionException {
try {
String actualValue = value.startsWith(encryptPrefix) ? value.substring(encryptPrefix.length(), value.length() - encryptSuffix.length()) : value;
log.info("Decrypting value " + actualValue);
String decryptedValue = service.decryptValue(actualValue);
log.info("\n" + decryptedValue);
} catch (Exception e) {
throw new MojoExecutionException("Error Decrypting: " + e.getMessage(), e);
}
}
}
|
<commit_msg>Remove unused line of code
<commit_before>import sqlite3
from datetime import datetime
from SenseCells.tts import tts
def show_all_notes():
conn = sqlite3.connect('memory.db')
tts('Your notes are as follows:')
cursor = conn.execute("SELECT notes FROM notes")
for row in cursor:
tts(row[0])
conn.commit()
conn.close()
def note_something(speech_text):
conn = sqlite3.connect('memory.db')
words_of_message = speech_text.split()
words_of_message.remove('note')
cleaned_message = ' '.join(words_of_message)
conn.execute("INSERT INTO notes (notes, notes_date) VALUES (?, ?)", (cleaned_message, datetime.strftime(datetime.now(), '%d-%m-%Y')))
conn.commit()
conn.close()
tts('Your note has been saved.')<commit_after>import sqlite3
from datetime import datetime
from SenseCells.tts import tts
def show_all_notes():
conn = sqlite3.connect('memory.db')
tts('Your notes are as follows:')
cursor = conn.execute("SELECT notes FROM notes")
for row in cursor:
tts(row[0])
conn.close()
def note_something(speech_text):
conn = sqlite3.connect('memory.db')
words_of_message = speech_text.split()
words_of_message.remove('note')
cleaned_message = ' '.join(words_of_message)
conn.execute("INSERT INTO notes (notes, notes_date) VALUES (?, ?)", (cleaned_message, datetime.strftime(datetime.now(), '%d-%m-%Y')))
conn.commit()
conn.close()
tts('Your note has been saved.')
|
<commit_msg>Change display of int exception
Int exceptions are now displayed on the same line as the message
"exception caught" and the quote marks have been removed from around it.
<commit_before>
int main()
{
DEV::KeepConsoleOpen keepConsoleOpen;
keepConsoleOpen.setKeyRequirement(DEV::KeepConsoleOpen::Escape);
try
{
Game game;
game.run();
}
catch (int e)
{
DEV::printLine("Exception caught:\n\"" + pl::stringFrom(e) + "\"");
return EXIT_FAILURE;
}
catch (char* e)
{
DEV::printLine("Exception caught:\n\"" + std::string(e) + "\"");
return EXIT_FAILURE;
}
catch (...)
{
DEV::printLine("Unknown exception caught!");
return EXIT_FAILURE;
}
keepConsoleOpen.allowToClose();
return EXIT_SUCCESS;
}
<commit_after>
int main()
{
DEV::KeepConsoleOpen keepConsoleOpen;
keepConsoleOpen.setKeyRequirement(DEV::KeepConsoleOpen::Escape);
try
{
Game game;
game.run();
}
catch (int e)
{
DEV::printLine("Exception caught: " + pl::stringFrom(e));
return EXIT_FAILURE;
}
catch (char* e)
{
DEV::printLine("Exception caught:\n\"" + std::string(e) + "\"");
return EXIT_FAILURE;
}
catch (...)
{
DEV::printLine("Unknown exception caught!");
return EXIT_FAILURE;
}
keepConsoleOpen.allowToClose();
return EXIT_SUCCESS;
}
|
<commit_msg>Make sure MEDIA_URL is available in the context of every template
<commit_before>from django.conf import settings
SETTINGS_TO_ADD = (
'GOOGLE_ANALYTICS_ACCOUNT',
'SOURCE_HINTS',
)
def add_settings(request):
"""Add some selected settings values to the context"""
return {
'settings': {
k: getattr(settings, k) for k in SETTINGS_TO_ADD
}
}
<commit_after>from django.conf import settings
SETTINGS_TO_ADD = (
'GOOGLE_ANALYTICS_ACCOUNT',
'SOURCE_HINTS',
'MEDIA_URL',
)
def add_settings(request):
"""Add some selected settings values to the context"""
return {
'settings': {
k: getattr(settings, k) for k in SETTINGS_TO_ADD
}
}
|
<commit_msg>Remove extraneous channel type in Context model.
<commit_before>import * as Discord from 'discord.js';
import * as mongoose from 'mongoose';
import { logInteraction } from '../helpers/logger';
export default class Context {
id: string;
bot: Discord.Client;
channel: Discord.TextChannel | Discord.DMChannel | Discord.NewsChannel;
guild: Discord.Guild;
msg: string;
preferences: mongoose.Document;
db: mongoose.Connection;
shard: number;
logInteraction;
constructor(id: string, bot: Discord.Client, channel: Discord.TextChannel | Discord.DMChannel | Discord.NewsChannel, guild: Discord.Guild,
msg: string, preferences: mongoose.Document, db: mongoose.Connection) {
this.id = id;
this.bot = bot;
this.msg = msg;
this.channel = channel;
this.guild = guild;
this.preferences = preferences;
this.db = db;
this.shard = guild.shardID;
this.logInteraction = logInteraction;
}
}<commit_after>import * as Discord from 'discord.js';
import * as mongoose from 'mongoose';
import { logInteraction } from '../helpers/logger';
export default class Context {
id: string;
bot: Discord.Client;
channel: Discord.TextChannel | Discord.DMChannel;
guild: Discord.Guild;
msg: string;
preferences: mongoose.Document;
db: mongoose.Connection;
shard: number;
logInteraction;
constructor(id: string, bot: Discord.Client, channel: Discord.TextChannel | Discord.DMChannel, guild: Discord.Guild,
msg: string, preferences: mongoose.Document, db: mongoose.Connection) {
this.id = id;
this.bot = bot;
this.msg = msg;
this.channel = channel;
this.guild = guild;
this.preferences = preferences;
this.db = db;
this.shard = guild.shardID;
this.logInteraction = logInteraction;
}
} |
<commit_msg>Update GDAXProductTicker obj name from bitstampTicker to gdaxTicker
Looks to be a copy paste mistake. Updated the name to reference gdax as opposed to bitstamp in this gdax example.<commit_before>package org.knowm.xchange.examples.gdax;
import java.io.IOException;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.ExchangeFactory;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.gdax.GDAXExchange;
import org.knowm.xchange.gdax.dto.marketdata.GDAXProductTicker;
import org.knowm.xchange.gdax.service.GDAXMarketDataServiceRaw;
import org.knowm.xchange.service.marketdata.MarketDataService;
public class GDAXTickerDemo {
public static void main(String[] args) throws IOException {
Exchange exchange = ExchangeFactory.INSTANCE.createExchange(GDAXExchange.class.getName());
MarketDataService marketDataService = exchange.getMarketDataService();
generic(marketDataService);
raw((GDAXMarketDataServiceRaw) marketDataService);
}
private static void generic(MarketDataService marketDataService) throws IOException {
Ticker ticker = marketDataService.getTicker(CurrencyPair.BTC_USD);
System.out.println(ticker.toString());
}
private static void raw(GDAXMarketDataServiceRaw marketDataService) throws IOException {
GDAXProductTicker bitstampTicker = marketDataService.getCoinbaseExProductTicker(CurrencyPair.BTC_USD);
System.out.println(bitstampTicker.toString());
}
}
<commit_after>package org.knowm.xchange.examples.gdax;
import java.io.IOException;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.ExchangeFactory;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.gdax.GDAXExchange;
import org.knowm.xchange.gdax.dto.marketdata.GDAXProductTicker;
import org.knowm.xchange.gdax.service.GDAXMarketDataServiceRaw;
import org.knowm.xchange.service.marketdata.MarketDataService;
public class GDAXTickerDemo {
public static void main(String[] args) throws IOException {
Exchange exchange = ExchangeFactory.INSTANCE.createExchange(GDAXExchange.class.getName());
MarketDataService marketDataService = exchange.getMarketDataService();
generic(marketDataService);
raw((GDAXMarketDataServiceRaw) marketDataService);
}
private static void generic(MarketDataService marketDataService) throws IOException {
Ticker ticker = marketDataService.getTicker(CurrencyPair.BTC_USD);
System.out.println(ticker.toString());
}
private static void raw(GDAXMarketDataServiceRaw marketDataService) throws IOException {
GDAXProductTicker gdaxTicker = marketDataService.getCoinbaseExProductTicker(CurrencyPair.BTC_USD);
System.out.println(gdaxTicker.toString());
}
}
|
<commit_msg>Allow lint in order to cover our lowest supported toolchain
This would be unnecessary if it were a clippy lint, but it is a regular
compiler lint.
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com>
<commit_before>// This allow should be removed once bindgen finds a way to
// generate struct alignment tests without triggering errors
// in the compiler. See https://github.com/rust-lang/rust-bindgen/issues/1651.
#![allow(deref_nullptr)]
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
<commit_after>// This allow should be removed once bindgen finds a way to
// generate struct alignment tests without triggering errors
// in the compiler. See https://github.com/rust-lang/rust-bindgen/issues/1651.
#![allow(unknown_lints)]
#![allow(deref_nullptr)]
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
|
<commit_msg>Add restrictioins for who can edit the project and who cannot
<commit_before>from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from .models import Project
from .forms import ProjectForm
@login_required
def add_project(request):
data = request.POST if request.POST else None
form = ProjectForm(data, user=request.user)
if form.is_valid():
form.save()
return render(request, 'projects/add.html', locals())
<commit_after>from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect, Http404
from .models import Project
from .forms import ProjectForm
def can_edit_projects(user):
return user.is_authenticated() and user.has_perm('projects.change_project')
@login_required
def add_project(request):
data = request.POST if request.POST else None
form = ProjectForm(data, user=request.user)
if form.is_valid():
form.save()
return render(request, 'projects/add.html', locals())
@login_required
def edit_project(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
if can_edit_projects(request.user) or request.user == project.user:
return render(request, 'projects/edit.html', locals())
else:
raise Http404
|
<commit_msg>Make SavedRollManager less static, also docstrings
<commit_before>import sqlite3
connection = sqlite3.connect('data.db')
class SavedRoll:
@staticmethod
def save(user, name, args):
pass
@staticmethod
def get(user, name):
pass
@staticmethod
def delete(user, name):
pass
<commit_after>class SavedRollManager:
"""
Class for managing saved rolls.
Attributes:
connection (sqlite3.Connection): Database connection used by manager
"""
def __init__(self, connection):
"""
Create a SavedRollManager instance.
Args:
connection (sqlite3.Connection): Database connection to use
"""
self.conn = connection
def save(self, user, chat, name, args):
"""
Save a roll to the database.
Args:
user (int): User ID to save roll for
chat (int): Chat ID to save roll for
name: Name of saved roll
args: Arguments to save for roll
"""
pass
def get(self, user, chat, name):
"""
Get a saved roll from the database.
Args:
user (int): User ID to get roll for
chat (int): Chat ID to get roll for
name: Name of saved roll
Returns:
list: List of arguments of saved roll
"""
pass
def delete(self, user, chat, name):
"""
Delete a saved roll from the database.
Args:
user (int): User ID to delete roll from
chat (int): Chat ID to delete roll from
name: Name of saved roll
"""
pass
|
<commit_msg>Clean out needless declarations in the node.h header file.
<commit_before>/* $Id$ */
/* Please see the LICENSE file for copyright and distribution information */
#ifndef __RUBY_XML_NODE__
#define __RUBY_XML_NODE__
extern VALUE cXMLNode;
extern VALUE eXMLNodeSetNamespace;
extern VALUE eXMLNodeFailedModify;
extern VALUE eXMLNodeUnknownType;
VALUE
ruby_xml_node2_wrap(VALUE class, xmlNodePtr xnode);
void ruby_xml_node_free(xmlNodePtr xnode);
void ruby_xml_node_mark_common(xmlNodePtr xnode);
void ruby_init_xml_node(void);
VALUE check_string_or_symbol(VALUE val);
VALUE ruby_xml_node_child_set(VALUE self, VALUE obj);
VALUE ruby_xml_node_name_get(VALUE self);
VALUE ruby_xml_node_property_get(VALUE self, VALUE key);
VALUE ruby_xml_node_property_set(VALUE self, VALUE key, VALUE val);
#endif
<commit_after>/* $Id$ */
/* Please see the LICENSE file for copyright and distribution information */
#ifndef __RUBY_XML_NODE__
#define __RUBY_XML_NODE__
extern VALUE cXMLNode;
extern VALUE eXMLNodeSetNamespace;
extern VALUE eXMLNodeFailedModify;
extern VALUE eXMLNodeUnknownType;
VALUE ruby_xml_node2_wrap(VALUE class, xmlNodePtr xnode);
VALUE check_string_or_symbol(VALUE val);
#endif
|
<commit_msg>Add test for reaching API health endpoint
<commit_before>import json
from indra.literature.dart_client import _jsonify_query_data
def test_timestamp():
# Should ignore "after"
assert _jsonify_query_data(timestamp={'on': '2020-01-01',
'after': '2020-01-02'}) == \
json.dumps({"timestamp": {"on": "2020-01-01"}})
assert _jsonify_query_data(timestamp={'after': '2020-01-01',
'before': '2020-01-05'}) == \
json.dumps(
{'timestamp': {'after': '2020-01-01', 'before': '2020-01-05'}})
def test_lists():
# Check lists, ignore the lists that have non-str objects
assert _jsonify_query_data(readers=['hume', 123456],
versions=['123', '456']) ==\
json.dumps({'versions': ['123', '456']})
<commit_after>import json
import requests
from indra.config import get_config
from indra.literature.dart_client import _jsonify_query_data, dart_base_url
def test_timestamp():
# Should ignore "after"
assert _jsonify_query_data(timestamp={'on': '2020-01-01',
'after': '2020-01-02'}) == \
json.dumps({"timestamp": {"on": "2020-01-01"}})
assert _jsonify_query_data(timestamp={'after': '2020-01-01',
'before': '2020-01-05'}) == \
json.dumps(
{'timestamp': {'after': '2020-01-01', 'before': '2020-01-05'}})
def test_lists():
# Check lists, ignore the lists that have non-str objects
assert _jsonify_query_data(readers=['hume', 123456],
versions=['123', '456']) ==\
json.dumps({'versions': ['123', '456']})
def test_api():
health_ep = dart_base_url + '/health'
dart_uname = get_config('DART_WM_USERNAME', failure_ok=False)
dart_pwd = get_config('DART_WM_PASSWORD', failure_ok=False)
res = requests.get(health_ep, auth=(dart_uname, dart_pwd))
assert res.status_code == 200
|
<commit_msg>Verify the full interface of the context object
Improved testcase for get_test_admin_context method
Change-Id: I8c99401150ed41cbf66b32cd00c7f8353ec4e267
<commit_before>
from cinder import test
from cinder.tests import utils as test_utils
class TestUtilsTestCase(test.TestCase):
def test_get_test_admin_context(self):
"""get_test_admin_context's return value behaves like admin context."""
ctxt = test_utils.get_test_admin_context()
# TODO(soren): This should verify the full interface context
# objects expose.
self.assertTrue(ctxt.is_admin)
<commit_after>
from cinder import test
from cinder.tests import utils as test_utils
class TestUtilsTestCase(test.TestCase):
def test_get_test_admin_context(self):
"""get_test_admin_context's return value behaves like admin context."""
ctxt = test_utils.get_test_admin_context()
self.assertIsNone(ctxt.project_id)
self.assertIsNone(ctxt.user_id)
self.assertIsNone(ctxt.domain)
self.assertIsNone(ctxt.project_domain)
self.assertIsNone(ctxt.user_domain)
self.assertIsNone(ctxt.project_name)
self.assertIsNone(ctxt.remote_address)
self.assertIsNone(ctxt.auth_token)
self.assertIsNone(ctxt.quota_class)
self.assertIsNotNone(ctxt.request_id)
self.assertIsNotNone(ctxt.timestamp)
self.assertEqual(['admin'], ctxt.roles)
self.assertEqual([], ctxt.service_catalog)
self.assertEqual('no', ctxt.read_deleted)
self.assertTrue(ctxt.read_deleted)
self.assertTrue(ctxt.is_admin)
|
<commit_msg>Fix non ascii symbols in json
<commit_before>""" Data layer """
# pylint: disable=line-too-long
import json
import os
from frontui.models import ChecklistInfo
class DataProvider:
""" Data provider (objects, questions, etc) """
def __init__(self):
self.data_dir = './frontui/app_data'
self.checklists_dir = self.data_dir + '/checklists'
self.objects = list()
self.checklist = ChecklistInfo()
def add_object(self, obj):
""" Add object to collection """
self.objects.append(obj)
def save_checklist(self, obj_num, obj_date, obj_dict):
""" Save checklist data """
obj_json = json.dumps(obj_dict, sort_keys=True, indent=4)
filedir = self.checklists_dir + '/' + obj_num
if not os.path.exists(filedir):
os.makedirs(filedir)
filename = obj_date + '.json'
with open(filedir + '/' + filename, 'w', encoding='utf8') as file:
file.write(obj_json)
return
<commit_after>""" Data layer """
# pylint: disable=line-too-long
import json
import os
from frontui.models import ChecklistInfo
class DataProvider:
""" Data provider (objects, questions, etc) """
def __init__(self):
self.data_dir = './frontui/app_data'
self.checklists_dir = self.data_dir + '/checklists'
self.objects = list()
self.checklist = ChecklistInfo()
def add_object(self, obj):
""" Add object to collection """
self.objects.append(obj)
def save_checklist(self, obj_num, obj_date, obj_dict):
""" Save checklist data """
obj_json = json.dumps(obj_dict, sort_keys=True, indent=4, ensure_ascii=False)
filedir = self.checklists_dir + '/' + obj_num
if not os.path.exists(filedir):
os.makedirs(filedir)
filename = obj_date + '.json'
with open(filedir + '/' + filename, 'w', encoding='utf8') as file:
file.write(obj_json)
return
|
<commit_msg>Update header missed by the script
Really, who puts spaces in front of the comments of a file header?!
<commit_before> # -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from indico.core.extpoint import IListener, IContributor
class ILocationActionListener(IListener):
"""
Events that are related to rooms, locations, etc...
"""
def roomChanged(self, obj, oldLocation, newLocation):
pass
def locationChanged(self, obj, oldLocation, newLocation):
pass
def placeChanged(self, obj):
"""
Either the room or location changed
"""
<commit_after>
from indico.core.extpoint import IListener
class ILocationActionListener(IListener):
"""
Events that are related to rooms, locations, etc...
"""
def roomChanged(self, obj, oldLocation, newLocation):
pass
def locationChanged(self, obj, oldLocation, newLocation):
pass
def placeChanged(self, obj):
"""
Either the room or location changed
"""
|
<commit_msg>Fix missing refactorization in unit test
send_to_users unit test was not modified after parameter change.
<commit_before>from django.test import TestCase
from yunity.utils.session import RealtimeClientData
class TestSharedSession(TestCase):
def test_session_key(self):
self.assertEqual(RealtimeClientData.session_key('123'), 'session-store-123')
def test_set_get_django_redis(self):
RealtimeClientData.set_user_session('123', 3)
self.assertEqual(RealtimeClientData.get_user_by_session('123'), '3')
RealtimeClientData.set_user_session('123', 4)
self.assertEqual(RealtimeClientData.get_user_by_session('123'), '4')
def test_integration_data_to_users(self):
""" This is a RealtimeClient integration test.
For current implementation, subscribe to the notifications topic to see it working:
redis-cli subscribe notifications
"""
RealtimeClientData.set_user_session('123', 3)
RealtimeClientData.send_to_users([3], {'msg': 'hello'})
<commit_after>from django.test import TestCase
from yunity.utils.session import RealtimeClientData
class TestSharedSession(TestCase):
def test_session_key(self):
self.assertEqual(RealtimeClientData.session_key('123'), 'session-store-123')
def test_set_get_django_redis(self):
RealtimeClientData.set_user_session('123', 3)
self.assertEqual(RealtimeClientData.get_user_by_session('123'), '3')
RealtimeClientData.set_user_session('123', 4)
self.assertEqual(RealtimeClientData.get_user_by_session('123'), '4')
def test_integration_data_to_users(self):
""" This is a RealtimeClient integration test.
For current implementation, subscribe to the notifications topic to see it working:
redis-cli subscribe notifications
"""
RealtimeClientData.set_user_session('123', 3)
RealtimeClientData.send_to_users([3], RealtimeClientData.Types.CHAT_MESSAGE, {'msg': 'hello'})
|
<commit_msg>Add aiohttp.web.Application into type aliases
<commit_before>
from multidict import MultiDictProxy as Cookies # Type of aiohttp.web.BaseRequest.cookies
from sqlalchemy.orm import Session as DBSession # Return from sqlalchemy.orm.sessionmaker
<commit_after>
from aiohttp.web import Application # aiohttp web server application
from multidict import MultiDictProxy as Cookies # Type of aiohttp.web.BaseRequest.cookies
from sqlalchemy.orm import Session as DBSession # Return from sqlalchemy.orm.sessionmaker
|
<commit_msg>Check request.user before using it
<commit_before>from django.conf import settings
from timepiece import models as timepiece
from timepiece.forms import QuickSearchForm
def timepiece_settings(request):
default_famfamfam_url = settings.STATIC_URL + 'images/icons/'
famfamfam_url = getattr(settings, 'FAMFAMFAM_URL', default_famfamfam_url)
context = {
'FAMFAMFAM_URL': famfamfam_url,
}
return context
def quick_search(request):
return {
'quick_search_form': QuickSearchForm(),
}
def active_entries(request):
active_entries = timepiece.Entry.objects.filter(
end_time__isnull=True,
).exclude(
user=request.user,
).select_related('user', 'project', 'activity')
return {
'active_entries': active_entries,
}
def extra_nav(request):
context = {
'extra_nav': getattr(settings, 'EXTRA_NAV', {})
}
return context
<commit_after>from django.conf import settings
from timepiece import models as timepiece
from timepiece.forms import QuickSearchForm
def timepiece_settings(request):
default_famfamfam_url = settings.STATIC_URL + 'images/icons/'
famfamfam_url = getattr(settings, 'FAMFAMFAM_URL', default_famfamfam_url)
context = {
'FAMFAMFAM_URL': famfamfam_url,
}
return context
def quick_search(request):
return {
'quick_search_form': QuickSearchForm(),
}
def active_entries(request):
active_entries = None
if request.user.is_authenticated():
active_entries = timepiece.Entry.objects.filter(
end_time__isnull=True,
).exclude(
user=request.user,
).select_related('user', 'project', 'activity')
return {
'active_entries': active_entries,
}
def extra_nav(request):
context = {
'extra_nav': getattr(settings, 'EXTRA_NAV', {})
}
return context
|
<commit_msg>Change sniffing to be more robust
Known bugs: Will update playing.txt when someone tries to use the speakers
if already in use. Saving grace is it'll change to the right person a second later.
<commit_before>from scapy.all import *
def airplay_callback(pkt):
try:
if pkt['Raw'].load[0:5] == 'SETUP':
# Someone is starting to play! Add them to the list yo
with open('/tmp/playing.txt', 'w') as f:
f.write(pkt[IP].src)
print "Updated playing.txt to " + pkt[IP].src
elif pkt['Raw'].load[0:5] == 'TEARD':
# Someone is getting *off* those speakers, yo
with open('/tmp/playing.txt', 'w') as f:
pass # Rewrite it with nothing
print "Updated playing.txt to be blank"
except:
pass # meh
sniff(filter="tcp and port 5000", store=0, prn=airplay_callback);
<commit_after>from scapy.all import *
cur_ip = False
def airplay_callback(pkt):
try:
if pkt[IP].sprintf('%proto%') == 'tcp':
# This could be anything! Parse further
if pkt['Raw'].load[0:5] == 'TEARD':
# Anyone can teardown, only remove the IP if it's the currently playing person
global cur_ip
if cur_ip == pkt[IP].src:
# Someone is getting *off* those speakers, yo
with open('/tmp/playing.txt', 'w') as f:
pass # Rewrite it with nothing
print "Updated playing.txt to be blank"
else:
# Should be UDP
if cur_ip != pkt[IP].src:
# A new person!
with open('/tmp/playing.txt', 'w') as f:
f.write(pkt[IP].src)
cur_ip = pkt[IP].src
print "Updated playing.txt to " + pkt[IP].src
except:
pass # meh
sniff(filter="port 5000 or port 6001", store=0, prn=airplay_callback);
|
<commit_msg>Add two options for map renamings
<commit_before>package jkind.api.results;
import java.util.Map;
/**
* A renaming backed by a map from strings to strings
*/
public class MapRenaming extends Renaming {
private Map<String, String> map;
public MapRenaming(Map<String, String> map) {
this.map = map;
}
@Override
public String rename(String original) {
return map.get(original);
}
}
<commit_after>package jkind.api.results;
import java.util.Map;
/**
* A renaming backed by a map from strings to strings
*/
public class MapRenaming extends Renaming {
private final Map<String, String> map;
private final Mode mode;
public static enum Mode {
NULL, IDENTITY
}
public MapRenaming(Map<String, String> map, Mode mode) {
this.map = map;
this.mode = mode;
}
@Override
public String rename(String original) {
String renamed = map.get(original);
if (renamed == null && mode == Mode.IDENTITY) {
return original;
} else {
return renamed;
}
}
}
|
<commit_msg>Remove extraneous method in Problem 15
<commit_before>//===-- problems/Problem15.h ------------------------------------*- C++ -*-===//
//
// ProjectEuler.net solutions by Will Mitchell
//
// This file is distributed under the MIT License. See LICENSE for details.
//
//===----------------------------------------------------------------------===//
///
/// \file
/// \brief Problem 15: Lattice paths
///
//===----------------------------------------------------------------------===//
#ifndef PROBLEMS_PROBLEM15_H
#define PROBLEMS_PROBLEM15_H
#include <string>
#include <gmpxx.h>
#include "../Problem.h"
namespace problems {
class Problem15 : public Problem {
public:
Problem15() : value(0), solved(false) {}
~Problem15() = default;
std::string answer();
std::string description() const;
void solve();
// Simple brute force solution
unsigned long long bruteForce(const unsigned long long limit) const;
private:
/// Cached answer
mpz_class value;
/// If cached answer is valid
bool solved;
};
}
#endif
<commit_after>//===-- problems/Problem15.h ------------------------------------*- C++ -*-===//
//
// ProjectEuler.net solutions by Will Mitchell
//
// This file is distributed under the MIT License. See LICENSE for details.
//
//===----------------------------------------------------------------------===//
///
/// \file
/// \brief Problem 15: Lattice paths
///
//===----------------------------------------------------------------------===//
#ifndef PROBLEMS_PROBLEM15_H
#define PROBLEMS_PROBLEM15_H
#include <string>
#include <gmpxx.h>
#include "../Problem.h"
namespace problems {
class Problem15 : public Problem {
public:
Problem15() : value(0), solved(false) {}
~Problem15() = default;
std::string answer();
std::string description() const;
void solve();
private:
/// Cached answer
mpz_class value;
/// If cached answer is valid
bool solved;
};
}
#endif
|
<commit_msg>Fix the UUID Parcel implementation
<commit_before>use {Parcel, Error, Settings};
use std::io::prelude::*;
use uuid::Uuid;
impl Parcel for Uuid
{
fn read(read: &mut Read,
settings: &Settings,
_: &mut hint::Hints)
-> Result<Self, Error> {
let bytes: Result<Vec<u8>, _> = read.bytes().take(16).collect();
let bytes = bytes?;
Ok(Uuid::from_bytes(&bytes)?)
}
fn write(&self, write: &mut Write) -> Result<(), Error> {
write.write(self.as_bytes())?;
Ok(())
}
}
<commit_after>use {Parcel, Error, Settings};
use hint;
use std::io::prelude::*;
use uuid::Uuid;
impl Parcel for Uuid
{
const TYPE_NAME: &'static str = "Uuid";
fn read(read: &mut Read,
_: &Settings,
_: &mut hint::Hints)
-> Result<Self, Error> {
let bytes: Result<Vec<u8>, _> = read.bytes().take(16).collect();
let bytes = bytes?;
Ok(Uuid::from_bytes(&bytes)?)
}
fn write(&self, write: &mut Write,
_: &Settings) -> Result<(), Error> {
write.write(self.as_bytes())?;
Ok(())
}
}
|
<commit_msg>Remove single quote marks from jsonif
<commit_before>"""Add a template tag to turn python objects into JSON"""
import types
import json
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter
def jsonify(obj):
if isinstance(obj, types.GeneratorType):
obj = list(obj)
return mark_safe(json.dumps(obj))
<commit_after>"""Add a template tag to turn python objects into JSON"""
import types
import json
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter
def jsonify(obj):
"""Turn object into a json instance"""
if isinstance(obj, types.GeneratorType):
obj = list(obj)
return mark_safe(json.dumps(obj).replace("'", "\\'"))
|
<commit_msg>Use importlib.import_module instead of __import__
<commit_before>
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = __import__(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
<commit_after>import importlib
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = importlib.import_module(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
<commit_msg>Revert the superuser creation in a migration
<commit_before>from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
<commit_after>from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
ADMIN_USERNAME = "rotest"
ADMIN_PASSWORD = "rotest"
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
try:
auth_models.User.objects.get(username=ADMIN_USERNAME)
except auth_models.User.DoesNotExist:
auth_models.User.objects.create_superuser(ADMIN_USERNAME,
"rotest@rotest.com",
ADMIN_PASSWORD)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
|
<commit_msg>Improve error handling in stream status view
- Check if stream exists and raise a 404 otherwise
- Check if upstream returned a success status code and raise a 500 otherwise
<commit_before>from api.streams.models import StreamConfiguration
from django.http import JsonResponse
from django.http.request import HttpRequest
import requests
def get_stream_status(request: HttpRequest, stream_slug: str):
stream = StreamConfiguration.objects.get(slug=stream_slug)
r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream), timeout=5)
r.raise_for_status()
return JsonResponse(r.json())
<commit_after>from api.streams.models import StreamConfiguration
from django.http import JsonResponse, Http404
from django.http.request import HttpRequest
import requests
def get_stream_status(request: HttpRequest, stream_slug: str):
try:
stream = StreamConfiguration.objects.get(slug=stream_slug)
except StreamConfiguration.DoesNotExist:
raise Http404("Stream with slug {0} does not exist.".format(stream_slug))
r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream), timeout=5)
if r.status_code != requests.codes.ok:
return JsonResponse({ "error": "Upstream request failed" }, status=500)
return JsonResponse(r.json())
|
<commit_msg>Switch back to using the old SVN update location.
While changing the download location would be nice, this keeps the option of putting a final data file that would force users to update.
<commit_before>"""Handle program wide resources (files, images, etc...)"""
import os
import sys
import base64
import tempfile
def unpack_resource(data):
"""Convert base64 encoded data into a file handle, and a temporary file name to access the data"""
file_handle = tempfile.NamedTemporaryFile()
file_handle.write(base64.b64decode(data))
file_handle.seek(0)
return (file_handle,file_handle.name)
#Paths to resource files
program_path = os.path.realpath(sys.path[0])
resources_path = os.path.join(program_path,"Resources")
translation_file = os.path.join(resources_path,"mlox.msg")
gif_file = os.path.join(resources_path,"mlox.gif")
base_file = os.path.join(program_path,"mlox_base.txt")
user_file = os.path.join(program_path,"mlox_user.txt")
#For the updater
UPDATE_BASE = "mlox-data.7z"
update_file = os.path.join(program_path,UPDATE_BASE)
UPDATE_URL = 'https://sourceforge.net/projects/mlox/files/mlox/' + UPDATE_BASE
<commit_after>"""Handle program wide resources (files, images, etc...)"""
import os
import sys
import base64
import tempfile
def unpack_resource(data):
"""Convert base64 encoded data into a file handle, and a temporary file name to access the data"""
file_handle = tempfile.NamedTemporaryFile()
file_handle.write(base64.b64decode(data))
file_handle.seek(0)
return (file_handle,file_handle.name)
#Paths to resource files
program_path = os.path.realpath(sys.path[0])
resources_path = os.path.join(program_path,"Resources")
translation_file = os.path.join(resources_path,"mlox.msg")
gif_file = os.path.join(resources_path,"mlox.gif")
base_file = os.path.join(program_path,"mlox_base.txt")
user_file = os.path.join(program_path,"mlox_user.txt")
#For the updater
UPDATE_BASE = "mlox-data.7z"
update_file = os.path.join(program_path,UPDATE_BASE)
UPDATE_URL = 'https://svn.code.sf.net/p/mlox/code/trunk/downloads/' + UPDATE_BASE
|
<commit_msg>Revert trying to fix activation redirection bug
This reverts commit c2d63335062abea4cece32bd01132bcf8dce44f2.
It seems like the commit doesn't actually do anything to alleviate the
bug. Since it's also more lenient with its checks, I'll rather revert
it.
<commit_before>from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (ACTIVATE_PATH in request.path
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH)<commit_after>from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (request.path.startswith(ACTIVATE_PATH)
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH) |
<commit_msg>Simplify deriving of CannedACL JSON classes
<commit_before>{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE StandaloneDeriving #-}
-- | Module for hand-written types that are used in generated modules.
module Stratosphere.Types
( CannedACL (..)
) where
import Data.Aeson
import GHC.Generics
-- | Amazon S3 supports a set of predefined grants, known as canned ACLs. Each
-- canned ACL has a predefined a set of grantees and permissions. The following
-- table lists the set of canned ACLs and the associated predefined grants.
-- See:
-- http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl
data CannedACL
= AuthenticatedRead
| AwsExecRead
| BucketOwnerRead
| BucketOwnerFullControl
| LogDeliveryWrite
| Private
| PublicRead
| PublicReadWrite
deriving (Show, Read, Eq, Generic)
deriving instance FromJSON CannedACL
deriving instance ToJSON CannedACL
<commit_after>{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
-- | Module for hand-written types that are used in generated modules.
module Stratosphere.Types
( CannedACL (..)
) where
import Data.Aeson
import GHC.Generics
-- | Amazon S3 supports a set of predefined grants, known as canned ACLs. Each
-- canned ACL has a predefined a set of grantees and permissions. The following
-- table lists the set of canned ACLs and the associated predefined grants.
-- See:
-- http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl
data CannedACL
= AuthenticatedRead
| AwsExecRead
| BucketOwnerRead
| BucketOwnerFullControl
| LogDeliveryWrite
| Private
| PublicRead
| PublicReadWrite
deriving (Show, Read, Eq, Generic, FromJSON, ToJSON)
|
<commit_msg>Convert new test to Junit4 style
<commit_before>package org.jfree.chart.axis.junit;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jfree.chart.axis.CompassFormat;
/**
* Tests for the {@link CompassFormat} class.
*/
public class CompassFormatTest extends TestCase {
/**
* Returns the tests as a test suite.
*
* @return The test suite.
*/
public static Test suite() {
return new TestSuite(CompassFormatTest.class);
}
/**
* Constructs a new set of tests.
*
* @param name the name of the tests.
*/
public CompassFormatTest(String name) {
super(name);
}
public void testDefaultConstructor() {
final CompassFormat fmt = new CompassFormat();
assert("N".equals(fmt.getDirectionCode(0)));
assert("N".equals(fmt.getDirectionCode(360)));
}
public void testCustomFormat() {
final CompassFormat fmt = new CompassFormat();
final CompassFormat fmtCustom = new CompassFormat("N", "O", "S", "W");
assert("E".equals(fmt.getDirectionCode(90)));
assert("O".equals(fmtCustom.getDirectionCode(90)));
assert("NNO".equals(fmtCustom.getDirectionCode(22.5)));
}
}
<commit_after>package org.jfree.chart.axis.junit;
import org.jfree.chart.axis.CompassFormat;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* Tests for the {@link CompassFormat} class.
*/
public class CompassFormatTest {
@Test
public void testDefaultConstructor() {
final CompassFormat fmt = new CompassFormat();
assertEquals("N", fmt.getDirectionCode(0));
assertEquals("N", fmt.getDirectionCode(360));
}
@Test
public void testCustomFormat() {
final CompassFormat fmt = new CompassFormat();
final CompassFormat fmtCustom = new CompassFormat("N", "O", "S", "W");
assertEquals("E", fmt.getDirectionCode(90));
assertEquals("O", fmtCustom.getDirectionCode(90));
assertEquals("NNO", fmtCustom.getDirectionCode(22.5));
}
}
|
<commit_msg>Move top-level imports from v0 to v1.
<commit_before>import intake
del intake
import warnings
import logging
logger = logging.getLogger(__name__)
from ._core import (Broker, BrokerES, Header, ALL,
lookup_config, list_configs, describe_configs, temp_config,
wrap_in_doct,
DeprecatedDoct, wrap_in_deprecated_doct)
from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog
# A catalog created from discovered entrypoints and v0 catalogs.
catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog()])
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
### Legacy imports ###
try:
from .databroker import DataBroker
except ImportError:
pass
else:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
<commit_after>import intake
del intake
import warnings
import logging
logger = logging.getLogger(__name__)
from .v1 import Broker, Header, ALL, temp, temp_config
from .utils import (lookup_config, list_configs, describe_configs,
wrap_in_doct, DeprecatedDoct, wrap_in_deprecated_doct)
from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog
# A catalog created from discovered entrypoints and v0 catalogs.
catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog()])
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
### Legacy imports ###
try:
from .databroker import DataBroker
except ImportError:
pass
else:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
|
<commit_msg>Add tests for (nottx)riak manager.
<commit_before>"""Tests for vumi.persist.riak_manager."""
from twisted.trial.unittest import TestCase
from vumi.persist.riak_manager import RiakManager
class TestRiakManager(TestCase):
pass
<commit_after>"""Tests for vumi.persist.riak_manager."""
from itertools import count
from twisted.trial.unittest import TestCase
from twisted.internet.defer import returnValue
from vumi.persist.riak_manager import RiakManager, flatten_generator
from vumi.persist.tests.test_txriak_manager import CommonRiakManagerTests
class TestRiakManager(CommonRiakManagerTests, TestCase):
"""Most tests are inherited from the CommonRiakManagerTests mixin."""
def setUp(self):
self.manager = RiakManager.from_config({'bucket_prefix': 'test.'})
self.manager.purge_all()
def tearDown(self):
self.manager.purge_all()
def test_call_decorator(self):
self.assertEqual(RiakManager.call_decorator, flatten_generator)
def test_flatten_generator(self):
results = []
counter = count()
@flatten_generator
def f():
for i in range(3):
a = yield counter.next()
results.append(a)
ret = f()
self.assertEqual(ret, None)
self.assertEqual(results, list(range(3)))
def test_flatter_generator_with_return_value(self):
@flatten_generator
def f():
yield None
returnValue("foo")
ret = f()
self.assertEqual(ret, "foo")
|
<commit_msg>Add HasChoices method to Type
<commit_before>package form
type Type int
const (
// <input type="text">
TEXT Type = iota + 1
// <input type="password">
PASSWORD
// <input type="hidden">
HIDDEN
// <textarea>
TEXTAREA
// <input type="checkbox">
CHECKBOX
// <input type="radio">
RADIO
// <select>
SELECT
)
<commit_after>package form
type Type int
const (
// <input type="text">
TEXT Type = iota + 1
// <input type="password">
PASSWORD
// <input type="hidden">
HIDDEN
// <textarea>
TEXTAREA
// <input type="checkbox">
CHECKBOX
// <input type="radio">
RADIO
// <select>
SELECT
)
// HasChoices returns wheter the type has multiple
// choices, which corresponds to RADIO and SELECT
// elements.
func (t Type) HasChoices() bool {
return t == RADIO || t == SELECT
}
|
<commit_msg>Use collection to get the id of the inserted quiz
<commit_before>import {Meteor} from 'meteor/meteor';
import {Quiz} from "../../both/models/quiz.model";
import {QuizCollection} from "../../both/collections/quiz.collection";
Meteor.methods({
saveQuiz: function(quizModel: Quiz) {
return QuizCollection.insert(quizModel);
}
});<commit_after>import {Meteor} from 'meteor/meteor';
import {Quiz} from "../../both/models/quiz.model";
import {QuizCollection} from "../../both/collections/quiz.collection";
Meteor.methods({
saveQuiz: function(quizModel: Quiz) {
return QuizCollection.collection.insert(quizModel);
}
}); |
<commit_msg>Fix compilation error C2664 on VS2013
No converting constructor
<commit_before>//
// Copyright(c) 2015 Gabi Melman.
// Distributed under the MIT License (http://opensource.org/licenses/MIT)
//
#pragma once
#include <spdlog/details/log_msg.h>
namespace spdlog
{
namespace sinks
{
class sink
{
public:
sink(): _level( level::trace ) {}
virtual ~sink() {}
virtual void log(const details::log_msg& msg) = 0;
virtual void flush() = 0;
bool should_log(level::level_enum msg_level) const;
void set_level(level::level_enum log_level);
level::level_enum level() const;
private:
level_t _level;
};
inline bool sink::should_log(level::level_enum msg_level) const
{
return msg_level >= _level.load(std::memory_order_relaxed);
}
inline void sink::set_level(level::level_enum log_level)
{
_level.store(log_level);
}
inline level::level_enum sink::level() const
{
return static_cast<spdlog::level::level_enum>(_level.load(std::memory_order_relaxed));
}
}
}
<commit_after>//
// Copyright(c) 2015 Gabi Melman.
// Distributed under the MIT License (http://opensource.org/licenses/MIT)
//
#pragma once
#include <spdlog/details/log_msg.h>
namespace spdlog
{
namespace sinks
{
class sink
{
public:
sink() { _level = (int)level::trace; }
virtual ~sink() {}
virtual void log(const details::log_msg& msg) = 0;
virtual void flush() = 0;
bool should_log(level::level_enum msg_level) const;
void set_level(level::level_enum log_level);
level::level_enum level() const;
private:
level_t _level;
};
inline bool sink::should_log(level::level_enum msg_level) const
{
return msg_level >= _level.load(std::memory_order_relaxed);
}
inline void sink::set_level(level::level_enum log_level)
{
_level.store(log_level);
}
inline level::level_enum sink::level() const
{
return static_cast<spdlog::level::level_enum>(_level.load(std::memory_order_relaxed));
}
}
}
|
<commit_msg>Add filename to square masks
<commit_before>from PIL import Image
import glob
def _get_masks():
TRAIN_MASKS = './data/train/*_mask.tif'
return [Image.open(file_name) for file_name in glob.glob(TRAIN_MASKS)]
def _get_rectangle_masks():
rectangle_masks = []
for image in _get_masks():
rectangle_mask = ((0,0), (0,0))
mask_coord = [(i-image.width*(i/image.width), i/image.width) for i, pixel in enumerate(image.getdata()) if pixel != 0]
if mask_coord:
mask_xs, mask_ys = zip(*mask_coord)
rectangle_mask = ((min(mask_xs), mask_ys[0]), (max(mask_xs), mask_ys[len(mask_ys)-1]))
rectangle_masks.append(rectangle_mask)
return rectangle_masks
def run():
print _get_rectangle_masks()
if __name__ == '__main__':
run()
<commit_after>from PIL import Image
import glob
def _get_masks():
TRAIN_MASKS = './data/train/*_mask.tif'
return [Image.open(file_name) for file_name in glob.glob(TRAIN_MASKS)]
def _get_rectangle_masks():
rectangle_masks = []
for image in _get_masks():
rectangle_mask = ((0,0), (0,0))
mask_coord = [(i-image.width*(i/image.width), i/image.width) for i, pixel in enumerate(image.getdata()) if pixel != 0]
if mask_coord:
mask_xs, mask_ys = zip(*mask_coord)
rectangle_mask = (image.filename, ((min(mask_xs), mask_ys[0]), (max(mask_xs), mask_ys[len(mask_ys)-1])))
rectangle_masks.append(rectangle_mask)
return rectangle_masks
def run():
print _get_rectangle_masks()
if __name__ == '__main__':
run()
|
<commit_msg>[Win] Use the default, non-high-res avatar when badging the taskbar.
BUG=374173
TEST=Start Chrome with --new-profile-management. Open and 'Incognito' window.
The taksbar icon should now have a grey background.
Review URL: https://codereview.chromium.org/286933008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@271862 0039d316-1c4b-4281-b951-d872f2087c98
<commit_before>// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/profiles/avatar_menu.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/profiles/profile_info_cache.h"
#include "chrome/browser/profiles/profile_manager.h"
// static
void AvatarMenu::GetImageForMenuButton(Profile* profile,
gfx::Image* image,
bool* is_rectangle) {
ProfileInfoCache& cache =
g_browser_process->profile_manager()->GetProfileInfoCache();
size_t index = cache.GetIndexOfProfileWithPath(profile->GetPath());
if (index == std::string::npos) {
NOTREACHED();
return;
}
*image = cache.GetAvatarIconOfProfileAtIndex(index);
*is_rectangle =
cache.IsUsingGAIAPictureOfProfileAtIndex(index) &&
cache.GetGAIAPictureOfProfileAtIndex(index);
}
<commit_after>// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/profiles/avatar_menu.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/profiles/profile_avatar_icon_util.h"
#include "chrome/browser/profiles/profile_info_cache.h"
#include "chrome/browser/profiles/profile_manager.h"
#include "ui/base/resource/resource_bundle.h"
// static
void AvatarMenu::GetImageForMenuButton(Profile* profile,
gfx::Image* image,
bool* is_rectangle) {
ProfileInfoCache& cache =
g_browser_process->profile_manager()->GetProfileInfoCache();
size_t index = cache.GetIndexOfProfileWithPath(profile->GetPath());
if (index == std::string::npos) {
NOTREACHED();
return;
}
// Ensure we are using the default resource, not the downloaded high-res one.
const size_t icon_index = cache.GetAvatarIconIndexOfProfileAtIndex(index);
const int resource_id =
profiles::GetDefaultAvatarIconResourceIDAtIndex(icon_index);
*image = ResourceBundle::GetSharedInstance().GetNativeImageNamed(resource_id);
*is_rectangle =
cache.IsUsingGAIAPictureOfProfileAtIndex(index) &&
cache.GetGAIAPictureOfProfileAtIndex(index);
}
|
<commit_msg>stdlib: Make api directly available in stdlib
<commit_before>import six
import subprocess
import os
def call(args, split=True):
"""
Call an external program, capture and automatically utf-8 decode its ouput.
Then, supress output to stderr and redirect to /dev/null.
:param args: Command to execute
:type args: list
:param split: Split the output on newlines
:type split: bool
:return: stdout output, 'utf-8' decoded, split by lines if split=True
:rtype: unicode/str or [unicode/str] if split=True
"""
r = None
with open(os.devnull, mode='w') as err:
if six.PY3:
r = subprocess.check_output(args, stderr=err, encoding='utf-8')
else:
r = subprocess.check_output(args, stderr=err).decode('utf-8')
if split:
return r.splitlines()
return r
<commit_after>import six
import subprocess
import os
from leapp.libraries.stdlib import api
def call(args, split=True):
"""
Call an external program, capture and automatically utf-8 decode its ouput.
Then, supress output to stderr and redirect to /dev/null.
:param args: Command to execute
:type args: list
:param split: Split the output on newlines
:type split: bool
:return: stdout output, 'utf-8' decoded, split by lines if split=True
:rtype: unicode/str or [unicode/str] if split=True
"""
r = None
with open(os.devnull, mode='w') as err:
if six.PY3:
r = subprocess.check_output(args, stderr=err, encoding='utf-8')
else:
r = subprocess.check_output(args, stderr=err).decode('utf-8')
if split:
return r.splitlines()
return r
|
<commit_msg>42: Add direct inferred edges as part of loading SciGraph
Task-Url: https://github.com/SciCrunch/SciGraph/issues/issue/42
Removes test to resolve inference issues<commit_before>package edu.sdsc.scigraph.owlapi.cases;
import static com.google.common.collect.Iterables.getOnlyElement;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import org.junit.Test;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import edu.sdsc.scigraph.owlapi.OwlRelationships;
public class TestInferredEdges extends OwlTestCase {
@Test
public void testInferredEdges() {
Node cx = getNode("http://example.org/cx");
Node dx = getNode("http://example.org/dx");
Iterable<Relationship> superclasses = dx.getRelationships(OwlRelationships.RDF_SUBCLASS_OF, Direction.OUTGOING);
Relationship r = getOnlyElement(superclasses);
assertThat(r.getOtherNode(dx), is(cx));
}
}
<commit_after>package edu.sdsc.scigraph.owlapi.cases;
import static com.google.common.collect.Iterables.getOnlyElement;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import org.junit.Ignore;
import org.junit.Test;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import edu.sdsc.scigraph.owlapi.OwlRelationships;
@Ignore
public class TestInferredEdges extends OwlTestCase {
@Test
public void testInferredEdges() {
Node cx = getNode("http://example.org/cx");
Node dx = getNode("http://example.org/dx");
Iterable<Relationship> superclasses = dx.getRelationships(OwlRelationships.RDF_SUBCLASS_OF, Direction.OUTGOING);
Relationship r = getOnlyElement(superclasses);
assertThat(r.getOtherNode(dx), is(cx));
}
}
|
<commit_msg>Add command line args to App
<commit_before>
from command import Command
from database import DatabaseSQLite
from client import Client
from server import Server
class App:
def run(self, testname, database):
server = ServerLoader.load(testname)
server.run(DatabaseSQLite(args.database))
def parseCommandLine():
parser = argparse.ArgumentParser()
parser.add_argument('--testname', required=True)
parser.add_argument('--database', required=True)
parser.add_argument('--loglevel', default='INFO')
parser.add_argument('--logfile', default=__name__ + '.log')
return parser.parse_args()
def configLogging(args):
level = getattr(logging, args.loglevel.upper(), None)
if not isinstance(level, int):
raise ValueError('Invalid log level: %s' % args.loglevel)
format = '%(asctime)-15s %(message)s'
logging.basicConfig(format=format, filename=args.logfile, level=level)
if __name__ == '__main__':
args = parseCommandLine()
configLogging(args)
app = App()
app.run(args.testname, args.database)
<commit_after>import argparse
import logging
from command import Command
from database import DatabaseSQLite
from client import Client
from server import Server
from serverLoader import ServerLoader
class App:
server = None
def __init__(self, testname, database):
self.server = ServerLoader().load(testname, database)
def run(self):
self.server.run()
def parseCommandLine():
parser = argparse.ArgumentParser()
parser.add_argument('--testname', required=True)
parser.add_argument('--database', required=True)
parser.add_argument('--loglevel', default='INFO')
parser.add_argument('--logfile', default=__name__ + '.log')
return parser.parse_args()
def configLogging(args):
level = getattr(logging, args.loglevel.upper(), None)
if not isinstance(level, int):
raise ValueError('Invalid log level: %s' % args.loglevel)
format = '%(asctime)-15s %(message)s'
logging.basicConfig(format=format, filename=args.logfile, level=level)
if __name__ == '__main__':
args = parseCommandLine()
configLogging(args)
app = App(args.testname, args.database)
app.run()
|
<commit_msg>Apply box constraints in GriadientDescentSolver.
<commit_before>// CppNumericalSolver
#ifndef GRADIENTDESCENTSOLVER_H_
#define GRADIENTDESCENTSOLVER_H_
#include <Eigen/Dense>
#include "isolver.h"
#include "../linesearch/morethuente.h"
namespace cppoptlib {
template<typename T>
class GradientDescentSolver : public ISolver<T, 1> {
public:
/**
* @brief minimize
* @details [long description]
*
* @param objFunc [description]
*/
void minimize(Problem<T> &objFunc, Vector<T> & x0) {
Vector<T> direction(x0.rows());
size_t iter = 0;
T gradNorm = 0;
do {
objFunc.gradient(x0, direction);
const T rate = MoreThuente<T, decltype(objFunc), 1>::linesearch(x0, -direction, objFunc) ;
x0 = x0 - rate * direction;
gradNorm = direction.template lpNorm<Eigen::Infinity>();
// std::cout << "iter: "<<iter<< " f = " << objFunc.value(x0) << " ||g||_inf "<<gradNorm << std::endl;
iter++;
} while ((gradNorm > this->settings_.gradTol) && (iter < this->settings_.maxIter));
}
};
} /* namespace cppoptlib */
#endif /* GRADIENTDESCENTSOLVER_H_ */
<commit_after>// CppNumericalSolver
#ifndef GRADIENTDESCENTSOLVER_H_
#define GRADIENTDESCENTSOLVER_H_
#include <Eigen/Dense>
#include "isolver.h"
#include "../linesearch/morethuente.h"
namespace cppoptlib {
template<typename T>
class GradientDescentSolver : public ISolver<T, 1> {
public:
/**
* @brief minimize
* @details [long description]
*
* @param objFunc [description]
*/
void minimize(Problem<T> &objFunc, Vector<T> & x0) {
Vector<T> direction(x0.rows());
size_t iter = 0;
T gradNorm = 0;
do {
objFunc.gradient(x0, direction);
const T rate = MoreThuente<T, decltype(objFunc), 1>::linesearch(x0, -direction, objFunc) ;
x0 = x0 - rate * direction;
objFunc.applyBounds(x0);
gradNorm = direction.template lpNorm<Eigen::Infinity>();
// std::cout << "iter: "<<iter<< " f = " << objFunc.value(x0) << " ||g||_inf "<<gradNorm << std::endl;
iter++;
} while ((gradNorm > this->settings_.gradTol) && (iter < this->settings_.maxIter));
}
};
} /* namespace cppoptlib */
#endif /* GRADIENTDESCENTSOLVER_H_ */
|
<commit_msg>Allow renderer argument to paginated_view decorator
<commit_before>import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned))
return new_func
<commit_after>import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func=None, renderer=dictify_model):
if func is None:
return functools.partial(paginated_view, renderer=renderer)
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned, renderer=renderer))
return new_func
|
<commit_msg>Add outlines of functions and code for soundspeeds.
<commit_before>''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
<commit_after>''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
def soundspeeds(Vphi, phidot, H):
"""Sound speeds of the background fields
Arguments
---------
Vphi: array_like
First derivative of the potential with respect to the fields
phidot: array_like
First derivative of the field values with respect to efold number N.
H: array_like
The Hubble parameter
All the arguments should have the same number of dimensions. Vphi and phidot
should be arrays of the same size, but H should have a dimension of size 1
corresponding to the "field" dimension of the other variables.
"""
try:
calphasq = 1 + 2*Vphi/(3*H**2*phidot)
except ValueError:
raise ValueError("""Arrays need to have the correct shape.
Vphi and phidot should have exactly the same shape,
and H should have a dimension of size 1 corresponding
to the "field" dimension of the others.""")
return calphasq
def rhodots():
"""Derivative in e-fold time of the energy densities of the individual fields."""
pass
def fullrhodot():
"""Combined derivative in e-fold time of the energy density of the field"""
pass
def deltarhosmatrix():
"""Matrix of the first order perturbed energy densities of the field components."""
pass
def deltaprel():
"""Perturbed relative pressure of the fields given as quantum mode functions."""
pass
def deltaprelspectrum():
"""Power spectrum of the full perturbed relative pressure."""
pass |
<commit_msg>Allow queries to be parsed as types
<commit_before>
module ParseQuery(parseQuery) where
import Data.List
import Type
parseQuery :: String -> Query
parseQuery x = Query (map f cat) names Nothing
where
(cat,names) = partition (':' `elem`) $ words x
f ('+':xs) = f xs
f ('-':xs) = let QTag _ a b = f xs in QTag False a b
f xs = let (a,_:b) = break (== ':') xs in QTag True a b
<commit_after>
module ParseQuery(parseQuery) where
import Data.List
import Data.Tuple.Extra
import Language.Haskell.Exts
import Type
parseQuery :: String -> Query
parseQuery x | "::":xs <- names = Query cat [] (Just $ fromParseResult $ parseType $ unwords xs)
| otherwise = Query cat names Nothing
where
(cat,names) = first (map f) $ partition (\x -> not (":" `isPrefixOf` x) && ':' `elem` x) $ words x
f ('+':xs) = f xs
f ('-':xs) = let QTag _ a b = f xs in QTag False a b
f xs = let (a,_:b) = break (== ':') xs in QTag True a b
|
<commit_msg>Fix typos discovered by the Debian Lintian tool
<commit_before>/*
Package sentences is a golang package will convert a blob of text into a list of sentences.
This package attempts to support a multitude of languages:
czech, danish, dutch, english, estonian, finnish,
french, german, greek, italian, norwegian, polish,
portuguese, slovene, spanish, swedish, and turkish.
An unsupervised multilingual sentence boundary detection library for golang.
The goal of this library is to be able to break up any text into a list of
sentences in multiple languages. The way the punkt system accomplishes this goal is
through training the tokenizer with text in that given language.
Once the likelyhoods of abbreviations, collocations, and sentence starters are
determined, finding sentence boundaries becomes easier.
There are many problems that arise when tokenizing text into sentences,
the primary issue being abbreviations. The punkt system attempts to determine
whether a word is an abbrevation, an end to a sentence, or even both through
training the system with text in the given language. The punkt system
incorporates both token- and type-based analysis on the text through two
different phases of annotation.
Original research article: http://citeseerx.ist.psu.edu/viewdoc/download;jsessionid=BAE5C34E5C3B9DC60DFC4D93B85D8BB1?doi=10.1.1.85.5017&rep=rep1&type=pdf
*/
package sentences
<commit_after>/*
Package sentences is a golang package that will convert a blob of text into a list of sentences.
This package attempts to support a multitude of languages:
Czech, Danish, Dutch, English, Estonian, Finnish,
French, German, Greek, Italian, Norwegian, Polish,
Portuguese, Slovene, Spanish, Swedish, and Turkish.
An unsupervised multilingual sentence boundary detection library for golang.
The goal of this library is to be able to break up any text into a list of
sentences in multiple languages. The way the punkt system accomplishes this goal is
through training the tokenizer with text in that given language.
Once the likelihoods of abbreviations, collocations, and sentence starters are
determined, finding sentence boundaries becomes easier.
There are many problems that arise when tokenizing text into sentences,
the primary issue being abbreviations. The punkt system attempts to determine
whether a word is an abbreviation, an end to a sentence, or even both through
training the system with text in the given language. The punkt system
incorporates both token- and type-based analysis on the text through two
different phases of annotation.
Original research article: http://citeseerx.ist.psu.edu/viewdoc/download;jsessionid=BAE5C34E5C3B9DC60DFC4D93B85D8BB1?doi=10.1.1.85.5017&rep=rep1&type=pdf
*/
package sentences
|
<commit_msg>Fix hidden broken floating-point test
<commit_before>
TEST(DataObjectTests, ScalarDataObjectHasCorrectProperties) {
float scalarValue = 5.0;
DataObject five(scalarValue);
EXPECT_EQ(five.Dim(), 0);
std::vector<int64_t> expectedShape;
EXPECT_EQ(five.Shape(), expectedShape);
EXPECT_EQ(five.GetKind(), DataKind::SCALAR);
}
TEST(DataObjectTests, MatrixDataObjectHasCorrectProperties) {
Eigen::MatrixXf m(3, 2);
DataObject matrix(m);
EXPECT_EQ(matrix.Dim(), 2);
std::vector<int64_t> expectedShape({3, 2});
EXPECT_EQ(matrix.Shape(), expectedShape);
EXPECT_EQ(matrix.GetKind(), DataKind::MATRIX);
}
TEST(DataObjectTests, ScalarDataObjectGivesCorrectScalarConversion) {
DataObject five = Scalar(5.0);
EXPECT_FLOAT_EQ(five.ToScalar(), 5.0);
bool comparatorWorks = five == 5.0;
EXPECT_EQ(comparatorWorks, true);
}
TEST(DataObjectTests, MatrixDataObjectGivesCorrectMatrixConversion) {
Eigen::MatrixXf m(3, 2);
DataObject matrix = Mat(m);
EXPECT_EQ(matrix.ToMatrix(), m);
bool comparatorWorks = matrix == m;
EXPECT_EQ(comparatorWorks, true);
}
<commit_after>
TEST(DataObjectTests, ScalarDataObjectHasCorrectProperties) {
float scalarValue = 5.0;
DataObject five(scalarValue);
EXPECT_EQ(five.Dim(), 0);
std::vector<int64_t> expectedShape;
EXPECT_EQ(five.Shape(), expectedShape);
EXPECT_EQ(five.GetKind(), DataKind::SCALAR);
}
TEST(DataObjectTests, MatrixDataObjectHasCorrectProperties) {
Eigen::MatrixXf m(3, 2);
DataObject matrix(m);
EXPECT_EQ(matrix.Dim(), 2);
std::vector<int64_t> expectedShape({3, 2});
EXPECT_EQ(matrix.Shape(), expectedShape);
EXPECT_EQ(matrix.GetKind(), DataKind::MATRIX);
}
TEST(DataObjectTests, ScalarDataObjectGivesCorrectScalarConversion) {
DataObject five = Scalar(5.0);
EXPECT_FLOAT_EQ(five.ToScalar(), 5.0);
bool comparatorWorks = five == 5.0;
EXPECT_EQ(comparatorWorks, true);
}
TEST(DataObjectTests, MatrixDataObjectGivesCorrectMatrixConversion) {
Eigen::MatrixXf m = Eigen::MatrixXf::Zero(3, 2);
DataObject matrix = Mat(m);
EXPECT_EQ(matrix.ToMatrix(), m);
bool comparatorWorks = matrix == m;
EXPECT_EQ(comparatorWorks, true);
}
|
<commit_msg>Set the download link to the current version of django-request, not latest git.
<commit_before>from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='inbox@kylefuller.co.uk',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request', 'request.templatetags'],
package_data={'request': ['templates/admin/request/*.html', 'templates/admin/request/request/*.html']},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
<commit_after>from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='inbox@kylefuller.co.uk',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/%s' % request.__version__,
packages=['request', 'request.templatetags'],
package_data={'request': ['templates/admin/request/*.html', 'templates/admin/request/request/*.html']},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
<commit_msg>Fix tests. Add SSL test.
<commit_before>package irc
import (
// irc "github.com/thoj/Go-IRC-Client-Library"
"fmt"
"testing"
)
func TestConnection(t *testing.T) {
irccon := IRC("invisible", "invisible")
fmt.Printf("Testing connection\n")
err := irccon.Connect("irc.freenode.net:6667")
fmt.Printf("Connecting...")
if err != nil {
t.Fatal("Can't connect to freenode.")
}
irccon.AddCallback("001", func(e *Event) { irccon.Join("#invisible") })
irccon.AddCallback("PRIVMSG" , func(e *Event) {
irccon.Privmsg("#invisible", "WHAT IS THIS\n")
fmt.Printf("Got private message, likely should respond!\n")
irccon.Privmsg(e.Nick , "WHAT")
})
irccon.Loop()
}
<commit_after>package irc
import (
// "github.com/thoj/go-ircevent"
"testing"
)
func TestConnection(t *testing.T) {
irccon := IRC("go-eventirc", "go-eventirc")
irccon.VerboseCallbackHandler = true
err := irccon.Connect("irc.freenode.net:6667")
if err != nil {
t.Fatal("Can't connect to freenode.")
}
irccon.AddCallback("001", func(e *Event) { irccon.Join("#go-eventirc") })
irccon.AddCallback("366" , func(e *Event) {
irccon.Privmsg("#go-eventirc", "Test Message\n")
irccon.Quit();
})
irccon.Loop()
}
func TestConnectionSSL(t *testing.T) {
irccon := IRC("go-eventirc", "go-eventirc")
irccon.VerboseCallbackHandler = true
irccon.UseSSL = true
err := irccon.Connect("irc.freenode.net:7000")
if err != nil {
t.Fatal("Can't connect to freenode.")
}
irccon.AddCallback("001", func(e *Event) { irccon.Join("#go-eventirc") })
irccon.AddCallback("366" , func(e *Event) {
irccon.Privmsg("#go-eventirc", "Test Message\n")
irccon.Quit();
})
irccon.Loop()
}
|
<commit_msg>Install c3d-metadata script as part of the package.
<commit_before>import os
import setuptools
setuptools.setup(
name='c3d',
version='0.2.0',
py_modules=['c3d'],
author='Leif Johnson',
author_email='leif@leifjohnson.net',
description='A library for manipulating C3D binary files',
long_description=open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'README.rst')).read(),
license='MIT',
url='http://github.com/EmbodiedCognition/py-c3d',
keywords=('c3d motion-capture'),
install_requires=['numpy'],
scripts=['scripts/c3d-viewer', 'scripts/c3d2csv'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering',
],
)
<commit_after>import os
import setuptools
setuptools.setup(
name='c3d',
version='0.2.0',
py_modules=['c3d'],
author='Leif Johnson',
author_email='leif@leifjohnson.net',
description='A library for manipulating C3D binary files',
long_description=open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'README.rst')).read(),
license='MIT',
url='http://github.com/EmbodiedCognition/py-c3d',
keywords=('c3d motion-capture'),
install_requires=['numpy'],
scripts=['scripts/c3d-metadata', 'scripts/c3d-viewer', 'scripts/c3d2csv'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering',
],
)
|
<commit_msg>Fix unit tests for the blacklist
<commit_before>import unittest
import config
from .. import ntokloapi
class BlacklistTest(unittest.TestCase):
def setUp(self):
self.blacklist = ntokloapi.Blacklist(config.TEST_KEY, config.TEST_SECRET)
def test_blacklist_add_singleitem(self):
response = self.blacklist.add(productid=['10201', ])
assert response == "204"
def test_blacklist_add_multipleitems(self):
response = self.blacklist.add(productid=['10202', '10203'])
assert response == "204"
def test_blacklist_add_empty_elements(self):
response = self.blacklist.add(productid=['10204', '10205', '', ''])
assert response == "204"
def test_blacklist_remove_singleitem(self):
response = self.blacklist.remove(productid=['10201', ])
assert response == "204"
def test_blacklist_remove_multipleitems(self):
response = self.blacklist.remove(productid=['10202', '10203'])
assert response == "204"
def test_blacklist_remove_empty_elements(self):
response = self.blacklist.remove(productid=['10204', '10205', '', ''])
assert response == "204"
def test_blacklist_show_items(self):
response = self.blacklist.list()
assert not response
<commit_after>import unittest
import config
import ntokloapi
class BlacklistTest(unittest.TestCase):
def setUp(self):
self.blacklist = ntokloapi.Blacklist(config.TEST_KEY, config.TEST_SECRET)
def test_blacklist_add_singleitem(self):
response = self.blacklist.add(productid=['10201', ])
assert response == 204
def test_blacklist_add_multipleitems(self):
response = self.blacklist.add(productid=['10202', '10203'])
assert response == 204
def test_blacklist_add_empty_elements(self):
response = self.blacklist.add(productid=['10204', '10205', '', ''])
assert response == 204
def test_blacklist_remove_singleitem(self):
response = self.blacklist.remove(productid=['10201', ])
assert response == 204
def test_blacklist_remove_multipleitems(self):
response = self.blacklist.remove(productid=['10202', '10203'])
assert response == 204
def test_blacklist_remove_empty_elements(self):
response = self.blacklist.remove(productid=['10204', '10205', '', ''])
assert response == 204
def test_blacklist_show_items(self):
response = self.blacklist.list()
assert not response
|
<commit_msg>Fix newlines in copying of errors<commit_before>
from ...external.qt import QtGui
__all__ = ['QMessageBoxPatched']
class QMessageBoxPatched(QtGui.QMessageBox):
def __init__(self, *args, **kwargs):
super(QMessageBoxPatched, self).__init__(*args, **kwargs)
copy_action = QtGui.QAction('&Copy', self)
copy_action.setShortcut(QtGui.QKeySequence.Copy)
copy_action.triggered.connect(self.copy_detailed)
select_all = QtGui.QAction('Select &All', self)
select_all.setShortcut(QtGui.QKeySequence.SelectAll)
select_all.triggered.connect(self.select_all)
menubar = QtGui.QMenuBar()
editMenu = menubar.addMenu('&Edit')
editMenu.addAction(copy_action)
editMenu.addAction(select_all)
self.layout().setMenuBar(menubar)
@property
def detailed_text_widget(self):
return self.findChild(QtGui.QTextEdit)
def select_all(self):
self.detailed_text_widget.selectAll()
def copy_detailed(self):
clipboard = QtGui.QApplication.clipboard()
selected_text = self.detailed_text_widget.textCursor().selectedText()
clipboard.setText(selected_text)
<commit_after>
import os
from ...external.qt import QtGui
__all__ = ['QMessageBoxPatched']
class QMessageBoxPatched(QtGui.QMessageBox):
def __init__(self, *args, **kwargs):
super(QMessageBoxPatched, self).__init__(*args, **kwargs)
copy_action = QtGui.QAction('&Copy', self)
copy_action.setShortcut(QtGui.QKeySequence.Copy)
copy_action.triggered.connect(self.copy_detailed)
select_all = QtGui.QAction('Select &All', self)
select_all.setShortcut(QtGui.QKeySequence.SelectAll)
select_all.triggered.connect(self.select_all)
menubar = QtGui.QMenuBar()
editMenu = menubar.addMenu('&Edit')
editMenu.addAction(copy_action)
editMenu.addAction(select_all)
self.layout().setMenuBar(menubar)
@property
def detailed_text_widget(self):
return self.findChild(QtGui.QTextEdit)
def select_all(self):
self.detailed_text_widget.selectAll()
def copy_detailed(self):
clipboard = QtGui.QApplication.clipboard()
selected_text = self.detailed_text_widget.textCursor().selectedText()
# Newlines are unicode, so need to normalize them to ASCII
selected_text = os.linesep.join(selected_text.splitlines())
clipboard.setText(selected_text)
|
<commit_msg>Use strict file writing for saving
This prevents us from trying to write to the save file while we are
reading it.
<commit_before>module Save ( save
, retrieve
) where
import Core
import Binary
import Data.Binary
import qualified Data.ByteString.Lazy as L
import Control.Applicative ((<$>))
save :: FilePath -> IRCState -> IO ()
save fp = L.writeFile fp . encode
retrieve :: FilePath -> IO IRCState
retrieve fp = decode <$> L.readFile fp
<commit_after>module Save ( save
, retrieve
) where
import Core
import Binary
import Data.Binary
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as B
import Control.Applicative ((<$>))
save :: FilePath -> IRCState -> IO ()
save fp = B.writeFile fp . L.toStrict . encode
retrieve :: FilePath -> IO IRCState
retrieve fp = decode . L.fromStrict <$> B.readFile fp
|
<commit_msg>Put the generator instanciation in the setUp
<commit_before>try:
from unittest2 import TestCase
except ImportError:
from unittest import TestCase # flake8: noqa
import six
from mock import patch
from daybed.backends.id_generators import KoremutakeGenerator
class KoremutakeGeneratorTest(TestCase):
def test_it_defaults_the_max_bytes_to_4(self):
generator = KoremutakeGenerator()
self.assertEquals(generator.max_bytes, 4)
@patch('koremutake.encode')
def test_it_doesnt_reuse_a_name_twice(self, encode):
encode.side_effect = ['existing-value', 'new-value']
created = ['existing-value']
def _exists(key):
return key in created
generator = KoremutakeGenerator()
self.assertEquals(generator(key_exist=_exists), 'new-value')
def test_it_returns_a_string_with_a_max_size(self):
generator = KoremutakeGenerator()
uid = generator()
self.assertTrue(len(uid) <= 24)
self.assertIsInstance(uid, six.text_type)
<commit_after>try:
from unittest2 import TestCase
except ImportError:
from unittest import TestCase # flake8: noqa
import six
from mock import patch
from daybed.backends.id_generators import KoremutakeGenerator
class KoremutakeGeneratorTest(TestCase):
def setUp(self):
self.generator = KoremutakeGenerator()
def test_it_defaults_the_max_bytes_to_4(self):
self.assertEquals(generator.max_bytes, 4)
@patch('koremutake.encode')
def test_it_doesnt_reuse_a_name_twice(self, encode):
encode.side_effect = ['existing-value', 'new-value']
created = ['existing-value']
def _exists(key):
return key in created
self.assertEquals(self.generator(key_exist=_exists), 'new-value')
def test_it_returns_a_string_with_a_max_size(self):
uid = self.generator()
self.assertTrue(len(uid) <= 24)
self.assertIsInstance(uid, six.text_type)
|
<commit_msg>Use '0.0.0.0' to ensure your API is reachable outside localhost.
<commit_before>
import os
from eve import Eve
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in os.environ:
port = int(os.environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
app = Eve()
app.run(host=host, port=port)
<commit_after>
import os
from eve import Eve
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in os.environ:
port = int(os.environ.get('PORT'))
# use '0.0.0.0' to ensure your REST API is reachable from all your
# network (and not only your computer).
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
app = Eve()
app.run(host=host, port=port)
|
<commit_msg>mrp: Add tvOS 13.4 build number
<commit_before>"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
<commit_after>"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
"17L256": "13.4",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
<commit_msg>Make a watchdog timer blink a LED
<commit_before>
int main (void) {
//Set pin 3 as output to source current?
PORTB = 1<<PORTB3;
DDRB = 1<<DDB3;
}<commit_after>
int main ( void ) {
//Disable interrupts and reset WDT timer
cli();
wdt_reset();
//Reset MCU status register
MCUSR &= ~(1<<WDRF);
//Disable watchdog
WDTCR |= (1<<WDCE) | (1<<WDE);
WDTCR = 0;
//Enable LED
DDRB = 1<<DDB3;
PORTB = 1<<PORTB3;
//Start watchdog
WDTCR |= (1<<WDCE) | (1<<WDE);
WDTCR = (1<<WDTIE) | (1<<WDP2) | (1<<WDP1);
//Enable interrupts
sei();
while(1);
}
//Catching WatchDog interrupts
ISR ( WDT_vect ) {
PORTB ^= 1<<PORTB3;
} |
<commit_msg>Load choob-plugin:s from local directories before classpath
<commit_before>package uk.co.uwcs.choob.modules;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandler;
class ChoobURLStreamHandler extends URLStreamHandler {
@Override
protected URLConnection openConnection(URL u) throws IOException {
final String path = u.getPath();
{
final URL resource = ChoobURLStreamHandler.class.getResource(path);
if (null != resource)
return resource.openConnection();
}
for (File sourcePath : new File[] {
new File("main/plugins-alpha"),
new File("main/plugins"),
new File("src/main/plugins-alpha"),
new File("src/main/plugins"),
}) {
final File f = new File(sourcePath, u.getPath());
if (f.isFile())
return new URL("file:///" + f.getAbsolutePath()).openConnection();
}
throw new FileNotFoundException("Couldn't resolve '" + path + "' from the classpath or source directories");
}
}
<commit_after>package uk.co.uwcs.choob.modules;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandler;
class ChoobURLStreamHandler extends URLStreamHandler {
@Override
protected URLConnection openConnection(URL u) throws IOException {
final String path = u.getPath();
for (File sourcePath : new File[] {
new File("main/plugins-alpha"),
new File("main/plugins"),
new File("src/main/plugins-alpha"),
new File("src/main/plugins"),
}) {
final File f = new File(sourcePath, u.getPath());
if (f.isFile())
return new URL("file:///" + f.getAbsolutePath()).openConnection();
}
{
final URL resource = ChoobURLStreamHandler.class.getResource(path);
if (null != resource)
return resource.openConnection();
}
throw new FileNotFoundException("Couldn't resolve '" + path + "' from the classpath or source directories");
}
}
|
<commit_msg>Update list of extraordinary gentlemen
<commit_before>ADMINFAG = ["RealDolos"]
PARROTFAG = "Parrot"
BLACKFAGS = [i.casefold() for i in (
"kalyx", "merc", "loliq", "annoying", "bot", "RootBeats", "JEW2FORU", "quag", "mire", "perici")]
OBAMAS = [i.casefold() for i in (
"counselor", "briseis", "apha", "bread", "ark3", "jizzbomb", "acid", "elkoalemos", "tarta")]
BLACKROOMS = "e7u-CG", "jAzmc3", "f66jeG", "24_zFd"
WHITEROOMS = "9pdLvy"
<commit_after>ADMINFAG = ["RealDolos"]
PARROTFAG = "Parrot"
BLACKFAGS = [i.casefold() for i in (
"kalyx", "merc", "loliq", "annoying", "RootBeats", "JEW2FORU", "quag", "mire", "perici", "Voldemort", "briseis", "brisis", "GNUsuks", "rhooes", "n1sm4n", "honeyhole", "Printer", "yume1")]
OBAMAS = [i.casefold() for i in (
"counselor", "briseis", "apha", "bread", "ark3", "jizzbomb", "acid", "elkoalemos", "tarta", "counselor", "myon")]
BLACKROOMS = "e7u-CG", "jAzmc3", "f66jeG", "24_zFd", "BHfjGvT", "BHI0pxg",
WHITEROOMS = "BEEPi",
|
<commit_msg>Tweak Sentry config to work in development
This makes a couple of changes:
- Most importantly, it wraps the setup code in a conditional so that
developers don't need to have a DSN set to start the app locally.
- Secondly, it removes the redundant call to "set_level". Originally
I thought the integration was sending info/warning events, but this
isn't the case [1] and even if it was, "set_level" affects the level
of custom events [2], not the level they are dispatched at.
[1]: https://github.com/getsentry/sentry-python/blob/4c09f3203d6d19789c6fa729a2e46557ad4ea913/sentry_sdk/integrations/logging.py#L56
[2]: https://docs.sentry.io/platforms/python/guides/logging/usage/set-level/
<commit_before>import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
<commit_after>import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
if 'SENTRY_DSN' in os.environ:
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
<commit_msg>Fix getting product in build_dashboard task
<commit_before>from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query(id=product_id).one()
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
<commit_after>from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query.get(product_id)
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
|
<commit_msg>Address execve protection fault in Octave autoconfig
`octave-cli` is what we really want
<commit_before>from .base_executor import ScriptExecutor
class Executor(ScriptExecutor):
ext = '.m'
name = 'OCTAVE'
command = 'octave'
address_grace = 131072
test_program = "disp(input('', 's'))"
fs = ['.*\.m', '/lib/', '/etc/nsswitch\.conf$', '/etc/passwd$', '/usr/share/', '/etc/fltk/']
def get_cmdline(self):
return [self.get_command(), '--no-gui', '--no-history', '--no-init-file', '--no-site-file',
'--no-window-system', '--norc', '--quiet', self._code]
<commit_after>from .base_executor import ScriptExecutor
class Executor(ScriptExecutor):
ext = '.m'
name = 'OCTAVE'
command = 'octave'
address_grace = 131072
test_program = "disp(input('', 's'))"
fs = ['.*\.m', '/lib/', '/etc/nsswitch\.conf$', '/etc/passwd$', '/usr/share/', '/etc/fltk/']
def get_cmdline(self):
return [self.get_command(), '--no-gui', '--no-history', '--no-init-file', '--no-site-file',
'--no-window-system', '--norc', '--quiet', self._code]
@classmethod
def get_find_first_mapping(cls):
return {
'octave': ['octave-cli'],
} |
<commit_msg>Move the structure definition on a better place
<commit_before>/** Author : Paul TREHIOU & Victor SENE
* Date : November 2014
**/
/**
* Declaration Point structure
* x - real wich is the abscisse of the point
* y - real wich is the ordinate of the point
*/
typedef struct
{
float x;
float y;
}Point;
/**
* Function wich create a point with a specified abscisse and ordinate
* abscisse - real
* ordinate - real
* return a new point
*/
Point createPoint(float abscisse, float ordinate);
/**
* Declaration of the Element structure
* value - value of the point of the current element of the polygon
* next - pointer on the next element
* previous - pointer on the previous element
*/
typedef struct pointelem{
Point value;
pointelem* next;
pointelem* previous;
}PointElement;
/**
* Declaration of the Polygon
*/
typedef PointElement* Polygon;
<commit_after>/** Author : Paul TREHIOU & Victor SENE
* Date : November 2014
**/
/**
* Declaration Point structure
* x - real wich is the abscisse of the point
* y - real wich is the ordinate of the point
*/
typedef struct
{
float x;
float y;
}Point;
/**
* Declaration of the Element structure
* value - value of the point of the current element of the polygon
* next - pointer on the next element
* previous - pointer on the previous element
*/
typedef struct pointelem{
Point value;
pointelem* next;
pointelem* previous;
}PointElement;
/**
* Declaration of the Polygon
*/
typedef PointElement* Polygon;
/**
* Function wich create a point with a specified abscisse and ordinate
* abscisse - real
* ordinate - real
* return a new point
*/
Point createPoint(float abscisse, float ordinate);
|
<commit_msg>Clone the propMetadata key as well
<commit_before>import { OpaqueToken, Inject } from '@angular/core';
export const CONFIG_INITIALIZER = new OpaqueToken('Config initializer');
export function ConfigInitializer<T>(config: T) {
type ConfigInitializerType = (config: T) => void;
const reflect: any = (window as any)['Reflect'];
const getOwnMetadata: Function = reflect.getOwnMetadata;
const defineMetadata: Function = reflect.defineMetadata;
return function ConfigInitializerDecorator(targetConstructor: any) {
const metaInformation = getOwnMetadata('annotations', targetConstructor);
const designParamtypesInformation = getOwnMetadata('design:paramtypes', targetConstructor);
const parametersInformation = getOwnMetadata('parameters', targetConstructor);
function newConstructor(
configInitializer: ConfigInitializerType,
...args: any[],
) {
configInitializer(config);
// tslint:disable-next-line:no-invalid-this
return targetConstructor.apply(this, args);
}
defineMetadata('annotations', metaInformation, newConstructor);
defineMetadata('parameters', parametersInformation, newConstructor);
defineMetadata(
'design:paramtypes',
[
new Inject(CONFIG_INITIALIZER),
...designParamtypesInformation,
],
newConstructor,
);
newConstructor.prototype = targetConstructor.prototype;
return newConstructor as typeof targetConstructor;
};
}
<commit_after>import { OpaqueToken, Inject } from '@angular/core';
export const CONFIG_INITIALIZER = new OpaqueToken('Config initializer');
export function ConfigInitializer<T>(config: T) {
type ConfigInitializerType = (config: T) => void;
const reflect: any = (window as any)['Reflect'];
const getOwnMetadata: Function = reflect.getOwnMetadata;
const defineMetadata: Function = reflect.defineMetadata;
return function ConfigInitializerDecorator(targetConstructor: any) {
const metaInformation = getOwnMetadata('annotations', targetConstructor);
const propMetaInformation = getOwnMetadata('propMetadata', targetConstructor);
const designParamtypesInformation = getOwnMetadata('design:paramtypes', targetConstructor);
const parametersInformation = getOwnMetadata('parameters', targetConstructor);
function newConstructor(
configInitializer: ConfigInitializerType,
...args: any[],
) {
configInitializer(config);
// tslint:disable-next-line:no-invalid-this
return targetConstructor.apply(this, args);
}
defineMetadata('annotations', metaInformation, newConstructor);
defineMetadata('propMetadata', propMetaInformation, newConstructor);
defineMetadata('parameters', parametersInformation, newConstructor);
defineMetadata(
'design:paramtypes',
[
new Inject(CONFIG_INITIALIZER),
...designParamtypesInformation,
],
newConstructor,
);
newConstructor.prototype = targetConstructor.prototype;
return newConstructor as typeof targetConstructor;
};
}
|
<commit_msg>Remove unnecessary variable from route
Jinja will set any variable it can't find to None, so the title variable
is unnecessary.
<commit_before>from flask import render_template, request, flash
from flask_login import login_required
from .. import main
from ... import data_api_client
from ..auth import role_required
@main.route('/buyers', methods=['GET'])
@login_required
@role_required('admin')
def find_buyer_by_brief_id():
brief_id = request.args.get('brief_id')
try:
brief = data_api_client.get_brief(brief_id).get('briefs')
except:
flash('no_brief', 'error')
return render_template(
"view_buyers.html",
users=list(),
title=None,
brief_id=brief_id
), 404
users = brief.get('users')
title = brief.get('title')
return render_template(
"view_buyers.html",
users=users,
title=title,
brief_id=brief_id
)
<commit_after>from flask import render_template, request, flash
from flask_login import login_required
from .. import main
from ... import data_api_client
from ..auth import role_required
@main.route('/buyers', methods=['GET'])
@login_required
@role_required('admin')
def find_buyer_by_brief_id():
brief_id = request.args.get('brief_id')
try:
brief = data_api_client.get_brief(brief_id).get('briefs')
except:
flash('no_brief', 'error')
return render_template(
"view_buyers.html",
users=list(),
brief_id=brief_id
), 404
users = brief.get('users')
title = brief.get('title')
return render_template(
"view_buyers.html",
users=users,
title=title,
brief_id=brief_id
)
|
<commit_msg>Define groups for the invoice events.
- wal-202
<commit_before>from nodeconductor.logging.loggers import EventLogger, event_logger
class InvoiceLogger(EventLogger):
month = int
year = int
customer = 'structure.Customer'
class Meta:
event_types = ('invoice_created', 'invoice_paid', 'invoice_canceled')
event_logger.register('invoice', InvoiceLogger)
<commit_after>from nodeconductor.logging.loggers import EventLogger, event_logger
class InvoiceLogger(EventLogger):
month = int
year = int
customer = 'structure.Customer'
class Meta:
event_types = ('invoice_created', 'invoice_paid', 'invoice_canceled')
event_groups = {
'customers': event_types,
'invoices': event_types,
}
event_logger.register('invoice', InvoiceLogger)
|
<commit_msg>Fix render layer adder in obf environments
<commit_before>package tehnut.lib.util;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.entity.RenderManager;
import net.minecraft.client.renderer.entity.RenderPlayer;
import net.minecraft.client.renderer.entity.layers.LayerRenderer;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import tehnut.lib.LendingLibrary;
import java.lang.reflect.Field;
@SideOnly(Side.CLIENT)
public class RenderHelper {
public static boolean addPlayerLayerRenderer(Class<? extends LayerRenderer<?>> layerRendererClass) {
RenderManager renderManager = Minecraft.getMinecraft().getRenderManager();
try {
Field renderPlayerField = RenderManager.class.getDeclaredField("playerRenderer");
renderPlayerField.setAccessible(true);
Object renderPlayerObj = renderPlayerField.get(renderManager);
if (renderPlayerObj instanceof RenderPlayer) {
RenderPlayer renderPlayer = (RenderPlayer) renderPlayerObj;
LayerRenderer<?> layerRenderer = layerRendererClass.getConstructor(RenderPlayer.class).newInstance(renderPlayer);
renderPlayer.addLayer(layerRenderer);
return true;
}
} catch (Exception e) {
LendingLibrary.getLogger().error(e.getLocalizedMessage());
}
return false;
}
}
<commit_after>package tehnut.lib.util;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.entity.RenderManager;
import net.minecraft.client.renderer.entity.RenderPlayer;
import net.minecraft.client.renderer.entity.layers.LayerRenderer;
import net.minecraftforge.fml.common.ObfuscationReflectionHelper;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import tehnut.lib.LendingLibrary;
@SideOnly(Side.CLIENT)
public class RenderHelper {
public static boolean addPlayerLayerRenderer(Class<? extends LayerRenderer<?>> layerRendererClass) {
RenderManager renderManager = Minecraft.getMinecraft().getRenderManager();
try {
RenderPlayer renderPlayer = ObfuscationReflectionHelper.getPrivateValue(RenderManager.class, renderManager, "playerRenderer", "field_178637_m");
LayerRenderer<?> layerRenderer = layerRendererClass.getConstructor(RenderPlayer.class).newInstance(renderPlayer);
renderPlayer.addLayer(layerRenderer);
return true;
} catch (Exception e) {
LendingLibrary.getLogger().error(e.getLocalizedMessage());
}
return false;
}
}
|
<commit_msg>Add test for window attribute
<commit_before>const config: NeteaseCaptcha.Config = {
captchaId: 'FAKE ID',
element: '#captcha',
mode: 'popup',
protocol: 'https',
width: '200px',
lang: 'en',
onVerify: (error: any, data?: NeteaseCaptcha.Data) => {
console.log(error, data);
}
};
const onLoad: NeteaseCaptcha.onLoad = (instance: NeteaseCaptcha.Instance) => {
instance.refresh();
instance.destroy();
};
function init(initNECaptcha: NeteaseCaptcha.InitFunction): void {
initNECaptcha(config, onLoad);
}
<commit_after>const config: NeteaseCaptcha.Config = {
captchaId: 'FAKE ID',
element: '#captcha',
mode: 'popup',
protocol: 'https',
width: '200px',
lang: 'en',
onVerify: (error: any, data?: NeteaseCaptcha.Data) => {
console.log(error, data);
}
};
const onLoad: NeteaseCaptcha.onLoad = (instance: NeteaseCaptcha.Instance) => {
instance.refresh();
instance.destroy();
if (instance.popUp) {
instance.popUp();
}
};
if (window.initNECaptcha) {
window.initNECaptcha(config, onLoad);
}
|
<commit_msg>Use absolute path for config file so it works with apps like Hazel
<commit_before>from os import path
from ConfigParser import ConfigParser
import requests
import sys
def reverse_lookup(lat, lon):
if(lat is None or lon is None):
return None
if not path.exists('./config.ini'):
return None
config = ConfigParser()
config.read('./config.ini')
if('MapQuest' not in config.sections()):
return None
key = config.get('MapQuest', 'key')
try:
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
return r.json()
except requests.exceptions.RequestException as e:
print e
return None
except ValueError as e:
print r.text
print e
return None
def place_name(lat, lon):
geolocation_info = reverse_lookup(lat, lon)
if(geolocation_info is not None):
if('address' in geolocation_info):
address = geolocation_info['address']
if('city' in address):
return address['city']
elif('state' in address):
return address['state']
elif('country' in address):
return address['country']
return None
<commit_after>from os import path
from ConfigParser import ConfigParser
import requests
import sys
def reverse_lookup(lat, lon):
if(lat is None or lon is None):
return None
config_file = '%s/config.ini' % path.dirname(path.dirname(path.abspath(__file__)))
if not path.exists(config_file):
return None
config = ConfigParser()
config.read(config_file)
if('MapQuest' not in config.sections()):
return None
key = config.get('MapQuest', 'key')
try:
r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon))
return r.json()
except requests.exceptions.RequestException as e:
print e
return None
except ValueError as e:
print r.text
print e
return None
def place_name(lat, lon):
geolocation_info = reverse_lookup(lat, lon)
if(geolocation_info is not None):
if('address' in geolocation_info):
address = geolocation_info['address']
if('city' in address):
return address['city']
elif('state' in address):
return address['state']
elif('country' in address):
return address['country']
return None
|
<commit_msg>Add new error and warning number
<commit_before>// Explorer loader problem list
// /problem.h
#ifndef PROBLEM_H_
#define PROBLEM_H_
/**错误列表*/
#define ERR_NO_MEM_FOR_ID 1 // 没有可用于中断描述符表的内存
#define ERR_NO_MEM_FOR_SD 2 // 没有可用于储存器描述符表的内存
#define ERR_NO_MEM_FOR_SCTBUF 3 // 没有可用于扇区缓存的内存
#define ERR_NO_MEM_FOR_CONFIG 4 // 没有可以分配给引导配置文件的内存
#define ERR_NO_MEM_FOR_BUFFER 5 // 没有可以分配给缓冲系统的内存
#define ERR_NO_MEM_FOR_FS 6 // 没有可以分配给文件系统的内存
#define ERR_NO_MEM_FOR_MMU 7 // 没有可以分配给MMU的内存
/**警告列表*/
#define WARN_NO_MEM 0x80000001 // 无充足内存
#define WARN_STORAGE_NOT_SUPPORT 0x80000002 // 暂时不支持这个储存器
#endif
<commit_after>// Explorer loader problem list
// /problem.h
#ifndef PROBLEM_H_
#define PROBLEM_H_
/**错误列表*/
#define ERR_NO_MEM_FOR_ID 1 // 没有可用于中断描述符表的内存
#define ERR_NO_MEM_FOR_SD 2 // 没有可用于储存器描述符表的内存
#define ERR_NO_MEM_FOR_SCTBUF 3 // 没有可用于扇区缓存的内存
#define ERR_NO_MEM_FOR_CONFIG 4 // 没有可以分配给引导配置文件的内存
#define ERR_NO_MEM_FOR_BUFFER 5 // 没有可以分配给缓冲系统的内存
#define ERR_NO_MEM_FOR_FS 6 // 没有可以分配给文件系统的内存
#define ERR_NO_MEM_FOR_MMU 7 // 没有可以分配给MMU的内存
#define ERR_NO_FILE 8 // 没有文件
#define ERR_CONFIG_OVERSIZE 9 // CONFIG.LDR oversized
/**警告列表*/
#define WARN_NO_MEM 0x80000001 // 无充足内存
#define WARN_STORAGE_NOT_SUPPORT 0x80000002 // 暂时不支持这个储存器
#define WARN_SCRIPT_SIZE_BAD 0x80000003 // length of cript incorrect
#endif
|
<commit_msg>Add the score to Engine.chat return values
<commit_before>
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
return best_response_pair.generate(best_captured, context, self.knowledge)
<commit_after>
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
response, new_context = best_response_pair.generate(best_captured, context, self.knowledge)
return response, new_context, best_score
|
<commit_msg>Include time of update start/end
<commit_before>
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
for vn_class in VerbNetClass.objects.all():
print(vn_class.name)
vn_class.update_members_and_translations()
<commit_after>
from time import gmtime, strftime
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
when = strftime("%d/%m/%Y %H:%M:%S", gmtime())
verb_logger.info("{}: Start full update of verbs (members and translations)".format(when))
for vn_class in VerbNetClass.objects.all():
print(vn_class.name)
vn_class.update_members_and_translations()
when = strftime("%d/%m/%Y %H:%M:%S", gmtime())
verb_logger.info("{}: Ended full update of verbs (members and translations)".format(when))
|
<commit_msg>Add custom CSS style to FIWARE doc
Change-Id: I74293d488e0cd762ad023b94879ee618a4016110
<commit_before>import os
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
project = 'FIWARE-Stream-Oriented-GE'
<commit_after>import os
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
project = 'FIWARE-Stream-Oriented-GE'
html_theme_options = {
'cssfiles': ['https://fiware.org/style/fiware_readthedocs.css']
}
|
<commit_msg>Resolve import error introduced in last commit.
<commit_before>
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = ''
with open('koordinates/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='koordinates',
packages=['koordinates',],
version=version
description='koordinates is a Python client library for a number of Koordinates web APIs',
long_description=readme + '\n\n' + history
author='Richard Shea',
author_email='rshea@thecubagroup.com',
url='https://github.com/koordinates/python-client',
download_url = 'https://github.com/koordinates/python-client/tarball/0.1',
keywords='koordinates api',
license = 'BSD',
classifiers=[],
test_suite='tests',
)
<commit_after>
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
import os
from codecs import open
version = ''
with open('koordinates/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='koordinates',
packages=['koordinates',],
version=version,
description='koordinates is a Python client library for a number of Koordinates web APIs',
long_description=readme + '\n\n' + history,
author='Richard Shea',
author_email='rshea@thecubagroup.com',
url='https://github.com/koordinates/python-client',
download_url = 'https://github.com/koordinates/python-client/tarball/0.1',
keywords='koordinates api',
license = 'BSD',
classifiers=[],
test_suite='tests',
)
|
<commit_msg>Add Chainmap support for python2
<commit_before>"""Determine the generator format"""
from collections import ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
<commit_after>"""Determine the generator format"""
try:
from collections import ChainMap
except ImportError:
from ConfigParser import _Chainmap as ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
|
<commit_msg>Add correct packages and requirements for python package
<commit_before>from setuptools import setup
setup(
name='lace',
version='0.1.1',
description='Neural Learning to Rank using Chainer',
url='https://github.com/rjagerman/lace',
download_url = 'https://github.com/rjagerman/lace/archive/v0.1.1.tar.gz',
author='Rolf Jagerman',
author_email='rjagerman@gmail.com',
license='MIT',
packages=['lace']
)
<commit_after>from setuptools import setup
setup(
name='lace',
version='0.1.1',
description='Neural Learning to Rank using Chainer',
url='https://github.com/rjagerman/lace',
download_url = 'https://github.com/rjagerman/lace/archive/v0.1.1.tar.gz',
author='Rolf Jagerman',
author_email='rjagerman@gmail.com',
license='MIT',
packages=['lace',
'lace.functions',
'lace.loss'],
install_requires=['numpy>=1.12.0', 'chainer>=2.0.0'],
tests_require=['nose']
)
|
<commit_msg>Allow for extra slashes in project paths, such as mq patch queues.
<commit_before>try:
from hggit import *
hggit_reposetup = reposetup
except ImportError:
# Allow this module to be imported without
# hg-git installed, eg for setup.py
pass
__version__ = "0.1.0"
def reposetup(ui, repo, **kwargs):
"""
Automatically adds Bitbucket->GitHub mirror paths to the repo.
Also creates a `master` bookmark for the `default` branch.
"""
hggit_reposetup(ui, repo, **kwargs)
bb = "ssh://hg@bitbucket.org/"
for pathname, path in ui.configitems("paths"):
if path.startswith(bb):
user, project = path.replace(bb, "").rstrip("/").split("/")
for k, v in ui.configitems("github"):
if k == "username":
user = v
gh_path = "git+ssh://git@github.com/%s/%s.git" % (user, project)
if pathname == "default":
if "master" not in repo._bookmarks:
from mercurial.commands import bookmark
bookmark(ui, repo, mark="master", rev="default")
gh_pathname = "github"
else:
gh_pathname = "github-" + pathname
ui.setconfig("paths", gh_pathname, gh_path)
<commit_after>try:
from hggit import *
hggit_reposetup = reposetup
except ImportError:
# Allow this module to be imported without
# hg-git installed, eg for setup.py
pass
__version__ = "0.1.0"
def reposetup(ui, repo, **kwargs):
"""
Automatically adds Bitbucket->GitHub mirror paths to the repo.
Also creates a `master` bookmark for the `default` branch.
"""
hggit_reposetup(ui, repo, **kwargs)
bb = "ssh://hg@bitbucket.org/"
for pathname, path in ui.configitems("paths"):
if path.startswith(bb):
user, project = path.replace(bb, "").split("/", 1)
# Strip slash and everything after it,
# such as mq patch queue path.
project = project.split("/")[0]
for k, v in ui.configitems("github"):
if k == "username":
user = v
gh_path = "git+ssh://git@github.com/%s/%s.git" % (user, project)
if pathname == "default":
if "master" not in repo._bookmarks:
from mercurial.commands import bookmark
bookmark(ui, repo, mark="master", rev="default")
gh_pathname = "github"
else:
gh_pathname = "github-" + pathname
ui.setconfig("paths", gh_pathname, gh_path)
|
<commit_msg>Remove warning and only include needed stuff.
git-svn-id: b99a075ee42e317ef7d0e499fd315684e5f6d838@35456 7cbeb6ba-43b4-40fd-8cce-4c39aea84d33
<commit_before>/* EINA - EFL data type library
* Copyright (C) 2008 Cedric Bail
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library;
* if not, see <http://www.gnu.org/licenses/>.
*/
#include "Eina.h"
EAPI int
eina_init(void)
{
int r;
r = eina_error_init();
r += eina_hash_init();
r += eina_stringshare_init();
r += eina_list_init();
r += eina_array_init();
return r;
}
EAPI int
eina_shutdown(void)
{
int r;
eina_array_shutdown();
eina_list_shutdown();
r = eina_stringshare_shutdown();
r += eina_hash_shutdown();
r += eina_error_shutdown();
return r;
}
<commit_after>/* EINA - EFL data type library
* Copyright (C) 2008 Cedric Bail
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library;
* if not, see <http://www.gnu.org/licenses/>.
*/
#include "eina_error.h"
#include "eina_hash.h"
#include "eina_stringshare.h"
#include "eina_list.h"
#include "eina_array.h"
EAPI int
eina_init(void)
{
int r;
r = eina_error_init();
r += eina_hash_init();
r += eina_stringshare_init();
r += eina_list_init();
r += eina_array_init();
return r;
}
EAPI int
eina_shutdown(void)
{
int r;
eina_array_shutdown();
eina_list_shutdown();
r = eina_stringshare_shutdown();
r += eina_hash_shutdown();
r += eina_error_shutdown();
return r;
}
|
<commit_msg>Allow POST requests for auth method so OpenID forms could use it that way.
<commit_before>from pyramid.view import view_config
from social.utils import module_member
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.pyramid_app.utils import psa, login_required
@view_config(route_name='social.auth', request_method='GET')
@psa('social.complete')
def auth(request):
return do_auth(request.backend, redirect_name='next')
@view_config(route_name='social.complete', request_method=('GET', 'POST'))
@psa('social.complete')
def complete(request, *args, **kwargs):
do_login = module_member(request.backend.setting('LOGIN_FUNCTION'))
return do_complete(request.backend, do_login, request.user,
redirect_name='next', *args, **kwargs)
@view_config(route_name='social.disconnect', request_method=('POST',))
@view_config(route_name='social.disconnect_association',
request_method=('POST',))
@psa()
@login_required
def disconnect(request):
return do_disconnect(request.backend, request.user,
request.matchdict.get('association_id'),
redirect_name='next')
<commit_after>from pyramid.view import view_config
from social.utils import module_member
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.pyramid_app.utils import psa, login_required
@view_config(route_name='social.auth', request_method=('GET', 'POST'))
@psa('social.complete')
def auth(request):
return do_auth(request.backend, redirect_name='next')
@view_config(route_name='social.complete', request_method=('GET', 'POST'))
@psa('social.complete')
def complete(request, *args, **kwargs):
do_login = module_member(request.backend.setting('LOGIN_FUNCTION'))
return do_complete(request.backend, do_login, request.user,
redirect_name='next', *args, **kwargs)
@view_config(route_name='social.disconnect', request_method=('POST',))
@view_config(route_name='social.disconnect_association',
request_method=('POST',))
@psa()
@login_required
def disconnect(request):
return do_disconnect(request.backend, request.user,
request.matchdict.get('association_id'),
redirect_name='next')
|
<commit_msg>Return unformatted list from get_language_list.
<commit_before>from django.conf import settings
from django.utils import translation
from django.utils.translation import ugettext as _
from six import text_type
from typing import Any
import os
import ujson
def with_language(string, language):
# type: (text_type, text_type) -> text_type
old_language = translation.get_language()
translation.activate(language)
result = _(string)
translation.activate(old_language)
return result
def get_language_list():
# type: () -> List[Dict[str, Any]]
path = os.path.join(settings.STATIC_ROOT, 'locale', 'language_options.json')
with open(path, 'r') as reader:
languages = ujson.load(reader)
lang_list = []
for lang_info in languages['languages']:
name = lang_info['name']
lang_info['name'] = with_language(name, lang_info['code'])
if 'percent_translated' not in lang_info:
lang_info['percent_translated'] = 'N/A'
lang_list.append(lang_info)
return sorted(lang_list, key=lambda i: i['name'])
def get_available_language_codes():
# type: () -> List[text_type]
language_list = get_language_list()
codes = [language['code'] for language in language_list]
return codes
<commit_after>from __future__ import absolute_import
from django.conf import settings
from django.utils import translation
from django.utils.translation import ugettext as _
from six import text_type
from typing import Any
import os
import ujson
def with_language(string, language):
# type: (text_type, text_type) -> text_type
old_language = translation.get_language()
translation.activate(language)
result = _(string)
translation.activate(old_language)
return result
def get_language_list():
# type: () -> List[Dict[str, Any]]
path = os.path.join(settings.STATIC_ROOT, 'locale', 'language_options.json')
with open(path, 'r') as reader:
languages = ujson.load(reader)
lang_list = []
for lang_info in languages['languages']:
name = lang_info['name']
lang_info['name'] = with_language(name, lang_info['code'])
lang_list.append(lang_info)
return sorted(lang_list, key=lambda i: i['name'])
def get_available_language_codes():
# type: () -> List[text_type]
language_list = get_language_list()
codes = [language['code'] for language in language_list]
return codes
|
<commit_msg>Support nosetests, handle magic names (and_, or_, etc)
<commit_before>
import json
from .util import make_dsl_object, unroll_definitions, unroll_struct
class MetaFilterQuery(type):
def __init__(cls, name, bases, d):
super(MetaFilterQuery, cls).__init__(name, bases, d)
unroll_definitions(cls._definitions)
def __getattr__(cls, key):
if key not in cls._definitions:
raise cls._exception(key)
return lambda *args, **kwargs: make_dsl_object(
cls, key, cls._definitions[key],
*args, **kwargs
)
class BaseFilterQuery(object):
_type = None
_struct = None
_dsl_type = None
def __init__(self, dsl_type, struct):
self._struct = struct
self._dsl_type = dsl_type
def dict(self):
return {
self._dsl_type: unroll_struct(self._struct)
}
def __str__(self):
return json.dumps(self.dict(), indent=4)
<commit_after>
import json
from .util import make_dsl_object, unroll_definitions, unroll_struct
class MetaFilterQuery(type):
def __init__(cls, name, bases, d):
super(MetaFilterQuery, cls).__init__(name, bases, d)
unroll_definitions(cls._definitions)
def __getattr__(cls, key):
if key == '__test__':
return None
if key not in cls._definitions:
raise cls._exception(key)
return lambda *args, **kwargs: make_dsl_object(
cls, key, cls._definitions[key],
*args, **kwargs
)
class BaseFilterQuery(object):
_type = None
_struct = None
_dsl_type = None
def __init__(self, dsl_type, struct):
self._struct = struct
self._dsl_type = dsl_type
def dict(self):
dsl_type = self._dsl_type[:1] if self._dsl_type.endswith('_') else self._dsl_type
return {
dsl_type: unroll_struct(self._struct)
}
def __str__(self):
return json.dumps(self.dict(), indent=4)
|
<commit_msg>Add Error propagation now that we use Throwable.
<commit_before>/**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.util;
public class Exceptions {
private Exceptions() {
}
public static RuntimeException propagate(Throwable t) {
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
} else {
throw new RuntimeException(t);
}
}
}<commit_after>/**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.util;
public class Exceptions {
private Exceptions() {
}
public static RuntimeException propagate(Throwable t) {
/**
* The return type of RuntimeException is a trick for code to be like this:
*
* throw Exceptions.propagate(e);
*
* Even though nothing will return and throw via that 'throw', it allows the code to look like it
* so it's easy to read and understand that it will always result in a throw.
*/
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
} else if (t instanceof Error) {
throw (Error) t;
} else {
throw new RuntimeException(t);
}
}
} |
<commit_msg>Add working transition metal highlight
<commit_before>import { Component, Input, Output, EventEmitter, OnChanges } from '@angular/core';
import { Element } from './element';
@Component({
moduleId: module.id,
selector: 'pt-element',
templateUrl: 'element.component.html',
styleUrls: ['element.component.css']
})
export class ElementComponent implements OnChanges {
@Input() elementData: Element;
// ^ exposes elementData property to parent component, listens for parent component to send data to child
@Output() elementHovered: EventEmitter<Object> = new EventEmitter<Object>();
// ^ exposes which element is being hovered on to the parent component, sends proper data to hoverReceived fxn
elementStyle: Object = {};
ngOnChanges(): void {
console.log('this ran');
if (this.elementData.highlight) {
this.elementStyle = {'background-color': 'pink'};
}
}
onHover(): void {
console.log(this.elementData);
this.elementHovered.emit(this.elementData);
}
// ^ sends data into emit channel to be picked up by parent component
}
<commit_after>import { Component, Input, Output, EventEmitter, DoCheck } from '@angular/core';
import { Element } from './element';
@Component({
moduleId: module.id,
selector: 'pt-element',
templateUrl: 'element.component.html',
styleUrls: ['element.component.css']
})
export class ElementComponent implements DoCheck {
@Input() elementData: Element;
// ^ exposes elementData property to parent component, listens for parent component to send data to child
@Output() elementHovered: EventEmitter<Object> = new EventEmitter<Object>();
// ^ exposes which element is being hovered on to the parent component, sends proper data to hoverReceived fxn
elementStyle: Object = {};
ngDoCheck() {
if (this.elementData.highlight) {
this.elementStyle = {'background-color': 'pink'};
} else {
this.elementStyle = {'background-color': 'transparent'};
}
}
onHover(): void {
this.elementHovered.emit(this.elementData);
}
// ^ sends data into emit channel to be picked up by parent component
}
|
<commit_msg>Fix after @rafaelcn did blame me crying about incorrect usage of cerr
<commit_before>/**
* C++ algorithm to get the sum of all prime numbers between a range of 2 and the number desired.
*/
#include <iostream>
#include <cstdlib>
bool is_prime(unsigned int number);
int main(int argc, char* argv[])
{
const char *num = "2000000";
if(argv[1])
num = argv[1];
else if (argv[1] == "--help")
std::cerr << "Usage: primes <number_of_primes>" << std::endl;
std::cerr << "You've choosen the sum of the first " << num
<< " prime numbers." << std::endl;
// iteration variables.
int i = 1;
int j = 0;
// the number to reach.
int number_to = atoi(num);
int sum = 0;
while(j < number_to)
{
if(is_prime(i))
{
std::cerr << i << std::endl;
sum += i;
j++;
}
i++;
}
std::cerr << "The sum of the numbers between 2 and " << number_to
<< " are: " << sum << std::endl;
std::cout << sum << std::endl;
}
bool is_prime(unsigned int number)
{
if (number <= 1)
return false;
unsigned int i;
for (i=2; i*i<=number; i++)
{
if (number % i == 0)
{
return false;
}
}
return true;
}<commit_after>/**
* C++ algorithm to get the sum of all prime numbers between a range of 2 and the number desired.
*/
#include <iostream>
#include <cstdlib>
#include <string.h>
bool is_prime(unsigned int number);
int main(int argc, char* argv[])
{
const char *num = "2000000";
if (strcmp(argv[1],"--help") == 0){
std::cerr << "Usage: primes <number_of_primes>" << std::endl;
exit(0);
}
else if(argv[1]) {
num = argv[1];
}
// iteration variables.
int i = 1;
int j = 0;
// the number to reach.
int number_to = atoi(num);
int sum = 0;
while(j < number_to)
{
if(is_prime(i))
{
sum += i;
j++;
}
i++;
}
std::cout << sum << std::endl;
}
bool is_prime(unsigned int number)
{
if (number <= 1)
return false;
unsigned int i;
for (i=2; i*i<=number; i++)
{
if (number % i == 0)
{
return false;
}
}
return true;
} |
<commit_msg>Fix: Use `enum` instead of `oneOf` in schema
<commit_before>import { Category } from 'hint/dist/src/lib/enums/category';
import { HintScope } from 'hint/dist/src/lib/enums/hintscope';
import { HintMetadata } from 'hint/dist/src/lib/types';
import * as Connections from './connections';
const meta: HintMetadata = {
docs: {
category: Category.performance,
description: `Performance budget checks if your site will load fast enough based on the size of your resources and a given connection speed`,
name: 'Performance budget'
},
id: 'performance-budget',
schema: [{
additionalProperties: false,
properties: {
connectionType: {
oneOf: [{ enum: Connections.ids }],
type: 'string'
},
loadTime: {
minimum: 1,
type: 'number'
}
},
type: 'object'
}],
scope: HintScope.site
};
export default meta;
<commit_after>import { Category } from 'hint/dist/src/lib/enums/category';
import { HintScope } from 'hint/dist/src/lib/enums/hintscope';
import { HintMetadata } from 'hint/dist/src/lib/types';
import * as Connections from './connections';
const meta: HintMetadata = {
docs: {
category: Category.performance,
description: `Performance budget checks if your site will load fast enough based on the size of your resources and a given connection speed`,
name: 'Performance budget'
},
id: 'performance-budget',
schema: [{
additionalProperties: false,
properties: {
connectionType: {
enum: Connections.ids,
type: 'string'
},
loadTime: {
minimum: 1,
type: 'number'
}
},
type: 'object'
}],
scope: HintScope.site
};
export default meta;
|
<commit_msg>shared/api: Add Type field to InstanceConsolePost
Signed-off-by: Free Ekanayaka <04111f73b2d444cf053b50d877d79556bf34f55a@canonical.com>
<commit_before>package api
// InstanceConsoleControl represents a message on the instance console "control" socket.
//
// API extension: instances
type InstanceConsoleControl struct {
Command string `json:"command" yaml:"command"`
Args map[string]string `json:"args" yaml:"args"`
}
// InstanceConsolePost represents a LXD instance console request.
//
// API extension: instances
type InstanceConsolePost struct {
Width int `json:"width" yaml:"width"`
Height int `json:"height" yaml:"height"`
}
<commit_after>package api
// InstanceConsoleControl represents a message on the instance console "control" socket.
//
// API extension: instances
type InstanceConsoleControl struct {
Command string `json:"command" yaml:"command"`
Args map[string]string `json:"args" yaml:"args"`
}
// InstanceConsolePost represents a LXD instance console request.
//
// API extension: instances
type InstanceConsolePost struct {
Width int `json:"width" yaml:"width"`
Height int `json:"height" yaml:"height"`
// API extension: console_vga_type
Type string `json:"type" yaml:"type"`
}
|
<commit_msg>Allow pulling in of cluster plugin using powershift-cli[cluster] as install target.
<commit_before>import sys
import os
from setuptools import setup
long_description = open('README.rst').read()
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
setup_kwargs = dict(
name='powershift-cli',
version='1.1.0',
description='Pluggable command line client for OpenShift.',
long_description=long_description,
url='https://github.com/getwarped/powershift-cli',
author='Graham Dumpleton',
author_email='Graham.Dumpleton@gmail.com',
license='BSD',
classifiers=classifiers,
keywords='openshift kubernetes',
packages=['powershift', 'powershift.cli'],
package_dir={'powershift': 'src/powershift'},
package_data={'powershift.cli': ['completion-bash.sh']},
entry_points = {'console_scripts':['powershift = powershift.cli:main']},
install_requires=['click'],
)
setup(**setup_kwargs)
<commit_after>import sys
import os
from setuptools import setup
long_description = open('README.rst').read()
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
setup_kwargs = dict(
name='powershift-cli',
version='1.1.0',
description='Pluggable command line client for OpenShift.',
long_description=long_description,
url='https://github.com/getwarped/powershift-cli',
author='Graham Dumpleton',
author_email='Graham.Dumpleton@gmail.com',
license='BSD',
classifiers=classifiers,
keywords='openshift kubernetes',
packages=['powershift', 'powershift.cli'],
package_dir={'powershift': 'src/powershift'},
package_data={'powershift.cli': ['completion-bash.sh']},
entry_points = {'console_scripts':['powershift = powershift.cli:main']},
install_requires=['click'],
extra_requires={'cluster': ['powershift-cluster>=1.1.0']},
)
setup(**setup_kwargs)
|
<commit_msg>Change module doctest creation to be more dynamic.
<commit_before>import unittest
import doctest
import sys
from optparse import OptionParser
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
from firmant import du
suite.addTest(doctest.DocTestSuite(du))
from firmant import entries
suite.addTest(doctest.DocTestSuite(entries))
from firmant import feeds
suite.addTest(doctest.DocTestSuite(feeds))
from firmant import i18n
suite.addTest(doctest.DocTestSuite(i18n))
from firmant import parser
suite.addTest(doctest.DocTestSuite(parser))
from firmant import tags
suite.addTest(doctest.DocTestSuite(tags))
from firmant import utils
suite.addTest(doctest.DocTestSuite(utils))
from firmant import writers
suite.addTest(doctest.DocTestSuite(writers))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
<commit_after>import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.entries',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.tags',
'firmant.utils',
'firmant.writers']
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
<commit_msg>sh: Fix up cpu to node mapping in sysfs.
Currently cpu_to_node() is always 0 in the UP case, though
we do want to have the CPU association linked in under sysfs
even in the cases where we're only on a single CPU.
Fix this up, so we have the cpu0 link on all of the available
nodes that don't already have a CPU link of their own.
Signed-off-by: Paul Mundt <38b52dbb5f0b63d149982b6c5de788ec93a89032@linux-sh.org>
<commit_before>
static DEFINE_PER_CPU(struct cpu, cpu_devices);
static int __init topology_init(void)
{
int i, ret;
#ifdef CONFIG_NEED_MULTIPLE_NODES
for_each_online_node(i)
register_one_node(i);
#endif
for_each_present_cpu(i) {
ret = register_cpu(&per_cpu(cpu_devices, i), i);
if (unlikely(ret))
printk(KERN_WARNING "%s: register_cpu %d failed (%d)\n",
__FUNCTION__, i, ret);
}
return 0;
}
subsys_initcall(topology_init);
<commit_after>/*
* arch/sh/kernel/topology.c
*
* Copyright (C) 2007 Paul Mundt
*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*/
#include <linux/cpu.h>
#include <linux/cpumask.h>
#include <linux/init.h>
#include <linux/percpu.h>
#include <linux/node.h>
#include <linux/nodemask.h>
static DEFINE_PER_CPU(struct cpu, cpu_devices);
static int __init topology_init(void)
{
int i, ret;
#ifdef CONFIG_NEED_MULTIPLE_NODES
for_each_online_node(i)
register_one_node(i);
#endif
for_each_present_cpu(i) {
ret = register_cpu(&per_cpu(cpu_devices, i), i);
if (unlikely(ret))
printk(KERN_WARNING "%s: register_cpu %d failed (%d)\n",
__FUNCTION__, i, ret);
}
#if defined(CONFIG_NUMA) && !defined(CONFIG_SMP)
/*
* In the UP case, make sure the CPU association is still
* registered under each node. Without this, sysfs fails
* to make the connection between nodes other than node0
* and cpu0.
*/
for_each_online_node(i)
if (i != numa_node_id())
register_cpu_under_node(raw_smp_processor_id(), i);
#endif
return 0;
}
subsys_initcall(topology_init);
|