content
stringlengths 128
6.74k
|
---|
<commit_msg>Change testSuites to be a list with default value
Signed-off-by: Orit Stone <530a3c863b274210fbf305df5a70e325b4dbcb5a@hp.com>
<commit_before>package org.openscore.lang.tools.build.tester.parse;
import java.io.Serializable;
import java.util.Map;
/**
* Created by stoneo on 3/15/2015.
**/
public class SlangTestCase {
private final String description;
private final String testSuits;
private final String systemPropertiesFile;
private final Map<String, Serializable> inputs;
private final Boolean throwsException;
private final String result;
public SlangTestCase(String description, String testSuits, String systemPropertiesFile,
Map<String, Serializable> inputs, Boolean throwsException, String result){
this.description = description;
this.testSuits = testSuits;
this.systemPropertiesFile = systemPropertiesFile;
this.inputs = inputs;
this.throwsException = throwsException;
this.result = result;
}
public String getDescription() {
return description;
}
public String getTestSuits() {
return testSuits;
}
public String getSystemPropertiesFile() {
return systemPropertiesFile;
}
public Map<String, Serializable> getInputs() {
return inputs;
}
public Boolean getThrowsException() {
return throwsException;
}
public String getResult() {
return result;
}
}
<commit_after>package org.openscore.lang.tools.build.tester.parse;
import org.apache.commons.collections4.CollectionUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Created by stoneo on 3/15/2015.
**/
public class SlangTestCase {
private final String description;
private final List<String> testSuits;
private final String systemPropertiesFile;
private final Map<String, Serializable> inputs;
private final Boolean throwsException;
private final String result;
public static final String BASE_TEST_SUITE = "base";
public SlangTestCase(String description, List<String> testSuits, String systemPropertiesFile,
Map<String, Serializable> inputs, Boolean throwsException, String result){
this.description = description;
this.systemPropertiesFile = systemPropertiesFile;
if(CollectionUtils.isEmpty(testSuits)){
this.testSuits = new ArrayList<>();
this.testSuits.add(BASE_TEST_SUITE);
} else {
this.testSuits = testSuits;
}
this.inputs = inputs;
this.throwsException = throwsException;
this.result = result;
}
public String getDescription() {
return description;
}
public List<String> getTestSuits() {
return testSuits;
}
public String getSystemPropertiesFile() {
return systemPropertiesFile;
}
public Map<String, Serializable> getInputs() {
return inputs;
}
public Boolean getThrowsException() {
return throwsException;
}
public String getResult() {
return result;
}
}
|
<commit_msg>Disable powershell modules list
Add list_kbs function
<commit_before>'''
Microsoft Update files management via wusa.exe
:maintainer: Thomas Lemarchand
:platform: Windows
:depends: PowerShell
'''
# Import python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.platform
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'win_wusa'
def __virtual__():
'''
Load only on Windows
'''
if not salt.utils.platform.is_windows():
return False, 'Only available on Windows systems'
powershell_info = __salt__['cmd.shell_info'](shell='powershell', list_modules=True)
if not powershell_info['installed']:
return False, 'PowerShell not available'
return __virtualname__
def is_installed(kb):
get_hotfix_result = __salt__['cmd.powershell_all']('Get-HotFix -Id {0}'.format(kb), ignore_retcode=True)
return get_hotfix_result['retcode'] == 0
def install(path):
return __salt__['cmd.run_all']('wusa.exe {0} /quiet /norestart'.format(path), ignore_retcode=True)
def uninstall(kb):
return __salt__['cmd.run_all']('wusa.exe /uninstall /kb:{0} /quiet /norestart'.format(kb[2:]), ignore_retcode=True)
<commit_after>'''
Microsoft Update files management via wusa.exe
:maintainer: Thomas Lemarchand
:platform: Windows
:depends: PowerShell
'''
# Import python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.platform
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'win_wusa'
def __virtual__():
'''
Load only on Windows
'''
if not salt.utils.platform.is_windows():
return False, 'Only available on Windows systems'
powershell_info = __salt__['cmd.shell_info'](shell='powershell', list_modules=False)
if not powershell_info['installed']:
return False, 'PowerShell not available'
return __virtualname__
def is_installed(kb):
get_hotfix_result = __salt__['cmd.powershell_all']('Get-HotFix -Id {0}'.format(kb), ignore_retcode=True)
return get_hotfix_result['retcode'] == 0
def install(path):
return __salt__['cmd.run_all']('wusa.exe {0} /quiet /norestart'.format(path), ignore_retcode=True)
def uninstall(kb):
return __salt__['cmd.run_all']('wusa.exe /uninstall /kb:{0} /quiet /norestart'.format(kb[2:]), ignore_retcode=True)
def list_kbs():
return __salt__['cmd.powershell']('Get-HotFix')
|
<commit_msg>Add a default value fill
<commit_before>
import { registerMigration } from './migrationUtils';
import { Votes } from '../../lib/collections/votes';
import { Posts } from '../../lib/collections/posts';
import { Comments } from '../../lib/collections/comments';
registerMigration({
name: "afVoteMigration",
dateWritten: "2020-09-19",
idempotent: true,
action: async () => {
const afPosts = await Posts.find({af: true}, {}, { _id: 1}).fetch()
const afComments = await Comments.find({af: true}, {}, {_id: 1}).fetch()
console.log("Fetched all the votes and comments")
const afDocs = [...afPosts, ...afComments]
await Votes.rawCollection().bulkWrite(afDocs.map(({_id}) => ({
updateMany: {
filter: { documentId: _id },
update: {
$set: {
documentIsAf: true
}
}
}
})),
{ ordered: false });
}
});<commit_after>
import { fillDefaultValues, registerMigration } from './migrationUtils';
import { Votes } from '../../lib/collections/votes';
import { Posts } from '../../lib/collections/posts';
import { Comments } from '../../lib/collections/comments';
registerMigration({
name: "afVoteMigration",
dateWritten: "2020-09-19",
idempotent: true,
action: async () => {
await fillDefaultValues({
collection: Votes,
fieldName: "documentIsAf",
});
const afPosts = await Posts.find({af: true}, {}, { _id: 1}).fetch()
const afComments = await Comments.find({af: true}, {}, {_id: 1}).fetch()
console.log("Fetched all the votes and comments")
const afDocs = [...afPosts, ...afComments]
await Votes.rawCollection().bulkWrite(afDocs.map(({_id}) => ({
updateMany: {
filter: { documentId: _id },
update: {
$set: {
documentIsAf: true
}
}
}
})),
{ ordered: false });
}
}); |
<commit_msg>Put try/except around ASDF import in init file
<commit_before>
from . import connect
<commit_after>
try:
import asdf
from . import connect
except ImportError:
pass
|
<commit_msg>Make camera interaction more sensitive
(refs #12095)
<commit_before>
import vtk
from .. import utils
class KeyPressInteractorStyle(vtk.vtkInteractorStyleMultiTouchCamera):
"""
An interactor style for capturing key press events in VTK window.
"""
def __init__(self, parent=None, **kwargs):
self.AddObserver("KeyPressEvent", self.keyPress)
super(KeyPressInteractorStyle, self).__init__(parent, **kwargs)
def keyPress(self, obj, event): #pylint: disable=unused-argument
"""
Executes when a key is pressed.
Inputs:
obj, event: Required by VTK.
"""
key = obj.GetInteractor().GetKeySym()
if key == 'c':
print '\n'.join(utils.print_camera(self.GetCurrentRenderer().GetActiveCamera()))
<commit_after>
import vtk
from .. import utils
class KeyPressInteractorStyle(vtk.vtkInteractorStyleMultiTouchCamera):
"""
An interactor style for capturing key press events in VTK window.
"""
def __init__(self, parent=None, **kwargs):
self.AddObserver("KeyPressEvent", self.keyPress)
super(KeyPressInteractorStyle, self).__init__(parent, **kwargs)
self.SetMotionFactor(0.1*self.GetMotionFactor())
def keyPress(self, obj, event): #pylint: disable=unused-argument
"""
Executes when a key is pressed.
Inputs:
obj, event: Required by VTK.
"""
key = obj.GetInteractor().GetKeySym()
if key == 'c':
print '\n'.join(utils.print_camera(self.GetCurrentRenderer().GetActiveCamera()))
|
<commit_msg>Fix validation of news creation form
<commit_before>
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=80)])
body = TextAreaField('Text', [InputRequired(), Length(max=80)])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)])
class ItemUpdateForm(ItemCreateForm):
pass
<commit_after>
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=80)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)])
class ItemUpdateForm(ItemCreateForm):
pass
|
<commit_msg>Simplify using Java 8 putIfAbsent
<commit_before>package com.janosgyerik.utils.algorithm.unionfind;
import org.assertj.core.util.VisibleForTesting;
import java.util.HashMap;
import java.util.Map;
public class UnionFindImpl implements UnionFind {
private final Map<Integer, Integer> ids = new HashMap<>();
@Override
public void union(int p, int q) {
add(p);
add(q);
ids.put(root(p), root(q));
}
private void add(int p) {
if (!ids.containsKey(p)) {
ids.put(p, p);
}
}
private int root(int p) {
int root = p;
while (root != ids.get(root)) {
int parent = ids.get(ids.get(root));
ids.put(root, parent);
root = parent;
}
return root;
}
@Override
public boolean connected(int p, int q) {
return root(p) == root(q);
}
@Override
public int find(int p) {
return ids.get(p);
}
@VisibleForTesting
Map<Integer, Integer> getIds() {
return ids;
}
}
<commit_after>package com.janosgyerik.utils.algorithm.unionfind;
import org.assertj.core.util.VisibleForTesting;
import java.util.HashMap;
import java.util.Map;
public class UnionFindImpl implements UnionFind {
private final Map<Integer, Integer> ids = new HashMap<>();
@Override
public void union(int p, int q) {
add(p);
add(q);
ids.put(root(p), root(q));
}
private void add(int p) {
ids.putIfAbsent(p, p);
}
private int root(int p) {
int root = p;
while (root != ids.get(root)) {
int parent = ids.get(ids.get(root));
ids.put(root, parent);
root = parent;
}
return root;
}
@Override
public boolean connected(int p, int q) {
return root(p) == root(q);
}
@Override
public int find(int p) {
return ids.get(p);
}
@VisibleForTesting
Map<Integer, Integer> getIds() {
return ids;
}
}
|
<commit_msg>Add option to prune all stalled workers instead of just one at the time.
Signed-off-by: Alessandro Degano <55cacd5b504daf5c7cafa0b6b8e4521b86a643c4@pix4d.com>
<commit_before>package commands
import (
"fmt"
"github.com/concourse/concourse/fly/rc"
)
type PruneWorkerCommand struct {
Worker string `short:"w" long:"worker" required:"true" description:"Worker to prune"`
}
func (command *PruneWorkerCommand) Execute(args []string) error {
workerName := command.Worker
target, err := rc.LoadTarget(Fly.Target, Fly.Verbose)
if err != nil {
return err
}
err = target.Validate()
if err != nil {
return err
}
err = target.Client().PruneWorker(workerName)
if err != nil {
return err
}
fmt.Printf("pruned '%s'\n", workerName)
return nil
}
<commit_after>package commands
import (
"fmt"
"github.com/concourse/concourse/fly/commands/internal/displayhelpers"
"github.com/concourse/concourse/fly/rc"
)
type PruneWorkerCommand struct {
Worker string `short:"w" long:"worker" description:"Worker to prune"`
AllStalled bool `short:"a" long:"all-stalled" description:"Prune all stalled workers"`
}
func (command *PruneWorkerCommand) Execute(args []string) error {
if command.Worker == "" && !command.AllStalled {
displayhelpers.Failf("Either a worker name or --all-stalled are required")
}
var workersNames []string
if command.Worker != "" {
workersNames = append(workersNames, command.Worker)
}
target, err := rc.LoadTarget(Fly.Target, Fly.Verbose)
if err != nil {
return err
}
err = target.Validate()
if err != nil {
return err
}
if command.AllStalled {
workers, err := target.Client().ListWorkers()
if err != nil {
return err
}
for _, worker := range workers {
if worker.State == "stalled" {
workersNames = append(workersNames, worker.Name)
}
}
if workersNames == nil {
displayhelpers.Failf("No stalled worker found.")
}
}
for _, workerName := range workersNames {
err = target.Client().PruneWorker(workerName)
if err != nil {
return err
}
fmt.Printf("pruned '%s'\n", workerName)
}
return nil
}
|
<commit_msg>Correct version for deploy to GAE
<commit_before>package main
import (
"fmt"
"net/http"
)
func helloWorld(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "Hello World!")
}
func startPage(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, "Hello, test server started on 8080 port.\n - /helloworld - show title page\n - /showinfo - show information about this thing")
}
func showInfo(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, "Inforamtion page for test project.\nLanguage - Go\nPlatform - Google Application Engine")
}
func init() {
http.HandleFunc("/", startPage)
http.HandleFunc("/helloworld", helloWorld)
http.HandleFunc("/showinfo", showInfo)
//Wrong code for App Enine - server cant understand what it need to show
//http.ListenAndServe(":80", nil)
}
/*
func main() {
fmt.Println("Hello, test server started on 80 port.\n - /helloworld - show title page\n - /showinfo - show information about this thing")
http.HandleFunc("/", startPage)
http.HandleFunc("/helloworld", helloWorld)
http.HandleFunc("/showinfo", showInfo)
http.ListenAndServe(":80", nil)
}
*/
//goapp serve app.yaml
//goapp deploy -application golangnode0 -version 0
<commit_after>//Command to run test version:
//goapp serve app.yaml
//Command to deploy/update application:
//goapp deploy -application golangnode0 -version 0
package main
import (
"fmt"
"net/http"
)
func helloWorld(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "Hello World!")
}
func startPage(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, "Hello, test server started on 8080 port.\n - /helloworld - show title page\n - /showinfo - show information about this thing")
}
func showInfo(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, "Inforamtion page for test project.\nLanguage - Go\nPlatform - Google Application Engine")
}
func init() {
http.HandleFunc("/", startPage)
http.HandleFunc("/helloworld", helloWorld)
http.HandleFunc("/showinfo", showInfo)
//Wrong code for App Enine - server cant understand what it need to show
//http.ListenAndServe(":80", nil)
}
/*
func main() {
fmt.Println("Hello, test server started on 80 port.\n - /helloworld - show title page\n - /showinfo - show information about this thing")
http.HandleFunc("/", startPage)
http.HandleFunc("/helloworld", helloWorld)
http.HandleFunc("/showinfo", showInfo)
http.ListenAndServe(":80", nil)
}
*/
|
<commit_msg>Clear command registry BEFORE each test.
<commit_before>import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def tearDown(self):
# Ensure we have a fresh registry after every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
<commit_after>import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def setUp(self):
# Ensure we have a fresh registry before every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
print Command.registry
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
|
<commit_msg>Add an error logger to the 500 handler
<commit_before>
from flask import jsonify
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
<commit_after>
from flask import jsonify
import structlog
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
logger = structlog.get_logger()
logger.error(status=500, message=e.args[0])
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
|
<commit_msg>Fix Unknown argument 'compress' in diagnostic for JDK11
<commit_before>package org.radargun.config;
import java.util.List;
import static org.radargun.config.VmArgUtils.ensureArg;
import static org.radargun.config.VmArgUtils.replace;
public class FlightRecorder implements VmArg {
@Property(doc = "Start flight recording for the benchmark.", optional = false)
private boolean enabled = false;
@Property(doc = "File for the recording.")
private String filename;
@Property(doc = "Settings file with recording configuration.")
private String settings;
@Override
public void setArgs(List<String> args) {
if (!enabled)
return;
StringBuilder recordingParams = new StringBuilder("=compress=false,delay=10s,duration=24h");
if (filename != null)
recordingParams.append(",filename=").append(filename);
if (settings != null)
recordingParams.append(",settings=").append(settings);
ensureArg(args, "-XX:+UnlockCommercialFeatures");
ensureArg(args, "-XX:+FlightRecorder");
replace(args, "-XX:StartFlightRecording", recordingParams.toString());
}
public boolean isEnabled() {
return enabled;
}
public String getFilename() {
return filename;
}
public String getSettings() {
return settings;
}
}<commit_after>package org.radargun.config;
import java.util.List;
import static org.radargun.config.VmArgUtils.ensureArg;
import static org.radargun.config.VmArgUtils.replace;
public class FlightRecorder implements VmArg {
@Property(doc = "Start flight recording for the benchmark.", optional = false)
private boolean enabled = false;
@Property(doc = "File for the recording.")
private String filename;
@Property(doc = "Settings file with recording configuration.")
private String settings;
@Override
public void setArgs(List<String> args) {
if (!enabled)
return;
StringBuilder recordingParams = new StringBuilder("=delay=10s,duration=24h");
if (filename != null)
recordingParams.append(",filename=").append(filename);
if (settings != null)
recordingParams.append(",settings=").append(settings);
ensureArg(args, "-XX:+UnlockCommercialFeatures");
ensureArg(args, "-XX:+FlightRecorder");
replace(args, "-XX:StartFlightRecording", recordingParams.toString());
}
public boolean isEnabled() {
return enabled;
}
public String getFilename() {
return filename;
}
public String getSettings() {
return settings;
}
} |
<commit_msg>Fix up rust example for 1.0
<commit_before>pub fn max_array(x: &mut[int, ..65536], y: &[int, ..65536]) {
for i in range(0u, 65536) {
if y[i] > x[i] {
x[i] = y[i];
}
}
}
<commit_after>pub fn max_array(x: &mut[i32; 65536], y: &[i32; 65536]) {
for i in (0..65536) {
if y[i] > x[i] {
x[i] = y[i];
}
}
}
|
<commit_msg>feat(stackdriver): Add new metric config fields.
<commit_before>import { ICanaryMetricSetQueryConfig } from 'kayenta/domain';
export interface IStackdriverCanaryMetricSetQueryConfig extends ICanaryMetricSetQueryConfig {
metricType: string;
groupByFields: string[];
}
<commit_after>import { ICanaryMetricSetQueryConfig } from 'kayenta/domain';
export interface IStackdriverCanaryMetricSetQueryConfig extends ICanaryMetricSetQueryConfig {
metricType: string;
resourceType: string;
crossSeriesReducer: string;
perSeriesAligner: string;
groupByFields: string[];
}
|
<commit_msg>Add view_locations for plugins in core
<commit_before>
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
"openprocurement.auctions.core.plugins",
]
<commit_after>
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
]
|
<commit_msg>Support the Isles of Scilly
<commit_before>PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
}
<commit_after>PARENT_TO_CHILD_AREAS = {
'DIS': ['DIW',],
'MTD': ['MTW',],
'CTY': ['CED',],
'LBO': ['LBW',],
'CED': ['CPC',],
'UTA': ['UTW', 'UTE'],
'NIA': ['NIE',],
'COI': ['COP',],
}
CHILD_TO_PARENT_AREAS = {
'DIW': 'DIS',
'MTW': 'MTD',
'UTW': 'UTA',
'UTE': 'UTA',
'CED': 'CTY',
'LBW': 'LBO',
'CPC': 'CED',
'COP': 'COI',
}
|
<commit_msg>Update new member example to not be broken.
Took forever but better late than never.
<commit_before>import discord
class MyClient(discord.Client):
async def on_ready(self):
print('Logged in as')
print(self.user.name)
print(self.user.id)
print('------')
async def on_member_join(self, member):
guild = member.guild
await guild.default_channel.send('Welcome {0.mention} to {1.name}!'.format(member, guild))
client = MyClient()
client.run('token')
<commit_after>import discord
class MyClient(discord.Client):
async def on_ready(self):
print('Logged in as')
print(self.user.name)
print(self.user.id)
print('------')
async def on_member_join(self, member):
guild = member.guild
if guild.system_channel is not None:
to_send = 'Welcome {0.mention} to {1.name}!'.format(member, guild)
await guild.system_channel.send(to_send)
client = MyClient()
client.run('token')
|
<commit_msg>Enable Odoo blog for UCW
<commit_before>{
'name': "Bestja: UCW",
'summary': "Installation configuration for UCW",
'description': "Installation configuration for Uniwersyteckie Centrum Wolontariatu",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'category': 'Specific Industry Applications',
'depends': [
'base',
'bestja_base',
'bestja_volunteer',
'bestja_volunteer_notes',
'bestja_account_deletion',
'bestja_organization',
'bestja_project',
'bestja_offers',
'bestja_offers_moderation',
'bestja_offers_invitations',
'bestja_offers_categorization',
'bestja_files',
'bestja_application_moderation',
'bestja_ucw_permissions',
],
'data': [
'data.xml',
],
'application': True,
}
<commit_after>{
'name': "Bestja: UCW",
'summary': "Installation configuration for UCW",
'description': "Installation configuration for Uniwersyteckie Centrum Wolontariatu",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'category': 'Specific Industry Applications',
'depends': [
'base',
'website_blog',
'bestja_base',
'bestja_volunteer',
'bestja_volunteer_notes',
'bestja_account_deletion',
'bestja_organization',
'bestja_project',
'bestja_offers',
'bestja_offers_moderation',
'bestja_offers_invitations',
'bestja_offers_categorization',
'bestja_files',
'bestja_application_moderation',
'bestja_ucw_permissions',
],
'data': [
'data.xml',
],
'application': True,
}
|
<commit_msg>Allow POST on settings too
<commit_before>package disallow
import (
"net/http"
"github.com/rancher/apiserver/pkg/types"
"github.com/rancher/steve/pkg/attributes"
schema2 "github.com/rancher/steve/pkg/schema"
steve "github.com/rancher/steve/pkg/server"
)
var (
allowPut = map[string]bool{
"features": true,
"settings": true,
}
)
func Register(server *steve.Server) {
server.SchemaFactory.AddTemplate(schema2.Template{
Customize: func(schema *types.APISchema) {
gr := attributes.GR(schema)
if gr.Group == "management.cattle.io" || gr.Group == "project.cattle.io" {
attributes.AddDisallowMethods(schema,
http.MethodPost,
http.MethodPatch,
http.MethodDelete)
if !allowPut[gr.Resource] {
attributes.AddDisallowMethods(schema, http.MethodPut)
}
}
},
})
}
<commit_after>package disallow
import (
"net/http"
"github.com/rancher/apiserver/pkg/types"
"github.com/rancher/steve/pkg/attributes"
schema2 "github.com/rancher/steve/pkg/schema"
steve "github.com/rancher/steve/pkg/server"
)
var (
allowPost = map[string]bool{
"settings": true,
}
allowPut = map[string]bool{
"features": true,
"settings": true,
}
)
func Register(server *steve.Server) {
server.SchemaFactory.AddTemplate(schema2.Template{
Customize: func(schema *types.APISchema) {
gr := attributes.GR(schema)
if gr.Group == "management.cattle.io" || gr.Group == "project.cattle.io" {
attributes.AddDisallowMethods(schema,
http.MethodPatch,
http.MethodDelete)
if !allowPut[gr.Resource] {
attributes.AddDisallowMethods(schema, http.MethodPut)
}
if !allowPut[gr.Resource] {
attributes.AddDisallowMethods(schema, http.MethodPost)
}
}
},
})
}
|
<commit_msg>Remove random execution of requests.
<commit_before>import httpserver.WebServer;
import requests.CiphersuiteRequest;
import requests.Ecrypt2LevelRequest;
import requests.OpenPortRequest;
import java.util.Random;
public class Client {
public static void main(String[] args) {
WebServer ws = new WebServer();
Random r = new Random();
for (int i = 0; i < 10; i++) {
switch (r.nextInt(3)) {
case 0: new Thread(new CiphersuiteRequest()).start();
break;
case 1: new Thread(new Ecrypt2LevelRequest()).start();
break;
case 2: new Thread(new OpenPortRequest()).start();
break;
}
}
}
}
<commit_after>import httpserver.WebServer;
import requests.CiphersuiteRequest;
import requests.Ecrypt2LevelRequest;
import requests.OpenPortRequest;
public class Client {
public static void main(String[] args) {
WebServer ws = new WebServer();
/*
Random r = new Random();
for (int i = 0; i < 10; i++) {
switch (r.nextInt(3)) {
case 0: new Thread(new CiphersuiteRequest()).start();
break;
case 1: new Thread(new Ecrypt2LevelRequest()).start();
break;
case 2: new Thread(new OpenPortRequest()).start();
break;
}
}
*/
for (int i = 0; i < 334; i++ ) {
new Thread(new CiphersuiteRequest()).start();
}
for (int i = 0; i < 333; i++) {
new Thread(new Ecrypt2LevelRequest()).start();
}
for(int i = 0; i < 333; i++ ) {
new Thread(new OpenPortRequest()).start();
}
}
}
|
<commit_msg>Remove duplication of precondition checks.
<commit_before>package uk.ac.ebi.quickgo.annotation.validation.service;
import com.google.common.base.Preconditions;
import java.util.Objects;
/**
* Utility methods related to Database Cross Reference ids.
*
* @author Tony Wardell
* Date: 10/11/2016
* Time: 13:41
* Created with IntelliJ IDEA.
*/
public class DbCrossReferenceId {
private static final String DELIMITER = ":";
public static String db(final String idWithDb) {
Preconditions.checkArgument(Objects.nonNull(idWithDb), "The id should not be null");
Preconditions.checkArgument(idWithDb.contains(DELIMITER), "The id should contain the delimiter %s", DELIMITER);
return idWithDb.substring(0, idWithDb.indexOf(":")).trim();
}
public static String id(final String idWithDb){
Preconditions.checkArgument(Objects.nonNull(idWithDb), "The id should not be null");
Preconditions.checkArgument(idWithDb.contains(DELIMITER), "The id should contain the delimiter %s", DELIMITER);
return idWithDb.substring(idWithDb.indexOf(":") + 1).trim();
}
}
<commit_after>package uk.ac.ebi.quickgo.annotation.validation.service;
import com.google.common.base.Preconditions;
import java.util.Objects;
/**
* Utility methods related to Database Cross Reference ids.
*
* @author Tony Wardell
* Date: 10/11/2016
* Time: 13:41
* Created with IntelliJ IDEA.
*/
public class DbCrossReferenceId {
private static final String DELIMITER = ":";
public static String db(final String idWithDb) {
checkPreconditions(idWithDb);
return idWithDb.substring(0, idWithDb.indexOf(":")).trim();
}
public static String id(final String idWithDb){
checkPreconditions(idWithDb);
return idWithDb.substring(idWithDb.indexOf(":") + 1).trim();
}
private static void checkPreconditions(String idWithDb) {
Preconditions.checkArgument(Objects.nonNull(idWithDb), "The id should not be null");
Preconditions.checkArgument(idWithDb.contains(DELIMITER), "The id should contain the delimiter %s", DELIMITER);
}
}
|
<commit_msg>Make known_assertions.txt cross-machine and hopefully also cross-platform.
<commit_before>
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global ignoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
ignoreList.append(line)
def ignore(assertion):
global ignoreList
for ig in ignoreList:
if (assertion.find(ig) != -1):
return True
return False
ignoreList = []
getIgnores()
# print "detect_assertions is ready (ignoring %d assertions)" % len(ignoreList)
<commit_after>
import platform
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global simpleIgnoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
mpi = line.find(", file ") # assertions use this format
if (mpi == -1):
mpi = line.find(": file ") # aborts use this format
if (mpi == -1):
simpleIgnoreList.append(line)
else:
twoPartIgnoreList.append((line[:mpi+7], localSlashes(line[mpi+7:])))
def localSlashes(s):
if platform.system() in ('Windows', 'Microsoft'):
return s.replace("\\", "/")
return s
def ignore(assertion):
global simpleIgnoreList
for ig in simpleIgnoreList:
if assertion.find(ig) != -1:
return True
for (part1, part2) in twoPartIgnoreList:
if assertion.find(part1) != -1 and assertion.find(part2) != -1:
return True
return False
simpleIgnoreList = []
twoPartIgnoreList = []
getIgnores()
#print "detect_assertions is ready (ignoring %d strings without filenames and %d strings with filenames)" % (len(simpleIgnoreList), len(twoPartIgnoreList))
|
<commit_msg>Test with K5 graph for fleury and hierholzer added.
<commit_before>package a4_p01_JS_MJ_test;
public class a4Test {
}
<commit_after>package a4_p01_JS_MJ_test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.List;
import org.jgrapht.Graph;
import org.jgrapht.WeightedGraph;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.DefaultWeightedEdge;
import org.jgrapht.graph.Pseudograph;
import org.junit.BeforeClass;
import org.junit.Test;
import a1_p01_JS_MJ.GraphParser;
import a1_p01_JS_MJ.GraphvizAdapter;
import a1_p01_JS_MJ.SearchResult;
import a2_p01_JS_MJ.AttributedNode;
import a2_p01_JS_MJ.GraphGenerator;
import a3_p01_JS_MJ.Kruskal;
import a4_p01_JS_MJ.Fleury;
import a4_p01_JS_MJ.Hierholzer;
public class a4Test {
static Graph graphk5;
static WeightedGraph<AttributedNode<String>, DefaultWeightedEdge> genGraph;
@BeforeClass
public static void init() {
try {
graphk5 = GraphParser.parseGraphFile("GraphK5.gka");
} catch(Exception ex) {
ex.printStackTrace();
}
genGraph = GraphGenerator.generateAttributedWeightedGraph(150, 30);
}
@Test
public void fleury_test(){
Fleury<String> fleury = new Fleury<String>();
SearchResult result = fleury.search((Pseudograph<String, DefaultEdge>) graphk5);
printTestResult("Test 1: Fleury - Graph K5", result);
System.out.println("Test Green.");
System.out.println("");
}
@Test
public void hierholzer_test(){
Hierholzer<String> hierholzer = new Hierholzer<String>();
SearchResult result = hierholzer.algorithm((Pseudograph<String, DefaultEdge>) graphk5);
printTestResult("Test 1: Fleury - Graph K5", result);
System.out.println("Test Green.");
System.out.println("");
}
public void printTestResult(String testname, SearchResult result){
System.out.println(testname);
System.out.print("Path: ");
for(DefaultEdge s : (ArrayList<DefaultEdge>) result.getPath()){
System.out.print(s+", ");
}
System.out.println("");
System.out.println("Path Length " + result.getPathLength()+1);
System.out.println("Accssess Counter: " + result.getAccsessCounter());
}
}
|
<commit_msg>Fix import errors on non-RPi platforms
<commit_before>
try:
import RPi.GPIO
from .raspberry_pi import *
except RuntimeError:
print(
'----------------------------------------------------------------------------')
print(
' WARNING: RPi.GPIO can only be run on the RPi. Falling back to mock objects.')
print(
'----------------------------------------------------------------------------')
except ImportError:
print('-------------------------------------------------------------------')
print(' WARNING: RPi.GPIO library not found. Falling back to mock objects.')
print('-------------------------------------------------------------------')
from .gpio_mock import *
<commit_after>
try:
import RPi.GPIO
from .raspberry_pi import *
except RuntimeError:
print(
'----------------------------------------------------------------------------')
print(
' WARNING: RPi.GPIO can only be run on the RPi. Falling back to mock objects.')
print(
'----------------------------------------------------------------------------')
from .gpio_mock import *
except ImportError:
print('-------------------------------------------------------------------')
print(' WARNING: RPi.GPIO library not found. Falling back to mock objects.')
print('-------------------------------------------------------------------')
from .gpio_mock import *
|
<commit_msg>Fix build problem on W2008 in /xp /x86 /release mode
Change-Id: I1d51e4f9b1e0ae9f6df28a22bf13a5ab6bf55ab8
Reviewed-on: http://review.couchbase.org/28476
Reviewed-by: Trond Norbye <60edd2ef23891a753f231b0c6f161dc634079a93@gmail.com>
Tested-by: Trond Norbye <60edd2ef23891a753f231b0c6f161dc634079a93@gmail.com>
<commit_before>/* -*- Mode: C; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */
#ifndef CONFIG_H
#define CONFIG_H 1
#ifdef WIN32
#include <winsock2.h>
#include <ws2tcpip.h>
#include <windows.h>
#else
#if !defined(__cplusplus) && !defined(linux) && !defined(__GNUC__)
typedef unsigned long long uint64_t;
typedef long long int64_t;
#endif
#ifndef _POSIX_PTHREAD_SEMANTICS
#define _POSIX_PTHREAD_SEMANTICS
#endif
#include <sys/wait.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netdb.h>
#include <unistd.h>
#include <sys/stat.h>
#include <sys/un.h>
#include <sys/resource.h>
#include <sys/uio.h>
#include <netinet/tcp.h>
#include <arpa/inet.h>
#include <sys/mman.h>
#include <pwd.h>
#include <sys/time.h>
#include <signal.h>
#endif
/* Common section */
#include <stdlib.h>
#include <inttypes.h>
#include <sys/types.h>
#include <platform/platform.h>
#endif
<commit_after>/* -*- Mode: C; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */
#ifndef CONFIG_H
#define CONFIG_H 1
#ifdef WIN32
#include <winsock2.h>
#include <ws2tcpip.h>
#include <windows.h>
#else
#if !defined(__cplusplus) && !defined(linux) && !defined(__GNUC__)
typedef unsigned long long uint64_t;
typedef long long int64_t;
#endif
#ifndef _POSIX_PTHREAD_SEMANTICS
#define _POSIX_PTHREAD_SEMANTICS
#endif
#include <sys/wait.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netdb.h>
#include <unistd.h>
#include <sys/stat.h>
#include <sys/un.h>
#include <sys/resource.h>
#include <sys/uio.h>
#include <netinet/tcp.h>
#include <arpa/inet.h>
#include <sys/mman.h>
#include <pwd.h>
#include <sys/time.h>
#include <signal.h>
#include <inttypes.h>
#endif
/* Common section */
#include <stdlib.h>
#include <sys/types.h>
#include <platform/platform.h>
#endif
|
<commit_msg>Make a buildbot using a buggy gcc happy
When specializing a template in a namespace, it has to be in a namespace
block, else gcc will get confused. Hopefully this fixes the issue.
https://gcc.gnu.org/bugzilla/show_bug.cgi?id=56480
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@365030 91177308-0d34-0410-b5e6-96231b3b80d8
<commit_before>//===- Dominators.cpp - Implementation of dominators tree for Clang CFG ---===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "clang/Analysis/Analyses/Dominators.h"
using namespace clang;
template <>
void clang::CFGDominatorTreeImpl</*IsPostDom=*/true>::anchor() {}
template <>
void clang::CFGDominatorTreeImpl</*IsPostDom=*/false>::anchor() {}
<commit_after>//===- Dominators.cpp - Implementation of dominators tree for Clang CFG ---===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "clang/Analysis/Analyses/Dominators.h"
namespace clang {
template <>
void CFGDominatorTreeImpl</*IsPostDom=*/true>::anchor() {}
template <>
void CFGDominatorTreeImpl</*IsPostDom=*/false>::anchor() {}
} // end of namespace clang
|
<commit_msg>Add some methods to the container for migration<commit_before>package info.u_team.u_team_test.init;
import info.u_team.u_team_core.containertype.UContainerType;
import info.u_team.u_team_core.util.registry.BaseRegistryUtil;
import info.u_team.u_team_test.TestMod;
import info.u_team.u_team_test.container.*;
import net.minecraft.inventory.container.ContainerType;
import net.minecraftforge.event.RegistryEvent.Register;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus;
@EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD)
public class TestContainers {
public static final ContainerType<BasicTileEntityContainer> BASIC = new UContainerType<>("basic", BasicTileEntityContainer::new);
public static final ContainerType<BasicEnergyCreatorContainer> BASIC_ENERGY_CREATOR = new UContainerType<>("energy_creator", BasicEnergyCreatorContainer::new);
public static final ContainerType<BasicFluidInventoryContainer> BASIC_FLUID_INVENTORY = new UContainerType<>("fluid_inventory", BasicFluidInventoryContainer::new);
@SubscribeEvent
public static void register(Register<ContainerType<?>> event) {
BaseRegistryUtil.getAllGenericRegistryEntriesAndApplyNames(TestMod.MODID, ContainerType.class).forEach(event.getRegistry()::register);
}
}
<commit_after>package info.u_team.u_team_test.init;
import info.u_team.u_team_core.containertype.UContainerType;
import info.u_team.u_team_test.TestMod;
import info.u_team.u_team_test.container.*;
import net.minecraft.inventory.container.ContainerType;
import net.minecraftforge.eventbus.api.IEventBus;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus;
import net.minecraftforge.registries.*;
@EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD)
public class TestContainers {
public static final DeferredRegister<ContainerType<?>> CONTAINER_TYPES = DeferredRegister.create(ForgeRegistries.CONTAINERS, TestMod.MODID);
public static final ContainerType<BasicTileEntityContainer> BASIC = new UContainerType<>("basic", BasicTileEntityContainer::new);
public static final ContainerType<BasicEnergyCreatorContainer> BASIC_ENERGY_CREATOR = new UContainerType<>("energy_creator", BasicEnergyCreatorContainer::new);
public static final ContainerType<BasicFluidInventoryContainer> BASIC_FLUID_INVENTORY = new UContainerType<>("fluid_inventory", BasicFluidInventoryContainer::new);
public static void register(IEventBus bus) {
CONTAINER_TYPES.register(bus);
}
}
|
<commit_msg>Set final version of stream scenario
<commit_before>package io.scalecube.services.benchmarks.service;
import io.scalecube.services.api.ServiceMessage;
import java.util.concurrent.Callable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
public class BenchmarkServiceImpl implements BenchmarkService {
@Override
public Mono<Void> requestVoid(ServiceMessage request) {
return Mono.empty();
}
@Override
public Mono<ServiceMessage> requestOne(ServiceMessage message) {
Callable<ServiceMessage> callable =
() -> {
long value = System.currentTimeMillis();
return ServiceMessage.from(message)
.header(SERVICE_RECV_TIME, value)
.header(SERVICE_SEND_TIME, value)
.build();
};
return Mono.fromCallable(callable);
}
@Override
public Flux<ServiceMessage> infiniteStream(ServiceMessage message) {
Callable<ServiceMessage> callable =
() ->
ServiceMessage.from(message)
.header(SERVICE_SEND_TIME, System.currentTimeMillis())
.build();
return Mono.fromCallable(callable)
.subscribeOn(Schedulers.newSingle("infiniteStream"))
.repeat()
.onBackpressureDrop();
}
}
<commit_after>package io.scalecube.services.benchmarks.service;
import io.scalecube.services.api.ServiceMessage;
import java.util.concurrent.Callable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
public class BenchmarkServiceImpl implements BenchmarkService {
@Override
public Mono<Void> requestVoid(ServiceMessage request) {
return Mono.empty();
}
@Override
public Mono<ServiceMessage> requestOne(ServiceMessage message) {
Callable<ServiceMessage> callable =
() -> {
long value = System.currentTimeMillis();
return ServiceMessage.from(message)
.header(SERVICE_RECV_TIME, value)
.header(SERVICE_SEND_TIME, value)
.build();
};
return Mono.fromCallable(callable);
}
@Override
public Flux<ServiceMessage> infiniteStream(ServiceMessage message) {
Callable<ServiceMessage> callable =
() ->
ServiceMessage.from(message)
.header(SERVICE_SEND_TIME, System.currentTimeMillis())
.build();
return Mono.fromCallable(callable)
.subscribeOn(Schedulers.parallel())
.repeat()
.onBackpressureDrop();
}
}
|
<commit_msg>Tweak raw text parameter name
<commit_before>from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(value):
return value.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
<commit_after>from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(text):
return text.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
|
<commit_msg>Add @ComponentScan to discover GraphQLController<commit_before>package com.github.timtebeek.graphql.jpa;
import javax.persistence.EntityManager;
import org.crygier.graphql.GraphQLExecutor;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@SuppressWarnings("static-method")
public class GraphqlJpaAutoConfiguration {
@Bean
@ConditionalOnMissingBean(GraphQLExecutor.class)
public GraphQLExecutor graphQLExecutor(final EntityManager entityManager) {
return new GraphQLExecutor(entityManager);
}
}
<commit_after>package com.github.timtebeek.graphql.jpa;
import javax.persistence.EntityManager;
import org.crygier.graphql.GraphQLExecutor;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@Configuration
@ComponentScan
@SuppressWarnings("static-method")
public class GraphqlJpaAutoConfiguration {
@Bean
@ConditionalOnMissingBean(GraphQLExecutor.class)
public GraphQLExecutor graphQLExecutor(final EntityManager entityManager) {
return new GraphQLExecutor(entityManager);
}
}
|
<commit_msg>Add README as long description.
<commit_before>import sys
from setuptools import setup, find_packages
peavy = __import__('peavy')
setup(
author = 'Fairview Computing LLC',
author_email = 'john@fairviewcomputing.com',
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging',
],
description = peavy.__doc__,
download_url='http://github.com/fairview/django-peavy/downloads',
install_requires = [
'Django>=1.3',
'South>=0.7.2'
],
license = "MIT License",
name = 'django-peavy',
packages = find_packages(),
package_data = {
'peavy': [
'README.rst',
'LICENSE.txt',
'templates/*/*.html',
'static/*/*/*',
],
},
url = 'http://github.com/fairview/django-peavy',
version = peavy.get_version(),
zip_safe = True,
)
<commit_after>import sys
from setuptools import setup, find_packages
peavy = __import__('peavy')
with open('README.rst') as file:
long_description = file.read()
setup(
author = 'Fairview Computing LLC',
author_email = 'john@fairviewcomputing.com',
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System :: Logging',
],
description = peavy.__doc__,
long_description=long_description,
download_url='http://github.com/fairview/django-peavy/downloads',
install_requires = [
'Django>=1.3',
'South>=0.7.2'
],
license = "MIT License",
name = 'django-peavy',
packages = find_packages(),
package_data = {
'peavy': [
'README.rst',
'LICENSE.txt',
'templates/*/*.html',
'static/*/*/*',
],
},
url = 'http://github.com/fairview/django-peavy',
version = peavy.get_version(),
zip_safe = True,
)
|
<commit_msg>Fix for PR2881: fix a small leak exposed by valgrind, using a ManagedStatic.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@57984 91177308-0d34-0410-b5e6-96231b3b80d8
<commit_before>//===-- PluginLoader.cpp - Implement -load command line option ------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements the -load <plugin> command line option handler.
//
//===----------------------------------------------------------------------===//
#define DONT_GET_PLUGIN_LOADER_OPTION
#include "llvm/Support/PluginLoader.h"
#include "llvm/Support/Streams.h"
#include "llvm/System/DynamicLibrary.h"
#include <ostream>
#include <vector>
using namespace llvm;
static std::vector<std::string> *Plugins;
void PluginLoader::operator=(const std::string &Filename) {
if (!Plugins)
Plugins = new std::vector<std::string>();
std::string Error;
if (sys::DynamicLibrary::LoadLibraryPermanently(Filename.c_str(), &Error)) {
cerr << "Error opening '" << Filename << "': " << Error
<< "\n -load request ignored.\n";
} else {
Plugins->push_back(Filename);
}
}
unsigned PluginLoader::getNumPlugins() {
return Plugins ? Plugins->size() : 0;
}
std::string &PluginLoader::getPlugin(unsigned num) {
assert(Plugins && num < Plugins->size() && "Asking for an out of bounds plugin");
return (*Plugins)[num];
}
<commit_after>//===-- PluginLoader.cpp - Implement -load command line option ------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements the -load <plugin> command line option handler.
//
//===----------------------------------------------------------------------===//
#define DONT_GET_PLUGIN_LOADER_OPTION
#include "llvm/Support/ManagedStatic.h"
#include "llvm/Support/PluginLoader.h"
#include "llvm/Support/Streams.h"
#include "llvm/System/DynamicLibrary.h"
#include <ostream>
#include <vector>
using namespace llvm;
static ManagedStatic<std::vector<std::string> > Plugins;
void PluginLoader::operator=(const std::string &Filename) {
std::string Error;
if (sys::DynamicLibrary::LoadLibraryPermanently(Filename.c_str(), &Error)) {
cerr << "Error opening '" << Filename << "': " << Error
<< "\n -load request ignored.\n";
} else {
Plugins->push_back(Filename);
}
}
unsigned PluginLoader::getNumPlugins() {
return Plugins.isConstructed() ? Plugins->size() : 0;
}
std::string &PluginLoader::getPlugin(unsigned num) {
assert(Plugins.isConstructed() && num < Plugins->size() &&
"Asking for an out of bounds plugin");
return (*Plugins)[num];
}
|
<commit_msg>Add NewStringReader-factory to test package
<commit_before>package test
import (
"bytes"
"encoding/json"
"io"
"net/http"
"github.com/labstack/echo"
)
type testForm struct {
Name string
Desc string
}
func Context(req *http.Request, res http.ResponseWriter, r interface{}) (c *echo.Context) {
c = echo.NewContext(req, echo.NewResponse(res), echo.New())
if r != nil {
c.Set("Resource", r)
}
return
}
func NewJsonReader(form interface{}) io.Reader {
jsForm, _ := json.Marshal(form)
return bytes.NewReader(jsForm)
}
func Form() *testForm {
return &testForm{}
}
<commit_after>package test
import (
"bytes"
"encoding/json"
"io"
"net/http"
"strings"
"github.com/labstack/echo"
)
type testForm struct {
Name string
Desc string
}
func Context(req *http.Request, res http.ResponseWriter, r interface{}) (c *echo.Context) {
c = echo.NewContext(req, echo.NewResponse(res), echo.New())
if r != nil {
c.Set("Resource", r)
}
return
}
func NewJsonReader(form interface{}) io.Reader {
jsForm, _ := json.Marshal(form)
return bytes.NewReader(jsForm)
}
func NewStringReader(s string) io.Reader {
return strings.NewReader(s)
}
func Form() *testForm {
return &testForm{}
}
|
<commit_msg>Add test for mixed line separators
<commit_before>package org.commonmark.test;
import org.junit.Test;
public class SpecialInputTest extends RenderingTestCase {
@Test
public void nullCharacterShouldBeReplaced() {
assertRendering("foo\0bar", "<p>foo\uFFFDbar</p>\n");
}
@Test
public void nullCharacterEntityShouldBeReplaced() {
assertRendering("foo�bar", "<p>foo\uFFFDbar</p>\n");
}
@Test
public void crLfAsLineSeparatorShouldBeParsed() {
assertRendering("foo\r\nbar", "<p>foo\nbar</p>\n");
}
@Test
public void crLfAtEndShouldBeParsed() {
assertRendering("foo\r\n", "<p>foo</p>\n");
}
@Test
public void surrogatePair() {
assertRendering("surrogate pair: \uD834\uDD1E", "<p>surrogate pair: \uD834\uDD1E</p>\n");
}
@Test
public void surrogatePairInLinkDestination() {
assertRendering("[title](\uD834\uDD1E)", "<p><a href=\"\uD834\uDD1E\">title</a></p>\n");
}
}
<commit_after>package org.commonmark.test;
import org.junit.Test;
public class SpecialInputTest extends RenderingTestCase {
@Test
public void nullCharacterShouldBeReplaced() {
assertRendering("foo\0bar", "<p>foo\uFFFDbar</p>\n");
}
@Test
public void nullCharacterEntityShouldBeReplaced() {
assertRendering("foo�bar", "<p>foo\uFFFDbar</p>\n");
}
@Test
public void crLfAsLineSeparatorShouldBeParsed() {
assertRendering("foo\r\nbar", "<p>foo\nbar</p>\n");
}
@Test
public void crLfAtEndShouldBeParsed() {
assertRendering("foo\r\n", "<p>foo</p>\n");
}
@Test
public void mixedLineSeparators() {
assertRendering("- a\n- b\r- c\r\n- d", "<ul>\n<li>a</li>\n<li>b</li>\n<li>c</li>\n<li>d</li>\n</ul>\n");
}
@Test
public void surrogatePair() {
assertRendering("surrogate pair: \uD834\uDD1E", "<p>surrogate pair: \uD834\uDD1E</p>\n");
}
@Test
public void surrogatePairInLinkDestination() {
assertRendering("[title](\uD834\uDD1E)", "<p><a href=\"\uD834\uDD1E\">title</a></p>\n");
}
}
|
<commit_msg>[editor] Switch to axios for the format SQL ajax request
<commit_before>// Licensed to Cloudera, Inc. under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. Cloudera, Inc. licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { simplePost } from 'api/apiUtilsV2';
import { FORMAT_SQL_API } from 'api/urls';
export const formatSql = async (options: {
statements: string;
silenceErrors?: boolean;
}): Promise<string> => {
try {
const response = await simplePost<
{
formatted_statements: string;
status: number;
},
{ statements: string }
>(FORMAT_SQL_API, options, {
silenceErrors: !!options.silenceErrors,
ignoreSuccessErrors: true
});
return (response && response.formatted_statements) || options.statements;
} catch (err) {
if (!options.silenceErrors) {
throw err;
}
}
return options.statements;
};
<commit_after>// Licensed to Cloudera, Inc. under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. Cloudera, Inc. licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import axios from 'axios';
type FormatSqlApiResponse = {
formatted_statements?: string;
status: number;
};
const FORMAT_SQL_API_URL = '/notebook/api/format';
export const formatSql = async (options: {
statements: string;
silenceErrors?: boolean;
}): Promise<string> => {
try {
const params = new URLSearchParams();
params.append('statements', options.statements);
const response = await axios.post<FormatSqlApiResponse>(FORMAT_SQL_API_URL, params);
if (response.data.status !== -1 && response.data.formatted_statements) {
return response.data.formatted_statements;
}
} catch (err) {
if (!options.silenceErrors) {
throw err;
}
}
return options.statements;
};
|
<commit_msg>Fix HBM2DLL with hibernate 5.2+
see https://stackoverflow.com/questions/44924930<commit_before>package org.ligoj.bootstrap.core.dao;
import org.hibernate.dialect.MySQL55Dialect;
/**
* MySQL dialect with enforced UTF-8 encoding.
*/
public class MySQL5InnoDBUtf8Dialect extends MySQL55Dialect {
@Override
public String getTableTypeString() {
return super.getTableTypeString() + " DEFAULT CHARSET=utf8";
}
}
<commit_after>package org.ligoj.bootstrap.core.dao;
import org.hibernate.dialect.MySQL55Dialect;
import org.hibernate.engine.jdbc.env.spi.NameQualifierSupport;
/**
* MySQL dialect with enforced UTF-8 encoding.
*/
public class MySQL5InnoDBUtf8Dialect extends MySQL55Dialect {
@Override
public String getTableTypeString() {
return super.getTableTypeString() + " DEFAULT CHARSET=utf8";
}
@Override
public NameQualifierSupport getNameQualifierSupport() {
return NameQualifierSupport.NONE;
}
}
|
<commit_msg>Remove useless dependancy to java.util.Enumeration
<commit_before>package com.alexrnl.commons.database;
import java.util.Enumeration;
/**
* Interface for the entity columns.<br />
* This interface should be implemented by the {@link Enumeration} which represent the column of the
* entity in the database.<br />
* @author Alex
*/
public interface EntityColumn {
/**
* Return the name of the column field, in the Java code.<br />
* This will later be used when dynamically accessing properties.
* @return the name of the property in the Java code.
*/
String getFieldName ();
}
<commit_after>package com.alexrnl.commons.database;
/**
* Interface for the entity columns.<br />
* This interface should be implemented by the {@link java.util.Enumeration} which represent the
* column of the
* entity in the database.<br />
* @author Alex
*/
public interface EntityColumn {
/**
* Return the name of the column field, in the Java code.<br />
* This will later be used when dynamically accessing properties.
* @return the name of the property in the Java code.
*/
String getFieldName ();
}
|
<commit_msg>Add comments for types and functions to be published
<commit_before>package acgen
import (
"fmt"
"io"
"sync"
)
type Command struct {
Name string
Flags []*Flag
}
type Flag struct {
Short []string
Long []string
Arg string
Description string
}
type Generator func(w io.Writer, c *Command) error
var (
generatorsMu sync.Mutex
generators = make(map[string]Generator)
)
func RegisterGenerator(name string, g Generator) {
generatorsMu.Lock()
defer generatorsMu.Unlock()
if _, dup := generators[name]; dup {
panic("RegisterGenerator called twice for generator " + name)
}
generators[name] = g
}
func LookGenerator(name string) (g Generator, err error) {
generatorsMu.Lock()
defer generatorsMu.Unlock()
if _, ok := generators[name]; !ok {
return nil, fmt.Errorf("%s: is not supported", name)
}
return generators[name], nil
}
<commit_after>package acgen
import (
"fmt"
"io"
"sync"
)
// A Command represents a command which has flags.
type Command struct {
Name string
Flags []*Flag
}
// A Flag represents the information of a flag.
type Flag struct {
Short []string // short options
Long []string // long options
Arg string // argument's name
Description string // help message
}
// A Generator writes a completion for command to w.
type Generator func(w io.Writer, c *Command) error
var (
generatorsMu sync.Mutex
generators = make(map[string]Generator)
)
// RegisterGenerator makes a completion generator available
// by the provided name.
func RegisterGenerator(name string, g Generator) {
generatorsMu.Lock()
defer generatorsMu.Unlock()
if _, dup := generators[name]; dup {
panic("RegisterGenerator called twice for generator " + name)
}
generators[name] = g
}
// LookGenerator returns a completion generator
// specified by its completion generator name.
func LookGenerator(name string) (g Generator, err error) {
generatorsMu.Lock()
defer generatorsMu.Unlock()
if _, ok := generators[name]; !ok {
return nil, fmt.Errorf("%s: is not supported", name)
}
return generators[name], nil
}
|
<commit_msg>Add more null handling cycle tag tests
<commit_before>package com.hubspot.jinjava.lib.tag;
import static org.assertj.core.api.Assertions.assertThat;
import com.hubspot.jinjava.BaseInterpretingTest;
import org.junit.Test;
public class CycleTagTest extends BaseInterpretingTest {
@Test
public void itDefaultsNullToImage() {
String template = "{% for item in [0,1] %}{% cycle {{item}} %}{% endfor %}";
assertThat(interpreter.render(template)).isEqualTo("{{item}}{{item}}");
}
}
<commit_after>package com.hubspot.jinjava.lib.tag;
import static org.assertj.core.api.Assertions.assertThat;
import com.hubspot.jinjava.BaseInterpretingTest;
import org.junit.Test;
public class CycleTagTest extends BaseInterpretingTest {
@Test
public void itDefaultsNullToImage() {
String template = "{% for item in [0,1] %}{% cycle {{item}} %}{% endfor %}";
assertThat(interpreter.render(template)).isEqualTo("{{item}}{{item}}");
}
@Test
public void itDefaultsMultipleNullToImage() {
String template = "{% for item in [0,1] %}{% cycle {{foo}},{{bar}} %}{% endfor %}";
assertThat(interpreter.render(template)).isEqualTo("{{foo}}{{bar}}");
}
@Test
public void itDefaultsNullToImageUsingAs() {
String template =
"{% for item in [0,1] %}{% cycle {{item}} as var %}{% cycle var %}{% endfor %}";
assertThat(interpreter.render(template)).isEqualTo("{{item}}{{item}}");
}
@Test
public void itDefaultsMultipleNullToImageUsingAs() {
String template =
"{% for item in [0,1] %}{% cycle {{foo}},{{bar}} as var %}{% cycle var %}{% endfor %}";
assertThat(interpreter.render(template)).isEqualTo("{{foo}}{{bar}}");
}
}
|
<commit_msg>CommandTable: Store an array of entries rather than a map.
<commit_before>import { Command } from "./command";
export class CommandTable {
private name: string;
private inherit: CommandTable[] = [];
private commands: Map<string, Command> = new Map();
constructor (name: string, inherit: CommandTable[]) {
this.name = name;
this.inherit = inherit;
}
}
<commit_after>import { Command } from "./command";
export class CommandTableEntry {
name: string;
command: Command;
}
export class CommandTable {
private name: string;
private inherit: CommandTable[] = [];
private commands: CommandTableEntry[] = [];
constructor (name: string, inherit: CommandTable[]) {
this.name = name;
this.inherit = inherit;
}
}
|
<commit_msg>Complete unfinished code committed by mistake.
<commit_before>
def test_filter_strings(wish):
accept_names = wish
names = ['has MARK', 'does not have']
accept_pattern = '.*MARK.*'<commit_after>
def test_filter_strings_basic(wish):
filter_strings = wish
input = ['has MARK', 'does not have']
expected_ouput = ['has MARK']
accept_pattern = '.*MARK.*'
assert list(filter_strings(input, accept_pattern)) == expected_ouput
|
<commit_msg>Print times in fixed columns.
Makes it easier to compare.
<commit_before>
clock_t calc_time0,calc_time1;
double calc_time;
#define TIME_ON calc_time0=clock();
#define TIME_OFF(msg) calc_time1=clock(); \
calc_time=(double)(calc_time1-calc_time0)/CLOCKS_PER_SEC; \
std::cout<<msg<<": iterations="<<i \
<<" CPU Time="<<std::fixed<<calc_time \
<<" iter/s="<<i/calc_time<<std::endl<<std::flush;
<commit_after>
clock_t calc_time0,calc_time1;
double calc_time;
void printTime(const std::string& msg, long long iterations, double iterPerSec) {
std::stringstream ss;
ss << msg;
while (ss.tellp() < 30) {
ss << ' ';
}
ss << " iterations=" << iterations;
while (ss.tellp() < 60) {
ss << ' ';
}
ss <<" CPU Time="<<std::fixed<<calc_time;
while (ss.tellp() < 80) {
ss << ' ';
}
ss <<" iter/s="<<iterPerSec<<std::endl;
std::cout << ss.str() << std::flush;
}
#define TIME_ON calc_time0=clock();
#define TIME_OFF(msg) calc_time1=clock(); \
calc_time=(double)(calc_time1-calc_time0)/CLOCKS_PER_SEC; \
printTime(msg, i, i/calc_time);
|
<commit_msg>Return Warning instead of Err when Critical
Name constraints do officially need to be set critical, often they are not because many implementations still don't support Name Constraints. This change returns a warning instead of an error using the custom error package.<commit_before>package nameconstraints
import (
"encoding/asn1"
"crypto/x509/pkix"
"github.com/globalsign/certlint/certdata"
"github.com/globalsign/certlint/checks"
"github.com/globalsign/certlint/errors"
)
const checkName = "NameConstraints Extension Check"
var extensionOid = asn1.ObjectIdentifier{2, 5, 29, 30}
func init() {
checks.RegisterExtensionCheck(checkName, extensionOid, nil, Check)
}
// Check performs a strict verification on the extension according to the standard(s)
func Check(ex pkix.Extension, d *certdata.Data) *errors.Errors {
var e = errors.New(nil)
// NameConstraints do officially need to be set critical, often they are not
// because many implementations still don't support Name Constraints.
// TODO: Only show a warning message
if !ex.Critical {
e.Err("NameConstraints extension set non-critical")
}
// NameConstraints should only be included in CA or subordinate certificates
if !d.Cert.IsCA {
e.Err("End entity certificate should not contain a NameConstraints extension")
}
return e
}
<commit_after>package nameconstraints
import (
"encoding/asn1"
"crypto/x509/pkix"
"github.com/globalsign/certlint/certdata"
"github.com/globalsign/certlint/checks"
"github.com/globalsign/certlint/errors"
)
const checkName = "NameConstraints Extension Check"
var extensionOid = asn1.ObjectIdentifier{2, 5, 29, 30}
func init() {
checks.RegisterExtensionCheck(checkName, extensionOid, nil, Check)
}
// Check performs a strict verification on the extension according to the standard(s)
func Check(ex pkix.Extension, d *certdata.Data) *errors.Errors {
var e = errors.New(nil)
// NameConstraints do officially need to be set critical, often they are not
// because many implementations still don't support Name Constraints.
if !ex.Critical {
e.Warning("NameConstraints extension set non-critical")
}
// NameConstraints should only be included in CA or subordinate certificates
if !d.Cert.IsCA {
e.Err("End entity certificate should not contain a NameConstraints extension")
}
return e
}
|
<commit_msg>Increment head and tail properly.
<commit_before>
uint32_t packet_queue_init(packet_queue_t * queue)
{
queue->head = 0;
queue->tail = 0;
return SUCCESS;
}
bool packet_queue_is_empty(packet_queue_t * queue)
{
return queue->head == queue->tail;
}
bool packet_queue_is_full(packet_queue_t * queue)
{
return abs(queue->head - queue->tail) == PACKET_QUEUE_SIZE;
}
uint32_t packet_queue_add(packet_queue_t * queue, radio_packet_t * packet)
{
if (packet_queue_is_full(queue))
return NO_MEMORY;
memcpy(&queue->packets[0], packet, sizeof(*packet));
queue->tail++;
return SUCCESS;
}
uint32_t packet_queue_get(packet_queue_t * queue, radio_packet_t ** packet)
{
if (packet_queue_is_empty(queue))
return NOT_FOUND;
*packet = &queue->packets[queue->head];
queue->head++;
return SUCCESS;
}
<commit_after>
uint32_t packet_queue_init(packet_queue_t * queue)
{
queue->head = 0;
queue->tail = 0;
return SUCCESS;
}
bool packet_queue_is_empty(packet_queue_t * queue)
{
return queue->head == queue->tail;
}
bool packet_queue_is_full(packet_queue_t * queue)
{
return abs(queue->head - queue->tail) == PACKET_QUEUE_SIZE;
}
uint32_t packet_queue_add(packet_queue_t * queue, radio_packet_t * packet)
{
if (packet_queue_is_full(queue))
return NO_MEMORY;
memcpy(&queue->packets[queue->tail], packet, sizeof(*packet));
queue->tail = (queue->tail + 1u) % PACKET_QUEUE_SIZE;
return SUCCESS;
}
uint32_t packet_queue_get(packet_queue_t * queue, radio_packet_t ** packet)
{
if (packet_queue_is_empty(queue))
return NOT_FOUND;
*packet = &queue->packets[queue->head];
queue->head = (queue->head + 1u) % PACKET_QUEUE_SIZE;
return SUCCESS;
}
|
<commit_msg>Return 0 bits in Darwin_SecRandom::poll on SecRandomCopyBytes failure
<commit_before>/*
* Darwin SecRandomCopyBytes EntropySource
* (C) 2015 Daniel Seither (Kullo GmbH)
*
* Botan is released under the Simplified BSD License (see license.txt)
*/
#include <botan/internal/darwin_secrandom.h>
#include <Security/Security.h>
#include <Security/SecRandom.h>
namespace Botan {
/**
* Gather entropy from SecRandomCopyBytes
*/
size_t Darwin_SecRandom::poll(RandomNumberGenerator& rng)
{
secure_vector<uint8_t> buf(BOTAN_SYSTEM_RNG_POLL_REQUEST);
if(0 == SecRandomCopyBytes(kSecRandomDefault, buf.size(), buf.data()))
{
rng.add_entropy(buf.data(), buf.size());
return buf.size() * 8;
}
}
}
<commit_after>/*
* Darwin SecRandomCopyBytes EntropySource
* (C) 2015 Daniel Seither (Kullo GmbH)
*
* Botan is released under the Simplified BSD License (see license.txt)
*/
#include <botan/internal/darwin_secrandom.h>
#include <Security/Security.h>
#include <Security/SecRandom.h>
namespace Botan {
/**
* Gather entropy from SecRandomCopyBytes
*/
size_t Darwin_SecRandom::poll(RandomNumberGenerator& rng)
{
secure_vector<uint8_t> buf(BOTAN_SYSTEM_RNG_POLL_REQUEST);
if(0 == SecRandomCopyBytes(kSecRandomDefault, buf.size(), buf.data()))
{
rng.add_entropy(buf.data(), buf.size());
return buf.size() * 8;
}
return 0;
}
}
|
<commit_msg>Update crossfilter to gray/blue scheme
Same as in https://vega.github.io/editor/#/examples/vega-lite/interactive_layered_crossfilter<commit_before># category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# blue background with selection
background = base.add_selection(brush)
# yellow highlights on the transformed data
highlight = base.encode(
color=alt.value('goldenrod')
).transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
<commit_after># category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# gray background with selection
background = base.encode(
color=alt.value('#ddd')
).add_selection(brush)
# blue highlights on the transformed data
highlight = base.transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
|
<commit_msg>Use CaseBlocks to update case owner_ids
<commit_before>from django.core.management.base import BaseCommand
from casexml.apps.case.models import CommCareCase
from corehq.util.couch import iter_update, DocUpdate
class Command(BaseCommand):
help = ("Make sure all supply point cases have their owner_id set "
"to the location_id")
def handle(self, *args, **options):
def add_location(case):
if not case['location_id']:
return None
if case['owner_id'] != case['location_id']:
case['owner_id'] = case['location_id']
return DocUpdate(case)
iter_update(
CommCareCase.get_db(),
add_location,
self.get_case_ids(),
verbose=True
)
def get_case_ids(self):
return (case['id'] for case in CommCareCase.get_db().view(
'commtrack/supply_point_by_loc',
reduce=False,
include_docs=False,
).all())
<commit_after>from xml.etree import ElementTree
from django.core.management.base import BaseCommand
from casexml.apps.case.mock import CaseBlock
from casexml.apps.case.models import CommCareCase
from dimagi.utils.chunked import chunked
from dimagi.utils.couch.database import iter_docs
from corehq.apps.domain.models import Domain
from corehq.apps.hqcase.utils import submit_case_blocks
def needs_update(case):
return (case.get('location_id', None) and
case['owner_id'] != case['location_id'])
def case_block(case):
return ElementTree.tostring(CaseBlock(
create=False,
case_id=case['_id'],
owner_id=case['location_id'],
).as_xml())
def get_cases(domain):
supply_point_ids = (case['id'] for case in CommCareCase.get_db().view(
'commtrack/supply_point_by_loc',
startkey=[domain],
endkey=[domain, {}],
reduce=False,
include_docs=False,
).all())
return iter_docs(CommCareCase.get_db(), supply_point_ids)
def update_supply_points(domain):
case_blocks = (case_block(c) for c in get_cases(domain) if needs_update(c))
if case_blocks:
for chunk in chunked(case_blocks, 100):
submit_case_blocks(chunk, domain)
print "updated {} cases on domain {}".format(len(chunk), domain)
class Command(BaseCommand):
help = ("Make sure all supply point cases have their owner_id set "
"to the location_id")
def handle(self, *args, **options):
all_domains = Domain.get_all_names()
total = len(all_domains)
finished = 0
for domain in all_domains:
update_supply_points(domain)
finished += 1
if finished % 100 == 0:
print "Processed {} of {} domains".format(finished, total)
|
<commit_msg>Rewrite divider to change interval without resetting it
<commit_before>
class Divider {
public:
Divider(int interval = 1) {
setInterval(interval);
}
void setInterval(int interval) {
this->interval = interval;
clockCounter = interval - 1;
}
void tick() {
clockCounter++;
if (clockCounter == interval) {
clockCounter = 0;
}
}
bool hasClocked() {
return clockCounter == 0;
}
private:
int interval;
int clockCounter;
};
#endif
<commit_after>
class Divider {
public:
Divider(int interval = 1) {
setInterval(interval);
reset();
}
void setInterval(int interval) {
if (interval < 1) {
resetValue = 0;
}
else {
resetValue = interval - 1;
}
}
void reset() {
clockCounter = resetValue;
}
void tick() {
if (clockCounter == 0) {
clockCounter = resetValue;
}
else {
--clockCounter;
}
}
bool hasClocked() {
return clockCounter == resetValue;
}
private:
int resetValue;
int clockCounter;
};
#endif
|
<commit_msg>Print out the URL when it starts
<commit_before>package main
import (
"flag"
"fmt"
"net/http"
)
var (
port int
dir string
)
func init() {
flag.IntVar(&port, "port", 8080, "Port to run server on.")
flag.StringVar(&dir, "dir", ".", "Directory to serve.")
flag.Parse()
}
func main() {
http.ListenAndServe(fmt.Sprintf(":%d", port), http.FileServer(http.Dir(dir)))
}
<commit_after>package main
import (
"flag"
"fmt"
"net/http"
)
var (
port int
dir string
)
func init() {
flag.IntVar(&port, "port", 8080, "Port to run server on.")
flag.StringVar(&dir, "dir", ".", "Directory to serve.")
flag.Parse()
}
func main() {
fmt.Printf("Serving files from %s, running on http://localhost:%d\n", dir, port)
http.ListenAndServe(fmt.Sprintf(":%d", port), http.FileServer(http.Dir(dir)))
}
|
<commit_msg>Implement lots of new methods
Signed-off-by: Nuno Azevedo <8c1e4f9df2ead62a44323c12422702b1d8dd9c90@gmail.com>
<commit_before>import Ice.Current;
import VideoStreaming.Stream;
import java.util.ArrayList;
import java.util.List;
public class PortalI extends VideoStreaming._PortalDisp {
private List<Stream> Streams = new ArrayList<Stream>();
// Calls from Streaming Servers
public void register(Stream stream, Current current) {
Streams.add(stream);
}
public void remove(Stream stream, Current current) {
Streams.remove(stream);
}
// Calls from Clients
public List<Stream> getStreams(Current current) {
return Streams;
}
}
<commit_after>import Streaming.NotifierPrxHelper;
import Streaming.Stream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class PortalI extends Streaming._PortalDisp {
private Map<Stream, Long> Streams = new HashMap<Stream, Long>();
Streaming.NotifierPrx Notifier = null;
PortalI() {
Ice.ObjectPrx obj = Ice.Application.communicator().propertyToProxy("TopicManager.Proxy");
IceStorm.TopicManagerPrx manager = IceStorm.TopicManagerPrxHelper.checkedCast(obj);
if (manager == null) {
System.err.println("Invalid proxy");
return;
}
IceStorm.TopicPrx topic = null;
while (topic == null) {
try {
topic = manager.retrieve("Streams");
} catch (IceStorm.NoSuchTopic e) {
try {
topic = manager.create("Streams");
} catch (IceStorm.TopicExists ex) {
System.err.println("Temporary failure");
return;
}
}
}
Ice.ObjectPrx publisher = topic.getPublisher().ice_oneway();
Notifier = NotifierPrxHelper.uncheckedCast(publisher);
new Thread(() -> {
while (true) {
for (Stream s : Streams.keySet())
if (System.currentTimeMillis() - Streams.get(s) >= 60000)
Streams.remove(s);
try {
Thread.sleep(60000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();
}
// Calls from Streaming Servers
public void register(Stream stream, Ice.Current current) {
Streams.put(stream, System.currentTimeMillis());
Notifier.inform(String.format("[%s: %s]", "New Stream", stream.getName()));
}
public void remove(Stream stream, Ice.Current current) {
Streams.remove(stream);
Notifier.inform(String.format("[%s: %s]", "Removed Stream", stream.getName()));
}
public void update(Stream stream, Ice.Current current) {
if (Streams.get(stream) != null)
Streams.put(stream, System.currentTimeMillis());
}
// Calls from Clients
public Stream get(String stream, Ice.Current current) {
for (Stream s : Streams.keySet())
if (s.name.equals(stream)) return s;
return null;
}
public List<Stream> getAll(Ice.Current current) {
return Streams.keySet().stream().collect(Collectors.toList());
}
}
|
<commit_msg>Support collecting schemas from filesystem.
<commit_before>
from abc import ABCMeta, abstractmethod
class Collector(object):
'''Collect and return schemas.'''
__metaclass__ = ABCMeta
@abstractmethod
def collect(self):
'''Yield collected schemas.
Each schema should be a Python dictionary.
'''
<commit_after>
import os
from abc import ABCMeta, abstractmethod
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
raise ImportError('Could not import json or simplejson')
class Collector(object):
'''Collect and return schemas.'''
__metaclass__ = ABCMeta
@abstractmethod
def collect(self):
'''Yield collected schemas.
Each schema should be a Python dictionary.
'''
class FilesystemCollector(Collector):
def __init__(self, paths=None, recursive=True):
'''Initialise with *paths* to search.
If *recursive* is True then all subdirectories of *paths* will also be
searched.
'''
self.paths = paths
self.recursive = recursive
if self.paths is None:
self.paths = []
super(FilesystemCollector, self).__init__()
def collect(self):
'''Yield collected schemas.'''
for path in self.paths:
for base, directories, filenames in os.walk(path):
for filename in filenames:
_, extension = os.path.splitext(filename)
if extension != '.json':
continue
filepath = os.path.join(base, filename)
with open(filepath, 'r') as file_handler:
schema = json.load(file_handler)
yield schema
if not self.recursive:
del directories[:]
|
<commit_msg>[IMP] Rename Objects to Models in module description
<commit_before>
{
"name": "Objects Graph",
"version": "0.1",
"depends": ["base"],
"author": "Smile",
"license": 'AGPL-3',
"description": """
Generate Objects Graph
Suggestions & Feedback to: corentin.pouhet-brunerie@smile.fr
""",
"website": "http://www.smile.fr",
"category": "Hidden",
"sequence": 32,
"data": [
"wizard/ir_model_graph_wizard_view.xml",
],
"demo": [],
'test': [],
"auto_install": True,
"installable": True,
"application": False,
}
<commit_after>
{
"name": "Models Graph",
"version": "0.1",
"depends": ["base"],
"author": "Smile",
"license": 'AGPL-3',
"description": """
Generate Models Graph
Suggestions & Feedback to: corentin.pouhet-brunerie@smile.fr
""",
"website": "http://www.smile.fr",
"category": "Hidden",
"sequence": 32,
"data": [
"wizard/ir_model_graph_wizard_view.xml",
],
"demo": [],
'test': [],
"auto_install": True,
"installable": True,
"application": False,
}
|
<commit_msg>Allow plotting two types against one another.
<commit_before>
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
x.append(obj['timestamp'])
autoscale(True, 'both')
plot(x, y, label = y_key)
legend(loc='upper left')
show()
if __name__ == "__main__":
main()
<commit_after>
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
parser.add_argument("-x",
help = "the key to use for the function being plotted",
default=None)
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
x_key = args.x
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
if x_key is None:
x.append(obj['timestamp'])
if obj['name'] == x_key:
x.append(obj['value'])
autoscale(True, 'both')
xlabel(x_key or 'timestamp')
ylabel(y_key)
plot(x, y, 'ro')
show()
if __name__ == "__main__":
main()
|
<commit_msg>Fix a packaging bug and make sure we also include templates directory.
<commit_before>
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='st2api',
version='0.4.0',
description='',
author='StackStorm',
author_email='info@stackstorm.com',
install_requires=[
"pecan",
],
test_suite='st2api',
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=['ez_setup'])
)
<commit_after>
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='st2api',
version='0.4.0',
description='',
author='StackStorm',
author_email='info@stackstorm.com',
install_requires=[
"pecan",
],
package_data={
'st2api': ['templates/*.html']
},
test_suite='st2api',
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=['ez_setup'])
)
|
<commit_msg>Add logic for grabbing file descriptor from FILE struct
<commit_before>import simuvex
from simuvex.s_type import SimTypeFd
import logging
l = logging.getLogger("simuvex.procedures.fileno")
######################################
# memset
######################################
class fileno(simuvex.SimProcedure):
#pylint:disable=arguments-differ
def run(self, f):
self.argument_types = {0: SimTypeFd()}
self.return_type = SimTypeFd()
return f
<commit_after>import simuvex
from simuvex.s_type import SimTypeFd, SimTypeTop
from . import io_file_data_for_arch
import logging
l = logging.getLogger("simuvex.procedures.fileno")
######################################
# fileno
######################################
class fileno(simuvex.SimProcedure):
#pylint:disable=arguments-differ
def run(self, f):
self.argument_types = {0: self.ty_ptr(SimTypeTop())}
self.return_type = SimTypeFd()
# Get FILE struct
io_file_data = io_file_data_for_arch(self.state.arch)
# Get the file descriptor from FILE struct
fd = self.state.se.any_int(self.state.memory.load(f + io_file_data['fd'],
4 * 8, # int
endness=self.state.arch.memory_endness))
return fd
|
<commit_msg>Move Session to Sport as an inline.
<commit_before>from django.contrib import admin
from .models import (Sport, Match, Session, CancelledSession)
@admin.register(Sport)
class SportAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Match)
class MatchAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(CancelledSession)
class CancelledSessionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Session)
<commit_after>from django.contrib import admin
from .models import (Sport, Match, Session, CancelledSession)
class SessionInline(admin.StackedInline):
model = Session
extra = 0
@admin.register(Sport)
class SportAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
inlines = [SessionInline,]
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Match)
class MatchAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(CancelledSession)
class CancelledSessionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
|
<commit_msg>Append app path to Python path in WSGI
<commit_before>import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "federez_ldap.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
<commit_after>import sys
import os
from os.path import dirname
sys.path.append(dirname(dirname(__file__)))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "federez_ldap.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
<commit_msg>Call into the discovery function and wait for someone to say hi
<commit_before>
void * start_discovery(void * args) {
syslog(LOG_INFO, "started discovery thread");
for (;;) {
continue;
}
}
<commit_after>
void * start_discovery(void * args) {
syslog(LOG_INFO, "started discovery thread");
for (;;) {
struct CL_Discovery_Transport *discovered_transport = malloc(sizeof(struct CL_Discovery_Transport));
wait_for_transport(discovered_transport);
}
}
|
<commit_msg>Add more joy using decorators
<commit_before>from flask.ext.classy import FlaskView
class AnalyserView(FlaskView):
def get(self):
return "awesome"
<commit_after>from flask.ext.classy import FlaskView
from utils.decorators import validate, require
from utils.validators import validate_url
class AnalyserView(FlaskView):
@require('url')
@validate({
'url': validate_url
})
def post(self, url):
return url
|
<commit_msg>Change data dir to ~/.local/share/classer
<commit_before>import os
def open_lastrun_file(mode='r'):
'''Open and return lastrun file.'''
# path to data directory
data_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'data')
os.makedirs(data_dir, exist_ok=True)
# path to lastrun file
lastrun_path = os.path.join(data_dir, 'lastrun.txt')
return open(lastrun_path, mode)
<commit_after>import os
def open_lastrun_file(mode='r'):
'''Open and return lastrun file.'''
# path to data directory
data_dir = os.path.expanduser('~/.local/share/classer/')
os.makedirs(data_dir, exist_ok=True)
# path to lastrun file
lastrun_path = os.path.join(data_dir, 'lastrun.txt')
return open(lastrun_path, mode)
|
<commit_msg>Put the code to create the package's long_description into a function.
<commit_before>
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
if sys.argv[-1] == 'publish':
publish()
sys.exit()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
<commit_after>
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
def make_long_description():
"""
Return the long description for the package.
"""
long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read()
return long_description
if sys.argv[-1] == 'publish':
publish()
sys.exit()
long_description = make_long_description()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=long_description,
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
|
<commit_msg>Remove unused `--output stdout` option
<commit_before>
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
if output == 'stdout':
plt.savefig(sys.stdout, format='png')
else:
plt.savefig(output)
else:
plt.show()
<commit_after>
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
plt.savefig(output)
else:
plt.show()
|
<commit_msg>[cli] Update clap code to use `app_from_crate!` macro.
Signed-off-by: Fletcher Nichol <77a0fd9e8048bbacd11af4e957bc6ff03b549f49@nichol.ca>
<commit_before>extern crate clap;
extern crate names;
use names::{Generator, Name};
fn main() {
let (naming, amount) = {
let app = clap_app!(names =>
(version: &crate_version!()[..])
(author: "\nAuthor: Fletcher Nichol <fnichol@nichol.ca>\n")
(about: "A random name generator with results like `delirious-pail'.")
(@setting ColoredHelp)
(@arg amount: "Number of names to generate (default: 1)")
(@arg number: -n --number "Adds a random number to the name(s)")
);
let matches = app.get_matches();
let amount = value_t!(matches.value_of("amount"), usize).unwrap_or(1);
let naming: Name = if matches.is_present("number") {
Name::Numbered
} else {
Default::default()
};
(naming, amount)
};
let mut generator = Generator::with_naming(naming);
for _ in 0..amount {
println!("{}", generator.next().unwrap());
}
}
<commit_after>extern crate clap;
extern crate names;
use clap::{AppSettings, Arg};
use names::{Generator, Name};
fn main() {
let (naming, amount) = parse_cli_args();
let mut generator = Generator::with_naming(naming);
for _ in 0..amount {
println!("{}", generator.next().unwrap());
}
}
fn parse_cli_args() -> (Name, usize) {
const FLAG_NUMBER: &str = "number";
const ARG_AMOUNT: &str = "amount";
let app = app_from_crate!()
.name("names")
.setting(AppSettings::ColoredHelp)
.arg(
Arg::with_name(FLAG_NUMBER)
.short("n")
.long(FLAG_NUMBER)
.help("Adds a random number to the name(s)"),
).arg(
Arg::with_name(ARG_AMOUNT)
.help("Number of names to generate")
.default_value("1"),
);
let matches = app.get_matches();
let amount = value_t_or_exit!(matches.value_of(ARG_AMOUNT), usize);
let naming: Name = if matches.is_present(FLAG_NUMBER) {
Name::Numbered
} else {
Default::default()
};
(naming, amount)
}
|
<commit_msg> Include posixishard as late as possible
<commit_before>/* ISC license. */
#include <string.h>
#include <utmpx.h>
#include <skalibs/posixishard.h>
#include <skalibs/allreadwrite.h>
#include <skalibs/strerr2.h>
#include <skalibs/djbunix.h>
#include "hpr.h"
#ifndef UT_LINESIZE
#define UT_LINESIZE 32
#endif
void hpr_wall (char const *s)
{
size_t n = strlen(s) ;
char tty[10 + UT_LINESIZE] = "/dev/" ;
char msg[n+1] ;
memcpy(msg, s, n) ;
msg[n++] = '\n' ;
setutxent() ;
for (;;)
{
size_t linelen ;
int fd ;
struct utmpx *utx = getutxent() ;
if (!utx) break ;
if (utx->ut_type != USER_PROCESS) continue ;
linelen = strnlen(utx->ut_line, UT_LINESIZE) ;
memcpy(tty + 5, utx->ut_line, linelen) ;
tty[5 + linelen] = 0 ;
fd = open_append(tty) ;
if (fd == -1) continue ;
allwrite(fd, msg, n) ;
fd_close(fd) ;
}
endutxent() ;
}
<commit_after>/* ISC license. */
#include <string.h>
#include <utmpx.h>
#include <skalibs/allreadwrite.h>
#include <skalibs/strerr2.h>
#include <skalibs/djbunix.h>
#include <skalibs/posixishard.h>
#include "hpr.h"
#ifndef UT_LINESIZE
#define UT_LINESIZE 32
#endif
void hpr_wall (char const *s)
{
size_t n = strlen(s) ;
char tty[10 + UT_LINESIZE] = "/dev/" ;
char msg[n+1] ;
memcpy(msg, s, n) ;
msg[n++] = '\n' ;
setutxent() ;
for (;;)
{
size_t linelen ;
int fd ;
struct utmpx *utx = getutxent() ;
if (!utx) break ;
if (utx->ut_type != USER_PROCESS) continue ;
linelen = strnlen(utx->ut_line, UT_LINESIZE) ;
memcpy(tty + 5, utx->ut_line, linelen) ;
tty[5 + linelen] = 0 ;
fd = open_append(tty) ;
if (fd == -1) continue ;
allwrite(fd, msg, n) ;
fd_close(fd) ;
}
endutxent() ;
}
|
<commit_msg>Add PYTEST_MD_REPORT_COLOR environment variable setting
<commit_before>
import sys
import py
if __name__ == "__main__":
sys.exit(py.test.cmdline.main())
<commit_after>
import os
import sys
import py
if __name__ == "__main__":
os.environ["PYTEST_MD_REPORT_COLOR"] = "text"
sys.exit(py.test.cmdline.main())
|
<commit_msg>gluster: Return UNKNOWN status for GlusterTaskStatus
If the string value passed to GlusterTaskStatus is not
one of the enum options return UNKNOWN as the option
value.
Fixes 2 issues reported by coverity scan when converting
vdsm return value
GlusterAsyncTaskStatus.from((String)map.get(STATUS)).getJobExecutionStatus()
-- possible NPE
Change-Id: If8cd725af21639827ba19c5589fe2889e1c98c19
Signed-off-by: Sahina Bose <e470779b356412d02748affb76951ba845275668@redhat.com>
<commit_before>package org.ovirt.engine.core.common.asynctasks.gluster;
import org.ovirt.engine.core.common.job.JobExecutionStatus;
/**
* This enum represents the gluster volume async task status values returned from VDSM
*/
public enum GlusterAsyncTaskStatus {
COMPLETED("COMPLETED"),
STARTED("STARTED"),
STOPPED("STOPPED"),
FAILED("FAILED"),
UNKNOWN("UNKNOWN"),
NOT_STARTED("NOT STARTED")
;
private String statusMsg;
private GlusterAsyncTaskStatus(String status) {
statusMsg = status;
}
public String value() {
return statusMsg;
}
public static GlusterAsyncTaskStatus from(String status) {
for (GlusterAsyncTaskStatus taskStatus : values()) {
if (taskStatus.value().equalsIgnoreCase(status)) {
return taskStatus;
}
}
return null;
}
public JobExecutionStatus getJobExecutionStatus() {
switch (this) {
case COMPLETED:
return JobExecutionStatus.FINISHED;
case STARTED:
return JobExecutionStatus.STARTED;
case STOPPED:
return JobExecutionStatus.ABORTED;
case FAILED:
return JobExecutionStatus.FAILED;
case UNKNOWN:
case NOT_STARTED:
default:
return JobExecutionStatus.UNKNOWN;
}
}
}
<commit_after>package org.ovirt.engine.core.common.asynctasks.gluster;
import org.ovirt.engine.core.common.job.JobExecutionStatus;
/**
* This enum represents the gluster volume async task status values returned from VDSM
*/
public enum GlusterAsyncTaskStatus {
COMPLETED("COMPLETED"),
STARTED("STARTED"),
STOPPED("STOPPED"),
FAILED("FAILED"),
UNKNOWN("UNKNOWN"),
NOT_STARTED("NOT STARTED")
;
private String statusMsg;
private GlusterAsyncTaskStatus(String status) {
statusMsg = status;
}
public String value() {
return statusMsg;
}
public static GlusterAsyncTaskStatus from(String status) {
for (GlusterAsyncTaskStatus taskStatus : values()) {
if (taskStatus.value().equalsIgnoreCase(status)) {
return taskStatus;
}
}
return GlusterAsyncTaskStatus.UNKNOWN;
}
public JobExecutionStatus getJobExecutionStatus() {
switch (this) {
case COMPLETED:
return JobExecutionStatus.FINISHED;
case STARTED:
return JobExecutionStatus.STARTED;
case STOPPED:
return JobExecutionStatus.ABORTED;
case FAILED:
return JobExecutionStatus.FAILED;
case UNKNOWN:
case NOT_STARTED:
default:
return JobExecutionStatus.UNKNOWN;
}
}
}
|
<commit_msg>Add thread member for offloading disk info ops
<commit_before>// Copyright (c) 2015, Matthew Malensek.
// Distributed under the BSD 2-Clause License (see LICENSE.txt for details)
#pragma once
#include <Windows.h>
#include "OSD.h"
class NotifyIcon;
class EjectOSD : public OSD {
public:
EjectOSD();
virtual void Hide();
virtual void ProcessHotkeys(HotkeyInfo &hki);
private:
DWORD _ignoreDrives;
DWORD _latestDrive;
MeterWnd _mWnd;
NotifyIcon *_icon;
std::vector<HICON> _iconImages;
void EjectDrive(std::wstring driveLetter);
DWORD DriveLetterToMask(wchar_t letter);
wchar_t MaskToDriveLetter(DWORD mask);
virtual void OnDisplayChange();
virtual LRESULT WndProc(HWND hWnd, UINT message,
WPARAM wParam, LPARAM lParam);
};<commit_after>// Copyright (c) 2015, Matthew Malensek.
// Distributed under the BSD 2-Clause License (see LICENSE.txt for details)
#pragma once
#include <Windows.h>
#include <thread>
#include "OSD.h"
class NotifyIcon;
class EjectOSD : public OSD {
public:
EjectOSD();
virtual void Hide();
virtual void ProcessHotkeys(HotkeyInfo &hki);
private:
DWORD _ignoreDrives;
DWORD _latestDrive;
MeterWnd _mWnd;
NotifyIcon *_icon;
std::vector<HICON> _iconImages;
std::thread _menuThread;
void EjectDrive(std::wstring driveLetter);
DWORD DriveLetterToMask(wchar_t letter);
wchar_t MaskToDriveLetter(DWORD mask);
virtual void OnDisplayChange();
virtual LRESULT WndProc(HWND hWnd, UINT message,
WPARAM wParam, LPARAM lParam);
}; |
<commit_msg>Add string and snowflake identifier union.
<commit_before>class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
<commit_after>from typing import Union
class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
MIdentifier = Union[SnowflakeID, str]
|
<commit_msg>Remove default option for `desa`
<commit_before>from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
default_options = (
'desa:build_tests=False'
)
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
<commit_after>from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
|
<commit_msg>Add options to use bind/unbind callbacks as opposed to mount/unmount
Reviewed By: oprisnik
Differential Revision: D15122179
fbshipit-source-id: 28b734a0fb0899b4a28e9cdacb31658480149ca2
<commit_before>/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.fresco.vito.core;
import javax.annotation.Nullable;
/** Fresco experiment class with default values. Override this class to change them. */
public class FrescoExperiments {
public boolean prepareImagePipelineComponents() {
return false;
}
public boolean checkCacheInAttach() {
return true;
}
public int mountContentPoolSize() {
// Default Litho mount pool size
return 3;
}
public boolean mountContentPoolSync() {
// Default Litho mount pool sync value
return true;
}
public boolean prefetchInOnPrepare() {
return false;
}
public boolean keepRefToPrefetchDatasouce() {
return false;
}
public boolean prepareActualImageWrapperInBackground() {
return true;
}
public boolean preparePlaceholderDrawableInBackground() {
return false;
}
public boolean keepRefToMainFetchDatasouce() {
return true;
}
public @Nullable Boolean prefetchToBitmapCache() {
return null;
}
public boolean closeDatasource() {
return true;
}
public boolean cacheImageInState() {
return false;
}
public boolean shouldDiffCallerContext() {
return false;
}
public void setupPropDiffingExperiment() {}
public boolean fadeInImages() {
return false;
}
}
<commit_after>/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.fresco.vito.core;
import javax.annotation.Nullable;
/** Fresco experiment class with default values. Override this class to change them. */
public class FrescoExperiments {
public boolean prepareImagePipelineComponents() {
return false;
}
public boolean checkCacheInAttach() {
return true;
}
public int mountContentPoolSize() {
// Default Litho mount pool size
return 3;
}
public boolean mountContentPoolSync() {
// Default Litho mount pool sync value
return true;
}
public boolean prefetchInOnPrepare() {
return false;
}
public boolean keepRefToPrefetchDatasouce() {
return false;
}
public boolean prepareActualImageWrapperInBackground() {
return true;
}
public boolean preparePlaceholderDrawableInBackground() {
return false;
}
public boolean keepRefToMainFetchDatasouce() {
return true;
}
public @Nullable Boolean prefetchToBitmapCache() {
return null;
}
public boolean closeDatasource() {
return true;
}
public boolean cacheImageInState() {
return false;
}
public boolean shouldDiffCallerContext() {
return false;
}
public void setupPropDiffingExperiment() {}
public boolean fadeInImages() {
return false;
}
public boolean useBindCallbacks() {
return false;
}
}
|
<commit_msg>Add smarts to cope with slug clashes with other places with the same names.
<commit_before>from django.core.management.base import LabelCommand
from mapit.models import Type
from pombola.core.models import Place, PlaceKind
from django.template.defaultfilters import slugify
class Command(LabelCommand):
help = 'Copy mapit.areas to core.places'
args = '<mapit.type.code>'
def handle_label(self, mapit_type_code, **options):
# load the mapit type
mapit_type = Type.objects.get(code=mapit_type_code)
# if needed create the core placetype
placekind, created = PlaceKind.objects.get_or_create(
name=mapit_type.description,
defaults={
'slug': slugify(mapit_type.description)
}
)
# create all the places as needed for all mapit areas of that type
for area in mapit_type.areas.all():
print area.name
place, created = Place.objects.get_or_create(
name=area.name,
kind=placekind,
defaults={
'slug': slugify(area.name),
}
)
place.mapit_area = area
place.save()
<commit_after>from django.core.management.base import LabelCommand
from mapit.models import Type
from pombola.core.models import Place, PlaceKind
from django.template.defaultfilters import slugify
class Command(LabelCommand):
help = 'Copy mapit.areas to core.places'
args = '<mapit.type.code>'
def handle_label(self, mapit_type_code, **options):
# load the mapit type
mapit_type = Type.objects.get(code=mapit_type_code)
# if needed create the core placetype
placekind, created = PlaceKind.objects.get_or_create(
name=mapit_type.description,
defaults={
'slug': slugify(mapit_type.description)
}
)
# create all the places as needed for all mapit areas of that type
for area in mapit_type.areas.all():
# There may be a slug clash as several areas have the same name but
# are different placekinds. Create the slug and then check to see
# if the slug is already in use for a placekind other than ours. If
# it is append the placekind to the slug.
slug = slugify(area.name)
if Place.objects.filter(slug=slug).exclude(kind=placekind).exists():
slug = slug + '-' + placekind.slug
print "'%s' (%s)" % (area.name, slug)
place, created = Place.objects.get_or_create(
name=area.name,
kind=placekind,
defaults={
'slug': slug,
}
)
place.mapit_area = area
place.save()
|
<commit_msg>Add an option to handle language: default to en
<commit_before>package config
import (
"log"
"os"
"path/filepath"
"code.google.com/p/gcfg"
)
const Version = "0.1a"
const DEFAULT_NICKNAME = "perpetua"
const DEFAULT_USER = "perpetua"
var BASE_DIR = filepath.Join(os.ExpandEnv("$HOME"), ".perpetua")
var CONFIG_FILE = filepath.Join(BASE_DIR, "perpetua.gcfg")
var DATABASE_FILE = filepath.Join(BASE_DIR, "perpetua.sqlite3")
type Options struct {
Server struct {
Hostname string
Port uint16
UseTLS, SkipVerify bool
}
IRC struct {
Nickname, User string
Channel []string
}
}
func (o *Options) Read() {
err := gcfg.ReadFileInto(o, CONFIG_FILE)
if o.IRC.Nickname == "" {
o.IRC.Nickname = DEFAULT_NICKNAME
}
if o.IRC.User == "" {
o.IRC.User = DEFAULT_USER
}
if err != nil {
log.Fatal(err)
}
}
<commit_after>package config
import (
"log"
"os"
"path/filepath"
"code.google.com/p/gcfg"
)
const Version = "0.1a"
const DEFAULT_LANG = "en"
const DEFAULT_NICKNAME = "perpetua"
const DEFAULT_USER = "perpetua"
var BASE_DIR = filepath.Join(os.ExpandEnv("$HOME"), ".perpetua")
var CONFIG_FILE = filepath.Join(BASE_DIR, "perpetua.gcfg")
var DATABASE_FILE = filepath.Join(BASE_DIR, "perpetua.sqlite3")
// Options is used by Gcfg to store data read from CONFIG_FILE.
type Options struct {
Server struct {
Hostname string
Port uint16
UseTLS, SkipVerify bool
}
IRC struct {
Nickname, User string
Channel []string
}
I18N struct {
Lang string
}
}
// Read configuration from default config file specified by
// CONFIG_FILE and set default values for not provided entries.
func (o *Options) Read() {
err := gcfg.ReadFileInto(o, CONFIG_FILE)
if err != nil {
log.Fatal(err)
}
if o.IRC.Nickname == "" {
o.IRC.Nickname = DEFAULT_NICKNAME
}
if o.IRC.User == "" {
o.IRC.User = DEFAULT_USER
}
if o.I18N.Lang == "" {
o.I18N.Lang = DEFAULT_LANG
}
}
|
<commit_msg>Make the error message stand out more for the user when we reject an svn commit.
<commit_before>from django.core.management import BaseCommand, CommandError
from mysite.missions import controllers
import sys
class Command(BaseCommand):
args = '<repo_path> <txn_id>'
help = 'SVN pre-commit hook for mission repositories'
def handle(self, *args, **options):
# This management command is called from the mission svn repositories
# as the pre-commit hook. It receives the repository path and transaction
# ID as arguments, and it receives a description of applicable lock
# tokens on stdin. Its environment and current directory are undefined.
if len(args) != 2:
raise CommandError, 'Exactly two arguments are expected.'
repo_path, txn_id = args
try:
controllers.SvnCommitMission.pre_commit_hook(repo_path, txn_id)
except controllers.IncorrectPatch, e:
sys.stderr.write(str(e) + '\n\n')
raise CommandError, 'The commit failed to validate.'
<commit_after>from django.core.management import BaseCommand, CommandError
from mysite.missions import controllers
import sys
class Command(BaseCommand):
args = '<repo_path> <txn_id>'
help = 'SVN pre-commit hook for mission repositories'
def handle(self, *args, **options):
# This management command is called from the mission svn repositories
# as the pre-commit hook. It receives the repository path and transaction
# ID as arguments, and it receives a description of applicable lock
# tokens on stdin. Its environment and current directory are undefined.
if len(args) != 2:
raise CommandError, 'Exactly two arguments are expected.'
repo_path, txn_id = args
try:
controllers.SvnCommitMission.pre_commit_hook(repo_path, txn_id)
except controllers.IncorrectPatch, e:
sys.stderr.write('\n ' + str(e) + '\n\n')
raise CommandError, 'The commit failed to validate.'
|
<commit_msg>Fix breakpoint_set_restart test for Windows
When run with the multiprocess test runner, the getchar() trick doesn't work, so ninja check-lldb would fail on this test, but running the test directly worked fine.
Differential Revision: http://reviews.llvm.org/D19035
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@266145 91177308-0d34-0410-b5e6-96231b3b80d8
<commit_before>//===-- main.cpp ------------------------------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#include <iostream>
#include <stdio.h>
int main(int argc, char const *argv[])
{
getchar();
printf("Set a breakpoint here.\n");
return 0;
}
<commit_after>//===-- main.cpp ------------------------------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#include <chrono>
#include <stdio.h>
#include <thread>
int main(int argc, char const *argv[])
{
static bool done = false;
while (!done)
{
std::this_thread::sleep_for(std::chrono::milliseconds{100});
}
printf("Set a breakpoint here.\n");
return 0;
}
|
<commit_msg>Use die and return instead of exiting.
<commit_before>
/* these 3 defined in makegen.c */
extern double *farrays[];
extern int sizeof_farray[];
extern int f_goto[];
int
fsize(int genno)
/* returns the size of function number genno */
{
if(!sizeof_farray[f_goto[genno]]) {
fprintf(stderr,"fsize: You haven't allocated function %d yet!\n",genno);
closesf();
}
return(sizeof_farray[f_goto[genno]]);
}
<commit_after>
/* these 3 defined in makegen.c */
extern double *farrays[];
extern int sizeof_farray[];
extern int f_goto[];
/* returns the size of function number genno */
int
fsize(int genno)
{
if (!sizeof_farray[f_goto[genno]]) {
die("fsize", "You haven't allocated function %d yet!", genno);
return -1;
}
return sizeof_farray[f_goto[genno]];
}
|
<commit_msg>Add test for key derivation
<commit_before>import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()<commit_after>import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
<commit_msg>Make compatible with Bleach v2.0 and html5lib v1.0
<commit_before>import bleach
from html5lib.sanitizer import HTMLSanitizer
from markdown.postprocessors import Postprocessor
from markdown import Extension
class MyTokenizer(HTMLSanitizer):
def sanitize_token(self, token):
return token
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linkify_callbacks=[]):
super(Postprocessor, self).__init__(md)
self._callbacks = linkify_callbacks
def run(self, text):
text = bleach.linkify(text,
callbacks=self._callbacks,
tokenizer=MyTokenizer)
return text
class LinkifyExtension(Extension):
config = {'linkify_callbacks': [[], 'Callbacks to send to bleach.linkify']}
def extendMarkdown(self, md, md_globals):
md.postprocessors.add(
"linkify",
LinkifyPostprocessor(md, self.getConfig('linkify_callbacks')),
"_begin")
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
<commit_after>import bleach
from markdown.postprocessors import Postprocessor
from markdown import Extension
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linkify_callbacks=[]):
super(Postprocessor, self).__init__(md)
self._callbacks = linkify_callbacks
def run(self, text):
text = bleach.linkify(text,
callbacks=self._callbacks)
return text
class LinkifyExtension(Extension):
config = {'linkify_callbacks': [[], 'Callbacks to send to bleach.linkify']}
def extendMarkdown(self, md, md_globals):
md.postprocessors.add(
"linkify",
LinkifyPostprocessor(md, self.getConfig('linkify_callbacks')),
"_begin")
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
|
<commit_msg>Test cases changed and minor optimization
<commit_before>import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.error_test = ("asd asdasd ")
self.error_test_response = "Sorry, I could not process that."
self.error_command = en.Command(self.error_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
response = uc.process_command(self.error_command)
assert response == self.error_test_response<commit_after>import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
error_test = ("asd asdasd ")
error_test_response = "Sorry, I could not process that."
error_command = en.Command(error_test, self.command_args)
response = uc.process_command(error_command)
assert response == error_test_response |
<commit_msg>Add automated 'gm compare' invocation
<commit_before>
import os;
import glob;
import re as regex;
testDirectory = "./test/";
def readMarkupFile(markupFilename):
f = open(markupFilename, 'r');
markup = f.read();
return markup;
def runNode(sourceFilename, destinationFilename, markupFilename):
markup = readMarkupFile(markupFilename);
cmd = "node ImageMarkupCall.js --input " + sourceFilename + " --output " + \
destinationFilename + " --markup \"" + markup + "\"";
ret = os.system(cmd);
if ret != 0:
sys.stderr.write('node-markup encountered an error while processing ' \
+ sourceFilename);
else:
print(sourceFilename + ' -> ' + destinationFilename);
for filename in os.listdir(testDirectory):
if filename.endswith(".markup"):
markupFilename = testDirectory + filename;
basename = regex.sub(r'(.+)\.markup', r'\1', filename);
sourceFilename = testDirectory + basename + '.source.jpg';
oracle = testDirectory + basename + '.node.oracle.jpg';
testFilename = testDirectory + basename + '.node.test.jpg';
runNode(sourceFilename, testFilename, markupFilename);
<commit_after>
import os;
import subprocess;
import glob;
import re as regex;
testDirectory = "./test/";
def readMarkupFile(markupFilename):
f = open(markupFilename, 'r');
markup = f.read();
return markup;
def compareOutputs(oracleFilename, destinationFilename):
metric = "mae";
cmd = ["gm","compare","-metric",metric,oracleFilename,destinationFilename];
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE);
(out, err) = proc.communicate();
print out;
def runNode(sourceFilename, destinationFilename, markupFilename):
markup = readMarkupFile(markupFilename);
cmd = "node ImageMarkupCall.js --input " + sourceFilename + " --output " + \
destinationFilename + " --markup \"" + markup + "\"";
ret = os.system(cmd);
if ret != 0:
sys.stderr.write('node-markup encountered an error while processing ' \
+ sourceFilename);
else:
print(sourceFilename + ' -> ' + destinationFilename);
return ret == 0;
for filename in os.listdir(testDirectory):
if filename.endswith(".markup"):
markupFilename = testDirectory + filename;
basename = regex.sub(r'(.+)\.markup', r'\1', filename);
sourceFilename = testDirectory + basename + '.source.jpg';
oracleFilename = testDirectory + basename + '.node.oracle.jpg';
testFilename = testDirectory + basename + '.node.test.jpg';
success = runNode(sourceFilename, testFilename, markupFilename);
if success:
compareOutputs(oracleFilename, testFilename);
|
<commit_msg>Solve Game of Thrones 1
<commit_before>package hackerrank;
import java.util.Scanner;
public class GameOfThrones1 {
private static String isPalindrome(String str) {
// TODO:
return "YES";
}
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
String str = in.next();
System.out.println(isPalindrome(str));
}
}
<commit_after>package hackerrank;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
public class GameOfThrones1 {
private static String isPalindrome(String str) {
Map<Character, Integer> counts = new HashMap<>();
for (int i = 0; i < str.length(); i++) {
char c = str.charAt(i);
if (!counts.containsKey(c)) {
counts.put(c, 1);
} else {
counts.put(c, counts.get(c) + 1);
}
}
int odd = 0;
for (Map.Entry<Character, Integer> e : counts.entrySet()) {
if (e.getValue() % 2 != 0) {
odd++;
}
}
if (odd > 1) {
return "NO";
}
return "YES";
}
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
String str = in.next();
System.out.println(isPalindrome(str));
}
}
|
<commit_msg>Update run mainfile to reflect import paths
<commit_before>import ui_logic_mainwindow as uv
from PyQt4 import QtGui
import sys
def main():
app = QtGui.QApplication(sys.argv)
ex = uv.UiMainWindow()
ex.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
<commit_after>from test import ui_logic_mainwindow as uv
from PyQt4 import QtGui
import sys
def main():
app = QtGui.QApplication(sys.argv)
ex = uv.UiMainWindow()
ex.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
<commit_msg>Fix time source for emscripten
<commit_before>// Copyright (c) 2013-2021 mogemimi. Distributed under the MIT license.
#include "TimeSourceEmscripten.hpp"
#include <emscripten.h>
namespace Pomdog::Detail::Emscripten {
TimePoint TimeSourceEmscripten::Now() const
{
const auto now = ::emscripten_get_now();
return TimePoint{Duration{static_cast<double>(now)}};
}
} // namespace Pomdog::Detail::Emscripten
<commit_after>// Copyright (c) 2013-2021 mogemimi. Distributed under the MIT license.
#include "TimeSourceEmscripten.hpp"
#include <emscripten.h>
#include <type_traits>
namespace Pomdog::Detail::Emscripten {
TimePoint TimeSourceEmscripten::Now() const
{
const auto now = ::emscripten_get_now();
static_assert(std::is_same_v<std::remove_const_t<decltype(now)>, double>);
return TimePoint{Duration{now * 0.001}};
}
} // namespace Pomdog::Detail::Emscripten
|
<commit_msg>Add six as a dependency.
<commit_before>
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.1'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
packages=['linersock'],
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
keywords=['nonblocking', 'socket', 'wrapper', 'library'])
<commit_after>
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.2'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
keywords=['nonblocking', 'socket', 'wrapper', 'library'],
packages=['linersock'],
install_requires=[
'six',
],
)
|
<commit_msg>Print the Format class used<commit_before>def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
<commit_after>def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
print 'Using header reader: %s' % format.__name__
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
|
<commit_msg>Add new methods to StreamBuilders
<commit_before>import { IStream } from "../../src/stream/stream.i";
export class StreamBuilder {
public build(): IStream {
return <IStream> {
writeLine: (message: string) => { }
};
}
}
<commit_after>import { IStream } from "../../src/stream/stream.i";
export class StreamBuilder {
public build(): IStream {
return <IStream> {
writeLine: (message: string) => { },
write: (message: string) => { },
moveCursor: (x: number, y: number) => { },
cursorTo: (x: number, y: number) => { },
clearLine: () => { }
};
}
}
|
<commit_msg>[core] ENHANCE: Add UserInputSource as a InputProviderSource
<commit_before>package org.museautomation.core.task.input;
import org.museautomation.core.task.state.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public abstract class ResolvedInputSource
{
public abstract String getDescription();
public static class TaskStateSource extends ResolvedInputSource
{
public TaskStateSource(InterTaskState state)
{
_state = state;
}
@Override
public String getDescription()
{
return "from task state " + _state.getStateDefinitionId();
}
public InterTaskState getState()
{
return _state;
}
private final InterTaskState _state;
}
public static class InputProviderSource extends ResolvedInputSource
{
public InputProviderSource(TaskInputProvider provider)
{
_provider = provider;
}
@Override
public String getDescription()
{
return "from task input provider " + _provider.getDescription();
}
public TaskInputProvider getProvider()
{
return _provider;
}
private final TaskInputProvider _provider;
}
public static class DefaultValueSource extends ResolvedInputSource
{
@Override
public String getDescription()
{
return "from default value ";
}
}
}<commit_after>package org.museautomation.core.task.input;
import org.museautomation.core.task.state.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public abstract class ResolvedInputSource
{
public abstract String getDescription();
public static class TaskStateSource extends ResolvedInputSource
{
public TaskStateSource(InterTaskState state)
{
_state = state;
}
@Override
public String getDescription()
{
return "from task state " + _state.getStateDefinitionId();
}
public InterTaskState getState()
{
return _state;
}
private final InterTaskState _state;
}
public static class InputProviderSource extends ResolvedInputSource
{
public InputProviderSource(TaskInputProvider provider)
{
_provider = provider;
}
@Override
public String getDescription()
{
return "from task input provider " + _provider.getDescription();
}
public TaskInputProvider getProvider()
{
return _provider;
}
private final TaskInputProvider _provider;
}
@SuppressWarnings("unused") // used in UI
public static class UserInputSource extends ResolvedInputSource
{
@Override
public String getDescription()
{
return "from user";
}
}
public static class DefaultValueSource extends ResolvedInputSource
{
@Override
public String getDescription()
{
return "from default value ";
}
}
} |
<commit_msg>Add onError example in main activity
<commit_before>package br.com.bloder.blorm;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import br.com.bloder.blormlib.Blorm;
import br.com.bloder.blormlib.validation.Action;
import static br.com.bloder.blormlib.validation.Validations.*;
public class MainActivity extends AppCompatActivity {
private EditText editTextFilled;
private Button submit;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
editTextFilled = (EditText) findViewById(R.id.edit_text_filled);
submit = (Button) findViewById(R.id.submit);
editTextFilled.setText("Hello");
new Blorm.Builder()
.field(editTextFilled).is(filled)
.onSuccess(new Action() {
@Override
public void call() { onSuccess();
}})
.submitOn(submit);
}
private void onSuccess() {
editTextFilled.setError(null);
Toast.makeText(this, "Success", Toast.LENGTH_SHORT).show();
}
}
<commit_after>package br.com.bloder.blorm;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import br.com.bloder.blormlib.Blorm;
import br.com.bloder.blormlib.validation.Action;
import static br.com.bloder.blormlib.validation.Validations.*;
public class MainActivity extends AppCompatActivity {
private EditText editTextFilled;
private Button submit;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
editTextFilled = (EditText) findViewById(R.id.edit_text_filled);
submit = (Button) findViewById(R.id.submit);
editTextFilled.setText("Hello");
new Blorm.Builder()
.field(editTextFilled).is(filled)
.onSuccess(new Action() {
@Override
public void call() { onSuccess();
}})
.onError(new Action() {
@Override
public void call() {
onError();
}
})
.submitOn(submit);
}
private void onSuccess() {
editTextFilled.setError(null);
Toast.makeText(this, "Success", Toast.LENGTH_SHORT).show();
}
private void onError() {
Toast.makeText(this, "Error", Toast.LENGTH_SHORT).show();
}
}
|
<commit_msg>Improve error message a bit.
<commit_before>// mad - mock ad server
// (C) copyright 2015 - J.W. Janssen
package main
import (
"net"
"fmt"
"github.com/coreos/go-systemd/activation"
"github.com/coreos/go-systemd/journal"
)
type JournaldLogger struct {
}
func (l *JournaldLogger) Log(msg string, args ...interface{}) {
if journal.Enabled() {
journal.Print(journal.PriInfo, fmt.Sprintf(msg, args...))
}
}
func NewLogger() Logger {
return &JournaldLogger{}
}
func Listener() net.Listener {
listeners, err := activation.Listeners(true)
if err != nil {
panic(err)
}
if len(listeners) != 1 {
panic("Unexpected number of socket activation fds")
}
return listeners[0]
}
// EOF
<commit_after>// mad - mock ad server
// (C) copyright 2015 - J.W. Janssen
package main
import (
"fmt"
"net"
"github.com/coreos/go-systemd/activation"
"github.com/coreos/go-systemd/journal"
)
type JournaldLogger struct {
}
func (l *JournaldLogger) Log(msg string, args ...interface{}) {
if journal.Enabled() {
journal.Print(journal.PriInfo, fmt.Sprintf(msg, args...))
}
}
func NewLogger() Logger {
return &JournaldLogger{}
}
func Listener() net.Listener {
listeners, err := activation.Listeners(true)
if err != nil {
panic(err)
}
if len(listeners) != 1 {
panic(fmt.Sprintf("Unexpected number of socket activation fds, got: %d listeners, expected 1!", len(listeners)))
}
return listeners[0]
}
// EOF
|
<commit_msg>Fix filter() usage due to python 3 compability
Built-in method filter() returns a list in Python 2.x [1], but it
returns an iterator in Python 3.x [2]. To remove the difference (and
make code more readable, also), we use list comprehension instead of filer().
[1] http://docs.python.org/2/library/functions.html#filter
[2] http://docs.python.org/3/library/functions.html#filter
Related to blueprint make-python3-compatible
Change-Id: Ifd42403309ba3a44693e0c7c856a64b861eca3e9
<commit_before>
import gc
class StringWithAttrs(str):
"""A String that can have arbitrary attributes
"""
pass
def _find_objects(t):
"""Find Objects in the GC State
This horribly hackish method locates objects of a
given class in the current python instance's garbage
collection state. In case you couldn't tell, this is
horribly hackish, but is necessary for locating all
green threads, since they don't keep track of themselves
like normal threads do in python.
:param class t: the class of object to locate
:rtype: list
:returns: a list of objects of the given type
"""
return filter(lambda o: isinstance(o, t), gc.get_objects())
<commit_after>
import gc
class StringWithAttrs(str):
"""A String that can have arbitrary attributes
"""
pass
def _find_objects(t):
"""Find Objects in the GC State
This horribly hackish method locates objects of a
given class in the current python instance's garbage
collection state. In case you couldn't tell, this is
horribly hackish, but is necessary for locating all
green threads, since they don't keep track of themselves
like normal threads do in python.
:param class t: the class of object to locate
:rtype: list
:returns: a list of objects of the given type
"""
return [o for o in gc.get_objects() if isinstance(o, t)]
|
<commit_msg>Use absolute value for E and logscale
<commit_before>
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep, label='$Ep$')
plt.plot(data.t, data.Ec, label='$Ec$')
plt.plot(data.t, data.E, label='$E_\mathrm{tot}$')
plt.grid('on')
plt.legend()
plt.show()
<commit_after>
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep.abs(), label='$Ep$')
plt.plot(data.t, data.Ec.abs(), label='$Ec$')
plt.plot(data.t, data.E.abs(), label='$E_\mathrm{tot}$')
plt.yscale('log')
plt.grid('on')
plt.legend()
plt.show()
|
<commit_msg>Fix chainer v4 error about type_check._argname
<commit_before>from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
<commit_after>from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
if hasattr(type_check, '_argname'):
# typecheck._argname is introduced by Chainer v6
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
|
<commit_msg>Revert "add __all__ parameter to main module"
This reverts commit bc9e574206e75b1a50bd1b8eb4bd56f96a18cf51.
<commit_before>"""A small django app around sendgrid and its webhooks"""
from utils import SendgridEmailMessage, SendgridEmailMultiAlternatives
from models import Email
from signals import email_event
__version__ = '0.1.0'
__all__ = ('SendgridEmailMessage', 'SendgridEmailMultiAlternatives', 'Email', 'email_event')
<commit_after>"""A small django app around sendgrid and its webhooks"""
__version__ = '0.1.0'
|
<commit_msg>Comment to explain numpy warnings filter
<commit_before>from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
<commit_after>from __future__ import division, print_function, absolute_import
import pkg_resources
import warnings
__author__ = "Will Usher, Tom Russell"
__copyright__ = "Will Usher, Tom Russell"
__license__ = "mit"
try:
__version__ = pkg_resources.get_distribution(__name__).version
except:
__version__ = 'unknown'
# Filter out warnings arising from some installed combinations of scipy/numpy
# - problem and fix discussed in [numpy/numpy#432](https://github.com/numpy/numpy/pull/432)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
|
<commit_msg>Include existing env vars in Build
Signed-off-by: Max Brunsfeld <8ee73e75b50cc292b5052b11f2ca25336d3e974e@pivotallabs.com>
<commit_before>package cmdtest
import (
"io/ioutil"
"os"
"os/exec"
"path/filepath"
)
func Build(mainPath string, args ...string) (string, error) {
return BuildIn(os.Getenv("GOPATH"), mainPath, args...)
}
func BuildIn(gopath string, mainPath string, args ...string) (string, error) {
if len(gopath) == 0 {
panic("$GOPATH not provided when building " + mainPath)
}
tmpdir, err := ioutil.TempDir("", "test_cmd_main")
if err != nil {
return "", err
}
executable := filepath.Join(tmpdir, filepath.Base(mainPath))
cmdArgs := append([]string{"build"}, args...)
cmdArgs = append(cmdArgs, "-o", executable, mainPath)
build := exec.Command("go", cmdArgs...)
build.Stdout = os.Stdout
build.Stderr = os.Stderr
build.Stdin = os.Stdin
build.Env = []string{"GOPATH=" + gopath}
err = build.Run()
if err != nil {
return "", err
}
return executable, nil
}
<commit_after>package cmdtest
import (
"io/ioutil"
"os"
"os/exec"
"path/filepath"
)
func Build(mainPath string, args ...string) (string, error) {
return BuildIn(os.Getenv("GOPATH"), mainPath, args...)
}
func BuildIn(gopath string, mainPath string, args ...string) (string, error) {
if len(gopath) == 0 {
panic("$GOPATH not provided when building " + mainPath)
}
tmpdir, err := ioutil.TempDir("", "test_cmd_main")
if err != nil {
return "", err
}
executable := filepath.Join(tmpdir, filepath.Base(mainPath))
cmdArgs := append([]string{"build"}, args...)
cmdArgs = append(cmdArgs, "-o", executable, mainPath)
build := exec.Command("go", cmdArgs...)
build.Stdout = os.Stdout
build.Stderr = os.Stderr
build.Stdin = os.Stdin
build.Env = append(os.Environ(), "GOPATH="+gopath)
err = build.Run()
if err != nil {
return "", err
}
return executable, nil
}
|
<commit_msg>Make onoff function more versatile
<commit_before>import RPi.GPIO as GPIO
from time import sleep
def onoff(period, pin):
"""Symmetric square wave, equal time on/off"""
half_cycle = period / 2.0
GPIO.output(pin, GPIO.HIGH)
sleep(half_cycle)
GPIO.output(pin, GPIO.LOW)
sleep(half_cycle)
def strobe(freq, dur, pin):
nflashes = freq * dur
seconds_to_sleep = 1.0 / freq
# Use Raspberry-Pi board pin numbers. In other words, 11 means pin
# number 11, not GPIO 11.
GPIO.setmode(GPIO.BOARD)
GPIO.setup(pin, GPIO.OUT) # requires root?
for i in range(nflashes):
onoff(seconds_to_sleep, pin)
GPIO.cleanup()
<commit_after>
import RPi.GPIO as GPIO
from time import sleep
def onoff(ontime, offtime, pin):
GPIO.output(pin, GPIO.HIGH)
sleep(ontime)
GPIO.output(pin, GPIO.LOW)
sleep(offtime)
def strobe(freq, dur, pin):
nflashes = freq * dur
period = 1.0 / freq
# Use Raspberry-Pi board pin numbers. In other words, 11 means pin
# number 11, not GPIO 11.
GPIO.setmode(GPIO.BOARD)
GPIO.setup(pin, GPIO.OUT) # requires root?
for i in range(nflashes):
onoff(period/2.0, period/2.0, pin)
GPIO.cleanup()
|
<commit_msg>Add ioloop fixture that works with tornado 5
<commit_before>"""Py.Test fixtures"""
from tornado.httpclient import AsyncHTTPClient
from pytest import fixture
from .mocks import MockAsyncHTTPClient
@fixture
def client(io_loop, request):
"""Return mocked AsyncHTTPClient"""
before = AsyncHTTPClient.configured_class()
AsyncHTTPClient.configure(MockAsyncHTTPClient)
request.addfinalizer(lambda : AsyncHTTPClient.configure(before))
c = AsyncHTTPClient()
assert isinstance(c, MockAsyncHTTPClient)
return c
<commit_after>"""Py.Test fixtures"""
from tornado.httpclient import AsyncHTTPClient
from tornado import ioloop
from pytest import fixture
from .mocks import MockAsyncHTTPClient
@fixture
def io_loop(request):
"""Same as pytest-tornado.io_loop, adapted for tornado 5"""
io_loop = ioloop.IOLoop()
io_loop.make_current()
def _close():
io_loop.clear_current()
io_loop.close(all_fds=True)
request.addfinalizer(_close)
return io_loop
@fixture
def client(io_loop, request):
"""Return mocked AsyncHTTPClient"""
before = AsyncHTTPClient.configured_class()
AsyncHTTPClient.configure(MockAsyncHTTPClient)
request.addfinalizer(lambda : AsyncHTTPClient.configure(before))
c = AsyncHTTPClient()
assert isinstance(c, MockAsyncHTTPClient)
return c
|
<commit_msg>Add optional secondary target argument
<commit_before>
class Symbol {
public:
Symbol(Opcode opcode) : opcode(opcode) {};
Symbol(Opcode opcode, unsigned int target) : opcode(opcode), target(target) {};
void execute();
private:
Opcode opcode;
unsigned int target;
};
<commit_after>
class Symbol {
public:
Symbol(Opcode opcode) : opcode(opcode) {};
Symbol(Opcode opcode, unsigned int target) : opcode(opcode), target(target) {};
Symbol(Opcode opcode, unsigned int target, unsigned int secondary) : opcode(opcode), target(target), secondary(secondary) {};
void execute();
private:
Opcode opcode;
unsigned int target = 0;
unsigned int secondary = 0;
};
|
<commit_msg>Convert also 0x80..0x9f characters to '?'
--HG--
branch : HEAD
<commit_before>/* Copyright (c) 2004 Timo Sirainen */
#include "lib.h"
#include "str.h"
#include "str-sanitize.h"
void str_sanitize_append(string_t *dest, const char *src, size_t max_len)
{
const char *p;
for (p = src; *p != '\0'; p++) {
if ((unsigned char)*p < 32)
break;
}
str_append_n(dest, src, (size_t)(p - src));
for (; *p != '\0' && max_len > 0; p++, max_len--) {
if ((unsigned char)*p < 32)
str_append_c(dest, '?');
else
str_append_c(dest, *p);
}
if (*p != '\0') {
str_truncate(dest, str_len(dest)-3);
str_append(dest, "...");
}
}
const char *str_sanitize(const char *src, size_t max_len)
{
string_t *str;
str = t_str_new(I_MIN(max_len, 256));
str_sanitize_append(str, src, max_len);
return str_c(str);
}
<commit_after>/* Copyright (c) 2004 Timo Sirainen */
#include "lib.h"
#include "str.h"
#include "str-sanitize.h"
void str_sanitize_append(string_t *dest, const char *src, size_t max_len)
{
const char *p;
for (p = src; *p != '\0'; p++) {
if (((unsigned char)*p & 0x7f) < 32)
break;
}
str_append_n(dest, src, (size_t)(p - src));
for (; *p != '\0' && max_len > 0; p++, max_len--) {
if (((unsigned char)*p & 0x7f) < 32)
str_append_c(dest, '?');
else
str_append_c(dest, *p);
}
if (*p != '\0') {
str_truncate(dest, str_len(dest)-3);
str_append(dest, "...");
}
}
const char *str_sanitize(const char *src, size_t max_len)
{
string_t *str;
str = t_str_new(I_MIN(max_len, 256));
str_sanitize_append(str, src, max_len);
return str_c(str);
}
|
<commit_msg>Use rectangle-point collision in button.
<commit_before>
using namespace GUI;
Button::Button(Widget* parent) : Widget(parent) {
mouseHover = false;
hasClickedCallback = false;
size = glm::vec2(64.f, 64.f);
}
Button::~Button() {
}
void Button::Update() {
double xpos = Input()->CursorX();
double ypos = Input()->CursorY();
mouseHover = xpos >= GetPosition().x && xpos < GetPosition().x + size.x && ypos >= GetPosition().y && ypos < GetPosition().y + size.y;
if (mouseHover && Input()->MousePressed(GLFW_MOUSE_BUTTON_LEFT) && hasClickedCallback) {
clickedCallback();
}
}
glm::vec2 Button::GetSize() const {
return size;
}
void Button::SetSize(const glm::vec2& size) {
this->size = size;
}
void Button::SetClickedCallback(std::function<void()> callback) {
clickedCallback = callback;
hasClickedCallback = true;
}
bool Button::GetMouseHover() const {
return mouseHover;
}
<commit_after>
using namespace GUI;
Button::Button(Widget* parent) : Widget(parent) {
mouseHover = false;
hasClickedCallback = false;
size = glm::vec2(64.f, 64.f);
}
Button::~Button() {
}
void Button::Update() {
glm::vec2 mousePosition(Input()->CursorX(), Input()->CursorY());
Physics::Rectangle rect(GetPosition(), size);
mouseHover = rect.Collide(mousePosition);
if (mouseHover && Input()->MousePressed(GLFW_MOUSE_BUTTON_LEFT) && hasClickedCallback) {
clickedCallback();
}
}
glm::vec2 Button::GetSize() const {
return size;
}
void Button::SetSize(const glm::vec2& size) {
this->size = size;
}
void Button::SetClickedCallback(std::function<void()> callback) {
clickedCallback = callback;
hasClickedCallback = true;
}
bool Button::GetMouseHover() const {
return mouseHover;
}
|
<commit_msg>TST: Update test to match API.
<commit_before>from skimage import draw
import matplotlib.pyplot as plt
import numpy as np
import photomosaic as pm
img = np.zeros((1000, 1000))
rr, cc = draw.circle(300, 500, 150)
img[rr, cc] = 1
tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3)
plt.imshow(pm.draw_tiles(img, tiles, color=0.5))
plt.savefig('test-partition.png')
<commit_after>from skimage import draw
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import photomosaic as pm
img = np.zeros((1000, 1000))
rr, cc = draw.circle(300, 500, 150)
img[rr, cc] = 1
tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3)
plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5))
plt.savefig('test-partition.png')
|