content
stringlengths 128
6.74k
|
---|
<commit_msg>Apply a blur filter automatically for each detected face<commit_before>import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the detected faces coordinates untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
<commit_after>import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the facial coordinates for each detected face untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
|
<commit_msg>Add JavaDoc for deprecated methods
<commit_before>package com.suse.salt.netapi.datatypes.target;
/**
* Matcher based on salt grains
*/
public class Grains extends DictionaryTarget implements Target<String> {
/**
* Creates a grains matcher
*
* @param target the targeting expression
*/
public Grains(String target) {
super(target);
}
/**
* Creates a grains matcher
*
* @param target the targeting expression
* @param delimiter the character to delimit nesting in the grain name
*/
public Grains(String target, char delimiter) {
super(target, delimiter);
}
/**
* Creates a grains matcher
*
* @param grain the grain name
* @param value the value to match
*/
public Grains(String grain, String value) {
super(grain, value);
}
/**
* Creates a grains matcher
*
* @param grain the grain name
* @param value the value to match
* @param delimiter the character to delimit nesting in the grain name
*/
public Grains(String grain, String value, char delimiter) {
super(grain, value, delimiter);
}
/**
* {@inheritDoc}
*/
@Override
public TargetType getType() { return TargetType.GRAIN; }
@Deprecated
public String getGrain() { return getKey(); }
}
<commit_after>package com.suse.salt.netapi.datatypes.target;
/**
* Matcher based on salt grains
*/
public class Grains extends DictionaryTarget implements Target<String> {
/**
* Creates a grains matcher
*
* @param target the targeting expression
*/
public Grains(String target) {
super(target);
}
/**
* Creates a grains matcher
*
* @param target the targeting expression
* @param delimiter the character to delimit nesting in the grain name
*/
public Grains(String target, char delimiter) {
super(target, delimiter);
}
/**
* Creates a grains matcher
*
* @param grain the grain name
* @param value the value to match
*/
public Grains(String grain, String value) {
super(grain, value);
}
/**
* Creates a grains matcher
*
* @param grain the grain name
* @param value the value to match
* @param delimiter the character to delimit nesting in the grain name
*/
public Grains(String grain, String value, char delimiter) {
super(grain, value, delimiter);
}
/**
* {@inheritDoc}
*/
@Override
public TargetType getType() { return TargetType.GRAIN; }
/**
* Return the grain identifier key
*
* @deprecated
* Use {@link #getKey()} instead.
*/
@Deprecated
public String getGrain() { return getKey(); }
}
|
<commit_msg>Add missing vat alias for New Zealand
Closes https://github.com/arthurdejong/python-stdnum/pull/202
<commit_before>
"""Collection of New Zealand numbers."""
<commit_after>
"""Collection of New Zealand numbers."""
# provide aliases
from stdnum.nz import ird as vat # noqa: F401
|
<commit_msg>Use pkg_resources to read README.rst
<commit_before>
from setuptools import setup, find_packages
setup(
name='basicdb',
version='0.1',
description='Basic database service',
long_description=open('README.rst', 'r').read(),
author='Soren Hansen',
author_email='soren@linux2go.dk',
url='http://github.com/sorenh/basicdb',
packages=find_packages(),
include_package_data=True,
license='Apache 2.0',
keywords='basicdb simpledb')
<commit_after>
from setuptools import setup, find_packages
import pkg_resources
setup(
name='basicdb',
version='0.1',
description='Basic database service',
long_description=pkg_resources.resource_string(__name__, "README.rst"),
author='Soren Hansen',
author_email='soren@linux2go.dk',
url='http://github.com/sorenh/basicdb',
packages=find_packages(),
include_package_data=True,
license='Apache 2.0',
keywords='basicdb simpledb')
|
<commit_msg>Add tests for .read methods
<commit_before>"""Test for `maas.client.viscera.sshkeys`."""
from .. import sshkeys
from ...testing import (
make_string_without_spaces,
TestCase,
)
from ..testing import bind
def make_origin():
return bind(sshkeys.SSHKeys, sshkeys.SSHKey)
class TestSSHKeys(TestCase):
def test__sshkeys_create(self):
""" SSHKeys.create() returns a new SSHKey. """
SSHKeys = make_origin().SSHKeys
key = make_string_without_spaces()
SSHKeys._handler.create.return_value = {
"id": 1,
"key": key,
"keysource": "",
}
SSHKeys.create(key=key)
SSHKeys._handler.create.assert_called_once_with(
key=key
)
<commit_after>"""Test for `maas.client.viscera.sshkeys`."""
import random
from .. import sshkeys
from ...testing import (
make_string_without_spaces,
TestCase,
)
from ..testing import bind
from testtools.matchers import Equals
def make_origin():
return bind(sshkeys.SSHKeys, sshkeys.SSHKey)
class TestSSHKeys(TestCase):
def test__sshkeys_create(self):
""" SSHKeys.create() returns a new SSHKey. """
SSHKeys = make_origin().SSHKeys
key = make_string_without_spaces()
SSHKeys._handler.create.return_value = {
"id": 1,
"key": key,
"keysource": "",
}
SSHKeys.create(key=key)
SSHKeys._handler.create.assert_called_once_with(
key=key
)
def test__sshkeys_read(self):
""" SSHKeys.read() returns a list of SSH keys. """
SSHKeys = make_origin().SSHKeys
keys = [
{
"id": random.randint(0, 100),
"key": make_string_without_spaces(),
"keysource": "",
}
for _ in range(3)
]
SSHKeys._handler.read.return_value = keys
ssh_keys = SSHKeys.read()
self.assertThat(len(ssh_keys), Equals(3))
class TestSSHKey(TestCase):
def test__sshkey_read(self):
""" SSHKeys.read() returns a single SSH key. """
SSHKey = make_origin().SSHKey
key_id = random.randint(0, 100)
key_dict = {
"id": key_id,
"key": make_string_without_spaces(),
"keysource": "",
}
SSHKey._handler.read.return_value = key_dict
self.assertThat(SSHKey.read(id=key_id), Equals(SSHKey(key_dict)))
|
<commit_msg>Change the ilst content (2)
<commit_before>/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ft_trmsignalhook.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: ncoden <ncoden@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/05/14 17:12:55 by ncoden #+# #+# */
/* Updated: 2015/05/18 16:04:52 by ncoden ### ########.fr */
/* */
/* ************************************************************************** */
#include "libft.h"
void ft_trmsignalhook(t_trm *trm, char sig, void (*func)(void *),
void *data)
{
t_ilst_evnt *event;
if ((event = (t_ilst_evnt *)ft_ilstpushfront__(sizeof(t_ilst_evnt),
(t_ilst **)&trm->on_signal, sig)))
{
event->event.func = func;
event->event.data = data;
}
}
<commit_after>/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ft_trmsignalhook.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: ncoden <ncoden@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/05/14 17:12:55 by ncoden #+# #+# */
/* Updated: 2015/05/23 23:50:08 by ncoden ### ########.fr */
/* */
/* ************************************************************************** */
#include "libft.h"
void ft_trmsignalhook(t_trm *trm, char sig, void (*func)(void *),
void *data)
{
t_ilst_evnt *event;
if ((event = (t_ilst_evnt *)ft_ilstpush__(sizeof(t_ilst_evnt),
(t_ilst **)&trm->on_signal, sig)))
{
event->event.func = func;
event->event.data = data;
}
}
|
<commit_msg>Add more tests to the local process integration test
<commit_before>from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
<commit_after>from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
def test_real_run_unknown_command(self):
executable = 'thiscommandshouldnotexist'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable)
@result.addCallback
def check_result(result):
stdout, stderr, code = result
self.assertEqual(stdout, '')
self.assertNotEqual(stderr, '')
self.assertNotEqual(code, 0)
return result
def test_getExitStatus_false(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('false')
@result.addCallback
def check_result(result):
self.assertNotEqual(result, 0, "The 'false' command should "
"exit with a nonzero code")
return result
def test_getExitStatus_true(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('true')
@result.addCallback
def check_result(result):
self.assertEqual(result, 0, "The 'true' command should "
"exit with code 0")
return result
|
<commit_msg>Update badge utility functions to use Polaris 2.0 Badge API.
<commit_before>import { HitStatus } from '../types';
import { BadgeDescriptor } from '@shopify/polaris/types/components/ResourceList/Item';
import { Status } from '@shopify/polaris/types/components/Badge/Badge';
// import { BadgeProps } from '@shopify/polaris';
const noTOBadge: BadgeDescriptor = {
content: 'No T.O.',
status: '' as Status
};
export const generateTOpticonBadge = (
averageScore: number | null
): BadgeDescriptor[] => {
if (!averageScore) {
return [noTOBadge];
}
const status = assignScoreColor(averageScore) as Status;
const content = generateContentString(averageScore);
return [
{
status,
content
}
];
};
const generateContentString = (average: number | null) => {
return !average ? 'No T.O.' : `${average.toFixed(2)} T.O.`;
};
const assignScoreColor = (score: number | null): Status | null => {
if (!score) {
return null;
}
if (score < 2) {
return 'warning';
} else if (score < 3) {
return 'attention';
} else if (score < 4) {
return 'info';
} else {
return 'success';
}
};
export const generateHitStatusBadge = (status: HitStatus): BadgeDescriptor => {
switch (status) {
case 'Paid':
return { content: 'Paid', status: 'success' };
case 'Approved':
case 'Pending Payment':
return { content: 'Approved', status: 'success' };
case 'Rejected':
return { content: 'Rejected', status: 'warning' };
case 'Submitted':
case 'Pending Approval':
return { content: 'Pending', status: 'info' };
default:
return { content: 'Pending', status: 'info' };
}
};
<commit_after>import { HitStatus } from '../types';
import { Status } from '@shopify/polaris/types/components/Badge/Badge';
export const assignScoreColor = (score: number): Status => {
if (score < 2) {
return 'warning';
} else if (score < 3) {
return 'attention';
} else if (score < 4) {
return 'info';
} else {
return 'success';
}
};
export const generateHitStatusBadge = (status: HitStatus): Status => {
switch (status) {
case 'Paid':
return 'success';
case 'Approved':
case 'Pending Payment':
return 'success';
case 'Rejected':
return 'warning';
case 'Submitted':
case 'Pending Approval':
return 'info';
default:
return 'info';
}
};
|
<commit_msg>Fix imports now that the serializer is in its own sub-package.
--HG--
extra : convert_revision : svn%3Aacbfec75-9323-0410-a652-858a13e371e0/trunk%40800
<commit_before>from htmlserializer import HTMLSerializer
from xhtmlserializer import XHTMLSerializer
<commit_after>
import os.path
__path__.append(os.path.dirname(__path__[0]))
from htmlserializer import HTMLSerializer
from xhtmlserializer import XHTMLSerializer
|
<commit_msg>Fix unittest for true headers..
<commit_before>from twisted.trial import unittest
from ooni.utils.txagentwithsocks import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
<commit_after>from twisted.trial import unittest
from ooni.utils.trueheaders import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB']
}
dummy_headers_dict2 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header3': ['ValueA', 'ValueB'],
}
dummy_headers_dict3 = {
'Header1': ['Value1', 'Value2'],
'Header2': ['ValueA', 'ValueB'],
'Header4': ['ValueA', 'ValueB'],
}
class TestTrueHeaders(unittest.TestCase):
def test_names_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
def test_names_not_match(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
th = TrueHeaders(dummy_headers_dict3)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
def test_names_match_expect_ignore(self):
th = TrueHeaders(dummy_headers_dict)
self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
|
<commit_msg>Validate length of cancelation reason supplied via form
<commit_before>
from wtforms import TextAreaField
from wtforms.validators import InputRequired
from ...util.l10n import LocalizedForm
class CancelForm(LocalizedForm):
reason = TextAreaField('Begründung', validators=[InputRequired()])
<commit_after>
from wtforms import TextAreaField
from wtforms.validators import InputRequired, Length
from ...util.l10n import LocalizedForm
class CancelForm(LocalizedForm):
reason = TextAreaField('Begründung', validators=[InputRequired(), Length(max=200)])
|
<commit_msg>Add one bigger size to arithmetic benchmark
<commit_before>import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithemticBenchmark()
benchmark.print_result()
benchmark = NumPyArithemticBenchmark()
benchmark.print_result()<commit_after>import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithmeticBenchmark()
benchmark.print_result()
benchmark = NumPyArithmeticBenchmark()
benchmark.print_result()
|
<commit_msg>Set index main view to return post ordered by updated and title field
<commit_before>from flask import render_template
from flask_classy import FlaskView
from ..models import PostModel
class Main(FlaskView):
""" Main page view. """
route_base = "/"
def index(self):
posts = PostModel.fetch()
return render_template("index.html", posts=posts)
<commit_after>from flask import render_template
from flask_classy import FlaskView
from ..models import PostModel
class Main(FlaskView):
""" Main page view. """
route_base = "/"
def index(self):
PostModel.set_query()
PostModel.query.order = ['-updated', 'title']
posts = PostModel.fetch()
return render_template("index.html", posts=posts)
|
<commit_msg>Set up library search paths for tests.
When running tests, exclusively use the libraries that are either
built-in, or are provided with the test suite.
<commit_before>import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
# Make this test directory the library search paths for all ECAD tools
for tool_lib_path in lib_search_paths:
tool_lib_path = [os.path.dirname(os.path.abspath(__file__))]
default_circuit.mini_reset()
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
<commit_after>import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
default_circuit.mini_reset()
lib_search_paths.clear()
lib_search_paths.update({
KICAD: [".", get_filename(".")],
SKIDL: [".", get_filename("../skidl/libs")]
})
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
|
<commit_msg>[olc] Fix warning in open location code library
<commit_before>
namespace openlocationcode {
const double kLatitudeMaxDegrees = 90;
const double kLongitudeMaxDegrees = 180;
CodeArea::CodeArea(double latitude_lo, double longitude_lo, double latitude_hi,
double longitude_hi, size_t code_length) {
latitude_lo_ = latitude_lo;
longitude_lo_ = longitude_lo;
latitude_hi_ = latitude_hi;
longitude_hi_ = longitude_hi;
code_length_ = code_length;
}
double CodeArea::GetLatitudeLo() const{
return latitude_lo_;
}
double CodeArea::GetLongitudeLo() const {
return longitude_lo_;
}
double CodeArea::GetLatitudeHi() const {
return latitude_hi_;
}
double CodeArea::GetLongitudeHi() const {
return longitude_hi_;
}
size_t CodeArea::GetCodeLength() const { return code_length_; }
LatLng CodeArea::GetCenter() const {
double latitude_center = std::min(latitude_lo_ + (latitude_hi_ - latitude_lo_) / 2, kLatitudeMaxDegrees);
double longitude_center = std::min(longitude_lo_ + (longitude_hi_ - longitude_lo_) / 2, kLongitudeMaxDegrees);
LatLng center = {latitude: latitude_center, longitude: longitude_center};
return center;
}
} // namespace openlocationcode<commit_after>
namespace openlocationcode {
const double kLatitudeMaxDegrees = 90;
const double kLongitudeMaxDegrees = 180;
CodeArea::CodeArea(double latitude_lo, double longitude_lo, double latitude_hi,
double longitude_hi, size_t code_length) {
latitude_lo_ = latitude_lo;
longitude_lo_ = longitude_lo;
latitude_hi_ = latitude_hi;
longitude_hi_ = longitude_hi;
code_length_ = code_length;
}
double CodeArea::GetLatitudeLo() const{
return latitude_lo_;
}
double CodeArea::GetLongitudeLo() const {
return longitude_lo_;
}
double CodeArea::GetLatitudeHi() const {
return latitude_hi_;
}
double CodeArea::GetLongitudeHi() const {
return longitude_hi_;
}
size_t CodeArea::GetCodeLength() const { return code_length_; }
LatLng CodeArea::GetCenter() const {
double latitude_center = std::min(latitude_lo_ + (latitude_hi_ - latitude_lo_) / 2, kLatitudeMaxDegrees);
double longitude_center = std::min(longitude_lo_ + (longitude_hi_ - longitude_lo_) / 2, kLongitudeMaxDegrees);
LatLng center{latitude_center, longitude_center};
return center;
}
} // namespace openlocationcode
|
<commit_msg>Remove a needless using namespace
<commit_before>
extern VALUE cMove;
using namespace osl;
#ifdef __cplusplus
extern "C" {
#endif
extern void Init_move(VALUE mOsl);
#ifdef __cplusplus
} /* extern "C" */
#endif
#endif /* RBOSL_MOVE_H */
<commit_after>
extern VALUE cMove;
#ifdef __cplusplus
extern "C" {
#endif
extern void Init_move(VALUE mOsl);
#ifdef __cplusplus
} /* extern "C" */
#endif
#endif /* RBOSL_MOVE_H */
|
<commit_msg>Make test a bit more precise.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@79073 91177308-0d34-0410-b5e6-96231b3b80d8
<commit_before>// RUN: clang-cc -emit-llvm %s -o - | not grep ptrtoint
// Make sure we generate something sane instead of a ptrtoint
union x {long long b;union x* a;} r = {.a = &r};
<commit_after>// RUN: clang-cc -emit-llvm %s -o - -triple i686-pc-linux-gnu | grep "bitcast (%0\* @r to %union.x\*), \[4 x i8\] zeroinitializer"
// Make sure we generate something sane instead of a ptrtoint
union x {long long b;union x* a;} r = {.a = &r};
|
<commit_msg>Use fmtlib's sprintf instead of tinyformat
TODO: Move to the indexed formatting strings for fmtlib
<commit_before>// Tinyformat has a number of non-annotated switch fallthrough cases
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wimplicit-fallthrough"
#endif
#include "dependencies/tinyformat/tinyformat.h"
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include "library/strings.h"
namespace OpenApoc
{
template <typename... Args> static UString format(const UString &fmt, Args &&... args)
{
return tfm::format(fmt.cStr(), std::forward<Args>(args)...);
}
UString tr(const UString &str, const UString domain = "ufo_string");
} // namespace OpenApoc
<commit_after>
namespace OpenApoc
{
template <typename... Args> static UString format(const UString &fmt, Args &&... args)
{
return fmt::sprintf(fmt.str(), std::forward<Args>(args)...);
}
UString tr(const UString &str, const UString domain = "ufo_string");
} // namespace OpenApoc
template <> struct fmt::formatter<OpenApoc::UString> : formatter<std::string>
{
template <typename FormatContext> auto format(const OpenApoc::UString &s, FormatContext &ctx)
{
return formatter<std::string>::format(s.str(), ctx);
}
};
|
<commit_msg>Fix for building indexes for MongoDB
<commit_before>package dbServices
import (
"github.com/DanielRenne/GoCore/core/extensions"
"reflect"
)
//GetIndexes provides a way to reflect on your structure to get structs tagged with `dbIndex`.
//This function is used to generate Indexes for MongoDB and other databases.
func GetDBIndexes(x interface{}) map[string]string {
keys := make(map[string]string)
getDBIndexesRecursive(reflect.ValueOf(x), keys, "")
return keys
}
func getDBIndexesRecursive(val reflect.Value, keys map[string]string, key string) {
for i := 0; i < val.NumField(); i++ {
valueField := val.Field(i)
typeField := val.Type().Field(i)
index := typeField.Tag.Get("dbIndex")
appendKey := extensions.MakeFirstLowerCase(typeField.Name)
if !valueField.CanInterface() {
continue
}
field := valueField.Interface()
fieldval := reflect.ValueOf(field)
switch fieldval.Kind() {
case reflect.Array, reflect.Slice, reflect.Struct:
getDBIndexesRecursive(fieldval, keys, key+appendKey+".")
default:
if index != "" {
keys[key+appendKey] = index
}
}
}
}
<commit_after>package dbServices
import (
"github.com/DanielRenne/GoCore/core/extensions"
"reflect"
)
//GetIndexes provides a way to reflect on your structure to get structs tagged with `dbIndex`.
//This function is used to generate Indexes for MongoDB and other databases.
func GetDBIndexes(x interface{}) map[string]string {
keys := make(map[string]string)
getDBIndexesRecursive(reflect.ValueOf(x), keys, "")
return keys
}
func getDBIndexesRecursive(val reflect.Value, keys map[string]string, key string) {
kind := val.Kind()
if kind == reflect.Slice {
return
}
for i := 0; i < val.NumField(); i++ {
valueField := val.Field(i)
typeField := val.Type().Field(i)
index := typeField.Tag.Get("dbIndex")
appendKey := extensions.MakeFirstLowerCase(typeField.Name)
if !valueField.CanInterface() {
continue
}
field := valueField.Interface()
fieldval := reflect.ValueOf(field)
switch fieldval.Kind() {
case reflect.Array, reflect.Slice, reflect.Struct:
getDBIndexesRecursive(fieldval, keys, key+appendKey+".")
default:
if index != "" {
keys[key+appendKey] = index
}
}
}
}
|
<commit_msg>Fix CommentModel m2m null warning
<commit_before>import os
from django.db import models
from django.conf import settings
class UserModel(models.Model):
name = models.CharField(max_length=20)
upload_to = os.path.join(settings.FILE_STORAGE_DIR, 'test_serializers')
class CommentModel(models.Model):
user = models.ForeignKey(
UserModel,
related_name='comments',
on_delete=models.CASCADE,
)
users_liked = models.ManyToManyField(UserModel, blank=True, null=True)
title = models.CharField(max_length=20)
text = models.CharField(max_length=200)
attachment = models.FileField(
upload_to=upload_to, blank=True, null=True, max_length=500)
hidden_text = models.CharField(max_length=200, blank=True, null=True)
<commit_after>import os
from django.db import models
from django.conf import settings
class UserModel(models.Model):
name = models.CharField(max_length=20)
upload_to = os.path.join(settings.FILE_STORAGE_DIR, 'test_serializers')
class CommentModel(models.Model):
user = models.ForeignKey(
UserModel,
related_name='comments',
on_delete=models.CASCADE,
)
users_liked = models.ManyToManyField(UserModel, blank=True)
title = models.CharField(max_length=20)
text = models.CharField(max_length=200)
attachment = models.FileField(
upload_to=upload_to, blank=True, null=True, max_length=500)
hidden_text = models.CharField(max_length=200, blank=True, null=True)
|
<commit_msg>Make methods and class public
<commit_before>package me.walkersneps.sneps.utils;
/**
* Created by Walkersneps on 23/04/16
* in package me.walkersneps.sneps.utils
* for SnepsUtils
*/
/**
* @author Walkersneps
*/
class PrimitiveConverter {
/**
* Converts a string to an int type variable
*
* @param str the input string
*
* @return first integer found in the input string
*/
static int stringToInt (String str){
return Integer.parseInt(str);
}
/**
* Converts a string to a char type variable
*
* @param str the input string
*
* @return first character in the input string
*/
static char stringToChar (String str) {
return str.charAt(0);
}
/**
* Converts a char to a string type variable
*
* @param c the input character
*
* @return string containing the passed character
*/
static String charToString (char c) {
return Character.toString(c);
}
/**
* Converts an integer in a string type variable
*
* @param i the input integer
*
* @return string containing the passed integer
*/
static String intToString (int i) {
return Integer.toString(i);
}
} //end of class
<commit_after>package me.walkersneps.sneps.utils;
/**
* Created by Walkersneps on 23/04/16
* in package me.walkersneps.sneps.utils
* for SnepsUtils
*/
/**
* @author Walkersneps
*/
public class PrimitiveConverter {
/**
* Converts a string to an int type variable
*
* @param str the input string
*
* @return first integer found in the input string
*/
public static int stringToInt (String str){
return Integer.parseInt(str);
}
/**
* Converts a string to a char type variable
*
* @param str the input string
*
* @return first character in the input string
*/
public static char stringToChar (String str) {
return str.charAt(0);
}
/**
* Converts a char to a string type variable
*
* @param c the input character
*
* @return string containing the passed character
*/
public static String charToString (char c) {
return Character.toString(c);
}
/**
* Converts an integer in a string type variable
*
* @param i the input integer
*
* @return string containing the passed integer
*/
public static String intToString (int i) {
return Integer.toString(i);
}
} //end of class
|
<commit_msg>Add examples for new EventSource API
<commit_before>{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Monad
import Control.Concurrent (forkIO, threadDelay)
import Control.Concurrent.Chan
import Network.Wai.Handler.Warp (run)
import Network.Wai.Middleware.Gzip (gzip, def)
import Network.Wai
import Network.HTTP.Types (statusOK)
import Data.Time.Clock.POSIX
import Blaze.ByteString.Builder.Char.Utf8 (fromString)
import Network.Wai.EventSource
app :: Chan ServerEvent -> Application
app chan req =
case pathInfo req of
[] -> return $ ResponseFile statusOK [("Content-Type", "text/html")] "static/index.html" Nothing
["es"] -> eventSourceApp chan req
_ -> error "unexpected pathInfo"
source :: Chan ServerEvent -> IO ()
source chan = forever $ do
threadDelay 1000000
time <- getPOSIXTime
writeChan chan (ServerEvent Nothing Nothing [fromString . show $ time])
main :: IO ()
main = do
chan <- newChan
_ <- forkIO . source $ chan
run 8000 (gzip def $ app chan)
<commit_after>{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Monad
import Control.Concurrent (forkIO, threadDelay)
import Control.Monad.Trans (liftIO)
import Control.Concurrent.Chan
import Network.Wai.Handler.Warp (run)
import Network.Wai.Middleware.Gzip (gzip, def)
import Network.Wai
import Network.HTTP.Types (statusOK)
import Data.Time.Clock.POSIX
import Blaze.ByteString.Builder.Char.Utf8 (fromString)
import qualified Data.Conduit as C
import Network.Wai.EventSource
app :: Chan ServerEvent -> Application
app chan req =
case pathInfo req of
[] -> return $ ResponseFile statusOK [("Content-Type", "text/html")] "static/index.html" Nothing
["esold"] -> eventSourceApp chan req
["eschan"] -> eventSourceAppChan chan req
["esio"] -> eventSourceAppIO eventIO req
["essrc"] -> eventSourceAppSource eventSource req
_ -> error "unexpected pathInfo"
eventChan :: Chan ServerEvent -> IO ()
eventChan chan = forever $ do
threadDelay 1000000
time <- getPOSIXTime
writeChan chan (ServerEvent Nothing Nothing [fromString . show $ time])
eventIO :: IO ServerEvent
eventIO = do
threadDelay 1000000
time <- getPOSIXTime
return $ ServerEvent (Just $ fromString "io")
Nothing
[fromString . show $ time]
eventSource :: C.Source IO ServerEvent
eventSource = C.sourceState () (const pull)
where
pull = do
time <- liftIO $ do
threadDelay 1000000
getPOSIXTime
return $ C.StateOpen () $ ServerEvent (Just $ fromString "source")
Nothing
[fromString . show $ time]
main = do
chan <- newChan
_ <- forkIO . eventChan $ chan
run 8000 (gzip def $ app chan)
|
<commit_msg>Replace User import with call to get_user_model()
<commit_before>import factory
from django.contrib.auth.hashers import make_password
from .models import Friendship, User
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
<commit_after>import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
<commit_msg>Make this a subclass of Thread.
<commit_before>// Copyright (c) 1999 Brian Wellington (bwelling@xbill.org)
// Portions Copyright (c) 1999 Network Associates, Inc.
package org.xbill.DNS;
import java.util.*;
import java.io.*;
import java.net.*;
/**
* A special-purpose thread used by Resolvers (both SimpleResolver and
* ExtendedResolver) to perform asynchronous queries.
*
* @author Brian Wellington
*/
class ResolveThread implements Runnable {
private Message query;
private Object id;
private ResolverListener listener;
private Resolver res;
/** Creates a new ResolveThread */
public
ResolveThread(Resolver res, Message query, Object id,
ResolverListener listener)
{
this.res = res;
this.query = query;
this.id = id;
this.listener = listener;
}
/**
* Performs the query, and executes the callback.
*/
public void
run() {
try {
Message response = res.send(query);
listener.receiveMessage(id, response);
}
catch (Exception e) {
listener.handleException(id, e);
}
}
}
<commit_after>// Copyright (c) 1999 Brian Wellington (bwelling@xbill.org)
// Portions Copyright (c) 1999 Network Associates, Inc.
package org.xbill.DNS;
import java.util.*;
import java.io.*;
import java.net.*;
/**
* A special-purpose thread used by Resolvers (both SimpleResolver and
* ExtendedResolver) to perform asynchronous queries.
*
* @author Brian Wellington
*/
class ResolveThread extends Thread {
private Message query;
private Object id;
private ResolverListener listener;
private Resolver res;
/** Creates a new ResolveThread */
public
ResolveThread(Resolver res, Message query, Object id,
ResolverListener listener)
{
this.res = res;
this.query = query;
this.id = id;
this.listener = listener;
}
/**
* Performs the query, and executes the callback.
*/
public void
run() {
try {
Message response = res.send(query);
listener.receiveMessage(id, response);
}
catch (Exception e) {
listener.handleException(id, e);
}
}
}
|
<commit_msg>Remove per patch version classifiers<commit_before>
from setuptools import setup
from setuptools import find_packages
import re
def find_version():
return re.search(r"^__version__ = '(.*)'$",
open('cantools/version.py', 'r').read(),
re.MULTILINE).group(1)
setup(name='cantools',
version=find_version(),
description='CAN BUS tools.',
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords=['can', 'can bus', 'dbc', 'kcd', 'automotive'],
url='https://github.com/eerimoq/cantools',
packages=find_packages(exclude=['tests']),
python_requires='>=3.6',
install_requires=[
'bitstruct>=6.0.0',
'python-can>=2.2.0',
'textparser>=0.21.1',
'diskcache',
'argparse_addons',
],
test_suite="tests",
entry_points = {
'console_scripts': ['cantools=cantools.__init__:_main']
})
<commit_after>
from setuptools import setup
from setuptools import find_packages
import re
def find_version():
return re.search(r"^__version__ = '(.*)'$",
open('cantools/version.py', 'r').read(),
re.MULTILINE).group(1)
setup(name='cantools',
version=find_version(),
description='CAN BUS tools.',
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
],
keywords=['can', 'can bus', 'dbc', 'kcd', 'automotive'],
url='https://github.com/eerimoq/cantools',
packages=find_packages(exclude=['tests']),
python_requires='>=3.6',
install_requires=[
'bitstruct>=6.0.0',
'python-can>=2.2.0',
'textparser>=0.21.1',
'diskcache',
'argparse_addons',
],
test_suite="tests",
entry_points = {
'console_scripts': ['cantools=cantools.__init__:_main']
})
|
<commit_msg>Clean up mocking done by udev tests when finished.
<commit_before>
import unittest
import mock
class UdevTest(unittest.TestCase):
def setUp(self):
import blivet.udev
blivet.udev.os = mock.Mock()
blivet.udev.log = mock.Mock()
def test_udev_get_device(self):
import blivet.udev
devices = blivet.udev.global_udev.list_devices(subsystem="block")
for device in devices:
self.assertNotEqual(blivet.udev.get_device(device.sys_path), None)
def udev_settle_test(self):
import blivet.udev
blivet.udev.util = mock.Mock()
blivet.udev.settle()
self.assertTrue(blivet.udev.util.run_program.called)
def udev_trigger_test(self):
import blivet.udev
blivet.udev.util = mock.Mock()
blivet.udev.trigger()
self.assertTrue(blivet.udev.util.run_program.called)
if __name__ == "__main__":
unittest.main()
<commit_after>
import unittest
import mock
class UdevTest(unittest.TestCase):
def setUp(self):
import blivet.udev
self._blivet_os = blivet.udev.os
self._blivet_log = blivet.udev.log
self._blivet_util = blivet.udev.util
blivet.udev.os = mock.Mock()
blivet.udev.log = mock.Mock()
blivet.udev.util = mock.Mock()
def tearDown(self):
import blivet.udev
blivet.udev.log = self._blivet_log
blivet.udev.os = self._blivet_os
blivet.udev.util = self._blivet_util
def test_udev_get_device(self):
import blivet.udev
devices = blivet.udev.global_udev.list_devices(subsystem="block")
for device in devices:
self.assertNotEqual(blivet.udev.get_device(device.sys_path), None)
def udev_settle_test(self):
import blivet.udev
blivet.udev.settle()
self.assertTrue(blivet.udev.util.run_program.called)
def udev_trigger_test(self):
import blivet.udev
blivet.udev.trigger()
self.assertTrue(blivet.udev.util.run_program.called)
if __name__ == "__main__":
unittest.main()
|
<commit_msg>Change DataCenters from struct to slice
<commit_before>package clcgo
import (
"encoding/json"
"errors"
"fmt"
)
type DataCenters struct {
DataCenters []DataCenter
}
type DataCenter struct {
ID string
Name string
}
const DataCentersURL = APIRoot + "/datacenters/%s"
func (d DataCenters) URL(a string) (string, error) {
return fmt.Sprintf(DataCentersURL, a), nil
}
func (d *DataCenters) Unmarshal(j []byte) error {
return json.Unmarshal(j, &d.DataCenters)
}
type DataCenterCapabilities struct {
DataCenter DataCenter `json:"-"`
Templates []struct {
Name string
Description string
}
}
const DataCenterCapabilitiesURL = DataCentersURL + "/%s/deploymentCapabilities"
func (d DataCenterCapabilities) URL(a string) (string, error) {
if d.DataCenter.ID == "" {
return "", errors.New("Need a DataCenter with an ID")
}
return fmt.Sprintf(DataCenterCapabilitiesURL, a, d.DataCenter.ID), nil
}
func (d *DataCenterCapabilities) Unmarshal(j []byte) error {
return json.Unmarshal(j, &d)
}
<commit_after>package clcgo
import (
"encoding/json"
"errors"
"fmt"
)
type DataCenters []DataCenter
type DataCenter struct {
ID string
Name string
}
const DataCentersURL = APIRoot + "/datacenters/%s"
func (d DataCenters) URL(a string) (string, error) {
return fmt.Sprintf(DataCentersURL, a), nil
}
func (d *DataCenters) Unmarshal(j []byte) error {
return json.Unmarshal(j, &d)
}
type DataCenterCapabilities struct {
DataCenter DataCenter `json:"-"`
Templates []struct {
Name string
Description string
}
}
const DataCenterCapabilitiesURL = DataCentersURL + "/%s/deploymentCapabilities"
func (d DataCenterCapabilities) URL(a string) (string, error) {
if d.DataCenter.ID == "" {
return "", errors.New("Need a DataCenter with an ID")
}
return fmt.Sprintf(DataCenterCapabilitiesURL, a, d.DataCenter.ID), nil
}
func (d *DataCenterCapabilities) Unmarshal(j []byte) error {
return json.Unmarshal(j, &d)
}
|
<commit_msg>Create HTTP API to download the file
Using 'application/octet-stream' as content-type of the response, it
allow to force the download of the file, rather than opening it in the
browser.
<commit_before>package fr.alecharp.picshare.resource;
import fr.alecharp.picshare.domain.Event;
import fr.alecharp.picshare.service.EventService;
import net.codestory.http.annotations.Get;
import net.codestory.http.templating.Model;
import net.codestory.http.templating.ModelAndView;
import javax.inject.Inject;
import java.util.Optional;
/**
* @author Adrien Lecharpentier
*/
public class EventResource {
private final EventService eventService;
@Inject
public EventResource(EventService eventService) {
this.eventService = eventService;
}
@Get("/event/:id")
public ModelAndView dashboard(String id) {
Optional<Event> event = eventService.get(id);
return event.isPresent() ?
ModelAndView.of("event/display.html", "event", event.get()) :
ModelAndView.of("404.html");
}
}
<commit_after>package fr.alecharp.picshare.resource;
import fr.alecharp.picshare.domain.Event;
import fr.alecharp.picshare.service.EventService;
import fr.alecharp.picshare.service.PictureService;
import net.codestory.http.Response;
import net.codestory.http.annotations.Get;
import net.codestory.http.annotations.Prefix;
import net.codestory.http.templating.Model;
import net.codestory.http.templating.ModelAndView;
import javax.inject.Inject;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Optional;
/**
* @author Adrien Lecharpentier
*/
@Prefix("/event")
public class EventResource {
private final EventService eventService;
private final PictureService pictureService;
@Inject
public EventResource(EventService eventService, PictureService pictureService) {
this.eventService = eventService;
this.pictureService = pictureService;
}
@Get("/:id")
public ModelAndView dashboard(String id) {
Optional<Event> event = eventService.get(id);
return event.isPresent() ?
ModelAndView.of("event/display.html", "event", event.get()) :
ModelAndView.of("404.html");
}
@Get("/:id/:picture")
public void download(String id, String picture, Response resp) throws IOException {
resp.setHeader("Content-Type", "application/octet-stream");
Files.copy(pictureService.getPicture(id, picture), resp.outputStream());
}
}
|
<commit_msg>Fix routeName attr not being passed into pages
<commit_before>import Mithril from 'mithril';
/**
* Generates a route resolver for a given component.
* In addition to regular route resolver functionality:
* - It provide the current route name as an attr
* - It sets a key on the component so a rerender will be triggered on route change.
*/
export default class DefaultResolver {
component: Mithril.Component;
routeName: string;
constructor(component, routeName) {
this.component = component;
this.routeName = routeName;
}
/**
* When a route change results in a changed key, a full page
* rerender occurs. This method can be overriden in subclasses
* to prevent rerenders on some route changes.
*/
makeKey() {
return this.routeName + JSON.stringify(m.route.param());
}
onmatch(args, requestedPath, route) {
return this.component;
}
render(vnode) {
return [{ ...vnode, routeName: this.routeName, key: this.makeKey() }];
}
}
<commit_after>import Mithril from 'mithril';
/**
* Generates a route resolver for a given component.
* In addition to regular route resolver functionality:
* - It provide the current route name as an attr
* - It sets a key on the component so a rerender will be triggered on route change.
*/
export default class DefaultResolver {
component: Mithril.Component;
routeName: string;
constructor(component, routeName) {
this.component = component;
this.routeName = routeName;
}
/**
* When a route change results in a changed key, a full page
* rerender occurs. This method can be overriden in subclasses
* to prevent rerenders on some route changes.
*/
makeKey() {
return this.routeName + JSON.stringify(m.route.param());
}
makeAttrs(vnode) {
return {
...vnode.attrs,
routeName: this.routeName,
};
}
onmatch(args, requestedPath, route) {
return this.component;
}
render(vnode) {
return [{ ...vnode, attrs: this.makeAttrs(vnode), key: this.makeKey() }];
}
}
|
<commit_msg>Add back the stable_build variable which is needed in the _revert_settings() function. Woops.
<commit_before>from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
<commit_after>from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
stable_build = run("readlink /var/www/live.%s.%s" % (repo, branch))
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
<commit_msg>Update 0.6.4
- Fixed exception error
<commit_before>
import requests
import os
def get_apod():
os.makedirs("APODs", exist_ok=True)
try:
apod_data = requests.get("https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY").json()
image_url = apod_data["url"]
if image_url.endswith(".gif"):
return
image_data = requests.get(image_url, stream=True)
except requests.HTTPError:
return
with open(os.path.join("APODs", os.path.basename(image_url)), "wb") as imagefile:
for chunk in image_data.iter_content(100000):
imagefile.write(chunk)
return os.path.abspath((os.path.join("APODs", os.path.basename(image_url))))
<commit_after>
import requests
import os
def get_apod():
os.makedirs("APODs", exist_ok=True)
try:
apod_data = requests.get("https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY").json()
image_url = apod_data["url"]
if image_url.endswith(".gif"):
raise TypeError
image_data = requests.get(image_url, stream=True)
except (requests.HTTPError or TypeError):
return
with open(os.path.join("APODs", os.path.basename(image_url)), "wb") as imagefile:
for chunk in image_data.iter_content(100000):
imagefile.write(chunk)
return os.path.abspath((os.path.join("APODs", os.path.basename(image_url))))
|
<commit_msg>Fix wrong animation of boss arm
Signed-off-by: Matheus Miranda <6faf3a473c1bf7863f2eb74400dd6142ec07131e@gmail.com>
<commit_before>
using namespace engine;
void Level::load(){
for(auto game_object : objects){
std::cout << "Loading " << game_object->name << std::endl;
game_object->load();
for(auto hit : game_object->get_hitboxes()){
hit->initialize();
}
}
}
void Level::free(){
for(auto game_object : objects){
std::cout << "Freeing" << game_object->name << std::endl;
game_object->free();
}
EventHandler::listeners.clear();
}
void Level::draw(){
for(auto game_object : objects){
if(game_object->is_active()){
game_object->draw();
}
}
}
<commit_after>
using namespace engine;
void Level::load(){
for(auto game_object : objects){
std::cout << "Loading " << game_object->name << std::endl;
game_object->load();
for(auto hit : game_object->get_hitboxes()){
hit->initialize();
}
}
}
void Level::free(){
for(auto game_object : objects){
std::cout << "Freeing" << game_object->name << std::endl;
game_object->free();
}
EventHandler::listeners.clear();
}
void Level::draw(){
for(auto game_object : objects){
if(game_object->is_active()){
if(game_object->name == "arm_left"){
game_object->set_actual_animation(game_object->animations["left_arm"]);
}
game_object->draw();
}
}
}
|
<commit_msg>Revert "homedir: add cgo or osusergo buildtag constraints for unix"
TL;DR: there is no way to do this right.
We do know that in some combination of build tags set (or unset),
linker flags, environment variables, and libc implementation,
this package won't work right. In fact, there is one specific
combination:
1. `CGO_ENABLED=1` (or unset)
2. static binary is being built (e.g. `go build` is run with `-extldflags -static`)
3. `go build` links the binary against glibc
4. `osusergo` is not set
This particular combination results in the following legitimate linker warning:
> cgo_lookup_unix.go: warning: Using 'getpwuid_r' in statically linked applications requires at runtime the shared libraries from the glibc version used for linking
If this warning is ignored and the resulting binary is used on a system
with files from a different glibc version (or without those files), it
could result in a segfault.
The commit being reverted tried to guard against such possibility,
but the problem is, we can only use build tags to account for items
1 and 4 from the above list, while items 2 and 3 do not result in
any build tags being set or unset, making this guard excessive.
Remove it.
This reverts commit 023b072288eab3c9e768d4aeeb917f27f06034c7.
Signed-off-by: Kir Kolyshkin <3a017b8ddb3f9cf3e4a59978b004111bdeb97f08@gmail.com>
<commit_before>// +build !windows,cgo !windows,osusergo
package homedir // import "github.com/docker/docker/pkg/homedir"
import (
"os"
"os/user"
)
// Key returns the env var name for the user's home dir based on
// the platform being run on
func Key() string {
return "HOME"
}
// Get returns the home directory of the current user with the help of
// environment variables depending on the target operating system.
// Returned path should be used with "path/filepath" to form new paths.
// If compiling statically, ensure the osusergo build tag is used.
// If needing to do nss lookups, do not compile statically.
func Get() string {
home := os.Getenv(Key())
if home == "" {
if u, err := user.Current(); err == nil {
return u.HomeDir
}
}
return home
}
// GetShortcutString returns the string that is shortcut to user's home directory
// in the native shell of the platform running on.
func GetShortcutString() string {
return "~"
}
<commit_after>// +build !windows
package homedir // import "github.com/docker/docker/pkg/homedir"
import (
"os"
"os/user"
)
// Key returns the env var name for the user's home dir based on
// the platform being run on
func Key() string {
return "HOME"
}
// Get returns the home directory of the current user with the help of
// environment variables depending on the target operating system.
// Returned path should be used with "path/filepath" to form new paths.
// If compiling statically, ensure the osusergo build tag is used.
// If needing to do nss lookups, do not compile statically.
func Get() string {
home := os.Getenv(Key())
if home == "" {
if u, err := user.Current(); err == nil {
return u.HomeDir
}
}
return home
}
// GetShortcutString returns the string that is shortcut to user's home directory
// in the native shell of the platform running on.
func GetShortcutString() string {
return "~"
}
|
<commit_msg>Fix 'path' function: use main's file as project root
<commit_before>import os
def setting(name):
return name.upper()
def path(subpath):
project_root = os.path.dirname(os.path.realpath(__file__))
return os.path.join(project_root, subpath)
<commit_after>import os
def setting(name):
return name.upper()
def path(subpath):
import __main__
project_root = os.path.dirname(os.path.realpath(__main__.__file__))
return os.path.join(project_root, subpath)
|
<commit_msg>Fix english language for new line length
<commit_before>
const Language LANG_ENGLISH = {
.hours = {
"one",
"two",
"three",
"four",
"five",
"six",
"seven",
"eight",
"nine",
"ten",
"eleven",
"twelve"
},
.phrases = {
"*$1 o'clock ",
"five past *$1 ",
"ten past *$1 ",
"quarter past *$1 ",
"twenty past *$1 ",
"twenty five past *$1 ",
"half past *$1 ",
"twenty five to *$2 ",
"twenty to *$2 ",
"quarter to *$2 ",
"ten to *$2 ",
"five to *$2 "
},
#ifdef PBL_PLATFORM_CHALK
.greetings = {
"Good morning ",
"Good day ",
"Good evening ",
"Good night "
},
#else
.greetings = {
"Good mor- ning ",
"Good day ",
"Good even- ing ",
"Good night "
},
#endif
.connection_lost = "Where is your phone? "
};
<commit_after>
const Language LANG_ENGLISH = {
.hours = {
"one",
"two",
"three",
"four",
"five",
"six",
"seven",
"eight",
"nine",
"ten",
"eleven",
"twelve"
},
#ifdef PBL_PLATFORM_CHALK
.phrases = {
"*$1 o'clock ",
"five past *$1 ",
"ten past *$1 ",
"quarter past *$1 ",
"twenty past *$1 ",
"twenty five past *$1 ",
"half past *$1 ",
"twenty five to *$2 ",
"twenty to *$2 ",
"quarter to *$2 ",
"ten to *$2 ",
"five to *$2 "
},
.greetings = {
"Good morning ",
"Good day ",
"Good evening ",
"Good night "
},
#else
.phrases = {
"*$1 o'clock ",
"five past *$1 ",
"ten past *$1 ",
"quarter past *$1 ",
"twenty past *$1 ",
"twenty five past *$1 ",
"half past *$1 ",
"twenty five to *$2 ",
"twenty to *$2 ",
"quarter to *$2 ",
"ten to *$2 ",
"five to *$2 "
},
.greetings = {
"Good mor- ning ",
"Good day ",
"Good even- ing ",
"Good night "
},
#endif
.connection_lost = "Where is your phone? "
};
|
<commit_msg>Remove start when running tests
<commit_before>
use daedalos::println;
#[no_mangle]
pub extern "C" fn _start() -> ! {
println!("Hello, world!");
loop {}
}
#[cfg(not(test))]
use core::panic::PanicInfo;
#[cfg(not(test))]
#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
println!("{}", info);
loop {}
}
<commit_after>use daedalos::println;
#[cfg(not(test))]
#[no_mangle]
pub extern "C" fn _start() -> ! {
println!("Hello, world!");
loop {}
}
#[cfg(not(test))]
use core::panic::PanicInfo;
#[cfg(not(test))]
#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
println!("{}", info);
loop {}
}
|
<commit_msg>Add download url and long description
<commit_before>from distutils.core import setup
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
license='MIT',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
<commit_after>from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
download_url='https://github.com/sashgorokhov/python-ninegag/archive/master.zip',
long_description=long_description,
license='MIT License',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
|
<commit_msg>Include fixed values as defaults
I'm not a big fan of this approach, but it avoids a good bit of code duplication
<commit_before>from gettext import gettext as _
class Fixed:
_("A mixin that ensures the presence of a predetermined value")
def __init__(self, value, *args, **kwargs):
self.value = value
super(Fixed, self).__init__(*args, **kwargs)
def encode(self, value):
# Always encode the fixed value
return super(Fixed, self).encode(self.value)
def decode(self, value):
value = super(Fixed, self).decode(value)
# Check that the value matches what it should be
if value != self.value:
raise ValueError(_("Expected %r, got %r" % (self.value, value)))
return value
<commit_after>from gettext import gettext as _
class Fixed:
_("A mixin that ensures the presence of a predetermined value")
def __init__(self, value, *args, **kwargs):
self.value = value
# Pass the value in as a default as well, to make
# sure it goes through when no value was supplied
super(Fixed, self).__init__(*args, default=value, **kwargs)
def encode(self, value):
# Always encode the fixed value
return super(Fixed, self).encode(self.value)
def decode(self, value):
value = super(Fixed, self).decode(value)
# Check that the value matches what it should be
if value != self.value:
raise ValueError(_("Expected %r, got %r" % (self.value, value)))
return value
|
<commit_msg>Change server default option values
<commit_before>package main
import (
"flag"
"net"
"github.com/hnakamur/rdirsync"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials"
"google.golang.org/grpc/grpclog"
)
func main() {
var enableTLS bool
flag.BoolVar(&enableTLS, "enable-tls", false, "enable TLS")
var certFile string
flag.StringVar(&certFile, "cert-file", "../../ssl/server/server.crt", "TLS cert file")
var keyFile string
flag.StringVar(&keyFile, "key-file", "../../ssl/server/server.key", "TLS key file")
var addr string
flag.StringVar(&addr, "addr", ":10000", "server listen address")
flag.Parse()
lis, err := net.Listen("tcp", addr)
if err != nil {
grpclog.Fatal(err)
}
var opts []grpc.ServerOption
if enableTLS {
creds, err := credentials.NewServerTLSFromFile(certFile, keyFile)
if err != nil {
grpclog.Fatalf("Failed to generate credentials %v", err)
}
opts = []grpc.ServerOption{grpc.Creds(creds)}
}
grpcServer := grpc.NewServer(opts...)
rdirsync.RegisterNewRDirSyncServer(grpcServer)
grpcServer.Serve(lis)
}
<commit_after>package main
import (
"flag"
"net"
"github.com/hnakamur/rdirsync"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials"
"google.golang.org/grpc/grpclog"
)
func main() {
var enableTLS bool
flag.BoolVar(&enableTLS, "enable-tls", false, "enable TLS")
var certFile string
flag.StringVar(&certFile, "cert-file", "server.crt", "TLS cert file")
var keyFile string
flag.StringVar(&keyFile, "key-file", "server.key", "TLS key file")
var addr string
flag.StringVar(&addr, "addr", ":10000", "server listen address")
flag.Parse()
lis, err := net.Listen("tcp", addr)
if err != nil {
grpclog.Fatal(err)
}
var opts []grpc.ServerOption
if enableTLS {
creds, err := credentials.NewServerTLSFromFile(certFile, keyFile)
if err != nil {
grpclog.Fatalf("Failed to generate credentials %v", err)
}
opts = []grpc.ServerOption{grpc.Creds(creds)}
}
grpcServer := grpc.NewServer(opts...)
rdirsync.RegisterNewRDirSyncServer(grpcServer)
grpcServer.Serve(lis)
}
|
<commit_msg>Remove broadcast from the client pool
<commit_before>package server
import (
"errors"
"sync"
)
type ClientPool struct {
mutex sync.RWMutex
clients map[string]*Client
}
func NewClientPool() *ClientPool {
p := new(ClientPool)
p.clients = make(map[string]*Client)
return p
}
func (cp *ClientPool) Add(c *Client) error {
cp.mutex.Lock()
defer cp.mutex.Unlock()
if _, ok := cp.clients[c.Name]; ok {
return errors.New("Client with this name already exists")
}
cp.clients[c.Name] = c
return nil
}
func (cp *ClientPool) Remove(c *Client) {
cp.mutex.Lock()
defer cp.mutex.Unlock()
delete(cp.clients, c.Name)
}
func (cp *ClientPool) Broadcast(sender *Client, m []byte) {
cp.mutex.RLock()
defer cp.mutex.RUnlock()
for _, client := range cp.clients {
client.Send(sender, m)
}
}
<commit_after>package server
import (
"errors"
"sync"
)
type ClientPool struct {
mutex sync.RWMutex
clients map[string]*Client
}
func NewClientPool() *ClientPool {
p := new(ClientPool)
p.clients = make(map[string]*Client)
return p
}
func (cp *ClientPool) Add(c *Client) error {
cp.mutex.Lock()
defer cp.mutex.Unlock()
if _, ok := cp.clients[c.Name]; ok {
return errors.New("Client with this name already exists")
}
cp.clients[c.Name] = c
return nil
}
func (cp *ClientPool) Remove(c *Client) {
cp.mutex.Lock()
defer cp.mutex.Unlock()
delete(cp.clients, c.Name)
}
|
<commit_msg>Test for character range regex introduced in bfgex 1.1.1
<commit_before>package br.com.six2six.fixturefactory.function;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.assertTrue;
import org.junit.Test;
import br.com.six2six.fixturefactory.function.impl.RegexFunction;
public class RegexFunctionTest {
@Test
public void regexString() {
String pattern = "\\w{8}";
String value = new RegexFunction(pattern).generateValue();
assertNotNull("Generated string can not be null", value);
assertTrue(String.format("Generated string no match with %s", pattern), value.matches(pattern));
}
@Test
public void regexNumber() {
String pattern = "\\d{3,8}";
String value = new RegexFunction(pattern).generateValue();
assertNotNull("Generated number can not be null", value);
assertTrue(String.format("Generated number no match with %s", pattern), value.matches(pattern));
}
@Test
public void regexPhoneNumber() {
String pattern = "(\\d{2})-(\\d{4})-(\\d{4})";
String value = new RegexFunction(pattern).generateValue();
assertNotNull("Generated phone number can not be null", value);
assertTrue(String.format("Generated phone number no match with %s", pattern), value.matches(pattern));
}
}
<commit_after>package br.com.six2six.fixturefactory.function;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.assertTrue;
import org.junit.Test;
import br.com.six2six.fixturefactory.function.impl.RegexFunction;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import java.util.Arrays;
@RunWith(Parameterized.class)
public class RegexFunctionTest {
@Parameters(name= "{index}: regex {0}={1}")
public static Iterable<String[]> data() {
return Arrays.asList(new String[][]{
{"String", "\\w{8}"},
{"Number", "\\d{3,8}"},
{"Phone number", "(\\d{2})-(\\d{4})-(\\d{4})"},
{"MD5", "[0-9a-f]{32}"}
});
}
private String regexName;
private String pattern;
public RegexFunctionTest(String regexName, String pattern) {
this.regexName = regexName;
this.pattern = pattern;
}
@Test
public void testAgainstRegex() {
String value = new RegexFunction(pattern).generateValue();
assertNotNull(String.format("Generated %s can not be null", regexName), regexName);
assertTrue(String.format("Generated %s (%s) no match with %s", regexName, value, pattern), value.matches(pattern));
}
}
|
<commit_msg>Remove end limit for product payer promo
<commit_before>import { isWithinInterval } from 'date-fns';
import { BLACK_FRIDAY, PRODUCT_PAYER } from '../constants';
import { Subscription } from '../interfaces';
import { hasMailPlus, hasMailProfessional, hasVpnBasic, hasVpnPlus, hasAddons } from './subscription';
export const isBlackFridayPeriod = () => {
return isWithinInterval(new Date(), { start: BLACK_FRIDAY.START, end: BLACK_FRIDAY.END });
};
export const isCyberMonday = () => {
return isWithinInterval(new Date(), { start: BLACK_FRIDAY.CYBER_START, end: BLACK_FRIDAY.CYBER_END });
};
export const isProductPayerPeriod = () => {
return isWithinInterval(new Date(), { start: PRODUCT_PAYER.START, end: PRODUCT_PAYER.END });
};
export const isProductPayer = (subscription: Subscription) => {
if (!subscription) {
return false;
}
const couponCode = subscription?.CouponCode || '';
const isPaying = hasMailPlus(subscription) || hasVpnBasic(subscription) || hasVpnPlus(subscription);
const noPro = !hasMailProfessional(subscription);
const noBundle = !(hasMailPlus(subscription) && hasVpnPlus(subscription));
const noBFCoupon = ![BLACK_FRIDAY.COUPON_CODE].includes(couponCode);
const noAddons = !hasAddons(subscription);
return isPaying && noPro && noBundle && noBFCoupon && noAddons;
};
<commit_after>import { isWithinInterval, isAfter } from 'date-fns';
import { BLACK_FRIDAY, PRODUCT_PAYER } from '../constants';
import { Subscription } from '../interfaces';
import { hasMailPlus, hasMailProfessional, hasVpnBasic, hasVpnPlus, hasAddons } from './subscription';
export const isBlackFridayPeriod = () => {
return isWithinInterval(new Date(), { start: BLACK_FRIDAY.START, end: BLACK_FRIDAY.END });
};
export const isCyberMonday = () => {
return isWithinInterval(new Date(), { start: BLACK_FRIDAY.CYBER_START, end: BLACK_FRIDAY.CYBER_END });
};
export const isProductPayerPeriod = () => {
return isAfter(new Date(), PRODUCT_PAYER.START);
};
export const isProductPayer = (subscription: Subscription) => {
if (!subscription) {
return false;
}
const couponCode = subscription?.CouponCode || '';
const isPaying = hasMailPlus(subscription) || hasVpnBasic(subscription) || hasVpnPlus(subscription);
const noPro = !hasMailProfessional(subscription);
const noBundle = !(hasMailPlus(subscription) && hasVpnPlus(subscription));
const noBFCoupon = ![BLACK_FRIDAY.COUPON_CODE].includes(couponCode);
const noAddons = !hasAddons(subscription);
return isPaying && noPro && noBundle && noBFCoupon && noAddons;
};
|
<commit_msg>Add throughput metrics to benchmark.
<commit_before>package raft
import (
"bytes"
"encoding/json"
"testing"
)
func BenchmarkAppendEntriesEncoding(b *testing.B) {
req, _ := createTestAppendEntriesRequest(2000)
for i := 0; i < b.N; i++ {
var buf bytes.Buffer
json.NewEncoder(&buf).Encode(req)
}
}
func BenchmarkAppendEntriesDecoding(b *testing.B) {
req, buf := createTestAppendEntriesRequest(2000)
for i := 0; i < b.N; i++ {
json.NewDecoder(bytes.NewReader(buf)).Decode(req)
}
}
func createTestAppendEntriesRequest(entryCount int) (*AppendEntriesRequest, []byte) {
entries := make([]*LogEntry, 0)
for i := 0; i < entryCount; i++ {
entries = append(entries, newLogEntry(nil, 1, 2, &joinCommand{Name: "localhost:1000"}))
}
req := newAppendEntriesRequest(1, "leader", 1, 1, entries, 1)
buf, _ := json.Marshal(req)
return req, buf
}
<commit_after>package raft
import (
"bytes"
"encoding/json"
"testing"
)
func BenchmarkAppendEntriesEncoding(b *testing.B) {
req, tmp := createTestAppendEntriesRequest(2000)
for i := 0; i < b.N; i++ {
var buf bytes.Buffer
json.NewEncoder(&buf).Encode(req)
}
b.SetBytes(int64(len(tmp)))
}
func BenchmarkAppendEntriesDecoding(b *testing.B) {
req, buf := createTestAppendEntriesRequest(2000)
for i := 0; i < b.N; i++ {
json.NewDecoder(bytes.NewReader(buf)).Decode(req)
}
b.SetBytes(int64(len(buf)))
}
func createTestAppendEntriesRequest(entryCount int) (*AppendEntriesRequest, []byte) {
entries := make([]*LogEntry, 0)
for i := 0; i < entryCount; i++ {
entries = append(entries, newLogEntry(nil, 1, 2, &joinCommand{Name: "localhost:1000"}))
}
req := newAppendEntriesRequest(1, "leader", 1, 1, entries, 1)
buf, _ := json.Marshal(req)
return req, buf
}
|
<commit_msg>Convert try! to ? operator.
<commit_before>use std::env;
use std::fs::{copy, create_dir_all, read_dir};
use std::path::{Path, PathBuf};
use std::io;
fn main() {
let res_dir_source = Path::new(&env::var("CARGO_MANIFEST_DIR").unwrap()).join("resources/");
let res_dir_target = Path::new(&env::var("OUT_DIR").unwrap()).join("../../../resources/");
//copies all resource files to "target/NAME/resources". Prints out any errors if failed.
if let Err(io_error) = add_resources(&res_dir_source, &res_dir_target) {
println!("OS Error: {}", io_error);
}
}
///Recursively copy all files in dir given by source_path to dir given by target path
///WARNING! Overwrites files with same name
fn add_resources(source_path: &PathBuf, target_path: &PathBuf) -> io::Result<()> {
match read_dir(source_path) {
Ok(entry_iter) => {
try!(create_dir_all(target_path));
for entry in entry_iter {
let entry = try!(entry);
let source_path = entry.path();
let target_path = target_path.join(entry.file_name());
try!(add_resources(&source_path, &target_path));
}
}
Err(_) => {
try!(copy(&source_path, &target_path));
}
}
Ok(())
}
<commit_after>use std::env;
use std::fs::{copy, create_dir_all, read_dir};
use std::path::{Path, PathBuf};
use std::io;
fn main() {
let res_dir_source = Path::new(&env::var("CARGO_MANIFEST_DIR").unwrap()).join("resources/");
let res_dir_target = Path::new(&env::var("OUT_DIR").unwrap()).join("../../../resources/");
//copies all resource files to "target/NAME/resources". Prints out any errors if failed.
if let Err(io_error) = add_resources(&res_dir_source, &res_dir_target) {
println!("OS Error: {}", io_error);
}
}
///Recursively copy all files in dir given by source_path to dir given by target path
///WARNING! Overwrites files with same name
fn add_resources(source_path: &PathBuf, target_path: &PathBuf) -> io::Result<()> {
match read_dir(source_path) {
Ok(entry_iter) => {
create_dir_all(target_path)?;
for entry in entry_iter {
let entry = entry?;
let source_path = entry.path();
let target_path = target_path.join(entry.file_name());
add_resources(&source_path, &target_path)?;
}
}
Err(_) => {
copy(&source_path, &target_path)?;
}
}
Ok(())
}
|
<commit_msg>Fix an incorrect postcode for Reading
<commit_before>from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.addressline6 in [
"RG30 4RX",
"RG1 3NF",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
<commit_after>from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.property_urn.strip().lstrip("0") == "310088234":
record = record._replace(addressline6="RG1 1SN")
if record.addressline6 in [
"RG30 4RX",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
|
<commit_msg>Return a 404 if the package was not found
<commit_before>"""Package blueprint."""
import os
import magic
from flask import Blueprint, current_app, make_response, render_template
blueprint = Blueprint('packages', __name__, url_prefix='/packages')
@blueprint.route('')
def foo():
return 'ok'
@blueprint.route('/<package_type>/<letter>/<name>/<version>',
methods=['GET', 'HEAD'])
def packages(package_type, letter, name, version):
"""Get the contents of a package."""
filepath = os.path.join(current_app.config['BASEDIR'], name.lower(),
version.lower())
if os.path.isfile(filepath):
with open(filepath, 'rb') as egg:
mimetype = magic.from_file(filepath, mime=True)
contents = egg.read()
return make_response(contents, 200, {'Content-Type': mimetype})
<commit_after>"""Package blueprint."""
import os
import magic
from flask import Blueprint, current_app, make_response, render_template
blueprint = Blueprint('packages', __name__, url_prefix='/packages')
@blueprint.route('')
def foo():
return 'ok'
@blueprint.route('/<package_type>/<letter>/<name>/<version>',
methods=['GET', 'HEAD'])
def packages(package_type, letter, name, version):
"""Get the contents of a package."""
filepath = os.path.join(current_app.config['BASEDIR'], name.lower(),
version.lower())
if os.path.isfile(filepath):
with open(filepath, 'rb') as egg:
mimetype = magic.from_file(filepath, mime=True)
contents = egg.read()
return make_response(contents, 200, {'Content-Type': mimetype})
return make_response('Package not found', 404)
|
<commit_msg>Include ASEditableTextNode in framework header.
<commit_before>/* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
#import <AsyncDisplayKit/ASDisplayNode.h>
#import <AsyncDisplayKit/ASDisplayNodeExtras.h>
#import <AsyncDisplayKit/ASControlNode.h>
#import <AsyncDisplayKit/ASImageNode.h>
#import <AsyncDisplayKit/ASTextNode.h>
#import <AsyncDisplayKit/ASBasicImageDownloader.h>
#import <AsyncDisplayKit/ASMultiplexImageNode.h>
#import <AsyncDisplayKit/ASNetworkImageNode.h>
#import <AsyncDisplayKit/ASTableView.h>
#import <AsyncDisplayKit/ASCollectionView.h>
#import <AsyncDisplayKit/ASCellNode.h>
<commit_after>/* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
#import <AsyncDisplayKit/ASDisplayNode.h>
#import <AsyncDisplayKit/ASDisplayNodeExtras.h>
#import <AsyncDisplayKit/ASControlNode.h>
#import <AsyncDisplayKit/ASImageNode.h>
#import <AsyncDisplayKit/ASTextNode.h>
#import <AsyncDisplayKit/ASEditableTextNode.h>
#import <AsyncDisplayKit/ASBasicImageDownloader.h>
#import <AsyncDisplayKit/ASMultiplexImageNode.h>
#import <AsyncDisplayKit/ASNetworkImageNode.h>
#import <AsyncDisplayKit/ASTableView.h>
#import <AsyncDisplayKit/ASCollectionView.h>
#import <AsyncDisplayKit/ASCellNode.h>
|
<commit_msg>Fix trove classifier for pypi
<commit_before>
from setuptools import setup, find_packages
__author__ = "Nitrax <nitrax@lokisec.fr>"
__copyright__ = "Copyright 2017, Legobot"
description = 'Lego providing networking tools'
name = 'legos.nettools'
setup(
name=name,
version='0.1.0',
namespace_packages=name.split('.')[:-1],
license='MIT',
description=description,
author='Nitrax',
url='https://github.com/Legobot/' + name,
install_requires=['legobot>=1.1.4,<=2.0.0',
'python-whois',
'urllib3',
'bandit==1.3.0',
'flake8==3.2.1',
'pytest==3.0.5'
],
classifiers=[
'License :: MIT',
'Programming Language :: Python :: 3'
],
packages=find_packages()
)
<commit_after>
from setuptools import setup, find_packages
__author__ = "Nitrax <nitrax@lokisec.fr>"
__copyright__ = "Copyright 2017, Legobot"
description = 'Lego providing networking tools'
name = 'legos.nettools'
setup(
name=name,
version='0.1.0',
namespace_packages=name.split('.')[:-1],
license='MIT',
description=description,
author='Nitrax',
url='https://github.com/Legobot/' + name,
install_requires=['legobot>=1.1.4,<=2.0.0',
'python-whois',
'urllib3',
'bandit==1.3.0',
'flake8==3.2.1',
'pytest==3.0.5'
],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3'
],
packages=find_packages()
)
|
<commit_msg>Fix type error if input is int
<commit_before>from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
<commit_after>from email import utils
import re
import time
import urllib
def digits(s):
if not s:
return ''
if type(s) is int:
return s
return re.sub('[^0-9]', '', s)
def floatformat(num, num_decimals):
return "%.{}f".format(num_decimals) % num
def strftime(datetime, formatstr):
"""
Uses Python's strftime with some tweaks
"""
return datetime.strftime(formatstr).lstrip("0").replace(" 0", " ")
def strip_frc(s):
if not s:
return ''
return s[3:]
def urlencode(s):
return urllib.quote(s.encode('utf8'))
def rfc2822(datetime):
tt = datetime.timetuple()
timestamp = time.mktime(tt)
return utils.formatdate(timestamp)
# def slugify(s):
# """
# Use Django's slugify method
# """
# return defaultfilters.slugify(s)
|
<commit_msg>Make navbar footer really stick to bottom
<commit_before>import {Component} from "@angular/core";
@Component({
selector: 'application-component',
templateUrl: '/static/components/application/application.html',
styles: [
`#navbar_botom {margin-bottom: 0; border-radius: 0;}`,
`#buffer {height: 70px}`
]
})
export class ApplicationComponent {
}
<commit_after>import {Component} from "@angular/core";
@Component({
selector: 'application-component',
templateUrl: '/static/components/application/application.html',
styles: [
`#navbar_botom {margin-bottom: 0; border-radius: 0; position: fixed; bottom: 0; left: 0; right: 0;}`,
`#buffer {height: 70px}`
]
})
export class ApplicationComponent {
}
|
<commit_msg>Print line after creating each csv
<commit_before>import csv
from datetime import date
from scraper.draft_scraper import scrape
CSV_FILE = 'datasets/%s_nbadraft.csv'
for year in range(1947, date.today().year):
draft = scrape(year)
header = [key for key in draft[1].keys()]
with open(CSV_FILE % year, 'w', newline='') as outfile:
dw = csv.DictWriter(outfile, header)
dw.writeheader()
dw.writerows([row for index, row in draft.items()])
<commit_after>import csv
from datetime import date
from scraper.draft_scraper import scrape
CSV_FILE = 'datasets/%s_nbadraft.csv'
for year in range(1947, date.today().year):
draft = scrape(year)
header = [key for key in draft[1].keys()]
with open(CSV_FILE % year, 'w', newline='') as outfile:
dw = csv.DictWriter(outfile, header)
dw.writeheader()
dw.writerows([row for index, row in draft.items()])
print('Data processed for %s.' % year)
|
<commit_msg>Rewrite the script in a package fasshion.
<commit_before>
import urllib2
import json
import re
# album_url = 'http://www.ximalaya.com/7712455/album/6333174'
album_url = 'http://www.ximalaya.com/7712455/album/4474664'
headers = {'User-Agent': 'Safari/537.36'}
resp = urllib2.urlopen(urllib2.Request(album_url, headers=headers))
ids = re.search('sound_ids=\"(.*)\"', resp.read()).group(1).split(',')
for ind, f in enumerate(ids):
url = 'http://www.ximalaya.com/tracks/{}.json'.format(f)
resp = urllib2.urlopen(urllib2.Request(url, headers=headers))
data = json.loads(resp.read())
output = data['title'] + data['play_path_64'][-4:]
print output, data['play_path_64']
with open(output, 'wb') as local:
local.write(urllib2.urlopen(data['play_path_64']).read())
<commit_after>
from urllib2 import urlopen, Request
import json
import re
class XmlyDownloader(object):
def __init__(self):
self.headers = {'User-Agent': 'Safari/537.36'}
def getIDs(self, url):
resp = urlopen(Request(url, headers=self.headers))
return re.search('sound_ids=\"(.*)\"', resp.read()).group(1).split(',')
def download_file(self, ID):
url = 'http://www.ximalaya.com/tracks/{}.json'.format(ID)
resp = urlopen(Request(url, headers=self.headers))
data = json.loads(resp.read())
output = data['title'] + data['play_path_64'][-4:]
print output, data['play_path_64']
with open(output, 'wb') as local:
local.write(urlopen(data['play_path_64']).read())
def download_album(self, album_url):
for ID in self.getIDs(album_url):
self.download_file(ID)
if __name__ == '__main__':
album_url = 'http://www.ximalaya.com/7712455/album/4474664'
xmly = XmlyDownloader()
xmly.download_album(album_url)
|
<commit_msg>Fix pip installs of astropy-helpers<commit_before>
import sys
from distutils.version import LooseVersion
import setuptools
from setuptools import setup
if LooseVersion(setuptools.__version__) < '30.3':
sys.stderr.write("ERROR: setuptools 30.3 or later is required by astropy-helpers\n")
sys.exit(1)
from astropy_helpers.version_helpers import generate_version_py # noqa
version = generate_version_py()
setup(version=version)
<commit_after>
import os
import sys
from distutils.version import LooseVersion
import setuptools
from setuptools import setup
if LooseVersion(setuptools.__version__) < '30.3':
sys.stderr.write("ERROR: setuptools 30.3 or later is required by astropy-helpers\n")
sys.exit(1)
# Need to add current directory to be able to import astropy-helpers
# despite PEP517/518 build isolation
sys.path.append(os.path.abspath("."))
from astropy_helpers.version_helpers import generate_version_py # noqa
version = generate_version_py()
setup(version=version)
|
<commit_msg>Check for an existing handler before registering default host-meta handler.
<commit_before>from django.db import models
from django.db.models.signals import post_save
import mimetypes
import wellknown
#
# create default host-meta handler
#
from wellknown.resources import HostMeta
wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml')
#
# resource model
#
class Resource(models.Model):
path = models.CharField(max_length=128)
content = models.TextField(blank=True)
content_type = models.CharField(max_length=128, blank=True)
class Meta:
ordering = ('path',)
def __unicode__(self):
return self.path
def save(self, **kwargs):
self.path = self.path.strip('/')
if not self.content_type:
self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain'
super(Resource, self).save(**kwargs)
#
# update resources when models are saved
#
def save_handler(sender, **kwargs):
reg = kwargs['instance']
wellknown.register(
reg.path,
content=reg.content,
content_type=reg.content_type,
update=True
)
post_save.connect(save_handler, sender=Resource)
<commit_after>from django.db import models
from django.db.models.signals import post_save
import mimetypes
import wellknown
#
# create default host-meta handler
#
from wellknown.resources import HostMeta
try:
wellknown.register('host-meta', handler=HostMeta(),
content_type='application/xrd+xml')
except ValueError:
pass
#
# resource model
#
class Resource(models.Model):
path = models.CharField(max_length=128)
content = models.TextField(blank=True)
content_type = models.CharField(max_length=128, blank=True)
class Meta:
ordering = ('path',)
def __unicode__(self):
return self.path
def save(self, **kwargs):
self.path = self.path.strip('/')
if not self.content_type:
self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain'
super(Resource, self).save(**kwargs)
#
# update resources when models are saved
#
def save_handler(sender, **kwargs):
reg = kwargs['instance']
wellknown.register(
reg.path,
content=reg.content,
content_type=reg.content_type,
update=True
)
post_save.connect(save_handler, sender=Resource)
|
<commit_msg>Fix issue with missing export
<commit_before>Cypress.Commands.add('login', (username = 'admin') => {
window.localStorage.setItem('USER', JSON.stringify({ username }));
});
<commit_after>Cypress.Commands.add('login', (username = 'admin') => {
window.localStorage.setItem('USER', JSON.stringify({ username }));
});
export {};
|
<commit_msg>Add daysAgo func to template
<commit_before>package main
import (
"html/template"
"log"
"os"
"time"
)
// Must method parses the template to ensure no errors;
// New method creates and returns a new template
var report = template.Must(template.New("issuelist").Parse(IssueList))
func main() {
result, err := SearchIssues(os.Args[1:])
if err != nil {
log.Fatal(err)
}
if err := report.Execute(os.Stdout, result); err != nil {
log.Fatal(err)
}
}
func daysAgo(t time.Time) int {
return int(time.Since(t).Hours() / 24)
}
<commit_after>package main
import (
"html/template"
"log"
"os"
"time"
)
// Must method parses the template to ensure no errors;
// New method creates and returns a new template
var report = template.Must(template.New("issuelist").
// Funcs adds daysAgo as a function accessible inside the template
Funcs(template.FuncMap{"daysAgo": daysAgo}).Parse(IssueList))
func main() {
result, err := SearchIssues(os.Args[1:])
if err != nil {
log.Fatal(err)
}
if err := report.Execute(os.Stdout, result); err != nil {
log.Fatal(err)
}
}
func daysAgo(t time.Time) int {
return int(time.Since(t).Hours() / 24)
}
|
<commit_msg>Revert "refactor: remove unused imports."
This reverts commit ceb37d26a48d7d56b3d0ded0376eb9498ce7ef6f.
<commit_before>from python_hosts.exception import (HostsException, HostsEntryException,
InvalidIPv4Address, InvalidComment)
from python_hosts.hosts import Hosts
from python_hosts.utils import is_readable, is_ipv4, is_ipv6
<commit_after>from python_hosts.hosts import Hosts, HostsEntry
from python_hosts.utils import is_readable, is_ipv4, is_ipv6, valid_hostnames
from python_hosts.exception import (HostsException, HostsEntryException,
InvalidIPv4Address, InvalidIPv6Address,
InvalidComment)
|
<commit_msg>Set Heartbeat interval to 5 mins
<commit_before>package net.vexelon.currencybg.srv;
import javax.ws.rs.core.MediaType;
/**
* Global constants
*
*/
public final class Defs {
/*
* Database parameters
*/
public static final String DB_DRIVER = "com.mysql.jdbc.Driver";
public static final String DB_NAME = "currencybg";
public static final String DB_HOST = System.getenv("OPENSHIFT_MYSQL_DB_HOST");
public static final String DB_PORT = System.getenv("OPENSHIFT_MYSQL_DB_PORT");
public static final String DB_CONNECTION = String.format("jdbc:mysql://%s:%s/%s", DB_HOST, DB_PORT, DB_NAME);
public static final String DB_USER = System.getenv("OPENSHIFT_MYSQL_DB_USERNAME");
public static final String DB_PASSWORD = System.getenv("OPENSHIFT_MYSQL_DB_PASSWORD");
public static final String DATEFORMAT_ISO_8601 = "yyyy-MM-dd'T'HH:mmZ";
/*
* Currency update parameters
*/
public static final long UPDATE_FIRST_INTERVAL = 10; // 10 seconds
public static final long UPDATES_PERIODIC_INTERVAL = 6 * 60 * 60; // 6 hours
/*
* HTTP API specific
*/
public static final String HEADER_APIKEY = "APIKey";
public static final String API_JSON_CONTENT_TYPE = MediaType.APPLICATION_JSON_TYPE + ";charset=utf-8";
public static final String DATETIME_FORMAT = "yyyy-MM-dd";
}<commit_after>package net.vexelon.currencybg.srv;
import javax.ws.rs.core.MediaType;
/**
* Global constants
*
*/
public final class Defs {
/*
* Database parameters
*/
public static final String DB_DRIVER = "com.mysql.jdbc.Driver";
public static final String DB_NAME = "currencybg";
public static final String DB_HOST = System.getenv("OPENSHIFT_MYSQL_DB_HOST");
public static final String DB_PORT = System.getenv("OPENSHIFT_MYSQL_DB_PORT");
public static final String DB_CONNECTION = String.format("jdbc:mysql://%s:%s/%s", DB_HOST, DB_PORT, DB_NAME);
public static final String DB_USER = System.getenv("OPENSHIFT_MYSQL_DB_USERNAME");
public static final String DB_PASSWORD = System.getenv("OPENSHIFT_MYSQL_DB_PASSWORD");
public static final String DATEFORMAT_ISO_8601 = "yyyy-MM-dd'T'HH:mmZ";
/*
* Currency update parameters
*/
public static final long UPDATE_FIRST_INTERVAL = 10; // 10 seconds
public static final long UPDATES_PERIODIC_INTERVAL = 5 * 60; // 5 minutes
/*
* HTTP API specific
*/
public static final String HEADER_APIKEY = "APIKey";
public static final String API_JSON_CONTENT_TYPE = MediaType.APPLICATION_JSON_TYPE + ";charset=utf-8";
public static final String DATETIME_FORMAT = "yyyy-MM-dd";
} |
<commit_msg>Format parsed speed data for use in Chart.js
<commit_before>import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
records = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
return json.dumps(records)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*)', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
<commit_after>import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
# needs:
# labels (timestamps)
# data (ping/dl/ul speed)
records = []
labels = []
download_speeds = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
for record in records:
labels.append(record["timestamp"])
if record["result"] == "success":
download_speeds.append(record["download"])
datasets = [{"label":"Download Speeds", "data":download_speeds}]
summary = {}
summary["labels"] = labels
summary["datasets"] = datasets
return json.dumps(summary)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*) Mbit/s', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
|
<commit_msg>Use the correct decoder for the test.
<commit_before>from hyper.http20.hpack import Decoder
from binascii import unhexlify
class TestHPACKDecoderIntegration(object):
def test_can_decode_a_story(self, story):
d = Decoder()
for case in story['cases']:
d.header_table_size = case['header_table_size']
decoded_headers = d.decode(unhexlify(case['wire']))
# The correct headers are a list of dicts, which is annoying.
correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()}
assert correct_headers == decoded_headers
<commit_after>from hyper.http20.hpack import Decoder
from hyper.http20.huffman import HuffmanDecoder
from hyper.http20.huffman_constants import REQUEST_CODES, REQUEST_CODES_LENGTH
from binascii import unhexlify
class TestHPACKDecoderIntegration(object):
def test_can_decode_a_story(self, story):
d = Decoder()
if story['context'] == 'request':
d.huffman_coder = HuffmanDecoder(REQUEST_CODES, REQUEST_CODES_LENGTH)
for case in story['cases']:
d.header_table_size = case['header_table_size']
decoded_headers = d.decode(unhexlify(case['wire']))
# The correct headers are a list of dicts, which is annoying.
correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()}
assert correct_headers == decoded_headers
|
<commit_msg>Reformat code. Added methods to pretty print messages.
<commit_before>import os
import sys
import jd2chm_log
import jd2chm_conf
logging = None
config = None
def getAppDir():
if hasattr(sys, "frozen"): # py2exe
return os.path.dirname(sys.executable)
return os.path.dirname(sys.argv[0])
def getLogging(level=2):
global logging
if not logging:
logging = jd2chm_log.Jd2chmLogging(level)
return logging
def getLog():
"""Faciliate sharing the logger accross the different modules."""
return getLogging().logger
def getConf():
global config
if not config:
config = jd2chm_conf.Jd2chmConfig()
config.init()
return config<commit_after>import os
import sys
import shutil
import jd2chm_log as log
import jd2chm_conf as conf
import jd2chm_const as const
logging = None
config = None
def get_app_dir():
if hasattr(sys, "frozen"): # py2exe
return os.path.dirname(sys.executable)
return os.path.dirname(sys.argv[0])
def get_logging(level=2):
global logging
if not logging:
logging = log.Jd2chmLogging(level)
return logging
def get_log():
"""Facilitate sharing the logger across the different modules."""
return get_logging().logger
def get_conf():
global config
if not config:
config = conf.Jd2chmConfig()
config.init()
return config
def term_width():
return shutil.get_terminal_size((const.DEFAULT_TERM_WIDTH,
const.DEFAULT_TERM_HEIGHT)).columns - const.TERM_MARGIN
def center(line, max_line=0):
"""Center a padded string based on the width of the terminal.
If max_line is provided for justified text, line shorter than max_line
will only be padded on the left side.
"""
width = term_width()
left_margin = (width - max_line) / 2
if len(line) < max_line:
return (' ' * int(left_margin)) + line
return line.center(width, ' ')
def print_center_block(text, max_line=0):
"""Print a block of text centered on the terminal."""
for line in text.split('\n'):
print(center(line, max_line))
|
<commit_msg>Make sure this works the first time you run it
<commit_before>'''
Setup script that:
/pyquic:
- compiles pyquic
- copies py_quic into base directory so that we can use the module directly
'''
import os
import shutil
class temp_cd():
def __init__(self, temp_dir):
self._temp_dir = temp_dir
self._return_dir = os.path.dirname(os.path.realpath(__file__))
def __enter__(self):
os.chdir(self._temp_dir)
def __exit__(self, type, value, traceback):
os.chdir(self._return_dir)
def setup_pyquic():
with temp_cd('pyquic/py_quic'):
os.system('make')
shutil.rmtree('quic/py_quic')
shutil.copytree('pyquic/py_quic', 'quic/py_quic')
def clean_pyquic():
shutil.rmtree('py_quic')
os.system('git submodule update --checkout --remote -f')
if __name__ == "__main__":
setup_pyquic()
<commit_after>'''
Setup script that:
/pyquic:
- compiles pyquic
- copies py_quic into base directory so that we can use the module directly
'''
import os
import shutil
class temp_cd():
def __init__(self, temp_dir):
self._temp_dir = temp_dir
self._return_dir = os.path.dirname(os.path.realpath(__file__))
def __enter__(self):
os.chdir(self._temp_dir)
def __exit__(self, type, value, traceback):
os.chdir(self._return_dir)
def setup_pyquic():
with temp_cd('pyquic/py_quic'):
os.system('make')
if os.path.exists('quic/py_quic'):
shutil.rmtree('quic/py_quic')
shutil.copytree('pyquic/py_quic', 'quic/py_quic')
def clean_pyquic():
shutil.rmtree('py_quic')
os.system('git submodule update --checkout --remote -f')
if __name__ == "__main__":
setup_pyquic()
|
<commit_msg>Update readstat_lseek header signature on Windows
<commit_before>
int readstat_open(const char *filename);
int readstat_close(int fd);
#ifdef _AIX
off64_t readstat_lseek(int fildes, off64_t offset, int whence);
#else
off_t readstat_lseek(int fildes, off_t offset, int whence);
#endif
readstat_error_t readstat_update_progress(int fd, size_t file_size,
readstat_progress_handler progress_handler, void *user_ctx);
<commit_after>
int readstat_open(const char *filename);
int readstat_close(int fd);
#if defined _WIN32 || defined __CYGWIN__
_off64_t readstat_lseek(int fildes, _off64_t offset, int whence);
#elif defined _AIX
off64_t readstat_lseek(int fildes, off64_t offset, int whence);
#else
off_t readstat_lseek(int fildes, off_t offset, int whence);
#endif
readstat_error_t readstat_update_progress(int fd, size_t file_size,
readstat_progress_handler progress_handler, void *user_ctx);
|
<commit_msg>Fix sublime icon pathing by using "/" instead of os.path.join
<commit_before>import sublime
from os import path
_plugin_name = "Git Conflict Resolver"
_icon_folder = path.join(_plugin_name, "gutter")
_icons = {
"ours": "ours",
"ancestor": "ancestor",
"theirs": "theirs"
}
def get(group):
base = ""
extension = ""
if int(sublime.version()) < 3000:
base = path.join("..", _icon_folder)
else:
base = path.join("Packages", _icon_folder)
extension = ".png"
return path.join(base, _icons[group] + extension)
<commit_after>import sublime
_plugin_name = "Git Conflict Resolver"
_icon_folder = "/".join([_plugin_name, "gutter"])
_icons = {
"ours": "ours",
"ancestor": "ancestor",
"theirs": "theirs"
}
def get(group):
base = ""
extension = ""
if int(sublime.version()) < 3000:
base = "/".join(["..", _icon_folder])
else:
base = "/".join(["Packages", _icon_folder])
extension = ".png"
return "/".join([base, _icons[group] + extension])
|
<commit_msg>Split text by \t, and added lists of bacteria and diseases
<commit_before>class parseFileBacteriaList:
'Class for read and print information from text file'
bacteriaName = []
fileName = ""
def __init__(self,fileName):
self.fileName = fileName
def readFile(self):
file = open(self.fileName).readlines()
for linia in file:
print linia<commit_after>class parseFileBacteriaList:
'Class for read and print information from text file'
bacteriaName = []
fileName = ""
def __init__(self,fileName):
self.fileName = fileName
def readFile(self):
file = open(self.fileName).readlines()
listBacteria = []
listDeseases = []
for linia in file:
line = linia.split("\t")
listBacteria.append(line[0])
listDeseases.append(line[1])
print listBacteria
print listDeseases |
<commit_msg>Fix out of bounds exception when printing crash states.
<commit_before>
namespace fs_testing {
using std::ostream;
using std::string;
using std::to_string;
ostream& PermuteTestResult::PrintCrashState(ostream& os) const {
for (unsigned int i = 0; i < crash_state.size() - 1; ++i) {
os << to_string(crash_state.at(i)) << ", ";
}
os << to_string(crash_state.back());
return os;
}
} // namespace fs_testing
<commit_after>
namespace fs_testing {
using std::ostream;
using std::string;
using std::to_string;
ostream& PermuteTestResult::PrintCrashState(ostream& os) const {
if (crash_state.size() == 0) {
return os;
}
for (unsigned int i = 0; i < crash_state.size() - 1; ++i) {
os << to_string(crash_state.at(i)) << ", ";
}
os << to_string(crash_state.back());
return os;
}
} // namespace fs_testing
|
<commit_msg>Fix flake8: E402 module level import not at top of file
<commit_before>
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "nau_timetable.settings")
application = get_wsgi_application()
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(application)
<commit_after>
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "nau_timetable.settings")
application = DjangoWhiteNoise(get_wsgi_application())
|
<commit_msg>Add citation utils to init
<commit_before>
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
<commit_after>
from . import base_manager, cache_manager, citation_utils, database_io, make_json_serializable, models, query_manager
from .base_manager import *
from .cache_manager import *
from .citation_utils import *
from .database_io import *
from .models import *
from .query_manager import *
__all__ = (
base_manager.__all__ +
cache_manager.__all__ +
citation_utils.__all__ +
database_io.__all__ +
models.__all__ +
query_manager.__all__
)
|
<commit_msg>Read the description for changeHit in the viewspot class to know how to use it properly
<commit_before>
/**
* Write a description of interface Spot here.
*
* @author (your name)
* @version (a version number or a date)
*/
public class ViewSpot
{
private boolean hit;
public ViewSpot()
{
hit=false;
}
public void changeHit()
{
if (hit==false)
{
hit=true;
}
}
}
<commit_after>/**
* Write a description of interface Spot here.
*
* @Basit Malik
* @version (a version number or a date)
*/
public class ViewSpot
{
private boolean hit;
public ViewSpot()
{
hit=false;
}
public boolean changeHit()//The else with a return of false is so that we can prompt the player to
//choose again if the method returns false so they cannot choose the
//same spot twice
{
if (hit==false)
{
hit=true;
return true;
}
else
{
return false;
}
}
}
|
<commit_msg>Fix install per \@8BitAce's PR
<commit_before>from setuptools import setup, find_packages
setup(
name="voltron",
version="0.1",
author="snare",
author_email="snare@ho.ax",
description=("A UI for GDB & LLDB"),
license="Buy snare a beer",
keywords="voltron gdb lldb",
url="https://github.com/snarez/voltron",
packages=find_packages(),
install_requires=['scruffington', 'flask', 'blessed', 'pygments', 'requests_unixsocket'],
data_files=['dbgentry.py'],
package_data={'voltron': ['config/*']},
install_package_data=True,
entry_points={
'console_scripts': ['voltron=voltron:main']
},
zip_safe=False
)
<commit_after>from setuptools import setup, find_packages
setup(
name="voltron",
version="0.1",
author="snare",
author_email="snare@ho.ax",
description=("A UI for GDB & LLDB"),
license="Buy snare a beer",
keywords="voltron gdb lldb",
url="https://github.com/snarez/voltron",
packages=['voltron'],
install_requires=['scruffington', 'flask', 'blessed', 'pygments', 'requests_unixsocket'],
data_files=['dbgentry.py'],
package_data={'voltron': ['config/*']},
install_package_data=True,
entry_points={
'console_scripts': ['voltron=voltron:main']
},
zip_safe=False
)
|
<commit_msg>Improve test cases for lms::Endian
<commit_before>
TEST(Endian, uint16) {
using lms::Endian;
ASSERT_EQ(uint16_t(0xCAFEu), Endian::betoh(Endian::htobe(uint16_t(0xCAFEu))));
ASSERT_EQ(uint16_t(0xCAFEu), Endian::letoh(Endian::htole(uint16_t(0xCAFEu))));
}
TEST(Endian, uint32) {
using lms::Endian;
ASSERT_EQ(uint32_t(0xDEADBEEFu), Endian::betoh(Endian::htobe(uint32_t(0xDEADBEEFu))));
ASSERT_EQ(uint32_t(0xDEADBEEFu), Endian::letoh(Endian::htole(uint32_t(0xDEADBEEFu))));
}
TEST(Endian, uint64) {
using lms::Endian;
ASSERT_EQ(uint64_t(0xFEEDCAFEDEADBEEFu), Endian::betoh(Endian::htobe(uint64_t(0xFEEDCAFEDEADBEEFu))));
ASSERT_EQ(uint64_t(0xFEEDCAFEDEADBEEFu), Endian::letoh(Endian::htole(uint64_t(0xFEEDCAFEDEADBEEFu))));
}
<commit_after>
TEST(Endian, uint16) {
using lms::Endian;
ASSERT_EQ(0xFECAu, Endian::letoh(Endian::htobe(uint16_t(0xCAFEu))));
ASSERT_EQ(0xFECAu, Endian::betoh(Endian::htole(uint16_t(0xCAFEu))));
}
TEST(Endian, uint32) {
using lms::Endian;
ASSERT_EQ(0xEFBEADDEu, Endian::letoh(Endian::htobe(uint32_t(0xDEADBEEFu))));
ASSERT_EQ(0xEFBEADDEu, Endian::betoh(Endian::htole(uint32_t(0xDEADBEEFu))));
}
TEST(Endian, uint64) {
using lms::Endian;
ASSERT_EQ(0xEFBEADDEFECAEDFEu, Endian::letoh(Endian::htobe(0xFEEDCAFEDEADBEEFu)));
ASSERT_EQ(0xEFBEADDEFECAEDFEu, Endian::betoh(Endian::htole(0xFEEDCAFEDEADBEEFu)));
}
|
<commit_msg>Add pytest-watch to console scripts to match the name.
<commit_before>import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as f:
return f.read()
setup(
name='pytest-watch',
version='3.1.0',
description='Local continuous test runner with pytest and watchdog.',
long_description=read('README.md'),
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/pytest-watch',
license='MIT',
platforms='any',
packages=find_packages(),
install_requires=read('requirements.txt').splitlines(),
entry_points={
'console_scripts': [
'py.test.watch = pytest_watch.command:main',
'ptw = pytest_watch.command:main',
]
},
)
<commit_after>import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as f:
return f.read()
setup(
name='pytest-watch',
version='3.1.0',
description='Local continuous test runner with pytest and watchdog.',
long_description=read('README.md'),
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/pytest-watch',
license='MIT',
platforms='any',
packages=find_packages(),
install_requires=read('requirements.txt').splitlines(),
entry_points={
'console_scripts': [
'py.test.watch = pytest_watch.command:main',
'pytest-watch = pytest_watch.command:main',
'ptw = pytest_watch.command:main',
]
},
)
|
<commit_msg>Allow loading of different box sizes
<commit_before>"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L25n512/output")
else:
halo=path.join(halo,"L25n256")
return path.join(base, halo)
<commit_after>"""Little module to find the path of a Cosmo box simulation"""
import os.path as path
base=path.expanduser("~/data/Cosmo/")
def get_name(sim, ff=True, box=25):
"""Get the directory for a simulation"""
halo = "Cosmo"+str(sim)+"_V6"
if ff:
halo=path.join(halo,"L"+str(box)+"n512/output")
else:
halo=path.join(halo,"L"+str(box)+"256")
return path.join(base, halo)
|
<commit_msg>Fix 'Expression has changed' issue
<commit_before>import {
Directive,
ContentChild,
EventEmitter,
Output,
Input,
AfterViewInit
} from '@angular/core';
import { ViewChild } from '@angular/core';
import { MatMenuTrigger } from '@angular/material';
@Directive({
selector: '[q-grid-menu-trigger]'
})
export class MenuTriggerDirective implements AfterViewInit {
@ContentChild(MatMenuTrigger) public trigger: MatMenuTrigger;
@Output('q-grid-menu-trigger') public onClose = new EventEmitter<any>();
constructor() {}
ngAfterViewInit() {
this.trigger.openMenu();
this.trigger.menuClosed.subscribe(() => {
if (this.onClose) {
setTimeout(() => this.onClose.emit(), 10);
}
});
}
}
<commit_after>import {
Directive,
ContentChild,
EventEmitter,
Output,
Input,
AfterViewInit
} from '@angular/core';
import { MatMenuTrigger } from '@angular/material';
@Directive({
selector: '[q-grid-menu-trigger]'
})
export class MenuTriggerDirective implements AfterViewInit {
@ContentChild(MatMenuTrigger) public trigger: MatMenuTrigger;
@Output('q-grid-menu-trigger') public onClose = new EventEmitter<any>();
constructor() {}
ngAfterViewInit() {
Promise.resolve(null).then(() => this.trigger.openMenu());
this.trigger.menuClosed.subscribe(() => {
if (this.onClose) {
setTimeout(() => this.onClose.emit(), 10);
}
});
}
}
|
<commit_msg>Fix typing for price field component.
<commit_before>import * as React from 'react';
import { defaultCurrency } from '@waldur/core/services';
import { connectPlanComponents } from './utils';
export const PriceField = connectPlanComponents(props => (
<div className="form-control-static">
{defaultCurrency(props.total)}
</div>
));
<commit_after>import * as React from 'react';
import { defaultCurrency } from '@waldur/core/services';
import { connectPlanComponents } from './utils';
export const PriceField = connectPlanComponents((props: {total: number}) => (
<div className="form-control-static">
{defaultCurrency(props.total)}
</div>
));
|
<commit_msg>Change path location of django q
<commit_before>import os
import sys
import django
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath)
VERSION = (0, 9, 2)
default_app_config = 'django_q.apps.DjangoQConfig'
# root imports will slowly be deprecated.
# please import from the relevant sub modules
if django.VERSION[:2] < (1, 9):
from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size
from .models import Task, Schedule, Success, Failure
from .cluster import Cluster
from .status import Stat
from .brokers import get_broker
__all__ = ['conf', 'cluster', 'models', 'tasks']
<commit_after>import django
# myPath = os.path.dirname(os.path.abspath(__file__))
# sys.path.insert(0, myPath)
VERSION = (0, 9, 2)
default_app_config = 'django_q.apps.DjangoQConfig'
# root imports will slowly be deprecated.
# please import from the relevant sub modules
if django.VERSION[:2] < (1, 9):
from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size
from .models import Task, Schedule, Success, Failure
from .cluster import Cluster
from .status import Stat
from .brokers import get_broker
__all__ = ['conf', 'cluster', 'models', 'tasks']
|
<commit_msg>Update test codes to fit test target.<commit_before>package net.folab.fo.jast;
import static org.junit.Assert.*;
import static org.hamcrest.CoreMatchers.*;
import net.folab.fo.bytecode.ByteArrayClassLoader;
import net.folab.fo.jast.AstVisitor;
import net.folab.fo.jast.AstWriter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class AstWriterTest {
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
@Test
public void testSetName() throws InstantiationException, IllegalAccessException {
AstVisitor av;
Class<?> generatedClass;
byte[] bytecode;
String name;
Object obj;
// - - -
name = "MainClass";
av = new AstWriter();
new ClassDeclaration(name).accept(av);
bytecode = av.toByteArray();
generatedClass = defineClass(name, bytecode);
assertThat(generatedClass.getName(), is(name));
obj = generatedClass.newInstance();
assertThat(obj, is(not(nullValue())));
// - - -
name = "foo.MainClass";
av = new AstWriter();
new ClassDeclaration(name).accept(av);
bytecode = av.toByteArray();
generatedClass = defineClass(name, bytecode);
assertThat(generatedClass.getName(), is(name));
obj = generatedClass.newInstance();
assertThat(obj, is(not(nullValue())));
}
public static Class<?> defineClass(String name, byte[] bytecode) {
return new ByteArrayClassLoader().defineClass(name, bytecode);
}
}
<commit_after>package net.folab.fo.jast;
import static org.junit.Assert.*;
import static org.hamcrest.CoreMatchers.*;
import net.folab.fo.bytecode.ByteArrayClassLoader;
import net.folab.fo.jast.AstWriter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class AstWriterTest {
private AstWriter writer;
@Before
public void setUp() throws Exception {
writer = new AstWriter();
}
@After
public void tearDown() throws Exception {
}
@Test
public void testSetName() throws InstantiationException, IllegalAccessException {
Class<?> generatedClass;
byte[] bytecode;
String name;
Object obj;
// - - -
name = "MainClass";
new ClassDeclaration(name).accept(writer);
bytecode = writer.toByteArray();
generatedClass = defineClass(name, bytecode);
assertThat(generatedClass.getName(), is(name));
obj = generatedClass.newInstance();
assertThat(obj, is(not(nullValue())));
// - - -
name = "foo.MainClass";
writer = new AstWriter();
new ClassDeclaration(name).accept(writer);
bytecode = writer.toByteArray();
generatedClass = defineClass(name, bytecode);
assertThat(generatedClass.getName(), is(name));
obj = generatedClass.newInstance();
assertThat(obj, is(not(nullValue())));
}
public static Class<?> defineClass(String name, byte[] bytecode) {
return new ByteArrayClassLoader().defineClass(name, bytecode);
}
}
|
<commit_msg>Update the example with latest interface
Update the example with the latest interface of the function "convert"<commit_before>import torch
import torch.nn as nn
import torch.nn.functional as F
from onnx_coreml import convert
# Step 0 - (a) Define ML Model
class small_model(nn.Module):
def __init__(self):
super(small_model, self).__init__()
self.fc1 = nn.Linear(768, 256)
self.fc2 = nn.Linear(256, 10)
def forward(self, x):
y = F.relu(self.fc1(x))
y = F.softmax(self.fc2(y))
return y
# Step 0 - (b) Create model or Load from dist
model = small_model()
dummy_input = torch.randn(768)
# Step 1 - PyTorch to ONNX model
torch.onnx.export(model, dummy_input, './small_model.onnx')
# Step 2 - ONNX to CoreML model
mlmodel = convert(model='./small_model.onnx', target_ios='13')
# Save converted CoreML model
mlmodel.save('small_model.mlmodel')
<commit_after>import torch
import torch.nn as nn
import torch.nn.functional as F
from onnx_coreml import convert
# Step 0 - (a) Define ML Model
class small_model(nn.Module):
def __init__(self):
super(small_model, self).__init__()
self.fc1 = nn.Linear(768, 256)
self.fc2 = nn.Linear(256, 10)
def forward(self, x):
y = F.relu(self.fc1(x))
y = F.softmax(self.fc2(y))
return y
# Step 0 - (b) Create model or Load from dist
model = small_model()
dummy_input = torch.randn(768)
# Step 1 - PyTorch to ONNX model
torch.onnx.export(model, dummy_input, './small_model.onnx')
# Step 2 - ONNX to CoreML model
mlmodel = convert(model='./small_model.onnx', minimum_ios_deployment_target='13')
# Save converted CoreML model
mlmodel.save('small_model.mlmodel')
|
<commit_msg>Add QueueingEventHandler and BlockingEventHandler types
<commit_before>package controllers
import "k8s.io/client-go/tools/cache"
var (
KeyFunc = cache.DeletionHandlingMetaNamespaceKeyFunc
)
<commit_after>package controllers
import (
"reflect"
"k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/client-go/tools/cache"
"k8s.io/client-go/util/workqueue"
)
var (
KeyFunc = cache.DeletionHandlingMetaNamespaceKeyFunc
)
type QueuingEventHandler struct {
Queue workqueue.RateLimitingInterface
}
func (q *QueuingEventHandler) Enqueue(obj interface{}) {
key, err := KeyFunc(obj)
if err != nil {
runtime.HandleError(err)
return
}
q.Queue.Add(key)
}
func (q *QueuingEventHandler) OnAdd(obj interface{}) {
q.Enqueue(obj)
}
func (q *QueuingEventHandler) OnUpdate(old, new interface{}) {
if reflect.DeepEqual(old, new) {
return
}
q.Enqueue(new)
}
func (q *QueuingEventHandler) OnDelete(obj interface{}) {
q.Enqueue(obj)
}
type BlockingEventHandler struct {
WorkFunc func(obj interface{})
}
func (b *BlockingEventHandler) Enqueue(obj interface{}) {
b.WorkFunc(obj)
}
func (b *BlockingEventHandler) OnAdd(obj interface{}) {
b.WorkFunc(obj)
}
func (b *BlockingEventHandler) OnUpdate(old, new interface{}) {
if reflect.DeepEqual(old, new) {
return
}
b.WorkFunc(new)
}
func (b *BlockingEventHandler) OnDelete(obj interface{}) {
b.WorkFunc(obj)
}
|
<commit_msg>Upgrade to using twp.sqlachemy3 3.1.0 which hosts search itself
thus needing a change to setup.py requirements
<commit_before>AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.0.15',
'sqlalchemy>=0.7.0',
'MySQL-python',
'pyparsing<2.0.0',
],
zip_safe = False,
license = 'BSD'
)
<commit_after>AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.1.0',
'MySQL-python',
],
zip_safe = False,
license = 'BSD'
)
|
<commit_msg>Fix amount of space we allocate for non-1 byte types
<commit_before>//! Importers for RAW volume files. A raw volume file is the raw bytes of
//! volume data in little endian, with X increasing first, then Y, then Z.
use std::io::BufReader;
use std::io::prelude::*;
use std::fs::File;
use std::path::Path;
use std::mem;
use std::iter;
use num::{self, NumCast};
use vol::Volume;
use vec3::Vec3i;
/// Import a RAW volume file with the set dimensions. The volume data is
/// expected to be of type T which should be a primitive scalar type
pub fn import<T: NumCast>(path: &Path, dims: Vec3i) -> Volume {
let mut f = match File::open(&path) {
Ok(f) => BufReader::new(f),
Err(e) => panic!("Error opening volume {}", e),
};
let mut data: Vec<_> = iter::repeat(0u8).take((dims.x * dims.y * dims.z) as usize).collect();
f.read_exact(&mut data[..]).expect("Failed to read entire RAW volume");
let data: Vec<f32> = data.chunks(mem::size_of::<T>())
.map(|x| unsafe { mem::transmute_copy::<u8, T>(&x[0]) })
.map(|x| num::cast(x).unwrap()).collect();
let mut volume = Volume::new(dims);
volume.set_region(&data[..], Vec3i::broadcast(0), dims);
volume
}
<commit_after>//! Importers for RAW volume files. A raw volume file is the raw bytes of
//! volume data in little endian, with X increasing first, then Y, then Z.
use std::io::BufReader;
use std::io::prelude::*;
use std::fs::File;
use std::path::Path;
use std::mem;
use std::iter;
use num::{self, NumCast};
use vol::Volume;
use vec3::Vec3i;
/// Import a RAW volume file with the set dimensions. The volume data is
/// expected to be of type T which should be a primitive scalar type
pub fn import<T: NumCast>(path: &Path, dims: Vec3i) -> Volume {
let mut f = match File::open(&path) {
Ok(f) => BufReader::new(f),
Err(e) => panic!("Error opening volume {}", e),
};
let mut data: Vec<_> = iter::repeat(0u8)
.take((dims.x * dims.y * dims.z) as usize * mem::size_of::<T>()).collect();
f.read_exact(&mut data[..]).expect("Failed to read entire RAW volume");
let data: Vec<f32> = data.chunks(mem::size_of::<T>())
.map(|x| unsafe { mem::transmute_copy::<u8, T>(&x[0]) })
.map(|x| num::cast(x).unwrap()).collect();
let mut volume = Volume::new(dims);
volume.set_region(&data[..], Vec3i::broadcast(0), dims);
volume
}
|
<commit_msg>Add fields to Message to support the ticker channel
<commit_before>package gdax
type Message struct {
Type string `json:"type"`
ProductId string `json:"product_id"`
TradeId int `json:"trade_id,number"`
OrderId string `json:"order_id"`
Sequence int64 `json:"sequence,number"`
MakerOrderId string `json:"maker_order_id"`
TakerOrderId string `json:"taker_order_id"`
Time Time `json:"time,string"`
RemainingSize float64 `json:"remaining_size,string"`
NewSize float64 `json:"new_size,string"`
OldSize float64 `json:"old_size,string"`
Size float64 `json:"size,string"`
Price float64 `json:"price,string"`
Side string `json:"side"`
Reason string `json:"reason"`
OrderType string `json:"order_type"`
Funds float64 `json:"funds,string"`
NewFunds float64 `json:"new_funds,string"`
OldFunds float64 `json:"old_funds,string"`
Message string `json:"message"`
Bids [][]string `json:"bids,omitempty"`
Asks [][]string `json:"asks,omitempty"`
Changes [][]string `json:"changes,omitempty"`
}
<commit_after>package gdax
type Message struct {
Type string `json:"type"`
ProductId string `json:"product_id"`
TradeId int `json:"trade_id,number"`
OrderId string `json:"order_id"`
Sequence int64 `json:"sequence,number"`
MakerOrderId string `json:"maker_order_id"`
TakerOrderId string `json:"taker_order_id"`
Time Time `json:"time,string"`
RemainingSize float64 `json:"remaining_size,string"`
NewSize float64 `json:"new_size,string"`
OldSize float64 `json:"old_size,string"`
Size float64 `json:"size,string"`
Price float64 `json:"price,string"`
Side string `json:"side"`
Reason string `json:"reason"`
OrderType string `json:"order_type"`
Funds float64 `json:"funds,string"`
NewFunds float64 `json:"new_funds,string"`
OldFunds float64 `json:"old_funds,string"`
Message string `json:"message"`
Bids [][]string `json:"bids,omitempty"`
Asks [][]string `json:"asks,omitempty"`
Changes [][]string `json:"changes,omitempty"`
LastSize float64 `json:"last_size,string"`
BestBid float64 `json:"best_bid,string"`
BestAsk float64 `json:"best_ask,string"`
}
|
<commit_msg>Add documentation for NSURL category
<commit_before>//
// NSURL+Pinmark.h
// Pinmark
//
// Created by Kyle Stevens on 12/24/13.
// Copyright (c) 2013 kilovolt42. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface NSURL (Pinmark)
- (NSDictionary *)queryParameters;
@end
<commit_after>//
// NSURL+Pinmark.h
// Pinmark
//
// Created by Kyle Stevens on 12/24/13.
// Copyright (c) 2013 kilovolt42. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface NSURL (Pinmark)
/**
* Creates a dictionary of the query parameters in which fields are keys for the corresponding values.
* Does not support array parameters at this time.
*
* @return A dictionary of query parameters.
*/
- (NSDictionary *)queryParameters;
@end
|
<commit_msg>Make host and port configurable thru envvars
<commit_before>
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
app.run(host='127.0.0.1', port=5000)
<commit_after>import os
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
host = os.environ.get('OSF_HOST', None)
port = os.environ.get('OSF_PORT', None)
app.run(host=host, port=port)
|
<commit_msg>Add Element admin view in admin url
<commit_before>from .views import AdminHandler, CubeHandler, ConnectionHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
]
<commit_after>from .views import AdminHandler, CubeHandler, ConnectionHandler
from .views import ElementHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
]
|
<commit_msg>Add support for DummyTSDB backend.
<commit_before>from __future__ import absolute_import
from sentry.tsdb.base import BaseTSDB
class DummyTSDB(BaseTSDB):
"""
A no-op time-series storage.
"""
def incr(self, model, key, timestamp=None, count=1):
pass
def get_range(self, model, keys, start, end, rollup=None):
return dict((k, []) for k in keys)
<commit_after>from __future__ import absolute_import
from sentry.tsdb.base import BaseTSDB
class DummyTSDB(BaseTSDB):
"""
A no-op time-series storage.
"""
def incr(self, model, key, timestamp=None, count=1):
pass
def get_range(self, model, keys, start, end, rollup=None):
return dict((k, []) for k in keys)
def record(self, model, key, values, timestamp=None):
pass
def get_distinct_counts_series(self, model, keys, start, end=None, rollup=None):
return {k: [] for k in keys}
def get_distinct_counts_totals(self, model, keys, start, end=None, rollup=None):
return {k: 0 for k in keys}
|
<commit_msg>Add css style to Dashboard sidebar item<commit_before>package org.panifex.platform.module.dashboard.impl;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.aries.blueprint.annotation.Bean;
import org.apache.aries.blueprint.annotation.Service;
import org.panifex.platform.module.api.sidebar.Sidebar;
import org.panifex.platform.module.api.sidebar.DefaultSidebarCommand;
import org.panifex.platform.module.api.sidebar.SidebarItem;
@Bean(id = DashboardSidebar.ID)
@Service(interfaces = Sidebar.class)
public class DashboardSidebar implements Sidebar {
public final static String ID = "org.panifex.platform.module.dashboard.impl.DashboardSidebar";
private Collection<SidebarItem> sidebarItems = new ArrayList<>();
/**
* Initializes Dashboard sidebar items;
*/
public DashboardSidebar() {
// create dashboard sidebar item
DefaultSidebarCommand dashboardItem = new DefaultSidebarCommand(
"Dashboard",
DashboardContent.ID,
0);
// add item to list
sidebarItems.add(dashboardItem);
}
@Override
public Collection<SidebarItem> getSidebarItems() {
return sidebarItems;
}
}
<commit_after>package org.panifex.platform.module.dashboard.impl;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.aries.blueprint.annotation.Bean;
import org.apache.aries.blueprint.annotation.Service;
import org.panifex.platform.module.api.sidebar.Sidebar;
import org.panifex.platform.module.api.sidebar.DefaultSidebarCommand;
import org.panifex.platform.module.api.sidebar.SidebarItem;
@Bean(id = DashboardSidebar.ID)
@Service(interfaces = Sidebar.class)
public class DashboardSidebar implements Sidebar {
public final static String ID = "org.panifex.platform.module.dashboard.impl.DashboardSidebar";
private Collection<SidebarItem> sidebarItems = new ArrayList<>();
/**
* Initializes Dashboard sidebar items;
*/
public DashboardSidebar() {
// create dashboard sidebar item
DefaultSidebarCommand dashboardItem = new DefaultSidebarCommand(
"Dashboard",
DashboardContent.ID,
0);
dashboardItem.setIconSclass("glyphicon glyphicon-home");
// add item to list
sidebarItems.add(dashboardItem);
}
@Override
public Collection<SidebarItem> getSidebarItems() {
return sidebarItems;
}
}
|
<commit_msg>Include Python-slugify to begin using their slugging function
<commit_before>from setuptools import setup, find_packages
setup(
name='dataset',
version='0.3.13',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'sqlalchemy-migrate >= 0.7',
"argparse >= 1.2.1",
"PyYAML >= 3.10"
],
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
<commit_after>from setuptools import setup, find_packages
setup(
name='dataset',
version='0.3.13',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.8.1',
'sqlalchemy-migrate >= 0.7',
"argparse >= 1.2.1",
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
],
tests_require=[],
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
<commit_msg>Add Print Statement For Easier Debugging
<commit_before>
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
assert(html == "James Joyce")
<commit_after>
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
print(html)
assert(html == "James Joyce")
|
<commit_msg>Remove buggy log message if prctl is missing
- it's not that important that you need to be informed of it, and
importing logging may cause cyclic dependencies/other problems
<commit_before>import threading
try:
import prctl
def set_thread_name(name): prctl.set_name(name)
def _thread_name_hack(self):
set_thread_name(self.name)
threading.Thread.__bootstrap_original__(self)
threading.Thread.__bootstrap_original__ = threading.Thread._Thread__bootstrap
threading.Thread._Thread__bootstrap = _thread_name_hack
except ImportError:
log('WARN: prctl module is not installed. You will not be able to see thread names')
def set_thread_name(name): pass
class StoppableThread(object):
def initStop(self):
self.stop = threading.Event()
self._stopped = False
def stopThread(self):
self._stopped = True
self.stop.set()
<commit_after>import threading
try:
import prctl
def set_thread_name(name): prctl.set_name(name)
def _thread_name_hack(self):
set_thread_name(self.name)
threading.Thread.__bootstrap_original__(self)
threading.Thread.__bootstrap_original__ = threading.Thread._Thread__bootstrap
threading.Thread._Thread__bootstrap = _thread_name_hack
except ImportError:
def set_thread_name(name): pass
class StoppableThread(object):
def initStop(self):
self.stop = threading.Event()
self._stopped = False
def stopThread(self):
self._stopped = True
self.stop.set()
|
<commit_msg>Add ordering for profiles by name
<commit_before>from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
<commit_after>from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
ordering = ["profile__display_name"]
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
|
<commit_msg>Add initial tweet extraction / load query term from config file
<commit_before>import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Search for latest tweets about query term
print twitter.search.tweets(q='satan')<commit_after>import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Load query term from configuration file
query_term = parser.get('query_settings','query_term')
# Search for latest tweets about query term
# Tweets has components 'search_metadata' and 'statuses' - we want the latter
tweets = twitter.search.tweets(q=query_term)['statuses']
# Extract tweetID, username and text of tweets returned from search
for tweet in tweets:
print tweet['id_str']
print tweet['user']['screen_name']
print tweet['text'] |
<commit_msg>Add shared layer 4 routing method
<commit_before>// Copyright 2020 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.config.provision.zone;
/**
* The routing methods supported by a zone.
*
* @author mpolden
*/
public enum RoutingMethod {
/** Routing happens through shared routing layer */
shared,
/** Routing happens through a dedicated layer 4 load balancer */
exclusive,
}
<commit_after>// Copyright 2020 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.config.provision.zone;
/**
* The routing methods supported by a zone.
*
* @author mpolden
*/
public enum RoutingMethod {
/** Routing happens through shared routing layer */
shared,
/** Routing happens through a dedicated layer 4 load balancer */
exclusive,
/** Routing happens through a shared layer 4 load balancer */
shared_layer_4
}
|
<commit_msg>Fix some things in evidence sources
<commit_before>from util import pklload
from collections import defaultdict
import indra.tools.assemble_corpus as ac
if __name__ == '__main__':
# Load cached Statements just before going into the model
stmts = pklload('pysb_stmts')
# Start a dictionary for source counts
sources_count = defaultdict(int)
# Count statements according to sources of evidence
for stmt in stmts:
sources = tuple(sorted(list(set([ev.source_api for ev in stmt.evidence]))))
sources_count[sources] += 1
# Statements from databases only
db_only = 0
# Statements from reading only
reading_only = 0
# Statements from databases and reading
mixture = 0
# Database sources
dbs = set(['bel', 'biopax', 'phosphosite', 'signor'])
# Reader sources
readers = set(['reach', 'trips', 'sparser', 'r3'])
for k, v in sources_count.items():
d = set(k).intersection(dbs)
r = set(k).intersection(readers)
if d and r:
mixture += v
if d and not r:
db_only += v
if r and not d:
reading_only += v
for k, v in sorted(sources_count.items(), key=lambda x: (len(x[1]), x[1])):
sources_str = ','.join(k)
line_str = sources_str + ',' + str(v)
<commit_after>from util import pklload
from collections import defaultdict
import indra.tools.assemble_corpus as ac
if __name__ == '__main__':
# Load cached Statements just before going into the model
stmts = pklload('pysb_stmts')
# Start a dictionary for source counts
sources_count = defaultdict(int)
# Count statements according to sources of evidence
for stmt in stmts:
sources = tuple(sorted(list(set([ev.source_api for ev in stmt.evidence]))))
sources_count[sources] += 1
# Statements from databases only
db_only = 0
# Statements from reading only
reading_only = 0
# Statements from databases and reading
mixture = 0
# Database sources
dbs = set(['bel', 'biopax', 'phosphosite', 'signor'])
# Reader sources
readers = set(['reach', 'trips', 'sparser', 'r3'])
for k, v in sources_count.items():
d = set(k).intersection(dbs)
r = set(k).intersection(readers)
if d and r:
mixture += v
if d and not r:
db_only += v
if r and not d:
reading_only += v
for k, v in sorted(sources_count.items(), key=lambda x: (len(x[0]), ','.join(sorted(x[0])))):
sources_str = ','.join(k)
line_str = sources_str + '\t' + str(v)
print(line_str)
|
<commit_msg>Fix word_count test import paths.
<commit_before>
from __future__ import unicode_literals
import unittest
from manoseimas.scrapy import textutils
class WordCountTest(unittest.TestCase):
def test_get_word_count(self):
word_count = textutils.get_word_count('Žodžiai, lietuviškai.')
self.assertEqual(word_count, 2)
def test_get_words(self):
words = textutils.get_words('Žodžiai, lietuviškai.')
self.assertEqual(words, ['Žodžiai', 'lietuviškai'])
<commit_after>
from __future__ import unicode_literals
import unittest
from manoseimas.common.utils import words
class WordCountTest(unittest.TestCase):
def test_get_word_count(self):
word_count = words.get_word_count('Žodžiai, lietuviškai.')
self.assertEqual(word_count, 2)
def test_get_words(self):
words_list = words.get_words('Žodžiai, lietuviškai.')
self.assertEqual(words_list, ['Žodžiai', 'lietuviškai'])
|
<commit_msg>Load utils in cms contrib package.
<commit_before>
__all__ = ["CMSJobDashboard", "BundleCMSSW"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
<commit_after>
__all__ = ["CMSJobDashboard", "BundleCMSSW", "Site", "lfn_to_pfn"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
from law.contrib.cms.util import Site, lfn_to_pfn
|
<commit_msg>Call randomise if no results
<commit_before>export class IsaacGenerator {
private _count: number;
constructor() {
this._count = 0;
}
public getValue(): number {
return 0;
}
private _randomise(): void {
}
}
<commit_after>export class IsaacGenerator {
private _count: number;
constructor() {
this._count = 0;
}
public getValue(): number {
if (this._count === 0) {
this._randomise();
}
return 0;
}
private _randomise(): void {
}
}
|
<commit_msg>Clean session data when logout
<commit_before>import { Component, OnInit } from '@angular/core';
import { RoutingService } from './../shared/routing/routing.service';
@Component({
selector: 'ts-logout',
template: ''
})
export class LogoutComponent implements OnInit {
constructor(
private routingService: RoutingService
) { }
ngOnInit(): void {
console.log('TODO: really logout');
this.routingService.toLogin();
}
}<commit_after>import { Component, OnInit } from '@angular/core';
import { RoutingService } from './../shared/routing/routing.service';
import { SessionService } from './../shared/session/session.service';
@Component({
selector: 'ts-logout',
template: ''
})
export class LogoutComponent implements OnInit {
constructor(
private routingService: RoutingService,
private sessionService: SessionService
) { }
ngOnInit(): void {
this.sessionService.clean();
this.routingService.toLogin();
}
} |
<commit_msg>Refactor use shallow clone instead of deep clone
<commit_before>import { AttributeTypesAction, AttributeTypesActions, setAttributeTypes, UpdateAttributeTypeAction } from "./attributeTypes.actions"
import { AttributeTypes } from "../../../../codeCharta.model"
const clone = require("rfdc")()
export function attributeTypes(state: AttributeTypes = setAttributeTypes().payload, action: AttributeTypesAction): AttributeTypes {
switch (action.type) {
case AttributeTypesActions.SET_ATTRIBUTE_TYPES:
return clone(action.payload)
case AttributeTypesActions.UPDATE_ATTRIBUTE_TYPE:
return updateAttributeType(state, action)
default:
return state
}
}
function updateAttributeType(state: AttributeTypes, action: UpdateAttributeTypeAction) {
const copy = clone(state)
if (copy[action.payload.category]) {
copy[action.payload.category][action.payload.name] = action.payload.type
}
return copy
}
<commit_after>import { AttributeTypesAction, AttributeTypesActions, setAttributeTypes, UpdateAttributeTypeAction } from "./attributeTypes.actions"
import { AttributeTypes } from "../../../../codeCharta.model"
const clone = require("rfdc")()
export function attributeTypes(state: AttributeTypes = setAttributeTypes().payload, action: AttributeTypesAction): AttributeTypes {
switch (action.type) {
case AttributeTypesActions.SET_ATTRIBUTE_TYPES:
return clone(action.payload)
case AttributeTypesActions.UPDATE_ATTRIBUTE_TYPE:
return updateAttributeType(state, action)
default:
return state
}
}
function updateAttributeType(state: AttributeTypes, action: UpdateAttributeTypeAction): AttributeTypes {
return { ...state, [action.payload.category]: { ...state[action.payload.category], [action.payload.name]: action.payload.type } }
}
|
<commit_msg>Delete unnecessary file in 'media' folder
<commit_before>from io import BytesIO
from PIL import Image
from django.core.files.base import ContentFile
from django.db import transaction
from django.db.utils import IntegrityError
from django.test import TestCase
from .factory import PhotoFactory
class ModelTest(TestCase):
def setUp(self):
self.photo = PhotoFactory()
def tearDown(self):
self.photo.delete()
def test_save(self):
self.assertTrue(self.photo.md5sum)
# prepare new upload image
thumb = Image.new('RGB', (1024, 768), 'red') # the same size and color as self.photo.image
thumb_io = BytesIO()
thumb.save(thumb_io, format='JPEG')
# prevent the purposefully-thrown exception from breaking the entire unittest's transaction
with transaction.atomic():
self.assertRaisesMessage(IntegrityError, "UNIQUE constraint failed: loader_photo.md5sum",
PhotoFactory, image = ContentFile(thumb_io.getvalue(), "test.jpg"),
name = "Uploaded Photo 1")
# no problems with the new different image
up_photo = PhotoFactory(name = "Uploaded Photo 1") # new blue image
self.assertNotEqual(up_photo.md5sum, self.photo.md5sum)
<commit_after>from io import BytesIO
import os
from django.core.files.base import ContentFile
from django.core.files.storage import default_storage
from django.db import transaction
from django.db.utils import IntegrityError
from django.test import TestCase
from PIL import Image
from .factory import PhotoFactory
class ModelTest(TestCase):
def setUp(self):
self.photo = PhotoFactory()
self.up_photo = None
def tearDown(self):
# delete created files from media/photos folder
self.photo.delete()
self.up_photo.delete()
def test_save(self):
self.assertTrue(self.photo.md5sum)
# prepare new upload image
thumb = Image.new('RGB', (1024, 768), 'red') # the same size and color as self.photo.image
thumb_io = BytesIO()
thumb.save(thumb_io, format='JPEG')
# prevent the purposefully-thrown exception from breaking the entire unittest's transaction
with transaction.atomic():
self.assertRaisesMessage(IntegrityError, "UNIQUE constraint failed: loader_photo.md5sum",
PhotoFactory, image = ContentFile(thumb_io.getvalue(), "test.jpg"),
name = "Uploaded Photo 1", thumbnail = None # we won't generate thumbnail image
)
path = default_storage.path(name="photos/test.jpg")
default_storage.delete(path) # remove photo created in 'media' folder
# no problems with the new different image
self.up_photo = PhotoFactory(name = "Uploaded Photo 1") # new blue image
self.assertNotEqual(self.up_photo.md5sum, self.photo.md5sum)
|
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
Use the Edit dataset card button to edit it.
- Downloads last month
- 0