content
stringlengths 128
6.74k
|
---|
<commit_msg>Use js-cookies to get cookie
<commit_before>/**
* Created by rtorres on 9/25/16.
*/
export function getCookie(cname: string): string {
let name: string = cname + '=';
let ca: string[] = document.cookie.split(';');
for(var i = 0; i <ca.length; i++) {
let c: any = ca[i];
while (c.charAt(0)==' ') {
c = c.substring(1);
}
if (c.indexOf(name) == 0) {
return c.substring(name.length,c.length);
}
}
return null;
}<commit_after>/**
* Created by rtorres on 9/25/16.
*/
import * as Cookies from 'js-cookie';
export function getCookie(cname: string): string {
return Cookies.get(cname);
} |
<commit_msg>Use multiple WebKit web processes.
A new feature for WebKit2GTK+ 2.4, we can now specify to use multiple
webkit web processes rather than all webviews sharing the same
process.
If a tab's web process is not shared with any other's (although
currently unimplemented, this may happen when a tab is opened by
another webview, either through javascript or middle-clicking a
hyperlink), killing or crashing that web process only results in one
broken tab rather than crashing the entire browser (WebKit1) or
breaking every tab (WebKit2GTK+ <= 2.2).
<commit_before>// Copyright (c) 2014 Josh Rickmar.
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package main
import (
"github.com/conformal/gotk3/gtk"
"runtime"
)
const HomePage HTMLPageDescription = "https://www.duckduckgo.com/lite"
const (
defaultWinWidth = 1024
defaultWinHeight = 768
)
// RunGUI initializes GTK, creates the toplevel window and all child widgets,
// opens the pages for the default session, and runs the Glib main event loop.
// This function blocks until the toplevel window is destroyed and the event
// loop exits.
func RunGUI() {
gtk.Init(nil)
window, _ := gtk.WindowNew(gtk.WINDOW_TOPLEVEL)
window.Connect("destroy", func() {
gtk.MainQuit()
})
window.SetDefaultGeometry(defaultWinWidth, defaultWinHeight)
window.Show()
session := []PageDescription{HomePage}
pm := NewPageManager(session)
window.Add(pm)
pm.Show()
gtk.Main()
}
func main() {
runtime.GOMAXPROCS(runtime.NumCPU())
RunProfiler("localhost:7070")
RunGUI()
}
<commit_after>// Copyright (c) 2014 Josh Rickmar.
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package main
import (
"runtime"
"github.com/conformal/gotk3/gtk"
"github.com/jrick/go-webkit2/wk2"
)
const HomePage HTMLPageDescription = "https://www.duckduckgo.com/lite"
const (
defaultWinWidth = 1024
defaultWinHeight = 768
)
// RunGUI initializes GTK, creates the toplevel window and all child widgets,
// opens the pages for the default session, and runs the Glib main event loop.
// This function blocks until the toplevel window is destroyed and the event
// loop exits.
func RunGUI() {
gtk.Init(nil)
window, _ := gtk.WindowNew(gtk.WINDOW_TOPLEVEL)
window.Connect("destroy", func() {
gtk.MainQuit()
})
window.SetDefaultGeometry(defaultWinWidth, defaultWinHeight)
window.Show()
wc := wk2.DefaultWebContext()
wc.SetProcessModel(wk2.ProcessModelMultipleSecondaryProcesses)
session := []PageDescription{HomePage}
pm := NewPageManager(session)
window.Add(pm)
pm.Show()
gtk.Main()
}
func main() {
runtime.GOMAXPROCS(runtime.NumCPU())
RunProfiler("localhost:7070")
RunGUI()
}
|
<commit_msg>Remove the hardcode from the settings.
<commit_before>from django.conf import settings as django_settings
from django.utils.translation import ugettext_lazy as _
def settings(request):
if not getattr(django_settings, "SOCIAL", None):
return {}
return {
"SOCIAL_FACEBOOK": django_settings.SOCIAL.get("FACEBOOK", ""),
"SOCIAL_TWITTER": django_settings.SOCIAL.get("TWITTER", ""),
"SOCIAL_GITHUB_REPO": django_settings.SOCIAL.get("GITHUB_REPO", ""),
"GOOGLE_ANALYTICS_ID": django_settings.SOCIAL.get("GOOGLE_ANALYTICS_ID", ""),
"SITE_TITLE": _("People's Archive of Rural India")
}
<commit_after>from django.conf import settings as django_settings
from django.utils.translation import ugettext_lazy as _
def settings(request):
if not getattr(django_settings, "SOCIAL", None):
return {}
return {
"SOCIAL_FACEBOOK": django_settings.SOCIAL.get("FACEBOOK", ""),
"SOCIAL_TWITTER": django_settings.SOCIAL.get("TWITTER", ""),
"SOCIAL_GITHUB_REPO": django_settings.SOCIAL.get("GITHUB_REPO", ""),
"GOOGLE_ANALYTICS_ID": django_settings.SOCIAL.get("GOOGLE_ANALYTICS_ID", ""),
"SITE_TITLE": django_settings.SITE_TITLE
}
|
<commit_msg>Reorder ENV urls redis providers.
<commit_before>import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
'REDISCLOUD_URL'
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
<commit_after>import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISCLOUD_URL'
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
|
<commit_msg>Replace TODO with comment explaining why it wasn't possible
<commit_before>
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# TODO: Consider removing this and using u'' or `from __future__ import
# unicode_literals` everywhere.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
<commit_after>
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# These streams require written text to be Unicode. We don't want to add
# `u` prefix to all our strings in Python 2, and cannot really use
# `unicode_literals` either because many other Python 2 APIs accept only
# byte strings.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
|
<commit_msg>Add some extra process names in case someone using the library needs to perform/monitor extra loading processes.<commit_before>
package uk.me.mjt.ch.status;
public enum MonitoredProcess {
LOAD_NODES, LOAD_WAYS, INDEX_MAP_DATA, VALIDATE_DATA
}
<commit_after>
package uk.me.mjt.ch.status;
public enum MonitoredProcess {
LOAD_NODES, LOAD_WAYS, INDEX_MAP_DATA, VALIDATE_DATA, EXTRA_PROCESS_1, EXTRA_PROCESS_2
}
|
<commit_msg>Fix build on non-windows platforms.
<commit_before>
namespace dux {
#ifdef _WIN32
ThreadChecker::ThreadChecker() : thread_id_(GetCurrentThreadId()) {}
bool ThreadChecker::IsCreationThreadCurrent() const {
return GetCurrentThreadId() == thread_id_;
}
#endif
} // namespace dux
<commit_after>
namespace dux {
ThreadChecker::ThreadChecker() : thread_id_(GetCurrentThreadId()) {}
bool ThreadChecker::IsCreationThreadCurrent() const {
return GetCurrentThreadId() == thread_id_;
}
} // namespace dux
#endif
|
<commit_msg>Change name of compare function in test grid
<commit_before>import pytest
from aimaPy.grid import *
compare = lambda x, y: all([elm_x == y[i] for i, elm_x in enumerate(x)])
def test_distance():
assert distance((1, 2), (5, 5)) == 5.0
def test_distance_squared():
assert distance_squared((1, 2), (5, 5)) == 25.0
def test_clip():
list_ = [clip(x, 0, 1) for x in [-1, 0.5, 10]]
res = [0, 0.5, 1]
assert compare(list_, res)
def test_vector_clip():
assert vector_clip((-1, 10), (0, 0), (9, 9)) == (0, 9)
if __name__ == '__main__':
pytest.main()
<commit_after>import pytest
from aimaPy.grid import *
compare_list = lambda x, y: all([elm_x == y[i] for i, elm_x in enumerate(x)])
def test_distance():
assert distance((1, 2), (5, 5)) == 5.0
def test_distance_squared():
assert distance_squared((1, 2), (5, 5)) == 25.0
def test_clip():
list_ = [clip(x, 0, 1) for x in [-1, 0.5, 10]]
res = [0, 0.5, 1]
assert compare_list(list_, res)
def test_vector_clip():
assert vector_clip((-1, 10), (0, 0), (9, 9)) == (0, 9)
if __name__ == '__main__':
pytest.main()
|
<commit_msg>Return only english tweet and print the body of the tweet for analysis via other tools
<commit_before>import os
from utils import Reader
if __name__ == '__main__':
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
for f in files:
tweets = Reader.read_file(f)
for tweet in tweets:
print '{}, {}'.format(tweet.verb(), tweet.timestamp())
<commit_after>import os
from utils import Reader
if __name__ == '__main__':
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
for f in files:
tweets = Reader.read_file(f)
eng_tweets = filter(lambda t: t.language() == 'en', tweets)
for tweet in tweets:
print '{}, {}, {}'.format(tweet.verb(), tweet.timestamp(), tweet.body())
|
<commit_msg>Change parameters power_curve and power_output to attributes
<commit_before>
__copyright__ = "Copyright oemof developer group"
__license__ = "GPLv3"
import numpy as np
class WindFarm(object):
"""
"""
def __init__(self, wind_farm_name, wind_turbine_fleet, coordinates,
power_curve=None, power_output=None):
self.wind_farm_name = wind_farm_name
self.wind_turbine_fleet = wind_turbine_fleet
self.coordinates = coordinates
self.power_curve = power_curve
self.power_output = power_output
# def wind_park_p_curve(self):
# p_curve = np.sum([self.wind_turbines[i].power_curve
# for i in range(len(self.wind_turbines))], axis=0)
# return p_curve
<commit_after>
__copyright__ = "Copyright oemof developer group"
__license__ = "GPLv3"
import numpy as np
class WindFarm(object):
"""
def __init__(self, wind_farm_name, wind_turbine_fleet, coordinates):
self.wind_farm_name = wind_farm_name
self.wind_turbine_fleet = wind_turbine_fleet
self.coordinates = coordinates
self.power_curve = None
self.power_output = None
# def wind_park_p_curve(self):
# p_curve = np.sum([self.wind_turbines[i].power_curve
# for i in range(len(self.wind_turbines))], axis=0)
# return p_curve
|
<commit_msg>Fix bug in admin user editing
Fixes KeyError when creating or editing a UniqueEmailUser in the admin
interface.
<commit_before>from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import UniqueEmailUser
class UniqueEmailUserCreationForm(UserCreationForm):
"""
A form that creates a UniqueEmailUser.
"""
def __init__(self, *args, **kargs):
super(UniqueEmailUserCreationForm, self).__init__(*args, **kargs)
del self.fields['username']
class Meta:
model = UniqueEmailUser
fields = ("email",)
class UniqueEmailUserChangeForm(UserChangeForm):
"""
A form for updating a UniqueEmailUser.
"""
def __init__(self, *args, **kargs):
super(UniqueEmailUserChangeForm, self).__init__(*args, **kargs)
del self.fields['username']
class Meta:
model = UniqueEmailUser
fields = ("email",)
<commit_after>from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import UniqueEmailUser
class UniqueEmailUserCreationForm(UserCreationForm):
"""
A form that creates a UniqueEmailUser.
"""
class Meta:
model = UniqueEmailUser
fields = ("email",)
class UniqueEmailUserChangeForm(UserChangeForm):
"""
A form for updating a UniqueEmailUser.
"""
class Meta:
model = UniqueEmailUser
fields = ("email",)
|
<commit_msg>Store password in session after successful login.
<commit_before>from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
<commit_after>from django.shortcuts import render_to_response, render
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from forms import LoginForm
def index(request):
return render_to_response('index.html', {})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
user = form.cleaned_data['user']
if user is not None and user.is_active:
request.session['password'] = form.cleaned_data['password']
login(request, user)
return HttpResponseRedirect('/')
else:
form = LoginForm()
return render(request, 'login.html', {
'form': form,
})
|
<commit_msg>Make regex a None property
<commit_before>""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
<commit_after>""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
self.regex = None
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
|
<commit_msg>Remove redundant Scan ID printing (there is another one elsewhere)
<commit_before>import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
def print_scanid(name, doc):
if name == 'start':
print('Scan ID:', doc['scan_id'])
print('Unique ID:', doc['uid'])
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
<commit_after>import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
|
<commit_msg>Fix user not defined error for not logged in users
<commit_before>from django import forms
from django_fixmystreet.fixmystreet.models import FMSUser, getLoggedInUserId
from django.contrib.auth.models import User
from django.conf import settings
from django.utils.translation import ugettext_lazy
from django.contrib.sessions.models import Session
class ManagersChoiceField (forms.fields.ChoiceField):
def __init__(self, *args, **kwargs):
# assemble the opt groups.
choices = []
choices.append(('', ugettext_lazy("Select a manager")))
currentUserOrganisationId = FMSUser.objects.get(pk=getLoggedInUserId(Session.objects.all()[0].session_key)).organisation
managers = FMSUser.objects.filter(manager=True)
managers = managers.filter(organisation_id=currentUserOrganisationId)
for manager in managers:
choices.append((manager.pk,manager.first_name+manager.last_name))
super(ManagersChoiceField,self).__init__(choices,*args,**kwargs)
def clean(self, value):
super(ManagersChoiceField,self).clean(value)
try:
model = FMSUser.objects.get(pk=value)
except FMSUser.DoesNotExist:
raise ValidationError(self.error_messages['invalid_choice'])
return model
class ManagersListForm(forms.Form):
manager=ManagersChoiceField(label="")<commit_after>from django import forms
from django_fixmystreet.fixmystreet.models import FMSUser, getLoggedInUserId
from django.contrib.auth.models import User
from django.conf import settings
from django.utils.translation import ugettext_lazy
from django.contrib.sessions.models import Session
from django.contrib.auth.decorators import login_required
class ManagersChoiceField (forms.fields.ChoiceField):
def __init__(self, *args, **kwargs):
choices = []
choices.append(('', ugettext_lazy("Select a manager")))
currentUserOrganisationId = 1
if Session.objects.all()[0].session_key:
currentUserOrganisationId = FMSUser.objects.get(pk=getLoggedInUserId(Session.objects.all()[0].session_key)).organisation
managers = FMSUser.objects.filter(manager=True)
managers = managers.filter(organisation_id=currentUserOrganisationId)
for manager in managers:
choices.append((manager.pk,manager.first_name+manager.last_name))
super(ManagersChoiceField,self).__init__(choices,*args,**kwargs)
def clean(self, value):
super(ManagersChoiceField,self).clean(value)
try:
model = FMSUser.objects.get(pk=value)
except FMSUser.DoesNotExist:
raise ValidationError(self.error_messages['invalid_choice'])
return model
class ManagersListForm(forms.Form):
manager=ManagersChoiceField(label="") |
<commit_msg>Bring down the server on (some?) uncaught errors.
I added an errback to the LoopingCall for gameStateManager.tick, so
it'll be called if any exception gets raised out of one of those calls.
The errback just prints a traceback and then brings down the server,
ensuring that other clients get disconnected as well.
This is at least some progress on #31, though it's hard to know if the
issue is really fully fixed.
<commit_before>from twisted.internet.task import LoopingCall
from twisted.python import log as twistedLog
from src.shared import config
from src.server.game_state_manager import GameStateManager
from src.server.networking import runServer, ConnectionManager
from src.server.stdio import setupStdio
def main(args):
connections = ConnectionManager()
gameStateManager = GameStateManager(connections)
connections.setGameStateHandler(gameStateManager)
setupStdio(gameStateManager)
loop = LoopingCall(gameStateManager.tick)
deferred = loop.start(config.TICK_LENGTH)
deferred.addErrback(twistedLog.err)
runServer(args.port, connections)
<commit_after>from twisted.internet import reactor
from twisted.internet.task import LoopingCall
from twisted.python import log as twistedLog
from src.shared import config
from src.server.game_state_manager import GameStateManager
from src.server.networking import runServer, ConnectionManager
from src.server.stdio import setupStdio
def unhandledError(reason):
twistedLog.err(reason, "Aborting due to unhandled error.")
reactor.stop()
def main(args):
connections = ConnectionManager()
gameStateManager = GameStateManager(connections)
connections.setGameStateHandler(gameStateManager)
setupStdio(gameStateManager)
loop = LoopingCall(gameStateManager.tick)
deferred = loop.start(config.TICK_LENGTH)
deferred.addErrback(unhandledError)
runServer(args.port, connections)
|
<commit_msg>Support preview mode (don't link, just print matches found)
<commit_before>package main
import (
"fmt"
"os"
"path/filepath"
"github.com/gmcnaughton/gofindhdr/findhdr"
)
func main() {
inpath := "/Users/gmcnaughton/Pictures/Photos Library.photoslibrary/Masters/2017/02"
// inpath := "./test"
outpath := "./out"
optlink := true
// Create output folder
_ = os.Mkdir(outpath, 0755)
count := 0
findhdr.Find(inpath, func(hdr *findhdr.Hdr) {
for _, image := range hdr.Images() {
count++
link := filepath.Join(outpath, image.Info.Name())
if optlink {
fmt.Println("Linking", link)
err := os.Link(image.Path, link)
if os.IsExist(err) {
fmt.Printf("Skipping %s (file exists)\n", link)
} else if err != nil {
fmt.Printf("Error linking %s\n", link)
fmt.Println(err)
}
} else {
fmt.Println(hdr)
}
}
fmt.Println()
})
fmt.Printf("Found %d hdrs.\n", count)
}
<commit_after>package main
import (
"fmt"
"os"
"path/filepath"
"github.com/gmcnaughton/gofindhdr/findhdr"
)
func main() {
// inpath := "/Users/gmcnaughton/Pictures/Photos Library.photoslibrary/Masters/2017/02"
inpath := "./test"
outpath := "./out"
optlink := false
// Create output folder
if optlink {
err := os.Mkdir(outpath, 0755)
if err != nil && !os.IsExist(err) {
fmt.Println("Error creating out directory", err)
}
}
count := 0
findhdr.Find(inpath, func(hdr *findhdr.Hdr) {
count++
if optlink {
for _, image := range hdr.Images() {
link := filepath.Join(outpath, image.Info.Name())
fmt.Println("Linking", link)
err := os.Link(image.Path, link)
if os.IsExist(err) {
fmt.Println("Skipping", err)
} else if err != nil {
fmt.Println("Error linking", err)
}
}
} else {
fmt.Println(hdr)
}
})
fmt.Printf("Found %d hdrs.\n", count)
}
|
<commit_msg>Use Noise in Burgers demo
<commit_before>from phi.flow import *
domain = Domain([64, 64], boundaries=PERIODIC)
world.add(BurgersVelocity(domain, velocity=lambda s: math.randfreq(s) * 2), physics=Burgers())
show(App('Burgers Equation in %dD' % len(domain.resolution), framerate=5))
<commit_after>from phi.flow import *
domain = Domain([64, 64], boundaries=PERIODIC, box=box[0:100, 0:100])
world.add(BurgersVelocity(domain, velocity=Noise(channels=domain.rank) * 2), physics=Burgers())
show(App('Burgers Equation in %dD' % len(domain.resolution), framerate=5))
|
<commit_msg>Add patch decorator to test_green() function
<commit_before>from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
import yaml
def test_green():
size = (10,10)
zoom = 10
lat = 50
lon = 50
satellite = True
testMap = Map(lat,lon,satellite,zoom,size)
threshold = 1
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
def assert_images_equal(r,g,b,checkArray):
testPixels = np.dstack((r,g,blue))
testMap.pixels = testPixels
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
<commit_after>from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
from mock import patch
import os
@patch('requests.get')
@patch('matplotlib.image.imread')
@patch('StringIO.StringIO')
def test_green(mock_get,mock_imread,mock_StringIO):
def assert_images_equal(r,g,b,checkArray):
testMap.pixels = np.dstack((r,g,b))
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
lat = 50
lon = 50
testMap = Map(lat,lon)
size = (400,400)
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
threshold = 1
#Check the returned array is false everywhere when the value of the green pixels is identical to the values of the red and blue pixels
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is false everywhere when the value of the green pixels is greater than the value of the blue pixels but less than the value of the red pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
#As above but with red and blue pixels switched
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is true everywhere when the value of the green pixels is greater than the value of the red and blue pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
|
<commit_msg>Print errors in js building to browser console.
<commit_before>class $mol_build_server extends $mol_server {
expressGenerator() {
return ( req : any , res : any , next : () => void )=> {
try {
return this.generator( req.url ) || next()
} catch( error ) {
$mol_atom_restore( error )
throw error
}
}
}
build() : $mol_build {
return null
}
@ $mol_prop()
generator( path : string ) {
var matched = path.match( /^((?:\/\w+)+)\/-\/(\w+(?:.\w+)+)$/ )
if( !matched ) return null
var build = this.build()
var [ path , path , bundle ] = matched
path = build.root().resolve( path ).path()
build.bundle({ path , bundle })
return <void> null
}
port() {
return 80
}
}
<commit_after>class $mol_build_server extends $mol_server {
expressGenerator() {
return ( req : any , res : any , next : () => void )=> {
try {
return this.generator( req.url ) || next()
} catch( error ) {
$mol_atom_restore( error )
if( req.url.match( /\.js$/ ) ) {
console.error( error )
res.send( `console.error( ${ JSON.stringify( error.message ) } )` ).end()
} else {
throw error
}
}
}
}
build() : $mol_build {
return null
}
@ $mol_prop()
generator( path : string ) {
var matched = path.match( /^((?:\/\w+)+)\/-\/(\w+(?:.\w+)+)$/ )
if( !matched ) return null
var build = this.build()
var [ path , path , bundle ] = matched
path = build.root().resolve( path ).path()
build.bundle({ path , bundle })
return <void> null
}
port() {
return 80
}
}
|
<commit_msg>Clean up Node delete handler.
<commit_before>from astral.api.handlers.base import BaseHandler
from astral.models.node import Node
from astral.api.client import NodesAPI
import sys
import logging
log = logging.getLogger(__name__)
class NodeHandler(BaseHandler):
def delete(self, node_uuid=None):
"""Remove the requesting node from the list of known nodes,
unregistering the from the network.
"""
if node_uuid:
node = Node.get_by(uuid=node_uuid)
else:
node = Node.me()
temp = node
node.delete()
if node_uuid:
node = Node.get_by(uuid=node_uuid)
else:
node = Node.me()
node.delete()
closest_supernode = Node.closest_supernode()
if closest_supernode:
log.info("Notifying closest supernode %s that %s was deleted",
closest_supernode, node)
NodesAPI(closest_supernode.absolute_url()).unregister(node)
sys.exit()
if temp == Node.me():
# TODO kind of a shortcut to shutting down, but should be a bit more
# formal
GenerateConsoleCtrlEvent(CTRL_C_EVENT, 0)
"""shut = Daemon()
shut.stop()
raise KeyboardInterrupt"""
if node == Node.me():
# TODO kind of a shortcut to shutting down, but should be a bit more
# formal
raise KeyboardInterrupt
<commit_after>from astral.api.handlers.base import BaseHandler
from astral.models.node import Node
from astral.api.client import NodesAPI
import logging
log = logging.getLogger(__name__)
class NodeHandler(BaseHandler):
def delete(self, node_uuid=None):
"""Remove the requesting node from the list of known nodes,
unregistering the from the network.
"""
if not node_uuid:
log.info("Shutting down because of request from %s",
self.request.remote_ip)
self.stop()
return
node = Node.get_by(uuid=node_uuid)
closest_supernode = Node.closest_supernode()
if closest_supernode:
log.info("Notifying closest supernode %s that %s was deleted",
closest_supernode, node)
NodesAPI(closest_supernode.absolute_url()).unregister(node)
node.delete()
|
<commit_msg>Revert "Updating ambient light to 2.0 API."
This reverts commit 9a9df3a205dcebfcb358b60ef11df54698ec6dce.
<commit_before>
typedef struct {
int intensity;
bool fired;
} ambient_light_data_t;
// callback for synchronous reads
static void ambient_light_cb(int intensity,
__attribute__ ((unused)) int unused1,
__attribute__ ((unused)) int unused2, void* ud) {
ambient_light_data_t* result = (ambient_light_data_t*)ud;
result->intensity = intensity;
result->fired = true;
}
int ambient_light_read_intensity_sync(int* lux_value) {
int err;
ambient_light_data_t result = {0};
result.fired = false;
err = ambient_light_subscribe(ambient_light_cb, (void*)(&result));
if (err < TOCK_SUCCESS) {
return err;
}
err = ambient_light_start_intensity_reading();
if (err < TOCK_SUCCESS) {
return err;
}
yield_for(&result.fired);
*lux_value = result.intensity;
return TOCK_SUCCESS;
}
int ambient_light_subscribe(subscribe_cb callback, void* userdata) {
return subscribe(DRIVER_NUM_AMBIENT_LIGHT, 0, callback, userdata);
}
int ambient_light_start_intensity_reading(void) {
return command(DRIVER_NUM_AMBIENT_LIGHT, 1, 0, 0);
}
<commit_after>
typedef struct {
int intensity;
bool fired;
} ambient_light_data_t;
// internal callback for faking synchronous reads
static void ambient_light_cb(int intensity,
__attribute__ ((unused)) int unused1,
__attribute__ ((unused)) int unused2, void* ud) {
ambient_light_data_t* result = (ambient_light_data_t*)ud;
result->intensity = intensity;
result->fired = true;
}
int ambient_light_read_intensity_sync(int* lux_value) {
int err;
ambient_light_data_t result = {0};
result.fired = false;
err = ambient_light_subscribe(ambient_light_cb, (void*)(&result));
if (err < TOCK_SUCCESS) {
return err;
}
err = ambient_light_start_intensity_reading();
if (err < TOCK_SUCCESS) {
return err;
}
yield_for(&result.fired);
*lux_value = result.intensity;
return TOCK_SUCCESS;
}
int ambient_light_subscribe(subscribe_cb callback, void* userdata) {
return subscribe(DRIVER_NUM_AMBIENT_LIGHT, 0, callback, userdata);
}
int ambient_light_start_intensity_reading(void) {
return command(DRIVER_NUM_AMBIENT_LIGHT, 1, 0, 0);
}
|
<commit_msg>Add geo info to status update
<commit_before>'''
A simple twitter client that posts current weather to twitter
'''
import tweepy
import json
from urllib2 import urlopen
import os
root =os.path.dirname(os.path.abspath(__file__))
conf = json.loads(file(root+'/twitterconfig.json').read())
auth = tweepy.OAuthHandler(conf['consumerkey'], conf['consumersecret'])
auth.set_access_token(conf['accesstoken'], conf['accesssecret'])
api = tweepy.API(auth)
w = json.loads(urlopen(conf['apiurl']).read())[0]
api.update_status('%(outtemp).1f °C, %(windspeed).1f m/s vind, %(rain).1f mm nedbør' %w);
<commit_after>'''
A simple twitter client that posts current weather to twitter
'''
import tweepy
import json
from urllib2 import urlopen
import os
root = os.path.dirname(os.path.abspath(__file__))
conf = json.loads(file(root+'/twitterconfig.json').read())
auth = tweepy.OAuthHandler(conf['consumerkey'], conf['consumersecret'])
auth.set_access_token(conf['accesstoken'], conf['accesssecret'])
api = tweepy.API(auth)
w = json.loads(urlopen(conf['apiurl']).read())[0]
api.update_status('%(outtemp).1f °C, %(windspeed).1f m/s vind, %(rain).1f mm nedbør' %w,lat=conf['lat'],long=conf['long'])
|
<commit_msg>Extend interfaces with new parsing functions
<commit_before>
class IElfRelocator;
enum class Endianness { Big, Little };
class CArchitecture
{
public:
virtual void AssembleOpcode(const std::wstring& name, const std::wstring& args) = 0;
virtual bool AssembleDirective(const std::wstring& name, const std::wstring& args) = 0;
virtual void NextSection() = 0;
virtual void Pass2() = 0;
virtual void Revalidate() = 0;
virtual int GetWordSize() = 0;
virtual IElfRelocator* getElfRelocator() = 0;
virtual Endianness getEndianness() = 0;
};
class CInvalidArchitecture: public CArchitecture
{
public:
virtual void AssembleOpcode(const std::wstring& name, const std::wstring& args);
virtual bool AssembleDirective(const std::wstring& name, const std::wstring& args);
virtual void NextSection();
virtual void Pass2();
virtual void Revalidate();
virtual int GetWordSize();
virtual IElfRelocator* getElfRelocator();
virtual Endianness getEndianness() { return Endianness::Little; };
};
extern CInvalidArchitecture InvalidArchitecture;
<commit_after>
class IElfRelocator;
class CAssemblerCommand;
class Tokenizer;
enum class Endianness { Big, Little };
class CArchitecture
{
public:
virtual void AssembleOpcode(const std::wstring& name, const std::wstring& args) = 0;
virtual bool AssembleDirective(const std::wstring& name, const std::wstring& args) = 0;
virtual CAssemblerCommand* parseDirective(Tokenizer& tokenizer) { return nullptr; };
virtual CAssemblerCommand* parseOpcode(Tokenizer& tokenizer) { return nullptr; };
virtual void NextSection() = 0;
virtual void Pass2() = 0;
virtual void Revalidate() = 0;
virtual int GetWordSize() = 0;
virtual IElfRelocator* getElfRelocator() = 0;
virtual Endianness getEndianness() = 0;
};
class CInvalidArchitecture: public CArchitecture
{
public:
virtual void AssembleOpcode(const std::wstring& name, const std::wstring& args);
virtual bool AssembleDirective(const std::wstring& name, const std::wstring& args);
virtual void NextSection();
virtual void Pass2();
virtual void Revalidate();
virtual int GetWordSize();
virtual IElfRelocator* getElfRelocator();
virtual Endianness getEndianness() { return Endianness::Little; };
};
extern CInvalidArchitecture InvalidArchitecture;
|
<commit_msg>Decrease patience for each layerwise trainer.
<commit_before>
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise')
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
<commit_after>
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise', patience=1, min_improvement=0.1)
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
|
<commit_msg>Add support for colored output in embeds
<commit_before>from discord import Embed
def build_embed(ctx, desc: str, title: str = ''):
name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name
embed = Embed(
title=title,
description=desc
)
embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url)
return embed
<commit_after>from enum import IntEnum
from discord import Embed
class OpStatus(IntEnum):
SUCCESS = 0x2ECC71,
FAILURE = 0xc0392B,
WARNING = 0xf39C12
def build_embed(ctx, desc: str, title: str = '', status: OpStatus = OpStatus.SUCCESS) -> Embed:
name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name
embed = Embed(
title=title,
description=desc,
color=status.value if status is not None else OpStatus.WARNING
)
embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url)
return embed
|
<commit_msg>MINOR: Print debug info to stderr
<commit_before>
import argparse
from datetime import timedelta
from subprocess import Popen
from time import time, sleep
def red(text):
RED = '\033[91m'
END = '\033[0m'
return RED + text + END
class HowLong(object):
def __init__(self):
parser = argparse.ArgumentParser(description='Time a process')
parser.add_argument('-i', type=float, nargs='?', metavar='interval',
help='the timer interval, defaults to 1 second')
parser.add_argument('command', metavar='C', type=str, nargs='+',
help='a valid command')
self.parsed_args = parser.parse_args()
self.timer_interval = self.parsed_args.i if self.parsed_args.i else 1
self.readable_command = " ".join(self.parsed_args.command)
def run(self):
print("Running", self.readable_command)
process = Popen(self.parsed_args.command)
start_time = time()
while process.poll() is None:
sleep(self.timer_interval)
elapsed_time = (time() - start_time) * 1000
print(red(str(timedelta(milliseconds=elapsed_time))))
print("Finished", self.readable_command)
def howlong():
HowLong().run()
if __name__ == "__main__": howlong()
<commit_after>from __future__ import print_function
import sys
import argparse
from datetime import timedelta
from subprocess import Popen
from time import time, sleep
def red(text):
RED = '\033[91m'
END = '\033[0m'
return RED + text + END
def log(*args):
print(*args, file=sys.stderr)
sys.stderr.flush()
class HowLong(object):
def __init__(self):
parser = argparse.ArgumentParser(description='Time a process')
parser.add_argument('-i', type=float, nargs='?', metavar='interval',
help='the timer interval, defaults to 1 second')
parser.add_argument('command', metavar='C', type=str, nargs='+',
help='a valid command')
self.parsed_args = parser.parse_args()
self.timer_interval = self.parsed_args.i if self.parsed_args.i else 1
self.readable_command = " ".join(self.parsed_args.command)
def run(self):
log("Running", self.readable_command)
process = Popen(self.parsed_args.command)
start_time = time()
while process.poll() is None:
sleep(self.timer_interval)
elapsed_time = (time() - start_time) * 1000
log(red(str(timedelta(milliseconds=elapsed_time))))
log("Finished", self.readable_command)
def howlong():
HowLong().run()
if __name__ == "__main__": howlong()
|
<commit_msg>[flexbe_core] Add method to set a custom execute rate for states
<commit_before>import rospy
from flexbe_core.core.lockable_state import LockableState
class LoopbackState(LockableState):
"""
A state that can refer back to itself.
It periodically transitions to itself while no other outcome is fulfilled.
"""
_loopback_name = 'loopback'
def __init__(self, *args, **kwargs):
self._rate = rospy.Rate(10)
# add loopback outcome
if len(args) > 0 and type(args[0]) is list:
# need this ugly check for list type, because first argument in CBState is the callback
args[0].append(self._loopback_name)
else:
outcomes = kwargs.get('outcomes', [])
outcomes.append(self._loopback_name)
kwargs['outcomes'] = outcomes
super(LoopbackState, self).__init__(*args, **kwargs)
self.__execute = self.execute
self.execute = self._loopback_execute
def _loopback_execute(self, *args, **kwargs):
result = self.__execute(*args, **kwargs)
if result is None or result == 'None':
result = self._loopback_name
return result
<commit_after>import rospy
from flexbe_core.core.lockable_state import LockableState
class LoopbackState(LockableState):
"""
A state that can refer back to itself.
It periodically transitions to itself while no other outcome is fulfilled.
"""
_loopback_name = 'loopback'
def __init__(self, *args, **kwargs):
self._rate = rospy.Rate(10)
# add loopback outcome
if len(args) > 0 and type(args[0]) is list:
# need this ugly check for list type, because first argument in CBState is the callback
args[0].append(self._loopback_name)
else:
outcomes = kwargs.get('outcomes', [])
outcomes.append(self._loopback_name)
kwargs['outcomes'] = outcomes
super(LoopbackState, self).__init__(*args, **kwargs)
self.__execute = self.execute
self.execute = self._loopback_execute
def _loopback_execute(self, *args, **kwargs):
result = self.__execute(*args, **kwargs)
if result is None or result == 'None':
result = self._loopback_name
return result
def set_rate(self, rate):
"""
Set the execution rate of this state,
i.e., the rate with which the execute method is being called.
Note: The rate is best-effort,
a rospy.Rate does not guarantee real-time properties.
@type label: float
@param label: The desired rate in Hz.
"""
self._rate = rospy.Rate(rate)
|
<commit_msg>Add two utility functions to HaxeDummyNode to detect whether a node is a dummy node.
<commit_before>/*
* Copyright 2000-2013 JetBrains s.r.o.
* Copyright 2014-2014 AS3Boyan
* Copyright 2014-2014 Elias Ku
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.plugins.haxe.lang.psi.impl;
import com.intellij.lang.ASTFactory;
import com.intellij.lang.ASTNode;
import com.intellij.psi.TokenType;
import com.intellij.psi.impl.source.tree.LazyParseableElement;
import com.intellij.psi.tree.ILazyParseableElementType;
/**
* Created by ebishton on 9/22/14.
*/
public class HaxeDummyASTNode extends LazyParseableElement {
HaxeDummyASTNode(String text) {
super(TokenType.DUMMY_HOLDER, "DUMMY");
}
}
<commit_after>/*
* Copyright 2000-2013 JetBrains s.r.o.
* Copyright 2014-2014 AS3Boyan
* Copyright 2014-2014 Elias Ku
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.plugins.haxe.lang.psi.impl;
import com.intellij.lang.ASTFactory;
import com.intellij.lang.ASTNode;
import com.intellij.psi.TokenType;
import com.intellij.psi.impl.source.tree.LazyParseableElement;
import com.intellij.psi.tree.ILazyParseableElementType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Created by ebishton on 9/22/14.
*/
public class HaxeDummyASTNode extends LazyParseableElement {
HaxeDummyASTNode(String text) {
super(TokenType.DUMMY_HOLDER, text);
}
public static boolean isDummyNode(@NotNull ASTNode node) {
return ! (node instanceof HaxeDummyASTNode);
}
public static boolean isUsableNode(@Nullable ASTNode node) {
return null != node && !isDummyNode(node);
}
}
|
<commit_msg>Allow use of alternate Django test cases
<commit_before>from __future__ import absolute_import
from django.test import TestCase as dTestCase
from django.test.runner import DiscoverRunner
from snapshottest.reporting import reporting_lines
from .unittest import TestCase as uTestCase
class TestRunner(DiscoverRunner):
separator1 = "=" * 70
separator2 = "-" * 70
def __init__(self, snapshot_update=False, **kwargs):
super(TestRunner, self).__init__(**kwargs)
TestCase.snapshot_should_update = snapshot_update
@classmethod
def add_arguments(cls, parser):
super(TestRunner, cls).add_arguments(parser)
parser.add_argument(
'--snapshot-update', default=False, action='store_true',
dest='snapshot_update', help='Update the snapshots automatically.',
)
def run_tests(self, test_labels, extra_tests=None, **kwargs):
result = super(TestRunner, self).run_tests(
test_labels=test_labels,
extra_tests=extra_tests,
**kwargs
)
self.print_report()
return result
def print_report(self):
print("\n" + self.separator1)
print('SnapshotTest summary')
print(self.separator2)
for line in reporting_lines('python manage.py test'):
print(line)
print(self.separator1)
class TestCase(uTestCase, dTestCase):
pass
<commit_after>from __future__ import absolute_import
from django.test import TestCase as dTestCase
from django.test import SimpleTestCase as dSimpleTestCase
from django.test.runner import DiscoverRunner
from snapshottest.reporting import reporting_lines
from .unittest import TestCase as uTestCase
class TestRunner(DiscoverRunner):
separator1 = "=" * 70
separator2 = "-" * 70
def __init__(self, snapshot_update=False, **kwargs):
super(TestRunner, self).__init__(**kwargs)
uTestCase.snapshot_should_update = snapshot_update
@classmethod
def add_arguments(cls, parser):
super(TestRunner, cls).add_arguments(parser)
parser.add_argument(
'--snapshot-update', default=False, action='store_true',
dest='snapshot_update', help='Update the snapshots automatically.',
)
def run_tests(self, test_labels, extra_tests=None, **kwargs):
result = super(TestRunner, self).run_tests(
test_labels=test_labels,
extra_tests=extra_tests,
**kwargs
)
self.print_report()
return result
def print_report(self):
print("\n" + self.separator1)
print('SnapshotTest summary')
print(self.separator2)
for line in reporting_lines('python manage.py test'):
print(line)
print(self.separator1)
class TestCase(uTestCase, dTestCase):
pass
class SimpleTestCase(uTestCase, dSimpleTestCase):
pass
|
<commit_msg>Rename test to be correct
<commit_before>package com.example.activity;
import android.app.Activity;
import org.junit.runner.RunWith;
import org.robolectric.Robolectric;
import org.robolectric.RobolectricTestRunner;
import static org.junit.Assert.assertTrue;
@RunWith(RobolectricTestRunner.class)
public class MainActivityTest {
@org.junit.Test
public void testSomething() throws Exception {
Activity activity = Robolectric.setupActivity(MainActivity.class);
assertTrue(activity.getTitle().toString().equals("Deckard"));
}
}
<commit_after>package com.example.activity;
import android.app.Activity;
import org.junit.runner.RunWith;
import org.robolectric.Robolectric;
import org.robolectric.RobolectricTestRunner;
import static org.junit.Assert.assertTrue;
@RunWith(RobolectricTestRunner.class)
public class MainActivityTest {
@org.junit.Test
public void titleIsCorrect() throws Exception {
Activity activity = Robolectric.setupActivity(MainActivity.class);
assertTrue(activity.getTitle().toString().equals("Deckard"));
}
}
|
<commit_msg>Reset message into empty string of sinonAssertion
<commit_before>properties = ["SinonSpy", "SinonStub", "SinonMock"]
production_properties = ["spy", "stub", "mock"]
def _clear_item_in_queue(queue):
for item in reversed(queue):
item.restore()
def sinontest(f):
def fn(*args, **kwargs):
ret = f(*args, **kwargs)
# handle production mode (called by sinon.py)
for prop in production_properties:
if "sinon" in f.__globals__ and prop in dir(f.__globals__["sinon"]):
_clear_item_in_queue(getattr(f.__globals__["sinon"], prop)._queue)
# handle unittest (direct use)
for prop in properties:
if prop in f.__globals__.keys():
_clear_item_in_queue(f.__globals__[prop]._queue)
return ret
return fn
class SinonSandbox(object):
def __init__(self):
pass
def create(self, config=None):
pass
def spy(self):
pass
def stub(self):
pass
def mock(self):
pass
def restore(self):
pass
<commit_after>properties = ["SinonSpy", "SinonStub", "SinonMock", "SinonAssertion"]
production_properties = ["spy", "stub", "mock", "assert"]
def _clear_assertion_message(obj):
setattr(obj, "message", "")
def _clear_item_in_queue(queue):
for item in reversed(queue):
item.restore()
def sinontest(f):
def fn(*args, **kwargs):
ret = f(*args, **kwargs)
# handle production mode (called by sinon.py)
for prop in production_properties:
if "sinon" in f.__globals__ and prop in dir(f.__globals__["sinon"]):
if prop == "assert":
_clear_assertion_message(getattr(f.__globals__["sinon"], prop))
else:
_clear_item_in_queue(getattr(f.__globals__["sinon"], prop)._queue)
# handle unittest (direct use)
for prop in properties:
if prop in f.__globals__.keys():
if prop == "SinonAssertion":
_clear_assertion_message(f.__globals__[prop])
else:
_clear_item_in_queue(f.__globals__[prop]._queue)
return ret
return fn
class SinonSandbox(object):
def __init__(self):
pass
def create(self, config=None):
pass
def spy(self):
pass
def stub(self):
pass
def mock(self):
pass
def restore(self):
pass
|
<commit_msg>Fix Kickasstorrents by using one of many mirrors.
<commit_before>from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 1
def get_popular(self):
names = []
for page in range(Provider.PAGES_TO_FETCH):
url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
<commit_after>from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 3
def get_popular(self):
names = []
base = "https://kickasstorrents.to/highres-movies/"
# New mirrors can be found at https://thekickasstorrents.com/
for page in range(Provider.PAGES_TO_FETCH):
if page == 0:
url = base
else:
url = base + "%s/" % (page + 1)
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
|
<commit_msg>Return suffix array indices from burrows wheeler
<commit_before>
def burrows_wheeler(text):
"""Returns the burrows wheeler transform of <text>.
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append(text[i:] + text[:i])
all_permutations.sort()
return "".join([w[-1] for w in all_permutations])
<commit_after>
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
|
<commit_msg>Add test for cli 'add' command
<commit_before>
"""Test CLI interface."""
import os
from .fixtures import record
assert record
def run(cmd):
"""Helper to test running a CLI command."""
os.system('python -m billabong ' + cmd)
def test_cli(record):
"""Test main supported CLI commands."""
ID = record['id']
run('ls')
run('blobs')
run('info ' + ID)
run('search txt')
run('check')
run('push')
run('pull')
run('echo ' + ID)
run('status')
run('version')
<commit_after>
"""Test CLI interface."""
import os
from .fixtures import record
assert record
def run(cmd):
"""Helper to test running a CLI command."""
os.system('python -m billabong ' + cmd)
def test_cli(record):
"""Test main supported CLI commands."""
ID = record['id']
run('ls')
run('records')
run('blobs')
run('info ' + ID)
run('info ' + ID + ' --no-color')
run('search txt')
run('check')
run('push')
run('pull')
run('echo ' + ID)
run('status')
run('version')
run('add hello.txt')
|
<commit_msg>Add support for accessing the response exception
<commit_before>package org.jboss.shamrock.example.testutils;
import java.io.InputStream;
import javax.json.JsonReader;
public interface URLResponse {
int statusCode();
String asString();
InputStream asInputStream();
JsonReader asJsonReader();
}
<commit_after>package org.jboss.shamrock.example.testutils;
import java.io.IOException;
import java.io.InputStream;
import javax.json.JsonReader;
public interface URLResponse {
int statusCode();
IOException exception();
String asString();
InputStream asInputStream();
JsonReader asJsonReader();
}
|
<commit_msg>Add @Alir3z4 as maintainer info
<commit_before>from setuptools import setup, find_packages
setup(
name = "sanitize",
version = "0.33",
description = "Bringing sanitiy to world of messed-up data",
long_description=open('README.md').read(),
author = "Aaron Swartz",
author_email = "me@aaronsw.com",
url='http://www.aaronsw.com/2002/sanitize/',
license=open('LICENCE').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.3',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2'
],
license='BSD',
packages=find_packages(),
py_modules=['sanitize'],
include_package_data=True,
zip_safe=False,
)
<commit_after>from setuptools import setup, find_packages
setup(
name = "sanitize",
version = "0.33",
description = "Bringing sanitiy to world of messed-up data",
long_description=open('README.md').read(),
author = "Aaron Swartz",
author_email = "me@aaronsw.com",
maintainer='Alireza Savand',
maintainer_email='alireza.savand@gmail.com',
url='http://www.aaronsw.com/2002/sanitize/',
license=open('LICENCE').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.3',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2'
],
license='BSD',
packages=find_packages(),
py_modules=['sanitize'],
include_package_data=True,
zip_safe=False,
)
|
<commit_msg>Change WeareableListener tag and implement onMessageReceived
<commit_before>package com.metroveu.metroveu;
import android.app.Service;
import android.content.Intent;
import android.os.IBinder;
import android.util.Log;
import com.google.android.gms.wearable.DataEvent;
import com.google.android.gms.wearable.DataEventBuffer;
import com.google.android.gms.wearable.DataMap;
import com.google.android.gms.wearable.DataMapItem;
import com.google.android.gms.wearable.WearableListenerService;
public class WearConnector extends WearableListenerService {
@Override
public void onDataChanged(DataEventBuffer dataEvents) {
for (DataEvent dataEvent : dataEvents) {
if (dataEvent.getType() == DataEvent.TYPE_CHANGED) {
DataMap dataMap = DataMapItem.fromDataItem(dataEvent.getDataItem()).getDataMap();
String path = dataEvent.getDataItem().getUri().getPath();
if (path.equals("/lines")) {
int number = dataMap.getInt("number");
Log.v("wear", Integer.toString(number));
}
}
}
}
}
<commit_after>package com.metroveu.metroveu;
import android.util.Log;
import com.google.android.gms.wearable.DataEvent;
import com.google.android.gms.wearable.DataEventBuffer;
import com.google.android.gms.wearable.DataMap;
import com.google.android.gms.wearable.DataMapItem;
import com.google.android.gms.wearable.MessageEvent;
import com.google.android.gms.wearable.WearableListenerService;
public class WearConnector extends WearableListenerService {
public void onMessageReceived(MessageEvent messageEvent) {
System.out.println("Message Received on Phone on launch of wear homepage");
if(messageEvent.getPath().equals("/lines")) {
//sendSavedDeals(); //fetch from db and make a datamap object using PutDataRequest
Log.v("JOAN", "Message received");
}
else {
Log.v("JOAN", "Wrong path");
}
}
@Override
public void onDataChanged(DataEventBuffer dataEvents) {
for (DataEvent dataEvent : dataEvents) {
if (dataEvent.getType() == DataEvent.TYPE_CHANGED) {
DataMap dataMap = DataMapItem.fromDataItem(dataEvent.getDataItem()).getDataMap();
String path = dataEvent.getDataItem().getUri().getPath();
if (path.equals("/lines")) {
int number = dataMap.getInt("number");
Log.v("JOAN", Integer.toString(number));
}
}
}
}
}
|
<commit_msg>Fix IllegalArgumentException in Random.nextInt (thanks to Prabakar)
<commit_before>/*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.planner.core.heuristic.selector.common.iterator;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
/**
* This {@link Iterator} does not shuffle and is never ending.
* @param <S>
*/
public class CachedListRandomIterator<S> implements Iterator<S> {
protected final List<S> cachedList;
protected final Random workingRandom;
public CachedListRandomIterator(List<S> cachedList, Random workingRandom) {
this.cachedList = cachedList;
this.workingRandom = workingRandom;
}
public boolean hasNext() {
return true;
}
public S next() {
int index = workingRandom.nextInt(cachedList.size());
return cachedList.get(index);
}
public void remove() {
throw new UnsupportedOperationException("Remove is not supported.");
}
}
<commit_after>/*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.planner.core.heuristic.selector.common.iterator;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
/**
* This {@link Iterator} does not shuffle and is never ending.
* @param <S>
*/
public class CachedListRandomIterator<S> implements Iterator<S> {
protected final List<S> cachedList;
protected final Random workingRandom;
protected final boolean notEmpty;
public CachedListRandomIterator(List<S> cachedList, Random workingRandom) {
this.cachedList = cachedList;
this.workingRandom = workingRandom;
notEmpty = !cachedList.isEmpty();
}
public boolean hasNext() {
return notEmpty;
}
public S next() {
int index = workingRandom.nextInt(cachedList.size());
return cachedList.get(index);
}
public void remove() {
throw new UnsupportedOperationException("Remove is not supported.");
}
}
|
<commit_msg>Cache: Index tasks by uuid as well as line number
<commit_before>import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
return task
def __iter__(self):
# iterated_cache = {
while self.cache.keys():
for key in list(self.cache.keys()):
task = self.cache[key]
if all([t.line_number not in self.cache.keys()
for t in task.add_dependencies]):
del self.cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
<commit_after>import copy
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
if task.uuid:
self.uuid_cache[task.uuid] = task
return task
def __iter__(self):
iterated_cache = copy.copy(self.cache)
while iterated_cache.keys():
for key in list(iterated_cache.keys()):
task = iterated_cache[key]
if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
<commit_msg>Revert "add ability to order a domain D by the result of a function F and an Ordering O on the image of F"
Use guava
This reverts commit d39b479c87eef9ce0f17d45b1885994f1a9f7fa4.
<commit_before>package com.bbn.bue.common;
import com.google.common.base.Function;
import com.google.common.collect.Ordering;
public final class OrderingUtils {
private OrderingUtils() {
throw new UnsupportedOperationException();
}
/**
* Gets a function which maps any iterable to its minimum according to the supplied {@link
* com.google.common.collect.Ordering}. If no such minimum exists for an input, it will throw an
* exception as specified in {@link Ordering#min(Iterable)}.
*/
public static <T> Function<Iterable<T>, T> minFunction(final Ordering<T> ordering) {
return new Function<Iterable<T>, T>() {
@Override
public T apply(Iterable<T> input) {
return ordering.min(input);
}
};
}
/**
* Gets a function which maps any iterable to its maximum according to the supplied {@link
* com.google.common.collect.Ordering}. If no such maximum exists for an input, it will throw an
* exception as specified in {@link Ordering#max(Iterable)}.
*/
public static <T> Function<Iterable<T>, T> maxFunction(final Ordering<T> ordering) {
return new Function<Iterable<T>, T>() {
@Override
public T apply(Iterable<T> input) {
return ordering.max(input);
}
};
}
/**
* Orders Ts in some domain by their image under F using the onFunctionResultOrdering
*/
public static <T,V> Ordering<T> onResultOf(final Function<T, V> F, final Ordering<V> onFunctionResult) {
return new Ordering<T>() {
@Override
public int compare(final T t, final T t1) {
return onFunctionResult.compare(F.apply(t), F.apply(t1));
}
};
}
}
<commit_after>package com.bbn.bue.common;
import com.google.common.base.Function;
import com.google.common.collect.Ordering;
public final class OrderingUtils {
private OrderingUtils() {
throw new UnsupportedOperationException();
}
/**
* Gets a function which maps any iterable to its minimum according to the supplied {@link
* com.google.common.collect.Ordering}. If no such minimum exists for an input, it will throw an
* exception as specified in {@link Ordering#min(Iterable)}.
*/
public static <T> Function<Iterable<T>, T> minFunction(final Ordering<T> ordering) {
return new Function<Iterable<T>, T>() {
@Override
public T apply(Iterable<T> input) {
return ordering.min(input);
}
};
}
/**
* Gets a function which maps any iterable to its maximum according to the supplied {@link
* com.google.common.collect.Ordering}. If no such maximum exists for an input, it will throw an
* exception as specified in {@link Ordering#max(Iterable)}.
*/
public static <T> Function<Iterable<T>, T> maxFunction(final Ordering<T> ordering) {
return new Function<Iterable<T>, T>() {
@Override
public T apply(Iterable<T> input) {
return ordering.max(input);
}
};
}
}
|
<commit_msg>Correct Java Doc on disconnect() method
Also remove unused imports<commit_before>package org.eclipse.scanning.api.event.core;
import java.util.Map;
import org.eclipse.scanning.api.event.EventException;
public interface ITopicConnection extends IURIConnection {
/**
* The string topic to publish events on for this manager.
* The events will be beans which serialize to JSON.
*
* @return
*/
public String getTopicName();
/**
* Sets the scan topic, causing the connection to be made to that topic and throwing an
* exception if the connection cannot be made.
* @param topic
* @throws EventException
*/
public void setTopicName(String topic) throws EventException;
/**
* Call to disconnect all publishers and subscribers when the connection goes down.
* @throws EventException
*/
public void disconnect() throws EventException;
}
<commit_after>package org.eclipse.scanning.api.event.core;
import org.eclipse.scanning.api.event.EventException;
public interface ITopicConnection extends IURIConnection {
/**
* The string topic to publish events on for this manager.
* The events will be beans which serialize to JSON.
*
* @return
*/
public String getTopicName();
/**
* Sets the scan topic, causing the connection to be made to that topic and throwing an
* exception if the connection cannot be made.
* @param topic
* @throws EventException
*/
public void setTopicName(String topic) throws EventException;
/**
* Call to disconnect this publisher or subscriber. Typically used in a try-finally block.
* @throws EventException
*/
public void disconnect() throws EventException;
}
|
<commit_msg>Make helper functions full `@deploy`s so they support global pyinfra kwargs.
<commit_before>from .configure import configure_kubeconfig, configure_kubernetes_component
from .install import install_kubernetes
def deploy_kubernetes_master(etcd_nodes):
# Install server components
install_kubernetes(components=(
'kube-apiserver', 'kube-scheduler', 'kube-controller-manager',
))
# Configure the API server, passing in our etcd nodes
configure_kubernetes_component('kube-apiserver', etcd_nodes=etcd_nodes)
configure_kubernetes_component('kube-scheduler')
configure_kubernetes_component('kube-controller-manager')
def deploy_kubernetes_node(master_address):
# Install node components
install_kubernetes(components=(
'kubelet', 'kube-proxy',
))
# Setup the kubeconfig for kubelet & kube-proxy to use
configure_kubeconfig(master_address)
configure_kubernetes_component('kubelet')
configure_kubernetes_component('kube-proxy')
<commit_after>from pyinfra.api import deploy
from .configure import configure_kubeconfig, configure_kubernetes_component
from .install import install_kubernetes
@deploy('Deploy Kubernetes master')
def deploy_kubernetes_master(
state, host,
etcd_nodes,
):
# Install server components
install_kubernetes(components=(
'kube-apiserver', 'kube-scheduler', 'kube-controller-manager',
))
# Configure the API server, passing in our etcd nodes
configure_kubernetes_component('kube-apiserver', etcd_nodes=etcd_nodes)
configure_kubernetes_component('kube-scheduler')
configure_kubernetes_component('kube-controller-manager')
@deploy('Deploy Kubernetes node')
def deploy_kubernetes_node(
state, host,
master_address,
):
# Install node components
install_kubernetes(components=(
'kubelet', 'kube-proxy',
))
# Setup the kubeconfig for kubelet & kube-proxy to use
configure_kubeconfig(master_address)
configure_kubernetes_component('kubelet')
configure_kubernetes_component('kube-proxy')
|
<commit_msg>ADD: Test for map that check objectgroup
<commit_before>
TEST_F(TiledFixture, map_attributes_should_be_parsed)
{
ASSERT_EQ("1.0", _parsedMap->get_version());
ASSERT_EQ("1.0.3", _parsedMap->get_tiledVersion());
ASSERT_EQ(Orientation::ORTOGNAL, _parsedMap->get_orientation());
ASSERT_EQ(RenderOrder::RIGHT_DOWN, _parsedMap->get_renderOrder());
ASSERT_EQ(10, _parsedMap->get_width());
ASSERT_EQ(10, _parsedMap->get_height());
ASSERT_EQ(32, _parsedMap->get_tileWidth());
ASSERT_EQ(32, _parsedMap->get_tileHeight());
}
<commit_after>
TEST_F(TiledFixture, map_attributes_should_be_parsed)
{
ASSERT_EQ("1.0", _parsedMap->get_version());
ASSERT_EQ("1.0.3", _parsedMap->get_tiledVersion());
ASSERT_EQ(Orientation::ORTOGNAL, _parsedMap->get_orientation());
ASSERT_EQ(RenderOrder::RIGHT_DOWN, _parsedMap->get_renderOrder());
ASSERT_EQ(10, _parsedMap->get_width());
ASSERT_EQ(10, _parsedMap->get_height());
ASSERT_EQ(32, _parsedMap->get_tileWidth());
ASSERT_EQ(32, _parsedMap->get_tileHeight());
}
TEST_F(TiledFixture, map_should_have_TestObject_parsed)
{
auto group = _parsedMap->getObjectGroup("TestObject");
ASSERT_EQ(group.get_name(), "TestObject");
}
|
<commit_msg>Use a generator for rendering, and pass nodelist unwrapped
<commit_before>
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.root = parse.parse(raw)
code = ast.Expression(
body=ast.ListComp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
], keywords=[], starargs=None, kwargs=None
),
], keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
global_ctx = {
'nodelist': self.root.nodelist,
'context': dict(context),
}
return ''.join(eval(self.code, global_ctx, {}))
<commit_after>
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.nodelist = parse.parse(raw)
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ ast.Name(id='context', ctx=ast.Load()), ],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
global_ctx = {
'nodelist': self.nodelist,
'context': dict(context),
}
return ''.join(eval(self.code, global_ctx, {}))
|
<commit_msg>Add stdio to this file becasue we use FILE.
It starts failing with FILE, identifier not found, when
you remove the include for logging.h, which is included
in scoped_handle_win.h
Review URL: http://codereview.chromium.org/16461
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@7441 0039d316-1c4b-4281-b951-d872f2087c98
<commit_before>// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef BASE_SCOPED_HANDLE_H_
#define BASE_SCOPED_HANDLE_H_
#include "base/basictypes.h"
#if defined(OS_WIN)
#include "base/scoped_handle_win.h"
#endif
class ScopedStdioHandle {
public:
ScopedStdioHandle()
: handle_(NULL) { }
explicit ScopedStdioHandle(FILE* handle)
: handle_(handle) { }
~ScopedStdioHandle() {
Close();
}
void Close() {
if (handle_) {
fclose(handle_);
handle_ = NULL;
}
}
FILE* get() const { return handle_; }
FILE* Take() {
FILE* temp = handle_;
handle_ = NULL;
return temp;
}
void Set(FILE* newhandle) {
Close();
handle_ = newhandle;
}
private:
FILE* handle_;
DISALLOW_EVIL_CONSTRUCTORS(ScopedStdioHandle);
};
#endif // BASE_SCOPED_HANDLE_H_
<commit_after>// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef BASE_SCOPED_HANDLE_H_
#define BASE_SCOPED_HANDLE_H_
#include <stdio.h>
#include "base/basictypes.h"
#if defined(OS_WIN)
#include "base/scoped_handle_win.h"
#endif
class ScopedStdioHandle {
public:
ScopedStdioHandle()
: handle_(NULL) { }
explicit ScopedStdioHandle(FILE* handle)
: handle_(handle) { }
~ScopedStdioHandle() {
Close();
}
void Close() {
if (handle_) {
fclose(handle_);
handle_ = NULL;
}
}
FILE* get() const { return handle_; }
FILE* Take() {
FILE* temp = handle_;
handle_ = NULL;
return temp;
}
void Set(FILE* newhandle) {
Close();
handle_ = newhandle;
}
private:
FILE* handle_;
DISALLOW_EVIL_CONSTRUCTORS(ScopedStdioHandle);
};
#endif // BASE_SCOPED_HANDLE_H_
|
<commit_msg>Add LabeledImageDataset to datasets module
<commit_before>from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
<commit_after>from chainer.datasets import cifar
from chainer.datasets import dict_dataset
from chainer.datasets import image_dataset
from chainer.datasets import mnist
from chainer.datasets import ptb
from chainer.datasets import sub_dataset
from chainer.datasets import tuple_dataset
DictDataset = dict_dataset.DictDataset
ImageDataset = image_dataset.ImageDataset
LabeledImageDataset = image_dataset.LabeledImageDataset
SubDataset = sub_dataset.SubDataset
TupleDataset = tuple_dataset.TupleDataset
get_cross_validation_datasets = sub_dataset.get_cross_validation_datasets
get_cross_validation_datasets_random = (
sub_dataset.get_cross_validation_datasets_random)
split_dataset = sub_dataset.split_dataset
split_dataset_random = sub_dataset.split_dataset_random
# examples
get_cifar10 = cifar.get_cifar10
get_cifar100 = cifar.get_cifar100
get_mnist = mnist.get_mnist
get_ptb_words = ptb.get_ptb_words
get_ptb_words_vocabulary = ptb.get_ptb_words_vocabulary
|
<commit_msg>Fix Pi-hole docker tag (alpine was deprecated)
<commit_before>package pihole
type Service struct{}
func (s Service) UserData() string {
return `
- name: pihole-etc-host.service
command: start
content: |
[Unit]
Description=pihole /etc/hosts entry
ConditionFirstBoot=true
[Service]
User=root
Type=oneshot
ExecStart=/bin/sh -c "echo 1.1.1.1 pi.hole >> /etc/hosts"
- name: pihole.service
command: start
content: |
[Unit]
Description=pihole
After=docker.service,dummy-interface.service
[Service]
User=core
Restart=always
TimeoutStartSec=0
KillMode=none
EnvironmentFile=/etc/environment
ExecStartPre=-/usr/bin/docker kill pihole
ExecStartPre=-/usr/bin/docker rm pihole
ExecStartPre=/usr/bin/docker pull diginc/pi-hole:alpine
ExecStart=/usr/bin/docker run --name pihole --net=host -e ServerIP=1.1.1.1 -e WEBPASSWORD=dosxvpn diginc/pi-hole:alpine
ExecStop=/usr/bin/docker stop pihole`
}
<commit_after>package pihole
type Service struct{}
func (s Service) UserData() string {
return `
- name: pihole-etc-host.service
command: start
content: |
[Unit]
Description=pihole /etc/hosts entry
ConditionFirstBoot=true
[Service]
User=root
Type=oneshot
ExecStart=/bin/sh -c "echo 1.1.1.1 pi.hole >> /etc/hosts"
- name: pihole.service
command: start
content: |
[Unit]
Description=pihole
After=docker.service,dummy-interface.service
[Service]
User=core
Restart=always
TimeoutStartSec=0
KillMode=none
EnvironmentFile=/etc/environment
ExecStartPre=-/usr/bin/docker kill pihole
ExecStartPre=-/usr/bin/docker rm pihole
ExecStartPre=/usr/bin/docker pull diginc/pi-hole:latest
ExecStart=/usr/bin/docker run --name pihole --net=host -e ServerIP=1.1.1.1 -e WEBPASSWORD=dosxvpn diginc/pi-hole:latest
ExecStop=/usr/bin/docker stop pihole`
}
|
<commit_msg>Add documentation for 8ball command
<commit_before>
from plugins.util import command
from random import choice
@command("8ball", "8-ball")
def eightball(m):
"""Returns 8-ball advice."""
with open(m.bot.base_path + '/plugins/responses/8ball.txt', 'r') as replies:
lines = replies.read().splitlines()
m.bot.private_message(m.location, choice(lines))<commit_after>
from plugins.util import command
from random import choice
@command("8ball", "8-ball")
def eightball(m):
"""Returns 8-ball advice."""
#- !8ball [question]
#-
#- ```irc
#- < GorillaWarfare> !8ball
#- < GorillaBot> Most likely.
#- ```
#-
#- Returns a magic 8 ball response.
with open(m.bot.base_path + '/plugins/responses/8ball.txt', 'r') as replies:
lines = replies.read().splitlines()
m.bot.private_message(m.location, choice(lines)) |
<commit_msg>Use a package in base Python distro for test
I was too ambitious -- pymt_hydrotrend isn't a default on Windows.
Using os.getcwd() should be less fragile.
<commit_before>from bmi_tester.bmipytest import load_component
entry_point = 'pymt_hydrotrend.bmi:Hydrotrend'
module_name, cls_name = entry_point.split(":")
def test_component_is_string():
component = load_component(entry_point)
assert isinstance(component, str)
def test_component_is_classname():
component = load_component(entry_point)
assert component == cls_name
<commit_after>from bmi_tester.bmipytest import load_component
entry_point = 'os:getcwd'
module_name, cls_name = entry_point.split(":")
def test_component_is_string():
component = load_component(entry_point)
assert isinstance(component, str)
def test_component_is_classname():
component = load_component(entry_point)
assert component == cls_name
|
<commit_msg>Allow user to read the name of the host
<commit_before>package se.arbetsformedlingen.venice.probe;
import java.util.Objects;
public class Host {
private String host;
Host(String host) {
this.host = host;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Host host1 = (Host) o;
return Objects.equals(host, host1.host);
}
@Override
public int hashCode() {
return Objects.hash(host);
}
@Override
public String toString() {
return "Host{" +
"host='" + host + '\'' +
'}';
}
}
<commit_after>package se.arbetsformedlingen.venice.probe;
import java.util.Objects;
public class Host {
private String host;
Host(String host) {
this.host = host;
}
String getName() {
return host;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Host host1 = (Host) o;
return Objects.equals(host, host1.host);
}
@Override
public int hashCode() {
return Objects.hash(host);
}
@Override
public String toString() {
return "Host{" +
"host='" + host + '\'' +
'}';
}
}
|
<commit_msg>Use node id (not project id) to create component Subscriptions
<commit_before>from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {}<commit_after>from framework.auth.decorators import must_be_logged_in
from model import Subscription
from flask import request
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.storage.mongostorage import KeyExistsException
@must_be_logged_in
def subscribe(auth, **kwargs):
user = auth.user
pid = kwargs.get('pid')
nid = kwargs.get('nid')
subscriptions = request.json
for event in subscriptions:
if event == 'comment_replies':
category = user._id
else:
category = nid if nid else pid
event_id = category + "_" + event
# Create subscription or find existing
for notification_type in subscriptions[event]:
if subscriptions[event][notification_type]:
try:
s = Subscription(_id=event_id)
s.object_id = category
s.event_name = event
s.save()
except KeyExistsException:
s = Subscription.find_one(Q('_id', 'eq', event_id))
s.object_id = category
s.event_name = event
s.save()
# Add user to list of subscribers
if notification_type not in s._fields:
setattr(s, notification_type, [])
s.save()
if user not in getattr(s, notification_type):
getattr(s, notification_type).append(user)
s.save()
else:
try:
s = Subscription.find_one(Q('_id', 'eq', event_id))
if user in getattr(s, notification_type):
getattr(s, notification_type).remove(user)
s.save()
except NoResultsFound:
pass
return {} |
<commit_msg>Fix the package repetition bug
<commit_before>package controllers
package controllers
import (
"github.com/astaxie/beego"
)
type ImageController struct {
beego.Controller
}
func (this *ImageController) Prepare() {
this.Ctx.Output.Context.ResponseWriter.Header().Set("X-Docker-Registry-Version", beego.AppConfig.String("Version"))
this.Ctx.Output.Context.ResponseWriter.Header().Set("X-Docker-Registry-Standalone", beego.AppConfig.String("Standalone"))
}
func (this *ImageController) GETPrivateLayer() {
}
func (this *ImageController) GETLayer() {
}
func (this *ImageController) PUTLayer() {
}
func (this *ImageController) PUTChecksum() {
}
func (this *ImageController) GETPrivateJSON() {
}
func (this *ImageController) GETJSON() {
}
func (this *ImageController) GETAncestry() {
}
func (this *ImageController) PUTJSON() {
}
func (this *ImageController) GETPrivateFiles() {
}
func (this *ImageController) GETFiles() {
}<commit_after>package controllers
import (
"github.com/astaxie/beego"
)
type ImageController struct {
beego.Controller
}
func (this *ImageController) Prepare() {
this.Ctx.Output.Context.ResponseWriter.Header().Set("X-Docker-Registry-Version", beego.AppConfig.String("Version"))
this.Ctx.Output.Context.ResponseWriter.Header().Set("X-Docker-Registry-Standalone", beego.AppConfig.String("Standalone"))
}
func (this *ImageController) GETPrivateLayer() {
}
func (this *ImageController) GETLayer() {
}
func (this *ImageController) PUTLayer() {
}
func (this *ImageController) PUTChecksum() {
}
func (this *ImageController) GETPrivateJSON() {
}
func (this *ImageController) GETJSON() {
}
func (this *ImageController) GETAncestry() {
}
func (this *ImageController) PUTJSON() {
}
func (this *ImageController) GETPrivateFiles() {
}
func (this *ImageController) GETFiles() {
}
|
<commit_msg>Refactor Project to use new mixins and methods
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from warehouse import db
from warehouse.databases.mixins import UUIDPrimaryKeyMixin
from warehouse.database.types import CIText
class Project(UUIDPrimaryKeyMixin, db.Model):
__tablename__ = "projects"
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
# Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
event.listen(Project.__table__, "after_create", db.DDL("""
CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
$body$
BEGIN
new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
RETURN new;
end;
$body$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""))
<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
from warehouse.database.utils import table_args
class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
__tablename__ = "projects"
__table_args__ = declared_attr(table_args((
TableDDL("""
CREATE OR REPLACE FUNCTION normalize_name()
RETURNS trigger AS $$
BEGIN
NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
return NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""),
)))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
|
<commit_msg>Add unit tests for functions call and check and fold for module player
<commit_before>import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
pass
def test_player_call(self):
pass
def test_player_fold(self):
pass
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()<commit_after>import unittest
from pypoker import Player
from pypoker import Table
class PlayerTestCase(unittest.TestCase):
'''
Tests for the Player class
'''
def setUp(self):
self.player = Player('usman', 1000, None)
self.table = Table(50,100,2,10,100,1000)
self.table.add_player('bob',1000)
self.table.add_player('jane',1000)
self.table.add_player('dylan',1000)
self.table.add_player('john',1000)
self.table.start_game()
def test_player_initialization(self):
self.assertEqual([self.player.player_name, self.player.chips], ['usman', 1000])
def test_player_check(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].check()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_call(self):
self.table.players[1].call()
self.assertEqual(self.table.players[1].chips, 900)
def test_player_fold(self):
self.table.players[1].call()
self.table.players[2].call()
self.table.players[3].call()
self.table.players[0].call()
self.table.players[1].fold()
self.assertTrue(self.table.players[1].folded)
def test_player_bet(self):
pass
def test_player_go_all_in(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() |
<commit_msg>Switch to more obvious imports
<commit_before>from __future__ import absolute_import
from __future__ import division
import collections
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
<commit_after>from __future__ import absolute_import
from __future__ import division
import collections
import slumber
import slumber.exceptions
import xmlrpc2.client
class BaseProcessor(object):
def __init__(self, index, warehouse, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
wargs, wkwargs = warehouse
self.client = xmlrpc2.client.Client(index)
self.warehouse = slumber.API(*wargs, **wkwargs)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
|
<commit_msg>Add a toplevel warning against legacy keras.preprocessing utilities
PiperOrigin-RevId: 434866390
<commit_before>"""Provides keras data preprocessing utils to pre-process tf.data.Datasets before they are fed to the model."""
from keras import backend
from keras.preprocessing import image
from keras.preprocessing import sequence
from keras.preprocessing import text
from keras.preprocessing import timeseries
from keras.utils import all_utils as utils
<commit_after>from keras import backend
from keras.preprocessing import image
from keras.preprocessing import sequence
from keras.preprocessing import text
from keras.preprocessing import timeseries
from keras.utils import all_utils as utils
|
<commit_msg>Use a better monad instance of Either
<commit_before>module Network.HTTP.Authentication.Basic (
Credentials(..),
parseCredentials,
) where
import Control.Monad
import Data.ByteString.Base64
import qualified Data.ByteString.Char8 as B
import Text.ParserCombinators.Parsec
data Credentials = Credentials { getUsername :: String
, getPassword :: String
}
instance Show Credentials where
show (Credentials user pass) = let concat' = mconcat [user, ":", pass]
encoded = (encode . B.pack) concat'
in "Basic " ++ B.unpack encoded
parseCredentials :: String -> Either ParseError Credentials
parseCredentials content = do
decoded <- parse parseBasicCredentials "(unknown)" content
let (user, pass) = break (/= ':') decoded
return $ Credentials user pass
parseBasicCredentials = string "Basic" >> space >> base64String
base64Char = oneOf ("+/" ++ ['0'..'9'] ++ ['A'..'Z'] ++ ['a'..'z'])
base64String = do
encoded <- many1 base64Char
either (fail "not a base64 string") (return . B.unpack) $ decode (B.pack encoded)
<commit_after>module Network.HTTP.Authentication.Basic (
Credentials(..),
parseCredentials,
) where
import Control.Monad
import Control.Monad.Except
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Char8 as B
import Text.ParserCombinators.Parsec
data Credentials = Credentials { getUsername :: String
, getPassword :: String
} deriving (Show)
encode :: Credentials -> String
encode c = let concat' = mconcat [getUsername c, ":", getPassword c]
encoded = (B.unpack . B64.encode . B.pack) concat'
in "Basic " ++ encoded
parseCredentials :: String -> Either ParseError Credentials
parseCredentials = parse parseBasicCredentials "(unknown)"
parseBasicCredentials = do
decoded <- string "Basic" >> space >> base64String
let pair = break (== ':') decoded
case pair of
(user, ':':pass) -> return $ Credentials user pass
_ -> fail "not a username password pair"
base64Char = oneOf ("+/=" ++ ['0'..'9'] ++ ['A'..'Z'] ++ ['a'..'z'])
base64String = do
encoded <- many1 base64Char
either fail (return . B.unpack) $ B64.decode (B.pack encoded)
|
<commit_msg>process: Fix a test on Windows
<commit_before>extern crate futures;
extern crate tokio_core;
extern crate tokio_process;
use std::env;
use std::sync::mpsc::channel;
use std::sync::{Once, ONCE_INIT};
use std::thread;
use tokio_core::{Loop, LoopHandle};
use tokio_process::Command;
static INIT: Once = ONCE_INIT;
fn init() {
INIT.call_once(|| {
let (tx, rx) = channel();
thread::spawn(move || {
let mut lp = Loop::new().unwrap();
let cmd = exit(&lp.handle());
let mut child = lp.run(cmd.spawn()).unwrap();
drop(child.kill());
lp.run(child).unwrap();
tx.send(()).unwrap();
drop(lp.run(futures::empty::<(), ()>()));
});
rx.recv().unwrap();
});
}
fn exit(handle: &LoopHandle) -> Command {
let mut me = env::current_exe().unwrap();
me.pop();
me.push("exit");
Command::new(me, handle)
}
#[test]
fn simple() {
init();
let mut lp = Loop::new().unwrap();
let mut cmd = exit(&lp.handle());
cmd.arg("2");
let mut child = lp.run(cmd.spawn()).unwrap();
let id = child.id();
assert!(id > 0);
let status = lp.run(&mut child).unwrap();
assert_eq!(status.code(), Some(2));
assert_eq!(child.id(), id);
assert!(child.kill().is_ok());
}
<commit_after>extern crate futures;
extern crate tokio_core;
extern crate tokio_process;
use std::env;
use std::sync::mpsc::channel;
use std::sync::{Once, ONCE_INIT};
use std::thread;
use tokio_core::{Loop, LoopHandle};
use tokio_process::Command;
static INIT: Once = ONCE_INIT;
fn init() {
INIT.call_once(|| {
let (tx, rx) = channel();
thread::spawn(move || {
let mut lp = Loop::new().unwrap();
let cmd = exit(&lp.handle());
let mut child = lp.run(cmd.spawn()).unwrap();
drop(child.kill());
lp.run(child).unwrap();
tx.send(()).unwrap();
drop(lp.run(futures::empty::<(), ()>()));
});
rx.recv().unwrap();
});
}
fn exit(handle: &LoopHandle) -> Command {
let mut me = env::current_exe().unwrap();
me.pop();
me.push("exit");
Command::new(me, handle)
}
#[test]
fn simple() {
init();
let mut lp = Loop::new().unwrap();
let mut cmd = exit(&lp.handle());
cmd.arg("2");
let mut child = lp.run(cmd.spawn()).unwrap();
let id = child.id();
assert!(id > 0);
let status = lp.run(&mut child).unwrap();
assert_eq!(status.code(), Some(2));
assert_eq!(child.id(), id);
drop(child.kill());
}
|
<commit_msg>Update @foal/ajv with PreHook type.
<commit_before>import { Hook, HttpResponseBadRequest, ObjectType } from '@foal/core';
import * as Ajv from 'ajv';
const defaultInstance = new Ajv();
export function validate(schema: ObjectType, ajv = defaultInstance): Hook {
const isValid = ajv.compile(schema);
return ctx => {
if (!isValid(ctx.body)) {
return new HttpResponseBadRequest(isValid.errors as Ajv.ErrorObject[]);
}
};
}
<commit_after>import { HttpResponseBadRequest, ObjectType, PreHook } from '@foal/core';
import * as Ajv from 'ajv';
const defaultInstance = new Ajv();
export function validate(schema: ObjectType, ajv = defaultInstance): PreHook {
const isValid = ajv.compile(schema);
return ctx => {
if (!isValid(ctx.body)) {
return new HttpResponseBadRequest(isValid.errors as Ajv.ErrorObject[]);
}
};
}
|
<commit_msg>Add a new 'count' utility function
<commit_before>
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "convert_to_od", "counter_to_iterable"]
import collections
import itertools
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def convert_to_od(mapping, order):
"""Convert mapping to an OrderedDict instance using order."""
return collections.OrderedDict([(i, mapping[i]) for i in order])
def counter_to_iterable(counter):
"""Convert a counter to an iterable / iterator."""
for item in itertools.starmap(itertools.repeat, counter):
yield from item
<commit_after>
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "convert_to_od",
"counter_to_iterable", "count"]
import collections
import itertools
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def convert_to_od(mapping, order):
"""Convert mapping to an OrderedDict instance using order."""
return collections.OrderedDict([(i, mapping[i]) for i in order])
def counter_to_iterable(counter):
"""Convert a counter to an iterable / iterator."""
for item in itertools.starmap(itertools.repeat, counter):
yield from item
def count(iterable):
"""Yield (item, count) two-tuples of the iterable."""
seen = []
full = list(iterable)
for item in full:
if item in seen:
continue
seen.append(item)
yield (item, full.count(item))
|
<commit_msg>Add defcon implementation of group lookup methods.
<commit_before>import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
<commit_after>import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
|
<commit_msg>Fix in post_syncdb workflow registration
<commit_before>from django.db.models.signals import post_syncdb
from gasistafelice.gas.workflow_data import workflow_dict
def init_workflows(app, created_models, verbosity, **kwargs):
app_label = app.__name__.split('.')[-2]
if app_label == 'workflows' and created_models: # `worklows` app was syncronized for the first time
# now that all necessary tables are in the DB, we can register our workflows
for name, w in workflow_dict.items():
w.register_workflow()
if verbosity == 2:
# give some feedback to the user
print "Workflow %s was successfully registered." % name
return
post_syncdb.connect(init_workflows)
<commit_after>from django.db.models.signals import post_syncdb
from gasistafelice.gas.workflow_data import workflow_dict
def init_workflows(app, created_models, verbosity, **kwargs):
app_label = app.__name__.split('.')[-2]
if app_label == 'workflows' and "Workflow" in created_models: # `worklows` app was syncronized for the first time
# now that all necessary tables are in the DB, we can register our workflows
for name, w in workflow_dict.items():
w.register_workflow()
if verbosity == 2:
# give some feedback to the user
print "Workflow %s was successfully registered." % name
return
post_syncdb.connect(init_workflows)
|
<commit_msg>Add getStorageObj() and delete(key) methods
<commit_before>class WebStorage {
private storageObj: Storage;
public constructor(storageObj: Storage) {
this.storageObj = storageObj;
}
public get(key: string): string {
if (!this.isCompatible()) { return; }
return this.storageObj.getItem(key);
}
public getObj(key: string): any {
if (!this.isCompatible()) { return; }
try {
return JSON.parse(this.get(key));
} catch (e) {
console.log('Invalid JSON: ' + e.message);
}
}
public set(key: string, value: string): void {
if (!this.isCompatible()) { return; }
this.storageObj.setItem(key, value);
}
public setObj(key: string, value: Object): void {
if (!this.isCompatible()) { return; }
try {
this.set(key, JSON.stringify(value));
} catch (e) {
console.log('Invalid JSON: ' + e.message);
}
}
private isCompatible(): boolean {
if (typeof(Storage) !== 'undefined') {
return true;
} else {
console.log('Your browser does not support Web Storage.');
return false;
}
}
}
<commit_after>class WebStorage {
private storageObj: Storage;
public constructor(storageObj: Storage) {
this.storageObj = storageObj;
}
public getStorageObj(): Storage {
return this.storageObj;
}
public get(key: string): string {
if (!this.isCompatible()) {return;}
return this.storageObj.getItem(key);
}
public getObj(key: string): any {
if (!this.isCompatible()) {return;}
try {
return JSON.parse(this.get(key));
} catch (e) {
console.log('Invalid JSON: ' + e.message);
}
}
public set(key: string, value: string): void {
if (!this.isCompatible()) {return;}
this.storageObj.setItem(key, value);
}
public setObj(key: string, value: Object): void {
if (!this.isCompatible()) {return;}
try {
this.set(key, JSON.stringify(value));
} catch (e) {
console.log('Invalid JSON: ' + e.message);
}
}
public delete(key: string): void {
if (!this.isCompatible()) {return;}
this.storageObj.removeItem(key);
}
private isCompatible(): boolean {
if (typeof(Storage) !== 'undefined') {
return true;
} else {
console.log('Your browser does not support Web Storage.');
return false;
}
}
}
|
<commit_msg>Set booked_by for cloned/additional showings
<commit_before>from django import forms
import cube.diary.models
class DiaryIdeaForm(forms.ModelForm):
class Meta(object):
model = cube.diary.models.DiaryIdea
class EventForm(forms.ModelForm):
class Meta(object):
model = cube.diary.models.Event
# Ensure soft wrapping is set for textareas:
widgets = {
'copy': forms.Textarea(attrs={'wrap':'soft'}),
'copy_summary': forms.Textarea(attrs={'wrap':'soft'}),
'terms': forms.Textarea(attrs={'wrap':'soft'}),
'notes': forms.Textarea(attrs={'wrap':'soft'}),
}
class ShowingForm(forms.ModelForm):
class Meta(object):
model = cube.diary.models.Showing
# Exclude these for now:
exclude = ('event', 'extra_copy', 'extra_copy_summary', 'booked_by')
class NewShowingForm(forms.ModelForm):
# Same as Showing, but without the role field
class Meta(object):
model = cube.diary.models.Showing
# Exclude these for now:
exclude = ('event', 'extra_copy', 'extra_copy_summary', 'booked_by', 'roles')
<commit_after>from django import forms
import cube.diary.models
class DiaryIdeaForm(forms.ModelForm):
class Meta(object):
model = cube.diary.models.DiaryIdea
class EventForm(forms.ModelForm):
class Meta(object):
model = cube.diary.models.Event
# Ensure soft wrapping is set for textareas:
widgets = {
'copy': forms.Textarea(attrs={'wrap':'soft'}),
'copy_summary': forms.Textarea(attrs={'wrap':'soft'}),
'terms': forms.Textarea(attrs={'wrap':'soft'}),
'notes': forms.Textarea(attrs={'wrap':'soft'}),
}
class ShowingForm(forms.ModelForm):
class Meta(object):
model = cube.diary.models.Showing
# Exclude these for now:
exclude = ('event', 'extra_copy', 'extra_copy_summary', 'booked_by')
class NewShowingForm(forms.ModelForm):
# Same as Showing, but without the role field
class Meta(object):
model = cube.diary.models.Showing
# Exclude these for now:
exclude = ('event', 'extra_copy', 'extra_copy_summary', 'roles')
|
<commit_msg>Check if the summary have diaries
<commit_before>package main
import (
"flag"
"fmt"
"log"
"time"
"github.com/publicgov/spain-boe-reader/net"
"github.com/publicgov/spain-boe-reader/params"
"github.com/publicgov/spain-boe-reader/summary"
)
var currentDate string
func main() {
// parse command line argument
flag.StringVar(¤tDate, "date", defaultTime(), "BOE publication date in format YYYYMMDD")
flag.Parse()
// create the URL for the day
p := params.Params{
SummaryType: "BOE",
ItemType: "S",
Date: currentDate,
}
// make the network request
client := net.New(p)
summary := client.MakeRequest()
// print basic info
log.Println(showBasicInfo(summary))
}
func defaultTime() string {
time := time.Now().UTC()
time.Format("2006-01-02")
return fmt.Sprintf("%d%02d%02d", time.Year(), time.Month(), time.Day())
}
func showBasicInfo(b summary.BoeSummary) string {
return fmt.Sprintf("Date(%s) Found %d diaries with %d sections",
b.Meta.PublicationDate, len(b.Diaries), b.SectionsSize())
}
<commit_after>package main
import (
"flag"
"fmt"
"log"
"time"
"github.com/publicgov/spain-boe-reader/net"
"github.com/publicgov/spain-boe-reader/params"
"github.com/publicgov/spain-boe-reader/summary"
)
var currentDate string
func main() {
// parse command line argument
flag.StringVar(¤tDate, "date", defaultTime(), "BOE publication date in format YYYYMMDD")
flag.Parse()
// create the URL for the day
p := params.Params{
SummaryType: "BOE",
ItemType: "S",
Date: currentDate,
}
// make the network request
client := net.New(p)
summary := client.MakeRequest()
if len(summary.Diaries) == 0 {
log.Println("No diaries found for date", currentDate)
return
}
// print basic info
log.Println(showBasicInfo(summary))
}
func defaultTime() string {
time := time.Now().UTC()
time.Format("2006-01-02")
return fmt.Sprintf("%d%02d%02d", time.Year(), time.Month(), time.Day())
}
func showBasicInfo(b summary.BoeSummary) string {
return fmt.Sprintf("Date(%s) Found %d diaries with %d sections",
b.Meta.PublicationDate, len(b.Diaries), b.SectionsSize())
}
|
<commit_msg>Remove oauth_provider as that's the eggname for django-oauth-plus.
<commit_before>
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'oauth_provider',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
<commit_after>
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
|
<commit_msg>Add comment explaining why use choose bestaudio for audio downloads.
<commit_before>import os
from youtube_dl import YoutubeDL
from youtube_dl import MaxDownloadsReached
def download(url, audio_only):
"""Downloads the youtube video from the url
Args:
url: The youtube URL pointing to the video to download.
audio_only: True if we only want to download the best audio.
Returns:
A (file name, video title) tuple.
The file name is ONLY the file name, and does not include the file path.
"""
downloader = YoutubeDL()
downloader.add_default_info_extractors()
downloader.params['outtmpl'] = os.path.join(os.getcwd(),
'temp/%(id)s.%(ext)s')
downloader.params['verbose'] = True
downloader.params['cachedir'] = None
downloader.params['noplaylist'] = True
downloader.params['max_downloads'] = 1
if audio_only:
downloader.params['format'] = 'bestaudio'
try:
info = downloader.extract_info(url)
except MaxDownloadsReached:
info = downloader.extract_info(url, download=False)
file_name = downloader.prepare_filename(info)
file_name = file_name.encode('ascii', 'ignore')
title = info.get('title', os.path.basename(file_name))
return file_name, title
<commit_after>import os
from youtube_dl import YoutubeDL
from youtube_dl import MaxDownloadsReached
def download(url, audio_only):
"""Downloads the youtube video from the url
Args:
url: The youtube URL pointing to the video to download.
audio_only: True if we only want to download the best audio.
Returns:
A (file name, video title) tuple.
The file name is ONLY the file name, and does not include the file path.
"""
downloader = YoutubeDL()
downloader.add_default_info_extractors()
downloader.params['outtmpl'] = os.path.join(os.getcwd(),
'temp/%(id)s.%(ext)s')
downloader.params['verbose'] = True
downloader.params['cachedir'] = None
downloader.params['noplaylist'] = True
downloader.params['max_downloads'] = 1
# We don't really have to do this, but YoutubeDL sometimes has a problem
# combining the video and audio portions of webm files, so this is a good
# workaround since we really only care about the audio part.
if audio_only:
downloader.params['format'] = 'bestaudio'
try:
info = downloader.extract_info(url)
except MaxDownloadsReached:
info = downloader.extract_info(url, download=False)
file_name = downloader.prepare_filename(info)
file_name = file_name.encode('ascii', 'ignore')
title = info.get('title', os.path.basename(file_name))
return file_name, title
|
<commit_msg>Fix for createsuperuser not creating a custom User model, and hence breaking AdminSite logins on setup
Since we're using a custom Auth Backend as well, we trap the authenticate() call.
If we can't find the requested user in our custom User, we fall back to checking the vanilla Django Users.
If we find one there, and if the password is valid, we create a new custom User, and clone all information off the vanilla User.
Then we delete the vanilla User and save the custom User in its place, finally returning the custom User instance and completing the authenticate() call successfully.
<commit_before>from django.conf import settings
from django.contrib.auth.backends import ModelBackend
from django.core.exceptions import ImproperlyConfigured
from django.db.models import get_model
class UserModelBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
user = self.user_class.objects.get(username=username)
if user.check_password(password):
return user
except self.user_class.DoesNotExist:
return None
def get_user(self, user_id):
try:
return self.user_class.objects.get(pk=user_id)
except self.user_class.DoesNotExist:
return None
@property
def user_class(self):
if not hasattr(self, '_user_class'):
self._user_class = get_model(*settings.CUSTOM_USER_MODEL.split('.', 2))
if not self._user_class:
raise ImproperlyConfigured('Could not get custom user model')
return self._user_class<commit_after>from django.conf import settings
from django.contrib.auth.backends import ModelBackend
from django.core.exceptions import ImproperlyConfigured
from django.db.models import get_model
from django.contrib.auth.models import User as oUser
from hado.models import User as cUser
class UserModelBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
user = self.user_class.objects.get(username=username)
if user.check_password(password):
return user
except self.user_class.DoesNotExist:
try:
ouser = oUser.objects.get(username=username)
u = cUser()
if ouser.check_password(password):
u.password = ouser.password
else:
return None # Abort
# Clone the User
u.id = ouser.id
u.username = ouser.username
u.first_name = ouser.first_name
u.last_name = ouser.last_name
u.email = ouser.email
u.is_active = ouser.is_active
u.is_staff = ouser.is_staff
u.is_superuser = ouser.is_superuser
u.last_login = ouser.last_login
u.date_joined = ouser.date_joined
# Perform the switch over
ouser.delete()
u.save()
return u
except oUser.DoesNotExist:
return None
def get_user(self, user_id):
try:
return self.user_class.objects.get(pk=user_id)
except self.user_class.DoesNotExist:
return None
@property
def user_class(self):
if not hasattr(self, '_user_class'):
self._user_class = get_model(*settings.CUSTOM_USER_MODEL.split('.', 2))
if not self._user_class:
raise ImproperlyConfigured('Could not get custom user model')
return self._user_class |
<commit_msg>Include <SDL.h> in the baked VMs, so Windows works.
<commit_before>
int main(int argc, char **argv)
{
printf("#include <stdint.h>\n"
"#include \"mako-vm.h\"\n"
"char *argv0;\n"
"int32_t mem[] = {");
while(!feof(stdin)) {
uint8_t buf[4];
int n = fread(buf, sizeof *buf, 4, stdin);
if(ferror(stdin)) goto onerr;
if(n == 0) break;
if(n != 4) {
fprintf(stderr, "%s: The file was invalid.\n", argv[0]);
exit(1);
}
printf("0x%04X,", (int32_t)buf[0] << 24 | (int32_t)buf[1] << 16 | (int32_t)buf[2] << 8 | (int32_t)buf[3]);
}
printf("0};\n"
"int main(int argc, char **argv)\n"
"{\n"
"\targv0 = argv[0];\n"
"\trun_vm(mem);\n"
"}\n");
exit(0);
onerr:
perror(argv[0]);
exit(1);
}
<commit_after>
int main(int argc, char **argv)
{
printf("#include <stdint.h>\n"
"#include <SDL.h>\n"
"#include \"mako-vm.h\"\n"
"char *argv0;\n"
"int32_t mem[] = {");
while(!feof(stdin)) {
uint8_t buf[4];
int n = fread(buf, sizeof *buf, 4, stdin);
if(ferror(stdin)) goto onerr;
if(n == 0) break;
if(n != 4) {
fprintf(stderr, "%s: The file was invalid.\n", argv[0]);
exit(1);
}
printf("0x%04X,", (int32_t)buf[0] << 24 | (int32_t)buf[1] << 16 | (int32_t)buf[2] << 8 | (int32_t)buf[3]);
}
printf("0};\n"
"int main(int argc, char **argv)\n"
"{\n"
"\targv0 = argv[0];\n"
"\trun_vm(mem);\n"
"}\n");
exit(0);
onerr:
perror(argv[0]);
exit(1);
}
|
<commit_msg>Make it possible to convert an entire directory at once
$ ts-node tests/main.ts tests/cases/**.in.hledger
<commit_before>import * as fs from 'fs';
import build from './build';
import * as parseArgs from 'minimist';
for (let arg of parseArgs(process.argv.slice(2))._) {
console.log(build(fs.readFileSync(arg, {encoding:'utf8'})));
}
<commit_after>import * as fs from 'fs';
import build from './build';
import * as parseArgs from 'minimist';
for (let inFile of parseArgs(process.argv.slice(2))._) {
let outFile = inFile.substring(0, inFile.length - ".in.hledger".length) + ".want";
let converted = build(fs.readFileSync(inFile, {encoding:'utf8'}));
fs.writeFileSync(outFile, converted, {encoding: 'utf8'});
}
|
<commit_msg>Add some padding between sections in settings
Reviewed By: priteshrnandgaonkar
Differential Revision: D18085724
fbshipit-source-id: d874a21399e86f0079bf1cc86d4b83be6ce5a5d7
<commit_before>/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
*/
import {FlexColumn, styled, FlexRow, ToggleButton} from 'flipper';
import React from 'react';
const IndentedSection = styled(FlexColumn)({
paddingLeft: 50,
});
const GreyedOutOverlay = styled('div')({
backgroundColor: '#EFEEEF',
borderRadius: 4,
opacity: 0.6,
height: '100%',
position: 'absolute',
left: 0,
right: 0,
});
export default function ToggledSection(props: {
label: string;
toggled: boolean;
onChange?: (value: boolean) => void;
children?: React.ReactNode;
// Whether to disallow interactions with this toggle
frozen?: boolean;
}) {
return (
<FlexColumn>
<FlexRow>
<ToggleButton
label={props.label}
onClick={() => props.onChange && props.onChange(!props.toggled)}
toggled={props.toggled}
/>
{props.frozen && <GreyedOutOverlay />}
</FlexRow>
<IndentedSection>
{props.children}
{props.toggled || props.frozen ? null : <GreyedOutOverlay />}
</IndentedSection>
</FlexColumn>
);
}
<commit_after>/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
*/
import {FlexColumn, styled, FlexRow, ToggleButton} from 'flipper';
import React from 'react';
const IndentedSection = styled(FlexColumn)({
paddingLeft: 50,
paddingBottom: 10,
});
const GreyedOutOverlay = styled('div')({
backgroundColor: '#EFEEEF',
borderRadius: 4,
opacity: 0.6,
height: '100%',
position: 'absolute',
left: 0,
right: 0,
});
export default function ToggledSection(props: {
label: string;
toggled: boolean;
onChange?: (value: boolean) => void;
children?: React.ReactNode;
// Whether to disallow interactions with this toggle
frozen?: boolean;
}) {
return (
<FlexColumn>
<FlexRow>
<ToggleButton
label={props.label}
onClick={() => props.onChange && props.onChange(!props.toggled)}
toggled={props.toggled}
/>
{props.frozen && <GreyedOutOverlay />}
</FlexRow>
<IndentedSection>
{props.children}
{props.toggled || props.frozen ? null : <GreyedOutOverlay />}
</IndentedSection>
</FlexColumn>
);
}
|
<commit_msg>Update the project Development Status
<commit_before>"""Package Keysmith."""
import codecs
import os.path
import setuptools # type: ignore
import keysmith # This project only depends on the standard library.
def read(*parts):
"""Read a file in this repository."""
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, *parts), 'r') as file_:
return file_.read()
ENTRY_POINTS = {
'console_scripts': [
'{name}={module}:{function}'.format(
name=keysmith.CONSOLE_SCRIPT,
module=keysmith.__name__,
function=keysmith.main.__name__,
),
],
}
if __name__ == '__main__':
setuptools.setup(
name=keysmith.__name__,
version=keysmith.__version__,
description='Passphrase Generator',
long_description=read('README.rst'),
author='David Tucker',
author_email='david@tucker.name',
license='BSD 3-Clause License',
url='https://github.com/dmtucker/keysmith',
python_requires='~=3.5',
py_modules=[keysmith.__name__],
entry_points=ENTRY_POINTS,
keywords='diceware generator keygen passphrase password',
classifiers=['Development Status :: 5 - Production/Stable'],
)
<commit_after>"""Package Keysmith."""
import codecs
import os.path
import setuptools # type: ignore
import keysmith # This project only depends on the standard library.
def read(*parts):
"""Read a file in this repository."""
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, *parts), 'r') as file_:
return file_.read()
ENTRY_POINTS = {
'console_scripts': [
'{name}={module}:{function}'.format(
name=keysmith.CONSOLE_SCRIPT,
module=keysmith.__name__,
function=keysmith.main.__name__,
),
],
}
if __name__ == '__main__':
setuptools.setup(
name=keysmith.__name__,
version=keysmith.__version__,
description='Passphrase Generator',
long_description=read('README.rst'),
author='David Tucker',
author_email='david@tucker.name',
license='BSD 3-Clause License',
url='https://github.com/dmtucker/keysmith',
python_requires='~=3.5',
py_modules=[keysmith.__name__],
entry_points=ENTRY_POINTS,
keywords='diceware generator keygen passphrase password',
classifiers=['Development Status :: 7 - Inactive'],
)
|
<commit_msg>Return of basic S3 tests.
<commit_before>
extern crate env_logger;
#[macro_use]
extern crate log;
extern crate time;
#[macro_use]
extern crate rusoto;
use rusoto::{DefaultCredentialsProvider, Region};
use rusoto::s3::{S3Error, S3Helper};
#[test]
fn all_s3_tests() {
let _ = env_logger::init();
info!("s3 integration tests starting up.");
let mut s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), Region::UsWest2);
match s3_list_buckets_tests(&mut s3) {
Ok(_) => { info!("Everything worked for S3 list buckets."); },
Err(err) => { info!("Got error in s3 list buckets: {}", err); }
}
}
fn s3_list_buckets_tests(s3: &mut S3Helper<DefaultCredentialsProvider>) -> Result<(), S3Error> {
let response = try!(s3.list_buckets());
info!("Got list of buckets: {:?}", response);
for q in response.buckets {
info!("Existing bucket: {:?}", q.name);
}
Ok(())
}
<commit_after>
extern crate env_logger;
#[macro_use]
extern crate log;
extern crate time;
#[macro_use]
extern crate rusoto;
use std::io::Read;
use std::fs::File;
use rusoto::{DefaultCredentialsProvider, Region};
use rusoto::s3::S3Helper;
#[test]
fn list_buckets_tests() {
let _ = env_logger::init();
let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), Region::UsWest2);
let response = s3.list_buckets().unwrap();
info!("Got list of buckets: {:?}", response);
for q in response.buckets {
info!("Existing bucket: {:?}", q.name);
}
}
#[test]
fn put_object_test() {
let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), Region::UsWest2);
let mut f = File::open("tests/sample-data/no_credentials").unwrap();
let mut contents : Vec<u8> = Vec::new();
match f.read_to_end(&mut contents) {
Err(why) => panic!("Error opening file to send to S3: {}", why),
Ok(_) => {
s3.put_object("rusototester", "no_credentials", &contents).unwrap();
}
}
}
// Dependent on the file being there or it'll break.
#[test]
fn get_object_test() {
let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), Region::UsWest2);
s3.get_object("rusototester", "no_credentials2").unwrap();
}
#[test]
fn delete_object_test() {
let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), Region::UsWest2);
s3.delete_object("rusototester", "no_credentials").unwrap();
}
|
<commit_msg>Create 2D list of Neurons in NeuronNetwork's init
<commit_before>class Neuron:
pass
class NeuronNetwork:
neurons = []
<commit_after>class Neuron:
pass
class NeuronNetwork:
neurons = []
def __init__(self, rows, columns):
self.neurons = []
for row in xrange(rows):
self.neurons.append([])
for column in xrange(columns):
self.neurons[row].append(Neuron())
|
<commit_msg>Use sys.executable instead of harcoded python path
Fixes issue when running in a virtualenv and in non-standard python
installations.
<commit_before>
import twitter_rss
import time
import subprocess
import config
# Launch web server
p = subprocess.Popen(['/usr/bin/python2', config.INSTALL_DIR + 'server.py'])
# Update the feeds
try:
while 1:
print 'Updating ALL THE FEEDS!'
try:
with open(config.XML_DIR + 'user/user.txt', 'r') as usernames:
for user in usernames:
twitter_rss.UserTweetGetter(user)
usernames.close()
with open(config.XML_DIR + 'htag/htag.txt', 'r') as hashtags:
for htag in hashtags:
twitter_rss.HashtagTweetGetter(user)
hashtags.close()
except IOError:
print 'File could not be read'
time.sleep(config.TIMER)
except (KeyboardInterrupt, SystemExit):
p.kill() # kill the subprocess
print '\nKeyboardInterrupt catched -- Finishing program.'<commit_after>
import twitter_rss
import time
import subprocess
import config
import sys
# Launch web server
p = subprocess.Popen([sys.executable, config.INSTALL_DIR + 'server.py'])
# Update the feeds
try:
while 1:
print 'Updating ALL THE FEEDS!'
try:
with open(config.XML_DIR + 'user/user.txt', 'r') as usernames:
for user in usernames:
twitter_rss.UserTweetGetter(user)
usernames.close()
with open(config.XML_DIR + 'htag/htag.txt', 'r') as hashtags:
for htag in hashtags:
twitter_rss.HashtagTweetGetter(user)
hashtags.close()
except IOError:
print 'File could not be read'
time.sleep(config.TIMER)
except (KeyboardInterrupt, SystemExit):
p.kill() # kill the subprocess
print '\nKeyboardInterrupt catched -- Finishing program.'
|
<commit_msg>Make multiton creation thread safe and fix some code style.
<commit_before>/**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.core;
import java.util.Map;
import java.util.HashMap;
public class MultitonInfo<T>
implements Axiom, ContextFactory<T>
{
Map<Object, T> instanceMap = new HashMap<Object, T>();
String name;
PropertyInfo p;
public MultitonInfo(String name, PropertyInfo p) {
this.name = name;
this.p = p;
}
public String getName() {
return name;
}
public T getInstance(Map<String, Object> args, X x) {
Object key = args.get(p.getName());
if ( ! instanceMap.containsKey(key) ) {
try {
Class<T> type = (Class<T>)p.getClassInfo().getObjClass();
T obj = type.newInstance();
((ContextAware)obj).setX(x);
for (Map.Entry<String, Object> entry : args.entrySet()) {
((FObject)obj).setProperty(entry.getKey(), entry.getValue());
}
instanceMap.put(key, obj);
} catch (java.lang.Exception e) {
e.printStackTrace();
return null;
}
}
return instanceMap.get(key);
}
}
<commit_after>/**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.core;
import java.util.Map;
import java.util.HashMap;
public class MultitonInfo<T>
implements Axiom, ContextFactory<T>
{
Map<Object, T> instanceMap = new HashMap<Object, T>();
String name;
PropertyInfo p;
public MultitonInfo(String name, PropertyInfo p) {
this.name = name;
this.p = p;
}
public String getName() {
return name;
}
public synchronized T getInstance(Map<String, Object> args, X x) {
Object key = args.get(p.getName());
if ( ! instanceMap.containsKey(key) ) {
try {
Class<T> type = (Class<T>)p.getClassInfo().getObjClass();
T obj = type.newInstance();
((ContextAware)obj).setX(x);
for ( Map.Entry<String, Object> entry : args.entrySet() ) {
((FObject)obj).setProperty(entry.getKey(), entry.getValue());
}
instanceMap.put(key, obj);
} catch (java.lang.Exception e) {
e.printStackTrace();
return null;
}
}
return instanceMap.get(key);
}
}
|
<commit_msg>Make the FunctionLogger a context manager
<commit_before>from __future__ import absolute_import
import inspect
import os
import sys
import psutil
from collections import namedtuple
from functools import wraps
from pikos.abstract_monitors import AbstractMonitor
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(AbstractMonitor):
_fields = FunctionRecord._fields
def __init__(self, recorder):
''' Initialize the logger class.
Parameters
----------
function : callable
The callable to profile
output : str
The file in which to store profiling results.
'''
super(FunctionLogger, self).__init__(None)
self._recorder = recorder
self._process = None
self._old_profile_function = None
def __call__(self, function):
self._item = function
@wraps(function)
def wrapper(*args, **kwds):
return self.run(*args, **kwds)
return wrapper
def setup(self):
self._recorder.prepare(self._fields)
self._process = psutil.Process(os.getpid())
self._old_profile_function = sys.getprofile()
sys.setprofile(self.on_function_event)
def teardown(self):
sys.setprofile(self._old_profile_function)
self._process = None
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
<commit_after>from __future__ import absolute_import
import inspect
from collections import namedtuple
from pikos._profile_functions import ProfileFunctions
from pikos._trace_functions import TraceFunctions
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(object):
_fields = FunctionRecord._fields
def __init__(self, recorder):
self._recorder = recorder
self._profiler = ProfileFunctions()
def __enter__(self):
self._recorder.prepare(self._fields)
self._profiler.set(self.on_function_event)
def __exit__(self, exc_type, exc_val, exc_tb):
self._profiler.unset()
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
|
<commit_msg>Remove back authorization for snippets
<commit_before>package org.crunchytorch.coddy.snippet.api;
import org.crunchytorch.coddy.snippet.elasticsearch.entity.SnippetEntity;
import org.crunchytorch.coddy.snippet.service.SnippetService;
import org.crunchytorch.coddy.user.filter.AuthorizationFilter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import java.util.List;
@Component
@AuthorizationFilter
@Path("/snippet")
public class Snippet {
@Autowired
private SnippetService snippetService;
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public List<SnippetEntity> getSnippets(@DefaultValue("0") @QueryParam("from") final int from,
@DefaultValue("10") @QueryParam("size") final int size) {
return snippetService.getEntity(from, size);
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("{id}")
public SnippetEntity getSnippet(@PathParam("id") String id) {
return snippetService.getSnippet(id);
}
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public void create(SnippetEntity snippet) {
snippetService.create(snippet);
}
}
<commit_after>package org.crunchytorch.coddy.snippet.api;
import org.crunchytorch.coddy.snippet.elasticsearch.entity.SnippetEntity;
import org.crunchytorch.coddy.snippet.service.SnippetService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import java.util.List;
@Component
@Path("/snippet")
public class Snippet {
@Autowired
private SnippetService snippetService;
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public List<SnippetEntity> getSnippets(@DefaultValue("0") @QueryParam("from") final int from,
@DefaultValue("10") @QueryParam("size") final int size) {
return snippetService.getEntity(from, size);
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("{id}")
public SnippetEntity getSnippet(@PathParam("id") String id) {
return snippetService.getSnippet(id);
}
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public void create(SnippetEntity snippet) {
snippetService.create(snippet);
}
}
|
<commit_msg>Implement naïve solution (TODO: replace with better use of iterators)
<commit_before>use std::io;
use std::io::prelude::*;
fn main() {
let stdin = io::stdin();
for line in stdin.lock().lines() {
println!("{}", line.unwrap());
}
}
<commit_after>use std::io;
use std::io::prelude::*;
fn main() {
let stdin = io::stdin();
let mut total = 0;
for line in stdin.lock().lines() {
let l = line.unwrap();
let split: Vec<i32> = l.split('x').map(|s| s.parse::<i32>().unwrap()).collect();
total += sqfootage(split[0], split[1], split[2]);
}
println!("Total is {} square feet.", total)
}
fn sqfootage(x:i32, y:i32, z:i32) -> i32 {
let side1 = x * y;
let side2 = y * z;
let side3 = z * x;
let slack = vmin(vec![side1, side2, side3]);
return (2 * side1) + (2 * side2) + (2 * side3) + slack;
}
fn vmin(v:Vec<i32>) -> i32 {
let mut m = v[0];
for e in v {
if e < m {
m = e;
}
}
m
}
#[test]
fn sqfootage_computes_area_with_slack() {
assert_eq!(58, sqfootage(2, 3, 4));
assert_eq!(43, sqfootage(1, 1, 10));
}
#[test]
fn min_works() {
assert_eq!(1, vmin(vec![1]));
assert_eq!(1, vmin(vec![2, 1]));
assert_eq!(1, vmin(vec![1, 2, 3]));
assert_eq!(1, vmin(vec![2, 1, 3]));
}
|
<commit_msg>Convert Portal to a class component and perform cleanup when navigating
<commit_before>// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
// See the LICENCE file in the repository root for full licence text.
import { ReactNode } from 'react';
import { createPortal } from 'react-dom';
export const Portal = ({children}: { children: ReactNode }) => createPortal(children, document.body);
<commit_after>// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
// See the LICENCE file in the repository root for full licence text.
import { PureComponent, ReactNode } from 'react';
import { createPortal } from 'react-dom';
interface Props {
children: ReactNode;
}
export class Portal extends PureComponent<Props> {
private readonly container: HTMLElement;
private readonly uuid: string;
constructor(props: Props) {
super(props);
this.uuid = osu.uuid();
this.container = document.createElement('div');
}
addPortal = () => document.body.appendChild(this.container);
componentDidMount() {
this.addPortal();
$(document).on(`turbolinks:before-cache.${this.uuid}`, () => {
this.removePortal();
});
}
componentWillUnmount = () => $(document).off(`turbolinks:before-cache.${this.uuid}`);
removePortal = () => document.body.removeChild(this.container);
render = () => createPortal(this.props.children, this.container);
}
|
<commit_msg>Add Content-Type to allowed default headers
<commit_before>package org.col.dw.cors;
import org.hibernate.validator.constraints.NotEmpty;
public class CorsConfiguration {
public static final String ANY_ORIGIN = "*";
@NotEmpty
public String origins = ANY_ORIGIN;
@NotEmpty
public String methods = "OPTIONS, GET, POST, PUT, DELETE";
@NotEmpty
public String headers = "Authorization";
public boolean anyOrigin() {
return ANY_ORIGIN.equals(origins);
}
}<commit_after>package org.col.dw.cors;
import org.hibernate.validator.constraints.NotEmpty;
public class CorsConfiguration {
public static final String ANY_ORIGIN = "*";
@NotEmpty
public String origins = ANY_ORIGIN;
@NotEmpty
public String methods = "OPTIONS, HEAD, GET, POST, PUT, DELETE";
@NotEmpty
public String headers = "Authorization, Content-Type";
public boolean anyOrigin() {
return ANY_ORIGIN.equals(origins);
}
} |
<commit_msg>Fix import bug in Python 3<commit_before>import version
__version__ = version.__version__
<commit_after>from .version import __version__
__version__ = version.__version__
|
<commit_msg>Remove test that have not to be here
<commit_before>
using namespace clt::filesystem;
using namespace clt::filesystem::factories;
using namespace clt::filesystem::entities;
using namespace clt::filesystem::entities::exceptions;
using namespace application::parameters;
int main(int argc, char* argv[]) {
if (argc < 2)
{
// TODO: real arguments management
printf("not enough argument, need the path of your executable");
return EXIT_FAILURE;
}
ApplicationParametersManager parametersManager(std::make_unique<ApplicationParametersBuilder>(), std::make_unique<ApplicationParametersReader>());
parametersManager.start(argc, argv);
ApplicationParameters parameters = parametersManager.getParameters();
EntityFactory entityFactory;
try
{
Executable executable = entityFactory.createExecutable(parameters.getExecutablePath());
executable.execute();
}
catch (InvalidExecutablePathException exception) {
std::cout << "Invalid executable path : " << std::endl << "\t - " << exception.getDescription() << std::endl;
}
return EXIT_SUCCESS;
}
<commit_after>
using namespace clt::filesystem;
using namespace clt::filesystem::factories;
using namespace clt::filesystem::entities;
using namespace clt::filesystem::entities::exceptions;
using namespace application::parameters;
int main(int argc, char* argv[]) {
ApplicationParametersManager parametersManager(std::make_unique<ApplicationParametersBuilder>(), std::make_unique<ApplicationParametersReader>());
parametersManager.start(argc, argv);
ApplicationParameters parameters = parametersManager.getParameters();
EntityFactory entityFactory;
try
{
Executable executable = entityFactory.createExecutable(parameters.getExecutablePath());
executable.execute();
}
catch (InvalidExecutablePathException exception) {
std::cout << "Invalid executable path : " << std::endl << "\t - " << exception.getDescription() << std::endl;
}
return EXIT_SUCCESS;
}
|
<commit_msg>Check if optional click callback is defined in steps component.
<commit_before>import * as classNames from 'classnames';
import * as React from 'react';
import { Step } from './Step';
import './StepsList.scss';
interface StepsListProps {
choices: string[];
value: string;
onClick?(step: string): void;
disabled?: boolean;
}
export const StepsList = (props: StepsListProps) => {
const stepIndex = props.choices.indexOf(props.value);
return (
<div className={classNames({disabled: props.disabled}, 'shopping-cart-steps')}>
{props.choices.map((title, index) => (
<Step
key={index}
title={`${index + 1}. ${title}`}
complete={stepIndex > index}
active={stepIndex === index}
onClick={() => props.onClick(title)}
/>
))}
</div>
);
};
<commit_after>import * as classNames from 'classnames';
import * as React from 'react';
import { Step } from './Step';
import './StepsList.scss';
interface StepsListProps {
choices: string[];
value: string;
onClick?(step: string): void;
disabled?: boolean;
}
export const StepsList = (props: StepsListProps) => {
const stepIndex = props.choices.indexOf(props.value);
return (
<div className={classNames({disabled: props.disabled}, 'shopping-cart-steps')}>
{props.choices.map((title, index) => (
<Step
key={index}
title={`${index + 1}. ${title}`}
complete={stepIndex > index}
active={stepIndex === index}
onClick={() => props.onClick && props.onClick(title)}
/>
))}
</div>
);
};
|
<commit_msg>Improve description of dronekit on PyPi
<commit_before>from setuptools import setup, Extension
import platform
version = '2.1.0'
setup(name='dronekit',
zip_safe=True,
version=version,
description='Python language bindings for the DroneApi',
long_description='Python language bindings for the DroneApi',
url='https://github.com/dronekit/dronekit-python',
author='3D Robotics',
install_requires=[
'pymavlink>=1.1.62',
'requests>=2.5.0,<=2.99999',
],
author_email='tim@3drobotics.com, kevinh@geeksville.com',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering',
],
license='apache',
packages=[
'dronekit', 'dronekit.cloud', 'dronekit.test'
],
ext_modules=[])
<commit_after>from setuptools import setup, Extension
import platform
version = '2.1.0'
setup(name='dronekit',
zip_safe=True,
version=version,
description='Developer Tools for Drones.',
long_description='Python API for communication and control of drones over MAVLink.',
url='https://github.com/dronekit/dronekit-python',
author='3D Robotics',
install_requires=[
'pymavlink>=1.1.62',
'requests>=2.5.0,<=2.99999',
],
author_email='tim@3drobotics.com, kevinh@geeksville.com',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering',
],
license='apache',
packages=[
'dronekit', 'dronekit.cloud', 'dronekit.test'
],
ext_modules=[])
|
<commit_msg>Update playback extension so that javascript functions return consistent (but not constant) values in an attempt to preserve the functionality but improve compatibility of the extension.
Review URL: http://codereview.chromium.org/62165
git-svn-id: dd90618784b6a4b323ea0c23a071cb1c9e6f2ac7@13688 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
<commit_before>// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "config.h"
#include "webkit/extensions/v8/playback_extension.h"
namespace extensions_v8 {
const char* kPlaybackExtensionName = "v8/PlaybackMode";
v8::Extension* PlaybackExtension::Get() {
v8::Extension* extension = new v8::Extension(
kPlaybackExtensionName,
"(function () {"
" var orig_date = Date;"
" Math.random = function() {"
" return 0.5;"
" };"
" Date.__proto__.now = function() {"
" return new orig_date(1204251968254);"
" };"
" Date = function() {"
" return Date.now();"
" };"
"})()");
return extension;
}
} // namespace extensions_v8
<commit_after>// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "config.h"
#include "webkit/extensions/v8/playback_extension.h"
namespace extensions_v8 {
const char* kPlaybackExtensionName = "v8/PlaybackMode";
v8::Extension* PlaybackExtension::Get() {
v8::Extension* extension = new v8::Extension(
kPlaybackExtensionName,
"(function () {"
" var orig_date = Date;"
" var x = 0;"
" var time_seed = 1204251968254;"
" Math.random = function() {"
" x += .1;"
" return (x % 1);"
" };"
" Date.__proto__.now = function() {"
" time_seed += 50;"
" return new orig_date(time_seed);"
" };"
" Date = function() {"
" return Date.now();"
" };"
"})()");
return extension;
}
} // namespace extensions_v8
|
<commit_msg>Add filter for rejecting practice topic
<commit_before>class Filterable:
no_delays_filter = lambda filterable: filterable.condition.record_id == str(6)
query_delay_filter = lambda filterable: filterable.condition.record_id == str(7)
document_delay_filter = lambda filterable: filterable.condition.record_id == str(8)
combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9)
identity_filter = lambda filterable: True
@staticmethod
def combine_filters( *filters ):
return lambda filterable: all([fil( filterable ) for fil in filters])
<commit_after>class Filterable:
no_delays_filter = lambda filterable: filterable.condition.record_id == str(6)
query_delay_filter = lambda filterable: filterable.condition.record_id == str(7)
document_delay_filter = lambda filterable: filterable.condition.record_id == str(8)
combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9)
practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367)
identity_filter = lambda filterable: True
@staticmethod
def combine_filters( *filters ):
return lambda filterable: all([fil( filterable ) for fil in filters])
|
<commit_msg>Add description to rst plugin
<commit_before>import os
import fnmatch
import restructuredtext_lint
DEFAULTS = {
'files': '*.rst',
}
def make_message(error):
return '%s %s:%s %s\n' % (
error.type, error.source, error.line, error.message,
)
def check(file_staged_for_commit, options):
basename = os.path.basename(file_staged_for_commit.path)
if not fnmatch.fnmatch(basename, options.rst_files):
return True
errors = restructuredtext_lint.lint(
file_staged_for_commit.contents,
file_staged_for_commit.path,
)
if errors:
print('\n'.join(make_message(e) for e in errors))
return False
else:
return True
<commit_after>"""Check that files contains valid ReStructuredText."""
import os
import fnmatch
import restructuredtext_lint
DEFAULTS = {
'files': '*.rst',
}
def make_message(error):
return '%s %s:%s %s\n' % (
error.type, error.source, error.line, error.message,
)
def check(file_staged_for_commit, options):
basename = os.path.basename(file_staged_for_commit.path)
if not fnmatch.fnmatch(basename, options.rst_files):
return True
errors = restructuredtext_lint.lint(
file_staged_for_commit.contents,
file_staged_for_commit.path,
)
if errors:
print('\n'.join(make_message(e) for e in errors))
return False
else:
return True
|
<commit_msg>Use StoreConfiguration to determine gadget render info<commit_before>package uk.ac.edukapp.renderer;
public class GadgetRenderer {
private static GadgetRenderer renderer = new GadgetRenderer();
private GadgetRenderer() {
}
public static GadgetRenderer getInstance() {
return renderer;
}
public String render(String uri, int width, int height) {
String html = "";
html += "<iframe src=\"http://widgets.open.ac.uk:8080/shindig/gadgets/ifr?url="
+ uri + "\"";
html += " width=\"" + "500" + "\"";
html += " height=\"" + "300" + "\"";
html += "></iframe>";
return html;
}
public String render(String uri) {
return render(uri, 500, 300);
}
}
<commit_after>package uk.ac.edukapp.renderer;
import uk.ac.edukapp.server.configuration.StoreConfiguration;
public class GadgetRenderer {
private static GadgetRenderer renderer = new GadgetRenderer();
private GadgetRenderer() {
}
public static GadgetRenderer getInstance() {
return renderer;
}
public String render(String uri, int width, int height) {
String html = "";
html += "<iframe src=\""+StoreConfiguration.getInstance().getShindigLocation()+"/gadgets/ifr?url="
+ uri + "\"";
html += " width=\"" + "500" + "\"";
html += " height=\"" + "300" + "\"";
html += "></iframe>";
return html;
}
public String render(String uri) {
return render(uri, 500, 300);
}
}
|
<commit_msg>Remove a bad typo from reicast
<commit_before>import Command
#~ import reicastControllers
import recalboxFiles
from generators.Generator import Generator
import ppssppControllers
import shutil
import os.path
import ConfigParser
class PPSSPPGenerator(Generator):
# Main entry of the module
# Configure fba and return a command
def generate(self, system, rom, playersControllers):
if not system.config['configfile']:
# Write emu.cfg to map joysticks, init with the default emu.cfg
Config = ConfigParser.ConfigParser()
Config.read(recalboxFiles.reicastConfigInit)
section = "input"
# For each pad detected
for index in playersControllers :
controller = playersControllers[index]
# we only care about player 1
if controller.player != "1":
continue
ppssppControllers.generateControllerConfig(controller)
# the command to run
#~ commandArray = [recalboxFiles.ppssppBin, rom, "--escape-exit"]
commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
<commit_after>import Command
#~ import reicastControllers
import recalboxFiles
from generators.Generator import Generator
import ppssppControllers
import shutil
import os.path
import ConfigParser
class PPSSPPGenerator(Generator):
# Main entry of the module
# Configure fba and return a command
def generate(self, system, rom, playersControllers):
if not system.config['configfile']:
for index in playersControllers :
controller = playersControllers[index]
# we only care about player 1
if controller.player != "1":
continue
ppssppControllers.generateControllerConfig(controller)
break
# the command to run
commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom]
# The next line is a reminder on how to quit PPSSPP with just the HK
#commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
|
<commit_msg>Fix code for review comments
<commit_before>"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
logger = logging.getLogger(__name__)
def _api_call(url, params={}):
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
if r is None:
logger.error("Returned response is: %s" % r)
raise Exception("Empty response found")
result = {"data": r.json()}
except Exception:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params={}):
"""Invoke Pending Graph Sync APIs for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params={}):
"""Invoke Graph Sync APIs to sync for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
<commit_after>"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
from urllib.parse import urljoin
logger = logging.getLogger(__name__)
def _api_call(url, params=None):
params = params or {}
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
r.raise_for_status()
result = {"data": r.json()}
except requests.exceptions.HTTPError:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params=None):
params = params or {}
"""Invoke Pending Graph Sync APIs for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params=None):
params = params or {}
"""Invoke Graph Sync APIs to sync for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
|
<commit_msg>Remove unnecessary imports in example
<commit_before>extern crate xswag_base as base;
use base::code::{SrcOffset, BytePos, Span, FileMap};
use base::diag;
const TEXT: &'static str = "no-content";
fn main() {
let file = FileMap::new("src/examples/dummy.txt", TEXT);
file.find_lines();
let e = diag::Report::simple_error(
"Oh noes, this `Report` contains a dummy span :(",
Span::dummy()
).with_span_note(
"neither does this note :/",
Span::dummy()
);
base::diag::print(&e, &file, diag::PrintOptions::default());
}
<commit_after>extern crate xswag_base as base;
use base::code::{Span, FileMap};
use base::diag;
const TEXT: &'static str = "no-content";
fn main() {
let file = FileMap::new("src/examples/dummy.txt", TEXT);
file.find_lines();
let e = diag::Report::simple_error(
"Oh noes, this `Report` contains a dummy span :(",
Span::dummy()
).with_span_note(
"neither does this note :/",
Span::dummy()
);
base::diag::print(&e, &file, diag::PrintOptions::default());
}
|
<commit_msg>Solve the xcffib chicken and egg problem
See https://groups.google.com/forum/#!topic/python-cffi/NgGybV5LLMs for more
info.
<commit_before>
import os
import sys
import subprocess
from setuptools import setup, find_packages
if not os.path.exists('./xcffib'):
print("It looks like you need to generate the binding.")
print("please run 'make xcffib' or 'make check'.")
sys.exit(1)
# version = subprocess.check_output(['git', 'describe', '--tags'])
import xcffib
setup(
name="xcffib",
version="prerelease",
description="A drop in replacement for xpyb, an XCB python binding",
keywords="xcb xpyb cffi x11 x windows",
license="MIT",
install_requires=['six', 'cffi>=0.8.2'],
packages=['xcffib'],
zip_safe=False,
ext_modules=[xcffib.ffi.verifier.get_extension()],
)
<commit_after>
import os
import sys
import subprocess
from setuptools import setup, find_packages
from distutils.command.build import build
if not os.path.exists('./xcffib'):
print("It looks like you need to generate the binding.")
print("please run 'make xcffib' or 'make check'.")
sys.exit(1)
# Stolen from http://github.com/xattr/xattr, which is also MIT licensed.
class cffi_build(build):
"""This is a shameful hack to ensure that cffi is present when
we specify ext_modules. We can't do this eagerly because
setup_requires hasn't run yet.
"""
def finalize_options(self):
import xcffib
self.distribution.ext_modules = [xcffib.ffi.verifier.get_extension()]
build.finalize_options(self)
# version = subprocess.check_output(['git', 'describe', '--tags'])
dependencies = ['six', 'cffi>=0.8.2']
setup(
name="xcffib",
version="prerelease",
description="A drop in replacement for xpyb, an XCB python binding",
keywords="xcb xpyb cffi x11 x windows",
license="MIT",
url="http://github.com/tych0/xcffib",
author="Tycho Andersen",
author_email="tycho@tycho.ws",
install_requires=dependencies,
setup_requires=dependencies,
packages=['xcffib'],
zip_safe=False,
cmdclass={'build': cffi_build},
)
|
<commit_msg>Remove footer print and make file PEP8 compliant
<commit_before>from flask.ext.login import current_user
from viaduct.models.page import Page, PagePermission, PageRevision
from viaduct import db
from flask import request, url_for, render_template
from viaduct.models.group import Group
class PageAPI:
@staticmethod
def remove_page(path):
page = Page.query.filter(Page.path==path).first()
if not page:
return False
for rev in page.revisions.all():
db.session.delete(rev)
for perm in page.permissions.all():
db.session.delete(perm)
db.session.commit()
db.session.delete(page)
db.session.commit()
return True
@staticmethod
def get_footer():
footer = Page.query.filter(Page.path == 'footer').first()
if not footer:
footer = Page('footer')
if footer.revisions.count() > 0:
revision = footer.revisions.order_by(PageRevision.id.desc()).first()
exists = True
else:
revision = PageRevision(footer, current_user,
'', '<b> No footer found </b>'
'', True)
exists = False
print vars(footer)
return render_template('page/get_footer.htm', footer_revision=revision, footer=footer, exists=exists)<commit_after>from flask.ext.login import current_user
from viaduct.models.page import Page, PageRevision
from viaduct import db
from flask import render_template
class PageAPI:
@staticmethod
def remove_page(path):
page = Page.query.filter(Page.path == path).first()
if not page:
return False
for rev in page.revisions.all():
db.session.delete(rev)
for perm in page.permissions.all():
db.session.delete(perm)
db.session.commit()
db.session.delete(page)
db.session.commit()
return True
@staticmethod
def get_footer():
footer = Page.query.filter(Page.path == 'footer').first()
if not footer:
footer = Page('footer')
if footer.revisions.count() > 0:
revision = footer.revisions.order_by(PageRevision.id.desc()).\
first()
exists = True
else:
revision = PageRevision(footer, current_user, '',
'<b> No footer found </b>' '', True)
exists = False
return render_template('page/get_footer.htm', footer_revision=revision,
footer=footer, exists=exists)
|
<commit_msg>Fix broken test.. was testing the old way of validation of the reconsent command.
<commit_before>package edu.northwestern.bioinformatics.studycalendar.web.schedule;
import edu.northwestern.bioinformatics.studycalendar.service.StudyService;
import edu.northwestern.bioinformatics.studycalendar.testing.StudyCalendarTestCase;
import gov.nih.nci.cabig.ctms.lang.DateTools;
import gov.nih.nci.cabig.ctms.lang.NowFactory;
import static org.easymock.EasyMock.expect;
import java.util.Calendar;
public class ScheduleReconsentCommandTest extends StudyCalendarTestCase {
private ScheduleReconsentCommand command;
private StudyService studyService;
private NowFactory nowFactory;
protected void setUp() throws Exception {
super.setUp();
studyService = registerMockFor(StudyService.class);
nowFactory = registerMockFor(NowFactory.class);
command = new ScheduleReconsentCommand(studyService, nowFactory);
}
public void testValidate() throws Exception {
command.setStartDate(DateTools.createTimestamp(2005, Calendar.AUGUST, 3));
expect(nowFactory.getNow()).andReturn(DateTools.createDate(2007, Calendar.AUGUST, 3)).times(2);
replayMocks();
command.validate(null);
verifyMocks();
assertSameDay("Expected Date different than actual", DateTools.createDate(2007, Calendar.AUGUST, 3), command.getStartDate());
}
}
<commit_after>package edu.northwestern.bioinformatics.studycalendar.web.schedule;
import edu.northwestern.bioinformatics.studycalendar.service.StudyService;
import edu.northwestern.bioinformatics.studycalendar.testing.StudyCalendarTestCase;
import gov.nih.nci.cabig.ctms.lang.DateTools;
import gov.nih.nci.cabig.ctms.lang.NowFactory;
import static org.easymock.EasyMock.expect;
import org.springframework.validation.Errors;
import org.springframework.validation.BindException;
import java.util.Calendar;
public class ScheduleReconsentCommandTest extends StudyCalendarTestCase {
private ScheduleReconsentCommand command;
private StudyService studyService;
private NowFactory nowFactory;
protected void setUp() throws Exception {
super.setUp();
studyService = registerMockFor(StudyService.class);
nowFactory = registerMockFor(NowFactory.class);
command = new ScheduleReconsentCommand(studyService, nowFactory);
}
public void testValidate() throws Exception {
BindException errors = new BindException(command, "startDate");
command.setStartDate(DateTools.createTimestamp(2005, Calendar.AUGUST, 3));
expect(nowFactory.getNow()).andReturn(DateTools.createDate(2007, Calendar.AUGUST, 3));
replayMocks();
command.validate(errors);
verifyMocks();
assertEquals("There should be one error: ", 1, errors.getAllErrors().size());
}
}
|
<commit_msg>Add Python 3 trove classifiers.
<commit_before>
from setuptools import setup
setup(
name='anytop',
version='0.2.1',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://github.com/larsyencken/anytop',
entry_points={
'console_scripts': [
'anytop = anytop.top:main',
'anyhist = anytop.histogram:main',
],
},
packages=['anytop'],
license='ISC',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
)
<commit_after>
from setuptools import setup
setup(
name='anytop',
version='0.2.1',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://github.com/larsyencken/anytop',
entry_points={
'console_scripts': [
'anytop = anytop.top:main',
'anyhist = anytop.histogram:main',
],
},
packages=['anytop'],
license='ISC',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
"Programming Language :: Python :: 3",
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
)
|
<commit_msg>Fix demographics resolve form formatting.
<commit_before>from django.forms import ModelForm
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Layout, Fieldset
from . import models
class DemographicsForm(ModelForm):
class Meta:
model = models.Demographics
exclude = ['patient', 'creation_date']
def __init__(self, *args, **kwargs):
super(DemographicsForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = 'post'
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-8'
self.helper.layout = Layout(
Fieldset('Medical',
'has_insurance',
'ER_visit_last_year',
'last_date_physician_visit',
'chronic_condition'),
Fieldset('Social',
'lives_alone',
'dependents',
'resource_access',
'transportation'),
Fieldset('Employment',
'currently_employed',
'education_level',
'work_status',
'annual_income')
)
self.helper.add_input(Submit('submit', 'Submit'))
<commit_after>from django.forms import ModelForm
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Layout, Fieldset
from . import models
class DemographicsForm(ModelForm):
class Meta:
model = models.Demographics
exclude = ['patient', 'creation_date']
def __init__(self, *args, **kwargs):
super(DemographicsForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = 'post'
self.helper.layout = Layout(
Fieldset('Medical',
'has_insurance',
'ER_visit_last_year',
'last_date_physician_visit',
'chronic_condition'),
Fieldset('Social',
'lives_alone',
'dependents',
'resource_access',
'transportation'),
Fieldset('Employment',
'currently_employed',
'education_level',
'work_status',
'annual_income')
)
self.helper.add_input(Submit('submit', 'Submit'))
|
<commit_msg>Add default clause to switch
<commit_before>/*
* Part of HTTPP.
*
* Distributed under the 3-clause BSD licence (See LICENCE.TXT file at the
* project root).
*
* Copyright (c) 2013 Thomas Sanchez. All rights reserved.
*
*/
#include "httpp/http/HttpCode.hpp"
std::string HTTPP::HTTP::getDefaultMessage(HttpCode code)
{
switch (code)
{
case HttpCode::Continue:
return "Continue";
case HttpCode::Ok:
return "Ok";
case HttpCode::Created:
return "Created";
case HttpCode::Accepted:
return "Accepted";
case HttpCode::NoContent:
return "NoContent";
case HttpCode::MultipleChoice:
return "MultipleChoice";
case HttpCode::MovedPermentaly:
return "MovedPermentaly";
case HttpCode::MovedTemporarily:
return "MovedTemporarily";
case HttpCode::NotModified:
return "NotModified";
case HttpCode::BadRequest:
return "BadRequest";
case HttpCode::Unauthorized:
return "Unauthorized";
case HttpCode::Forbidden:
return "Forbidden";
case HttpCode::NotFound:
return "NotFound";
case HttpCode::InternalServerError:
return "InternalServerError";
case HttpCode::NotImplemented:
return "NotImplemented";
case HttpCode::BadGateway:
return "BadGateway";
case HttpCode::ServiceUnavailable:
return "ServiceUnavailable";
case HttpCode::HttpVersionNotSupported:
return "HttpVersionNotSupported";
}
}
<commit_after>/*
* Part of HTTPP.
*
* Distributed under the 3-clause BSD licence (See LICENCE.TXT file at the
* project root).
*
* Copyright (c) 2013 Thomas Sanchez. All rights reserved.
*
*/
#include "httpp/http/HttpCode.hpp"
std::string HTTPP::HTTP::getDefaultMessage(HttpCode code)
{
switch (code)
{
default:
return "Unknown";
case HttpCode::Continue:
return "Continue";
case HttpCode::Ok:
return "Ok";
case HttpCode::Created:
return "Created";
case HttpCode::Accepted:
return "Accepted";
case HttpCode::NoContent:
return "NoContent";
case HttpCode::MultipleChoice:
return "MultipleChoice";
case HttpCode::MovedPermentaly:
return "MovedPermentaly";
case HttpCode::MovedTemporarily:
return "MovedTemporarily";
case HttpCode::NotModified:
return "NotModified";
case HttpCode::BadRequest:
return "BadRequest";
case HttpCode::Unauthorized:
return "Unauthorized";
case HttpCode::Forbidden:
return "Forbidden";
case HttpCode::NotFound:
return "NotFound";
case HttpCode::InternalServerError:
return "InternalServerError";
case HttpCode::NotImplemented:
return "NotImplemented";
case HttpCode::BadGateway:
return "BadGateway";
case HttpCode::ServiceUnavailable:
return "ServiceUnavailable";
case HttpCode::HttpVersionNotSupported:
return "HttpVersionNotSupported";
}
}
|
<commit_msg>Mark the py.test test as not to be run in nose.
<commit_before>"""Tests of the test-runner plugins."""
import py
import unittest
from nose.plugins import PluginTester
from coverage.runners.noseplugin import Coverage
class TestCoverage(PluginTester, unittest.TestCase):
"""Test the nose plugin."""
activate = '--with-coverage' # enables the plugin
plugins = [Coverage()]
args = ['--cover-report=report']
@py.test.mark.skipif(True) # "requires nose test runner"
def test_output(self):
assert "Processing Coverage..." in self.output, (
"got: %s" % self.output)
def makeSuite(self):
class TC(unittest.TestCase):
def runTest(self):
raise ValueError("Coverage down")
return unittest.TestSuite([TC()])
pytest_plugins = ['pytester']
def test_functional(testdir):
"""Test the py.test plugin."""
testdir.makepyfile("""
def f():
x = 42
def test_whatever():
pass
""")
result = testdir.runpytest("--cover-report=annotate")
assert result.ret == 0
assert result.stdout.fnmatch_lines([
'*Processing Coverage*'
])
coveragefile = testdir.tmpdir.join(".coverage")
assert coveragefile.check()
# XXX try loading it?
<commit_after>"""Tests of the test-runner plugins."""
import py
import unittest
from nose.plugins import PluginTester
from coverage.runners.noseplugin import Coverage
class TestCoverage(PluginTester, unittest.TestCase):
"""Test the nose plugin."""
activate = '--with-coverage' # enables the plugin
plugins = [Coverage()]
args = ['--cover-report=report']
@py.test.mark.skipif(True) # "requires nose test runner"
def test_output(self):
assert "Processing Coverage..." in self.output, (
"got: %s" % self.output)
def makeSuite(self):
class TC(unittest.TestCase):
def runTest(self):
raise ValueError("Coverage down")
return unittest.TestSuite([TC()])
pytest_plugins = ['pytester']
def test_functional(testdir):
"""Test the py.test plugin."""
testdir.makepyfile("""
def f():
x = 42
def test_whatever():
pass
""")
result = testdir.runpytest("--cover-report=annotate")
assert result.ret == 0
assert result.stdout.fnmatch_lines([
'*Processing Coverage*'
])
coveragefile = testdir.tmpdir.join(".coverage")
assert coveragefile.check()
# XXX try loading it?
# Keep test_functional from running in nose:
test_functional.__test__ = False
|